File CVE-2025-69223-auto_decompress-zip-bomb.patch of Package python-aiohttp.42491

From 2b920c39002cee0ec5b402581779bbaaf7c9138a Mon Sep 17 00:00:00 2001
From: Sam Bull <git@sambull.org>
Date: Sat, 3 Jan 2026 15:56:02 +0000
Subject: [PATCH] Use decompressor max_length parameter (#11898) (#11918)

(cherry picked from commit 92477c5a74c43dfe0474bd24f8de11875daa2298)

---------

Co-authored-by: J. Nick Koston <nick@koston.org>
---
 CHANGES/11898.breaking.rst      |   2 +
 aiohttp/compression_utils.py    | 121 ++++++++++++++++++++------------
 aiohttp/http_exceptions.py      |   4 ++
 aiohttp/http_parser.py          |  23 +++++-
 aiohttp/multipart.py            |  31 +++++---
 aiohttp/web_request.py          |   2 +-
 docs/spelling_wordlist.txt      |   1 +
 pyproject.toml                  |   4 +-
 requirements/runtime-deps.in    |   4 +-
 tests/test_client_functional.py | 114 +++++++++++++++++++++++++++++-
 tests/test_http_parser.py       |  34 +++++++++
 tests/test_multipart.py         |  83 ++++++++++++++++------
 12 files changed, 335 insertions(+), 88 deletions(-)
 create mode 100644 CHANGES/11898.breaking.rst

Index: aiohttp-3.9.3/aiohttp/compression_utils.py
===================================================================
--- aiohttp-3.9.3.orig/aiohttp/compression_utils.py
+++ aiohttp-3.9.3/aiohttp/compression_utils.py
@@ -1,5 +1,6 @@
 import asyncio
 import zlib
+from abc import ABC, abstractmethod
 from concurrent.futures import Executor
 from typing import Optional, cast
 
@@ -13,7 +14,11 @@ try:
 except ImportError:  # pragma: no cover
     HAS_BROTLI = False
 
-MAX_SYNC_CHUNK_SIZE = 1024
+MAX_SYNC_CHUNK_SIZE = 4096
+DEFAULT_MAX_DECOMPRESS_SIZE = 2**25  # 32MiB
+
+# Unlimited decompression constants - different libraries use different conventions
+ZLIB_MAX_LENGTH_UNLIMITED = 0  # zlib uses 0 to mean unlimited
 
 
 def encoding_to_mode(
@@ -26,19 +31,37 @@ def encoding_to_mode(
     return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
 
 
-class ZlibBaseHandler:
+class DecompressionBaseHandler(ABC):
     def __init__(
         self,
-        mode: int,
         executor: Optional[Executor] = None,
         max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
     ):
-        self._mode = mode
+        """Base class for decompression handlers."""
         self._executor = executor
         self._max_sync_chunk_size = max_sync_chunk_size
 
+    @abstractmethod
+    def decompress_sync(
+        self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
+        """Decompress the given data."""
+
+    async def decompress(
+        self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
+        """Decompress the given data."""
+        if (
+            self._max_sync_chunk_size is not None
+            and len(data) > self._max_sync_chunk_size
+        ):
+            return await asyncio.get_event_loop().run_in_executor(
+                self._executor, self.decompress_sync, data, max_length
+            )
+        return self.decompress_sync(data, max_length)
+
 
-class ZLibCompressor(ZlibBaseHandler):
+class ZLibCompressor:
     def __init__(
         self,
         encoding: Optional[str] = None,
@@ -49,12 +72,12 @@ class ZLibCompressor(ZlibBaseHandler):
         executor: Optional[Executor] = None,
         max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
     ):
-        super().__init__(
-            mode=encoding_to_mode(encoding, suppress_deflate_header)
+        self._executor = executor
+        self._max_sync_chunk_size = max_sync_chunk_size
+        self._mode = (
+            encoding_to_mode(encoding, suppress_deflate_header)
             if wbits is None
-            else wbits,
-            executor=executor,
-            max_sync_chunk_size=max_sync_chunk_size,
+            else wbits
         )
         if level is None:
             self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
@@ -86,7 +109,7 @@ class ZLibCompressor(ZlibBaseHandler):
         return self._compressor.flush(mode)
 
 
-class ZLibDecompressor(ZlibBaseHandler):
+class ZLibDecompressor(DecompressionBaseHandler):
     def __init__(
         self,
         encoding: Optional[str] = None,
@@ -94,26 +117,15 @@ class ZLibDecompressor(ZlibBaseHandler):
         executor: Optional[Executor] = None,
         max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
     ):
-        super().__init__(
-            mode=encoding_to_mode(encoding, suppress_deflate_header),
-            executor=executor,
-            max_sync_chunk_size=max_sync_chunk_size,
-        )
+        super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size)
+        self._mode = encoding_to_mode(encoding, suppress_deflate_header)
         self._decompressor = zlib.decompressobj(wbits=self._mode)
 
-    def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
+    def decompress_sync(
+        self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
         return self._decompressor.decompress(data, max_length)
 
-    async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
-        if (
-            self._max_sync_chunk_size is not None
-            and len(data) > self._max_sync_chunk_size
-        ):
-            return await asyncio.get_event_loop().run_in_executor(
-                self._executor, self.decompress_sync, data, max_length
-            )
-        return self.decompress_sync(data, max_length)
-
     def flush(self, length: int = 0) -> bytes:
         return (
             self._decompressor.flush(length)
@@ -134,22 +146,31 @@ class ZLibDecompressor(ZlibBaseHandler):
         return self._decompressor.unused_data
 
 
-class BrotliDecompressor:
+class BrotliDecompressor(DecompressionBaseHandler):
     # Supports both 'brotlipy' and 'Brotli' packages
     # since they share an import name. The top branches
     # are for 'brotlipy' and bottom branches for 'Brotli'
-    def __init__(self) -> None:
+    def __init__(
+        self,
+        executor: Optional[Executor] = None,
+        max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+    ) -> None:
+        """Decompress data using the Brotli library."""
         if not HAS_BROTLI:
             raise RuntimeError(
                 "The brotli decompression is not available. "
                 "Please install `Brotli` module"
             )
         self._obj = brotli.Decompressor()
+        super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size)
 
-    def decompress_sync(self, data: bytes) -> bytes:
+    def decompress_sync(
+        self, data, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
+        """Decompress the given data."""
         if hasattr(self._obj, "decompress"):
-            return cast(bytes, self._obj.decompress(data))
-        return cast(bytes, self._obj.process(data))
+            return cast(bytes, self._obj.decompress(data, max_length))
+        return cast(bytes, self._obj.process(data, max_length))
 
     def flush(self) -> bytes:
         if hasattr(self._obj, "flush"):
Index: aiohttp-3.9.3/aiohttp/http_exceptions.py
===================================================================
--- aiohttp-3.9.3.orig/aiohttp/http_exceptions.py
+++ aiohttp-3.9.3/aiohttp/http_exceptions.py
@@ -75,6 +75,10 @@ class ContentLengthError(PayloadEncoding
     """Not enough data for satisfy content length header."""
 
 
+class DecompressSizeError(PayloadEncodingError):
+    """Decompressed size exceeds the configured limit."""
+
+
 class LineTooLong(BadHttpMessage):
     def __init__(
         self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
Index: aiohttp-3.9.3/aiohttp/http_parser.py
===================================================================
--- aiohttp-3.9.3.orig/aiohttp/http_parser.py
+++ aiohttp-3.9.3/aiohttp/http_parser.py
@@ -26,7 +26,12 @@ from yarl import URL
 
 from . import hdrs
 from .base_protocol import BaseProtocol
-from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
+from .compression_utils import (
+    DEFAULT_MAX_DECOMPRESS_SIZE,
+    HAS_BROTLI,
+    BrotliDecompressor,
+    ZLibDecompressor,
+)
 from .helpers import (
     set_exception,
     DEBUG,
@@ -40,6 +45,7 @@ from .http_exceptions import (
     BadStatusLine,
     ContentEncodingError,
     ContentLengthError,
+    DecompressSizeError,
     InvalidHeader,
     InvalidURLError,
     LineTooLong,
@@ -931,7 +937,12 @@ class DeflateBuffer:
 
     decompressor: Any
 
-    def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
+    def __init__(
+        self,
+        out: StreamReader,
+        encoding: Optional[str],
+        max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
+    ) -> None:
         self.out = out
         self.size = 0
         self.encoding = encoding
@@ -948,6 +959,8 @@ class DeflateBuffer:
         else:
             self.decompressor = ZLibDecompressor(encoding=encoding)
 
+        self._max_decompress_size = max_decompress_size
+
     def set_exception(self, exc: BaseException) -> None:
         self.out.set_exception(exc)
 
@@ -972,7 +985,10 @@ class DeflateBuffer:
             )
 
         try:
-            chunk = self.decompressor.decompress_sync(chunk)
+            # Decompress with limit + 1 so we can detect if output exceeds limit
+            chunk = self.decompressor.decompress_sync(
+                chunk, max_length=self._max_decompress_size + 1
+            )
         except Exception:
             raise ContentEncodingError(
                 "Can not decode content-encoding: %s" % self.encoding
@@ -980,6 +996,13 @@ class DeflateBuffer:
 
         self._started_decoding = True
 
+        # Check if decompression limit was exceeded
+        if len(chunk) > self._max_decompress_size:
+            raise DecompressSizeError(
+                "Decompressed data exceeds the configured limit of %d bytes"
+                % self._max_decompress_size
+            )
+
         if chunk:
             self.out.feed_data(chunk, len(chunk))
 
Index: aiohttp-3.9.3/aiohttp/multipart.py
===================================================================
--- aiohttp-3.9.3.orig/aiohttp/multipart.py
+++ aiohttp-3.9.3/aiohttp/multipart.py
@@ -27,7 +27,12 @@ from urllib.parse import parse_qsl, unqu
 
 from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
 
-from .compression_utils import ZLibCompressor, ZLibDecompressor
+from .abc import AbstractStreamWriter
+from .compression_utils import (
+    DEFAULT_MAX_DECOMPRESS_SIZE,
+    ZLibCompressor,
+    ZLibDecompressor,
+)
 from .hdrs import (
     CONTENT_DISPOSITION,
     CONTENT_ENCODING,
@@ -263,6 +268,7 @@ class BodyPartReader:
         *,
         subtype: str = "mixed",
         default_charset: Optional[str] = None,
+        max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
     ) -> None:
         self.headers = headers
         self._boundary = boundary
@@ -278,6 +284,7 @@ class BodyPartReader:
         self._prev_chunk: Optional[bytes] = None
         self._content_eof = 0
         self._cache: Dict[str, Any] = {}
+        self._max_decompress_size = max_decompress_size
 
     def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
         return self  # type: ignore[return-value]
@@ -307,7 +314,7 @@ class BodyPartReader:
         while not self._at_eof:
             data.extend(await self.read_chunk(self.chunk_size))
         if decode:
-            return self.decode(data)
+            return await self.decode(data)
         return data
 
     async def read_chunk(self, size: int = chunk_size) -> bytes:
@@ -452,7 +459,7 @@ class BodyPartReader:
         """Returns True if the boundary was reached or False otherwise."""
         return self._at_eof
 
-    def decode(self, data: bytes) -> bytes:
+    async def decode(self, data: bytes) -> bytes:
         """Decodes data.
 
         Decoding is done according the specified Content-Encoding
@@ -462,18 +469,18 @@ class BodyPartReader:
             data = self._decode_content_transfer(data)
         # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
         if not self._is_form_data and CONTENT_ENCODING in self.headers:
-            return self._decode_content(data)
+            return await self._decode_content(data)
         return data
 
-    def _decode_content(self, data: bytes) -> bytes:
+    async def _decode_content(self, data: bytes) -> bytes:
         encoding = self.headers.get(CONTENT_ENCODING, "").lower()
         if encoding == "identity":
             return data
         if encoding in {"deflate", "gzip"}:
-            return ZLibDecompressor(
+            return await ZLibDecompressor(
                 encoding=encoding,
                 suppress_deflate_header=True,
-            ).decompress_sync(data)
+            ).decompress(data, max_length=self._max_decompress_size)
 
         raise RuntimeError(f"unknown content encoding: {encoding}")
 
@@ -530,11 +537,11 @@ class BodyPartReaderPayload(Payload):
         if params:
             self.set_content_disposition("attachment", True, **params)
 
-    async def write(self, writer: Any) -> None:
+    async def write(self, writer: AbstractStreamWriter) -> None:
         field = self._value
         chunk = await field.read_chunk(size=2**16)
         while chunk:
-            await writer.write(field.decode(chunk))
+            await writer.write(await field.decode(chunk))
             chunk = await field.read_chunk(size=2**16)
 
 
@@ -929,7 +936,9 @@ class MultipartWriter(Payload):
         total += 2 + len(self._boundary) + 4  # b'--'+self._boundary+b'--\r\n'
         return total
 
-    async def write(self, writer: Any, close_boundary: bool = True) -> None:
+    async def write(
+        self, writer: AbstractStreamWriter, close_boundary: bool = True
+    ) -> None:
         """Write body."""
         for part, encoding, te_encoding in self._parts:
             if self._is_form_data:
@@ -958,7 +967,7 @@ class MultipartWriter(Payload):
 
 
 class MultipartPayloadWriter:
-    def __init__(self, writer: Any) -> None:
+    def __init__(self, writer: AbstractStreamWriter) -> None:
         self._writer = writer
         self._encoding: Optional[str] = None
         self._compress: Optional[ZLibCompressor] = None
Index: aiohttp-3.9.3/docs/spelling_wordlist.txt
===================================================================
--- aiohttp-3.9.3.orig/docs/spelling_wordlist.txt
+++ aiohttp-3.9.3/docs/spelling_wordlist.txt
@@ -182,6 +182,7 @@ lowercased
 Mako
 manylinux
 metadata
+MiB
 microservice
 middleware
 middlewares
Index: aiohttp-3.9.3/tests/test_client_functional.py
===================================================================
--- aiohttp-3.9.3.orig/tests/test_client_functional.py
+++ aiohttp-3.9.3/tests/test_client_functional.py
@@ -8,9 +8,18 @@ import json
 import pathlib
 import socket
 import ssl
+import zlib
 from typing import Any, AsyncIterator
 from unittest import mock
 
+try:
+    try:
+        import brotlicffi as brotli
+    except ImportError:
+        import brotli
+except ImportError:
+    brotli = None  # pragma: no cover
+
 import pytest
 from multidict import MultiDict
 from yarl import URL
@@ -19,6 +28,8 @@ import aiohttp
 from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web
 from aiohttp.abc import AbstractResolver
 from aiohttp.client_exceptions import TooManyRedirects
+from aiohttp.compression_utils import DEFAULT_MAX_DECOMPRESS_SIZE
+from aiohttp.http_exceptions import DecompressSizeError
 from aiohttp.pytest_plugin import AiohttpClient, TestClient
 from aiohttp.test_utils import unused_port
 
@@ -1903,8 +1914,71 @@ async def test_bad_payload_compression(a
     resp.close()
 
 
-async def test_bad_payload_chunked_encoding(aiohttp_client) -> None:
-    async def handler(request):
+async def test_payload_decompress_size_limit(aiohttp_client: AiohttpClient) -> None:
+    """Test that decompression size limit triggers DecompressSizeError.
+
+    When a compressed payload expands beyond the configured limit,
+    we raise DecompressSizeError.
+    """
+    # Create a highly compressible payload that exceeds the decompression limit.
+    # 64MiB of repeated bytes compresses to ~32KB but expands beyond the
+    # 32MiB per-call limit.
+    original = b"A" * (64 * 2**20)
+    compressed = zlib.compress(original)
+    assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE
+
+    async def handler(request: web.Request) -> web.Response:
+        # Send compressed data with Content-Encoding header
+        resp = web.Response(body=compressed)
+        resp.headers["Content-Encoding"] = "deflate"
+        return resp
+
+    app = web.Application()
+    app.router.add_get("/", handler)
+    client = await aiohttp_client(app)
+
+    async with client.get("/") as resp:
+        assert resp.status == 200
+
+        with pytest.raises(aiohttp.ClientPayloadError) as exc_info:
+            await resp.read()
+
+        assert isinstance(exc_info.value.__cause__, DecompressSizeError)
+        assert "Decompressed data exceeds" in str(exc_info.value.__cause__)
+
+
+@pytest.mark.skipif(brotli is None, reason="brotli is not installed")
+async def test_payload_decompress_size_limit_brotli(
+    aiohttp_client: AiohttpClient,
+) -> None:
+    """Test that brotli decompression size limit triggers DecompressSizeError."""
+    assert brotli is not None
+    # Create a highly compressible payload that exceeds the decompression limit.
+    original = b"A" * (64 * 2**20)
+    compressed = brotli.compress(original)
+    assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE
+
+    async def handler(request: web.Request) -> web.Response:
+        resp = web.Response(body=compressed)
+        resp.headers["Content-Encoding"] = "br"
+        return resp
+
+    app = web.Application()
+    app.router.add_get("/", handler)
+    client = await aiohttp_client(app)
+
+    async with client.get("/") as resp:
+        assert resp.status == 200
+
+        with pytest.raises(aiohttp.ClientPayloadError) as exc_info:
+            await resp.read()
+
+        assert isinstance(exc_info.value.__cause__, DecompressSizeError)
+        assert "Decompressed data exceeds" in str(exc_info.value.__cause__)
+
+
+async def test_bad_payload_chunked_encoding(aiohttp_client: AiohttpClient) -> None:
+    async def handler(request: web.Request) -> web.StreamResponse:
         resp = web.StreamResponse()
         resp.force_close()
         resp._length_check = False
Index: aiohttp-3.9.3/tests/test_http_parser.py
===================================================================
--- aiohttp-3.9.3.orig/tests/test_http_parser.py
+++ aiohttp-3.9.3/tests/test_http_parser.py
@@ -2,6 +2,7 @@
 
 import asyncio
 import re
+import zlib
 from contextlib import nullcontext
 from typing import Any, Dict, List
 from unittest import mock
@@ -1679,3 +1680,36 @@ class TestDeflateBuffer:
         dbuf.feed_eof()
 
         assert buf.at_eof()
+
+    @pytest.mark.parametrize(
+        "chunk_size",
+        [1024, 2**14, 2**16],  # 1KB, 16KB, 64KB
+        ids=["1KB", "16KB", "64KB"],
+    )
+    async def test_streaming_decompress_large_payload(
+        self, protocol: BaseProtocol, chunk_size: int
+    ) -> None:
+        """Test that large payloads decompress correctly when streamed in chunks.
+
+        This simulates real HTTP streaming where compressed data arrives in
+        small network chunks. Each chunk's decompressed output should be within
+        the max_decompress_size limit, allowing full recovery of the original data.
+        """
+        # Create a large payload (3MiB) that compresses well
+        original = b"A" * (3 * 2**20)
+        compressed = zlib.compress(original)
+
+        buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
+        dbuf = DeflateBuffer(buf, "deflate")
+
+        # Feed compressed data in chunks (simulating network streaming)
+        for i in range(0, len(compressed), chunk_size):
+            chunk = compressed[i : i + chunk_size]
+            dbuf.feed_data(chunk, len(chunk))
+
+        dbuf.feed_eof()
+
+        # Read all decompressed data
+        result = b"".join(buf._buffer)
+        assert len(result) == len(original)
+        assert result == original
Index: aiohttp-3.9.3/tests/test_multipart.py
===================================================================
--- aiohttp-3.9.3.orig/tests/test_multipart.py
+++ aiohttp-3.9.3/tests/test_multipart.py
@@ -9,6 +9,7 @@ import pytest
 
 import aiohttp
 from aiohttp import payload
+from aiohttp.abc import AbstractStreamWriter
 from aiohttp.hdrs import (
     CONTENT_DISPOSITION,
     CONTENT_ENCODING,
@@ -32,14 +33,14 @@ def buf():
 
 
 @pytest.fixture
-def stream(buf):
-    writer = mock.Mock()
+def stream(buf: bytearray) -> AbstractStreamWriter:
+    writer = mock.create_autospec(AbstractStreamWriter, instance=True, spec_set=True)
 
     async def write(chunk):
         buf.extend(chunk)
 
     writer.write.side_effect = write
-    return writer
+    return writer  # type: ignore[no-any-return]
 
 
 @pytest.fixture
@@ -1002,7 +1003,9 @@ async def test_writer(writer) -> None:
     assert writer.boundary == ":"
 
 
-async def test_writer_serialize_io_chunk(buf, stream, writer) -> None:
+async def test_writer_serialize_io_chunk(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     with io.BytesIO(b"foobarbaz") as file_handle:
         writer.append(file_handle)
         await writer.write(stream)
@@ -1012,7 +1015,9 @@ async def test_writer_serialize_io_chunk
     )
 
 
-async def test_writer_serialize_json(buf, stream, writer) -> None:
+async def test_writer_serialize_json(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     writer.append_json({"привет": "мир"})
     await writer.write(stream)
     assert (
@@ -1021,7 +1026,9 @@ async def test_writer_serialize_json(buf
     )
 
 
-async def test_writer_serialize_form(buf, stream, writer) -> None:
+async def test_writer_serialize_form(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     data = [("foo", "bar"), ("foo", "baz"), ("boo", "zoo")]
     writer.append_form(data)
     await writer.write(stream)
@@ -1029,7 +1036,9 @@ async def test_writer_serialize_form(buf
     assert b"foo=bar&foo=baz&boo=zoo" in buf
 
 
-async def test_writer_serialize_form_dict(buf, stream, writer) -> None:
+async def test_writer_serialize_form_dict(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     data = {"hello": "мир"}
     writer.append_form(data)
     await writer.write(stream)
@@ -1037,7 +1046,9 @@ async def test_writer_serialize_form_dic
     assert b"hello=%D0%BC%D0%B8%D1%80" in buf
 
 
-async def test_writer_write(buf, stream, writer) -> None:
+async def test_writer_write(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     writer.append("foo-bar-baz")
     writer.append_json({"test": "passed"})
     writer.append_form({"test": "passed"})
@@ -1083,7 +1094,9 @@ async def test_writer_write(buf, stream,
     ) == bytes(buf)
 
 
-async def test_writer_write_no_close_boundary(buf, stream) -> None:
+async def test_writer_write_no_close_boundary(
+    buf: bytearray, stream: AbstractStreamWriter
+) -> None:
     writer = aiohttp.MultipartWriter(boundary=":")
     writer.append("foo-bar-baz")
     writer.append_json({"test": "passed"})
@@ -1136,7 +1149,9 @@ async def test_writer_serialize_with_con
     assert b"Time to Relax!" == data
 
 
-async def test_writer_serialize_with_content_encoding_deflate(buf, stream, writer):
+async def test_writer_serialize_with_content_encoding_deflate(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     writer.append("Time to Relax!", {CONTENT_ENCODING: "deflate"})
     await writer.write(stream)
     headers, message = bytes(buf).split(b"\r\n\r\n", 1)
@@ -1150,7 +1165,9 @@ async def test_writer_serialize_with_con
     assert thing == message
 
 
-async def test_writer_serialize_with_content_encoding_identity(buf, stream, writer):
+async def test_writer_serialize_with_content_encoding_identity(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     thing = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00"
     writer.append(thing, {CONTENT_ENCODING: "identity"})
     await writer.write(stream)
@@ -1165,12 +1182,16 @@ async def test_writer_serialize_with_con
     assert thing == message.split(b"\r\n")[0]
 
 
-def test_writer_serialize_with_content_encoding_unknown(buf, stream, writer):
+def test_writer_serialize_with_content_encoding_unknown(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     with pytest.raises(RuntimeError):
         writer.append("Time to Relax!", {CONTENT_ENCODING: "snappy"})
 
 
-async def test_writer_with_content_transfer_encoding_base64(buf, stream, writer):
+async def test_writer_with_content_transfer_encoding_base64(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "base64"})
     await writer.write(stream)
     headers, message = bytes(buf).split(b"\r\n\r\n", 1)
@@ -1183,7 +1204,9 @@ async def test_writer_with_content_trans
     assert b"VGltZSB0byBSZWxheCE=" == message.split(b"\r\n")[0]
 
 
-async def test_writer_content_transfer_encoding_quote_printable(buf, stream, writer):
+async def test_writer_content_transfer_encoding_quote_printable(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     writer.append("Привет, мир!", {CONTENT_TRANSFER_ENCODING: "quoted-printable"})
     await writer.write(stream)
     headers, message = bytes(buf).split(b"\r\n\r\n", 1)
@@ -1199,7 +1222,9 @@ async def test_writer_content_transfer_e
     )
 
 
-def test_writer_content_transfer_encoding_unknown(buf, stream, writer) -> None:
+def test_writer_content_transfer_encoding_unknown(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     with pytest.raises(RuntimeError):
         writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "unknown"})
 
@@ -1323,7 +1348,9 @@ class TestMultipartWriter:
             with aiohttp.MultipartWriter(boundary=":") as writer:
                 writer.append(None)
 
-    async def test_write_preserves_content_disposition(self, buf, stream) -> None:
+    async def test_write_preserves_content_disposition(
+        self, buf: bytearray, stream: AbstractStreamWriter
+    ) -> None:
         with aiohttp.MultipartWriter(boundary=":") as writer:
             part = writer.append(b"foo", headers={CONTENT_TYPE: "test/passed"})
             part.set_content_disposition("form-data", filename="bug")
@@ -1340,7 +1367,9 @@ class TestMultipartWriter:
         )
         assert message == b"foo\r\n--:--\r\n"
 
-    async def test_preserve_content_disposition_header(self, buf, stream):
+    async def test_preserve_content_disposition_header(
+        self, buf: bytearray, stream: AbstractStreamWriter
+    ) -> None:
         # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381
         with pathlib.Path(__file__).open("rb") as fobj:
             with aiohttp.MultipartWriter("form-data", boundary=":") as writer:
@@ -1364,7 +1393,9 @@ class TestMultipartWriter:
             b'Content-Disposition: attachments; filename="bug.py"'
         )
 
-    async def test_set_content_disposition_override(self, buf, stream):
+    async def test_set_content_disposition_override(
+        self, buf: bytearray, stream: AbstractStreamWriter
+    ) -> None:
         # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381
         with pathlib.Path(__file__).open("rb") as fobj:
             with aiohttp.MultipartWriter("form-data", boundary=":") as writer:
@@ -1388,7 +1419,9 @@ class TestMultipartWriter:
             b'Content-Disposition: attachments; filename="bug.py"'
         )
 
-    async def test_reset_content_disposition_header(self, buf, stream):
+    async def test_reset_content_disposition_header(
+        self, buf: bytearray, stream: AbstractStreamWriter
+    ) -> None:
         # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381
         with pathlib.Path(__file__).open("rb") as fobj:
             with aiohttp.MultipartWriter("form-data", boundary=":") as writer:
Index: aiohttp-3.9.3/CHANGES/11898.breaking.rst
===================================================================
--- /dev/null
+++ aiohttp-3.9.3/CHANGES/11898.breaking.rst
@@ -0,0 +1,2 @@
+``Brotli`` and ``brotlicffi`` minimum version is now 1.2.
+Decompression now has a default maximum output size of 32MiB per decompress call -- by :user:`Dreamsorcerer`.
Index: aiohttp-3.9.3/aiohttp/web_request.py
===================================================================
--- aiohttp-3.9.3.orig/aiohttp/web_request.py
+++ aiohttp-3.9.3/aiohttp/web_request.py
@@ -728,7 +728,7 @@ class BaseRequest(MutableMapping[str, An
                         tmp = tempfile.TemporaryFile()
                         chunk = await field.read_chunk(size=2**16)
                         while chunk:
-                            chunk = field.decode(chunk)
+                            chunk = await field.decode(chunk)
                             tmp.write(chunk)
                             size += len(chunk)
                             if 0 < max_size < size:
openSUSE Build Service is sponsored by