File CVE-2025-69223-auto_decompress-zip-bomb.patch of Package python-aiohttp.42478

From 586778f19b67a26a7d7ed9cd20add9b5b4dea96c Mon Sep 17 00:00:00 2001
From: Sam Bull <aa6bs0@sambull.org>
Date: Sat, 8 Apr 2023 21:42:02 +0100
Subject: [PATCH] refactor: Extract zlib-related logic into a single module
 (#7223) (#7250)

<!-- Thank you for your contribution! -->

Backport #7223

Addresses issue https://github.com/aio-libs/aiohttp/issues/7192
Refactors the logic to have the zlib-related stuff concentrated into a
single module

No

https://github.com/aio-libs/aiohttp/issues/7192

- [x] I think the code is well written
- [x] Unit tests for the changes exist
- [ ] Documentation reflects the changes
- [ ] If you provide code modification, please add yourself to
`CONTRIBUTORS.txt`
  * The format is &lt;Name&gt; &lt;Surname&gt;.
  * Please keep alphabetical order, the file is sorted by names.
- [ ] Add a new news fragment into the `CHANGES` folder
  * name it `<issue_id>.<type>` for example (588.bugfix)
* if you don't have an `issue_id` change it to the pr id after creating
the pr
* ensure type is one of the following: * `.feature`: Signifying a new
feature. * `.bugfix`: Signifying a bug fix. * `.doc`: Signifying a
documentation improvement. * `.removal`: Signifying a deprecation or
removal of public API.
* `.misc`: A ticket has been closed, but it is not of interest to users.
* Make sure to use full sentences with correct case and punctuation, for
example: "Fix issue with non-ascii contents in doctest text files."

---------

Co-authored-by: pre-commit-ci[bot]
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Sam Bull <aa6bs0@sambull.org>
(cherry picked from commit 3ff81dc9c9ce20efd3bf54cf52adaf438c483a92)

<!-- Thank you for your contribution! -->

## What do these changes do?

<!-- Please give a short brief about these changes. -->

## Are there changes in behavior for the user?

<!-- Outline any notable behaviour for the end users. -->

## Related issue number

<!-- Are there any issues opened that will be resolved by merging this
change? -->

## Checklist

- [ ] I think the code is well written
- [ ] Unit tests for the changes exist
- [ ] Documentation reflects the changes
- [ ] If you provide code modification, please add yourself to
`CONTRIBUTORS.txt`
  * The format is &lt;Name&gt; &lt;Surname&gt;.
  * Please keep alphabetical order, the file is sorted by names.
- [ ] Add a new news fragment into the `CHANGES` folder
  * name it `<issue_id>.<type>` for example (588.bugfix)
* if you don't have an `issue_id` change it to the pr id after creating
the pr
  * ensure type is one of the following:
    * `.feature`: Signifying a new feature.
    * `.bugfix`: Signifying a bug fix.
    * `.doc`: Signifying a documentation improvement.
    * `.removal`: Signifying a deprecation or removal of public API.
* `.misc`: A ticket has been closed, but it is not of interest to users.
* Make sure to use full sentences with correct case and punctuation, for
example: "Fix issue with non-ascii contents in doctest text files."

---------

Co-authored-by: Mykola Mokhnach <mokhnach@gmail.com>
---
 aiohttp/client_reqrep.py     |   2 +-
 aiohttp/compression_utils.py | 148 +++++++++++++++++++++++++++++++++++
 aiohttp/http_parser.py       |  41 ++--------
 aiohttp/http_websocket.py    |  16 ++--
 aiohttp/http_writer.py       |  12 +--
 aiohttp/multipart.py         |  28 ++++---
 aiohttp/web_response.py      |  41 ++++------
 tests/test_http_parser.py    |   4 +-
 8 files changed, 207 insertions(+), 85 deletions(-)
 create mode 100644 aiohttp/compression_utils.py

Index: aiohttp-3.6.0/aiohttp/compression_utils.py
===================================================================
--- /dev/null
+++ aiohttp-3.6.0/aiohttp/compression_utils.py
@@ -0,0 +1,168 @@
+import asyncio
+import zlib
+from abc import ABC, abstractmethod
+from concurrent.futures import Executor
+from typing import Optional, cast
+
+try:
+    import brotli
+
+    HAS_BROTLI = True
+except ImportError:  # pragma: no cover
+    HAS_BROTLI = False
+
+MAX_SYNC_CHUNK_SIZE = 4096
+DEFAULT_MAX_DECOMPRESS_SIZE = 2**25  # 32MiB
+
+# Unlimited decompression constants - different libraries use different conventions
+ZLIB_MAX_LENGTH_UNLIMITED = 0  # zlib uses 0 to mean unlimited
+
+
+def encoding_to_mode(
+    encoding: Optional[str] = None,
+    suppress_deflate_header: bool = False,
+) -> int:
+    if encoding == "gzip":
+        return 16 + zlib.MAX_WBITS
+
+    return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
+
+
+class DecompressionBaseHandler(ABC):
+    def __init__(
+        self,
+        executor: Optional[Executor] = None,
+        max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+    ):
+        """Base class for decompression handlers."""
+        self._executor = executor
+        self._max_sync_chunk_size = max_sync_chunk_size
+
+    @abstractmethod
+    def decompress_sync(
+        self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
+        """Decompress the given data."""
+
+    async def decompress(
+        self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
+        """Decompress the given data."""
+        if (
+            self._max_sync_chunk_size is not None
+            and len(data) > self._max_sync_chunk_size
+        ):
+            return await asyncio.get_event_loop().run_in_executor(
+                self._executor, self.decompress_sync, data, max_length
+            )
+        return self.decompress_sync(data, max_length)
+
+
+class ZLibCompressor:
+    def __init__(
+        self,
+        encoding: Optional[str] = None,
+        suppress_deflate_header: bool = False,
+        level: Optional[int] = None,
+        wbits: Optional[int] = None,
+        strategy: int = zlib.Z_DEFAULT_STRATEGY,
+        executor: Optional[Executor] = None,
+        max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+    ):
+        self._executor = executor
+        self._max_sync_chunk_size = max_sync_chunk_size
+        self._mode = (
+            encoding_to_mode(encoding, suppress_deflate_header)
+            if wbits is None
+            else wbits
+        )
+        if level is None:
+            self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
+        else:
+            self._compressor = zlib.compressobj(
+                wbits=self._mode, strategy=strategy, level=level
+            )
+
+    def compress_sync(self, data: bytes) -> bytes:
+        return self._compressor.compress(data)
+
+    async def compress(self, data: bytes) -> bytes:
+        if (
+            self._max_sync_chunk_size is not None
+            and len(data) > self._max_sync_chunk_size
+        ):
+            return await asyncio.get_event_loop().run_in_executor(
+                self._executor, self.compress_sync, data
+            )
+        return self.compress_sync(data)
+
+    def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
+        return self._compressor.flush(mode)
+
+
+class ZLibDecompressor(DecompressionBaseHandler):
+    def __init__(
+        self,
+        encoding: Optional[str] = None,
+        suppress_deflate_header: bool = False,
+        executor: Optional[Executor] = None,
+        max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+    ):
+        super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size)
+        self._mode = encoding_to_mode(encoding, suppress_deflate_header)
+        self._decompressor = zlib.decompressobj(wbits=self._mode)
+
+    def decompress_sync(
+        self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
+        return self._decompressor.decompress(data, max_length)
+
+    def flush(self, length: int = 0) -> bytes:
+        return (
+            self._decompressor.flush(length)
+            if length > 0
+            else self._decompressor.flush()
+        )
+
+    @property
+    def eof(self) -> bool:
+        return self._decompressor.eof
+
+    @property
+    def unconsumed_tail(self) -> bytes:
+        return self._decompressor.unconsumed_tail
+
+    @property
+    def unused_data(self) -> bytes:
+        return self._decompressor.unused_data
+
+
+class BrotliDecompressor(DecompressionBaseHandler):
+    # Supports both 'brotlipy' and 'Brotli' packages
+    # since they share an import name. The top branches
+    # are for 'brotlipy' and bottom branches for 'Brotli'
+    def __init__(
+        self,
+        executor: Optional[Executor] = None,
+        max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+    ) -> None:
+        """Decompress data using the Brotli library."""
+        if not HAS_BROTLI:
+            raise RuntimeError(
+                "The brotli decompression is not available. "
+                "Please install `Brotli` module"
+            )
+        self._obj = brotli.Decompressor()
+        super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size)
+
+    def decompress_sync(
+        self, data, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+    ) -> bytes:
+        if hasattr(self._obj, "decompress"):
+            return cast(bytes, self._obj.decompress(data, max_length))
+        return cast(bytes, self._obj.process(data, max_length))
+
+    def flush(self) -> bytes:
+        if hasattr(self._obj, "flush"):
+            return cast(bytes, self._obj.flush())
+        return b""
Index: aiohttp-3.6.0/aiohttp/http_parser.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/http_parser.py
+++ aiohttp-3.6.0/aiohttp/http_parser.py
@@ -3,7 +3,6 @@ import asyncio
 import collections
 import re
 import string
-import zlib
 from enum import IntEnum
 from typing import Any, List, Optional, Tuple, Type, Union  # noqa
 
@@ -12,12 +11,19 @@ from yarl import URL
 
 from . import hdrs
 from .base_protocol import BaseProtocol
+from .compression_utils import (
+    DEFAULT_MAX_DECOMPRESS_SIZE,
+    HAS_BROTLI,
+    BrotliDecompressor,
+    ZLibDecompressor,
+)
 from .helpers import NO_EXTENSIONS, BaseTimerContext, set_exception
 from .http_exceptions import (
     BadHttpMessage,
     BadStatusLine,
     ContentEncodingError,
     ContentLengthError,
+    DecompressSizeError,
     InvalidHeader,
     LineTooLong,
     TransferEncodingError,
@@ -27,12 +33,6 @@ from .log import internal_logger
 from .streams import EMPTY_PAYLOAD, StreamReader
 from .typedefs import RawHeaders
 
-try:
-    import brotli
-    HAS_BROTLI = True
-except ImportError:  # pragma: no cover
-    HAS_BROTLI = False
-
 
 __all__ = (
     'HeadersParser', 'HttpParser', 'HttpRequestParser', 'HttpResponseParser',
@@ -736,7 +736,12 @@ class HttpPayloadParser:
 class DeflateBuffer:
     """DeflateStream decompress stream and feed data into specified stream."""
 
-    def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
+    def __init__(
+        self,
+        out: StreamReader,
+        encoding: Optional[str],
+        max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
+    ) -> None:
         self.out = out
         self.size = 0
         self.encoding = encoding
@@ -747,33 +752,50 @@ class DeflateBuffer:
                 raise ContentEncodingError(
                     'Can not decode content-encoding: brotli (br). '
                     'Please install `brotlipy`')
-            self.decompressor = brotli.Decompressor()
+            self.decompressor = BrotliDecompressor()
         else:
-            zlib_mode = (16 + zlib.MAX_WBITS
-                         if encoding == 'gzip' else -zlib.MAX_WBITS)
-            self.decompressor = zlib.decompressobj(wbits=zlib_mode)
+            self.decompressor = ZLibDecompressor(encoding=encoding)
+
+        self._max_decompress_size = max_decompress_size
 
     def set_exception(self, exc: BaseException) -> None:
         self.out.set_exception(exc)
 
     def feed_data(self, chunk: bytes, size: int) -> None:
         self.size += size
+
+        # RFC1950
+        # bits 0..3 = CM = 0b1000 = 8 = "deflate"
+        # bits 4..7 = CINFO = 1..7 = windows size.
+        if (
+            not self._started_decoding
+            and self.encoding == "deflate"
+            and chunk[0] & 0xF != 8
+        ):
+            # Change the decoder to decompress incorrectly compressed data
+            # Actually we should issue a warning about non-RFC-compliant data.
+            self.decompressor = ZLibDecompressor(
+                encoding=self.encoding, suppress_deflate_header=True
+            )
+
         try:
-            chunk = self.decompressor.decompress(chunk)
+            chunk = self.decompressor.decompress_sync(
+                chunk, max_length=self._max_decompress_size + 1
+            )
         except Exception:
-            if not self._started_decoding and self.encoding == 'deflate':
-                self.decompressor = zlib.decompressobj()
-                try:
-                    chunk = self.decompressor.decompress(chunk)
-                except Exception:
-                    raise ContentEncodingError(
-                        'Can not decode content-encoding: %s' % self.encoding)
-            else:
-                raise ContentEncodingError(
-                    'Can not decode content-encoding: %s' % self.encoding)
+            raise ContentEncodingError(
+                'Can not decode content-encoding: %s' % self.encoding)
+
+        self._started_decoding = True
+
+        # Check if decompression limit was exceeded
+        if len(chunk) > self._max_decompress_size:
+            raise DecompressSizeError(
+                "Decompressed data exceeds the configured limit of %d bytes"
+                % self._max_decompress_size
+            )
 
         if chunk:
-            self._started_decoding = True
             self.out.feed_data(chunk, len(chunk))
 
     def feed_eof(self) -> None:
Index: aiohttp-3.6.0/aiohttp/http_websocket.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/http_websocket.py
+++ aiohttp-3.6.0/aiohttp/http_websocket.py
@@ -12,6 +12,7 @@ from struct import Struct
 from typing import Any, Callable, List, Optional, Tuple, Union
 
 from .base_protocol import BaseProtocol
+from .compression_utils import ZLibCompressor, ZLibDecompressor
 from .helpers import NO_EXTENSIONS
 from .log import ws_logger
 from .streams import DataQueue
@@ -280,7 +281,7 @@ class WebSocketReader:
     def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
         for fin, opcode, payload, compressed in self.parse_frame(data):
             if compressed and not self._decompressobj:
-                self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
+                self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)
             if opcode == WSMsgType.CLOSE:
                 if len(payload) >= 2:
                     close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
@@ -358,8 +359,9 @@ class WebSocketReader:
                     # Decompress process must to be done after all packets
                     # received.
                     if compressed:
+                        assert self._decompressobj is not None
                         self._partial.extend(_WS_DEFLATE_TRAILING)
-                        payload_merged = self._decompressobj.decompress(
+                        payload_merged = self._decompressobj.decompress_sync(
                             self._partial, self._max_msg_size)
                         if self._decompressobj.unconsumed_tail:
                             left = len(self._decompressobj.unconsumed_tail)
@@ -572,14 +574,14 @@ class WebSocketWriter:
         if (compress or self.compress) and opcode < 8:
             if compress:
                 # Do not set self._compress if compressing is for this frame
-                compressobj = zlib.compressobj(wbits=-compress)
+                compressobj = ZLibCompressor(wbits=-compress)
             else:  # self.compress
                 if not self._compressobj:
-                    self._compressobj = zlib.compressobj(wbits=-self.compress)
+                    self._compressobj = ZLibCompressor(wbits=-self.compress)
                 compressobj = self._compressobj
 
-            message = compressobj.compress(message)
-            message = message + compressobj.flush(
+            message = await compressobj.compress(message)
+            message += compressobj.flush(
                 zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH)
             if message.endswith(_WS_DEFLATE_TRAILING):
                 message = message[:-4]
Index: aiohttp-3.6.0/aiohttp/http_writer.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/http_writer.py
+++ aiohttp-3.6.0/aiohttp/http_writer.py
@@ -9,6 +9,7 @@ from multidict import CIMultiDict  # noq
 
 from .abc import AbstractStreamWriter
 from .base_protocol import BaseProtocol
+from .compression_utils import ZLibCompressor
 from .helpers import NO_EXTENSIONS
 
 __all__ = ('StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11')
@@ -54,9 +55,10 @@ class StreamWriter(AbstractStreamWriter)
         self.chunked = True
 
     def enable_compression(self, encoding: str='deflate') -> None:
-        zlib_mode = (16 + zlib.MAX_WBITS
-                     if encoding == 'gzip' else -zlib.MAX_WBITS)
-        self._compress = zlib.compressobj(wbits=zlib_mode)
+        suppress_deflate_header = encoding == "deflate"
+        self._compress = ZLibCompressor(
+            encoding=encoding, suppress_deflate_header=suppress_deflate_header
+        )
 
     def _write(self, chunk: bytes) -> None:
         size = len(chunk)
@@ -79,7 +81,7 @@ class StreamWriter(AbstractStreamWriter)
             await self._on_chunk_sent(chunk)
 
         if self._compress is not None:
-            chunk = self._compress.compress(chunk)
+            chunk = await self._compress.compress(chunk)
             if not chunk:
                 return
 
@@ -120,9 +122,9 @@ class StreamWriter(AbstractStreamWriter)
 
         if self._compress:
             if chunk:
-                chunk = self._compress.compress(chunk)
+                chunk = await self._compress.compress(chunk)
 
-            chunk = chunk + self._compress.flush()
+            chunk += self._compress.flush()
             if chunk and self.chunked:
                 chunk_len = ('%x\r\n' % len(chunk)).encode('ascii')
                 chunk = chunk_len + chunk + b'\r\n0\r\n\r\n'
Index: aiohttp-3.6.0/aiohttp/multipart.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/multipart.py
+++ aiohttp-3.6.0/aiohttp/multipart.py
@@ -24,6 +24,12 @@ from urllib.parse import parse_qsl, unqu
 
 from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping  # noqa
 
+from .abc import AbstractStreamWriter
+from .compression_utils import (
+    DEFAULT_MAX_DECOMPRESS_SIZE,
+    ZLibCompressor,
+    ZLibDecompressor,
+)
 from .hdrs import (
     CONTENT_DISPOSITION,
     CONTENT_ENCODING,
@@ -251,6 +257,7 @@ class BodyPartReader:
         *,
         subtype: str = "mixed",
         default_charset: Optional[str] = None,
+        max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
     ) -> None:
         self.headers = headers
         self._boundary = boundary
@@ -267,6 +274,7 @@ class BodyPartReader:
         self._prev_chunk = None  # type: Optional[bytes]
         self._content_eof = 0
         self._cache = {}  # type: Dict[str, Any]
+        self._max_decompress_size = max_decompress_size
 
     def __aiter__(self) -> 'BodyPartReader':
         return self
@@ -453,14 +461,16 @@ class BodyPartReader:
     def _decode_content(self, data: bytes) -> bytes:
         encoding = self.headers.get(CONTENT_ENCODING, '').lower()
 
-        if encoding == 'deflate':
-            return zlib.decompress(data, -zlib.MAX_WBITS)
-        elif encoding == 'gzip':
-            return zlib.decompress(data, 16 + zlib.MAX_WBITS)
-        elif encoding == 'identity':
+        if encoding == "identity":
             return data
-        else:
-            raise RuntimeError('unknown content encoding: {}'.format(encoding))
+        if encoding in ("deflate", "gzip"):
+            compressed = ZLibDecompressor(
+                encoding=encoding,
+                suppress_deflate_header=True,
+            ).decompress_sync(data, max_length=self._max_decompress_size)
+            return compressed
+
+        raise RuntimeError(f"unknown content encoding: {encoding}")
 
     def _decode_content_transfer(self, data: bytes) -> bytes:
         encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, '').lower()
@@ -517,7 +527,7 @@ class BodyPartReaderPayload(Payload):
         if params:
             self.set_content_disposition('attachment', True, **params)
 
-    async def write(self, writer: Any) -> None:
+    async def write(self, writer: AbstractStreamWriter) -> None:
         field = self._value
         chunk = await field.read_chunk(size=2**16)
         while chunk:
@@ -964,9 +974,12 @@ class MultipartPayloadWriter:
             self._encoding = 'quoted-printable'
 
     def enable_compression(self, encoding: str='deflate') -> None:
-        zlib_mode = (16 + zlib.MAX_WBITS
-                     if encoding == 'gzip' else -zlib.MAX_WBITS)
-        self._compress = zlib.compressobj(wbits=zlib_mode)
+        strategy = zlib.Z_DEFAULT_STRATEGY
+        self._compress = ZLibCompressor(
+            encoding=encoding,
+            suppress_deflate_header=True,
+            strategy=strategy,
+        )
 
     async def write_eof(self) -> None:
         if self._compress is not None:
@@ -983,7 +996,7 @@ class MultipartPayloadWriter:
     async def write(self, chunk: bytes) -> None:
         if self._compress is not None:
             if chunk:
-                chunk = self._compress.compress(chunk)
+                chunk = await self._compress.compress(chunk)
                 if not chunk:
                     return
 
Index: aiohttp-3.6.0/aiohttp/web_response.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/web_response.py
+++ aiohttp-3.6.0/aiohttp/web_response.py
@@ -6,7 +6,6 @@ import json
 import math
 import time
 import warnings
-import zlib
 from concurrent.futures import Executor
 from email.utils import parsedate
 from http.cookies import SimpleCookie
@@ -27,6 +26,7 @@ from multidict import CIMultiDict, istr
 
 from . import hdrs, payload
 from .abc import AbstractStreamWriter
+from .compression_utils import ZLibCompressor
 from .helpers import HeadersMixin, rfc822_formatted_time, sentinel
 from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
 from .payload import Payload
@@ -666,13 +666,6 @@ class Response(StreamResponse):
 
         return await super()._start(request)
 
-    def _compress_body(self, zlib_mode: int) -> None:
-        compressobj = zlib.compressobj(wbits=zlib_mode)
-        body_in = self._body
-        assert body_in is not None
-        self._compressed_body = \
-            compressobj.compress(body_in) + compressobj.flush()
-
     async def _do_start_compression(self, coding: ContentCoding) -> None:
         if self._body_payload or self._chunked:
             return await super()._do_start_compression(coding)
@@ -680,22 +673,26 @@ class Response(StreamResponse):
         if coding != ContentCoding.identity:
             # Instead of using _payload_writer.enable_compression,
             # compress the whole body
-            zlib_mode = (16 + zlib.MAX_WBITS
-                         if coding == ContentCoding.gzip else -zlib.MAX_WBITS)
-            body_in = self._body
-            assert body_in is not None
-            if self._zlib_executor_size is not None and \
-                    len(body_in) > self._zlib_executor_size:
-                await asyncio.get_event_loop().run_in_executor(
-                    self._zlib_executor, self._compress_body, zlib_mode)
-            else:
-                self._compress_body(zlib_mode)
-
-            body_out = self._compressed_body
-            assert body_out is not None
+            compressor = ZLibCompressor(
+                encoding=str(coding.value),
+                max_sync_chunk_size=self._zlib_executor_size,
+                executor=self._zlib_executor,
+            )
+            assert self._body is not None
+            if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
+                warnings.warn(
+                    "Synchronous compression of large response bodies "
+                    f"({len(self._body)} bytes) might block the async event loop. "
+                    "Consider providing a custom value to zlib_executor_size/"
+                    "zlib_executor response properties or disabling compression on it."
+                )
+            self._compressed_body = (
+                await compressor.compress(self._body) + compressor.flush()
+            )
+            assert self._compressed_body is not None
 
             self._headers[hdrs.CONTENT_ENCODING] = coding.value
-            self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
+            self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
 
 
 def json_response(data: Any=sentinel, *,
Index: aiohttp-3.6.0/aiohttp/http_exceptions.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/http_exceptions.py
+++ aiohttp-3.6.0/aiohttp/http_exceptions.py
@@ -66,6 +66,10 @@ class ContentLengthError(PayloadEncoding
     """Not enough data for satisfy content length header."""
 
 
+class DecompressSizeError(PayloadEncodingError):
+    """Decompressed size exceeds the configured limit."""
+
+
 class LineTooLong(BadHttpMessage):
 
     def __init__(self, line: str,
Index: aiohttp-3.6.0/docs/spelling_wordlist.txt
===================================================================
--- aiohttp-3.6.0.orig/docs/spelling_wordlist.txt
+++ aiohttp-3.6.0/docs/spelling_wordlist.txt
@@ -145,6 +145,7 @@ lossless
 Mako
 manylinux
 metadata
+MiB
 microservice
 middleware
 middlewares
Index: aiohttp-3.6.0/tests/test_client_functional.py
===================================================================
--- aiohttp-3.6.0.orig/tests/test_client_functional.py
+++ aiohttp-3.6.0/tests/test_client_functional.py
@@ -6,8 +6,17 @@ import io
 import json
 import pathlib
 import socket
+import zlib
 from unittest import mock
 
+try:
+    try:
+        import brotlicffi as brotli
+    except ImportError:
+        import brotli
+except ImportError:
+    brotli = None  # pragma: no cover
+
 import pytest
 from async_generator import async_generator, yield_
 from multidict import MultiDict
@@ -16,6 +25,8 @@ import aiohttp
 from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web
 from aiohttp.abc import AbstractResolver
 from aiohttp.client_exceptions import TooManyRedirects
+from aiohttp.compression_utils import DEFAULT_MAX_DECOMPRESS_SIZE
+from aiohttp.http_exceptions import DecompressSizeError
 from aiohttp.test_utils import unused_port
 
 
@@ -1713,8 +1724,66 @@ async def test_bad_payload_compression(a
     resp.close()
 
 
-async def test_bad_payload_chunked_encoding(aiohttp_client) -> None:
+async def test_payload_decompress_size_limit(aiohttp_client) -> None:
+    """Test that decompression size limit triggers DecompressSizeError.
+
+    When a compressed payload expands beyond the configured limit,
+    we raise DecompressSizeError.
+    """
+    # Create a highly compressible payload that exceeds the decompression limit.
+    # 64MiB of repeated bytes compresses to ~32KB but expands beyond the
+    # 32MiB per-call limit.
+    original = b"A" * (64 * 2**20)
+    compressed = zlib.compress(original)
+    assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE
+
+    async def handler(request: web.Request) -> web.Response:
+        # Send compressed data with Content-Encoding header
+        resp = web.Response(body=compressed)
+        resp.headers["Content-Encoding"] = "deflate"
+        return resp
+
+    app = web.Application()
+    app.router.add_get("/", handler)
+    client = await aiohttp_client(app)
+
+    async with client.get("/") as resp:
+        assert resp.status == 200
+
+        with pytest.raises(aiohttp.ClientPayloadError) as exc_info:
+            await resp.read()
+
+        assert "Decompressed data exceeds" in str(exc_info.value)
 
+
+@pytest.mark.skipif(brotli is None, reason="brotli is not installed")
+async def test_payload_decompress_size_limit_brotli(aiohttp_client) -> None:
+    """Test that brotli decompression size limit triggers DecompressSizeError."""
+    assert brotli is not None
+    # Create a highly compressible payload that exceeds the decompression limit.
+    original = b"A" * (64 * 2**20)
+    compressed = brotli.compress(original)
+    assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE
+
+    async def handler(request: web.Request) -> web.Response:
+        resp = web.Response(body=compressed)
+        resp.headers["Content-Encoding"] = "br"
+        return resp
+
+    app = web.Application()
+    app.router.add_get("/", handler)
+    client = await aiohttp_client(app)
+
+    async with client.get("/") as resp:
+        assert resp.status == 200
+
+        with pytest.raises(aiohttp.ClientPayloadError) as exc_info:
+            await resp.read()
+
+        assert "Decompressed data exceeds" in str(exc_info.value)
+
+
+async def test_bad_payload_chunked_encoding(aiohttp_client) -> None:
     async def handler(request):
         resp = web.StreamResponse()
         resp.force_close()
Index: aiohttp-3.6.0/tests/test_multipart.py
===================================================================
--- aiohttp-3.6.0.orig/tests/test_multipart.py
+++ aiohttp-3.6.0/tests/test_multipart.py
@@ -8,6 +8,7 @@ import pytest
 
 import aiohttp
 from aiohttp import payload
+from aiohttp.abc import AbstractStreamWriter
 from aiohttp.hdrs import (
     CONTENT_DISPOSITION,
     CONTENT_ENCODING,
@@ -29,14 +30,14 @@ def buf():
 
 
 @pytest.fixture
-def stream(buf):
-    writer = mock.Mock()
+def stream(buf: bytearray) -> AbstractStreamWriter:
+    writer = mock.create_autospec(AbstractStreamWriter, instance=True, spec_set=True)
 
     async def write(chunk):
         buf.extend(chunk)
 
     writer.write.side_effect = write
-    return writer
+    return writer  # type: ignore[no-any-return]
 
 
 @pytest.fixture
@@ -928,7 +929,9 @@ async def test_writer_content_transfer_e
             b' =D0=BC=D0=B8=D1=80!' == message.split(b'\r\n')[0])
 
 
-def test_writer_content_transfer_encoding_unknown(buf, stream, writer) -> None:
+def test_writer_content_transfer_encoding_unknown(
+    buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter
+) -> None:
     with pytest.raises(RuntimeError):
         writer.append('Time to Relax!', {CONTENT_TRANSFER_ENCODING: 'unknown'})
 
Index: aiohttp-3.6.0/tests/test_http_parser.py
===================================================================
--- aiohttp-3.6.0.orig/tests/test_http_parser.py
+++ aiohttp-3.6.0/tests/test_http_parser.py
@@ -919,9 +919,10 @@ class TestDeflateBuffer:
         dbuf = DeflateBuffer(buf, 'deflate')
 
         dbuf.decompressor = mock.Mock()
-        dbuf.decompressor.decompress.return_value = b'line'
+        dbuf.decompressor.decompress_sync.return_value = b"line"
 
-        dbuf.feed_data(b'data', 4)
+        # First byte should be b'x' in order code not to change the decoder.
+        dbuf.feed_data(b'xxxx', 4)
         assert [b'line'] == list(d for d, _ in buf._buffer)
 
     async def test_feed_data_err(self, stream) -> None:
@@ -931,10 +932,13 @@ class TestDeflateBuffer:
 
         exc = ValueError()
         dbuf.decompressor = mock.Mock()
-        dbuf.decompressor.decompress.side_effect = exc
+        dbuf.decompressor.decompress_sync.side_effect = exc
 
         with pytest.raises(http_exceptions.ContentEncodingError):
-            dbuf.feed_data(b'data', 4)
+            # Should be more than 4 bytes to trigger deflate FSM error.
+            # Should start with b'x', otherwise code switch mocked decoder.
+            data = b"xsomedata"
+            dbuf.feed_data(data, len(data))
 
     async def test_feed_eof(self, stream) -> None:
         buf = aiohttp.FlowControlDataQueue(stream,
@@ -991,3 +995,36 @@ class TestDeflateBuffer:
         dbuf.feed_eof()
 
         assert buf.at_eof()
+
+    @pytest.mark.parametrize(
+        "chunk_size",
+        [1024, 2**14, 2**16],  # 1KB, 16KB, 64KB
+        ids=["1KB", "16KB", "64KB"],
+    )
+    async def test_streaming_decompress_large_payload(
+        self, protocol: BaseProtocol, chunk_size: int
+    ) -> None:
+        """Test that large payloads decompress correctly when streamed in chunks.
+
+        This simulates real HTTP streaming where compressed data arrives in
+        small network chunks. Each chunk's decompressed output should be within
+        the max_decompress_size limit, allowing full recovery of the original data.
+        """
+        # Create a large payload (3MiB) that compresses well
+        original = b"A" * (3 * 2**20)
+        compressed = zlib.compress(original)
+
+        buf = streams.StreamReader(protocol, limit=2**16)
+        dbuf = DeflateBuffer(buf, "deflate")
+
+        # Feed compressed data in chunks (simulating network streaming)
+        for i in range(0, len(compressed), chunk_size):
+            chunk = compressed[i : i + chunk_size]
+            dbuf.feed_data(chunk, len(chunk))
+
+        dbuf.feed_eof()
+
+        # Read all decompressed data
+        result = b"".join(buf._buffer)
+        assert len(result) == len(original)
+        assert result == original
openSUSE Build Service is sponsored by