File CVE-2025-53643.patch of Package python-aiohttp.40456
---
aiohttp/http_parser.py | 68 ++++++++++++++++++++++++++++------------------
aiohttp/multipart.py | 2 -
tests/test_http_parser.py | 59 +++++++++++++++++----------------------
3 files changed, 69 insertions(+), 60 deletions(-)
Index: aiohttp-3.6.0/aiohttp/http_parser.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/http_parser.py 2025-09-03 23:13:32.011607539 +0200
+++ aiohttp-3.6.0/aiohttp/http_parser.py 2025-09-03 23:13:40.827889952 +0200
@@ -94,8 +94,8 @@
headers = CIMultiDict() # type: CIMultiDict[str]
raw_headers = []
- lines_idx = 1
- line = lines[1]
+ lines_idx = 0
+ line = lines[lines_idx]
line_count = len(lines)
while line:
@@ -296,7 +296,9 @@
compression=msg.compression,
code=self.code, readall=self.readall,
response_with_body=self.response_with_body,
- auto_decompress=self._auto_decompress)
+ auto_decompress=self._auto_decompress,
+ headers_parser=self._headers_parser,
+ )
if not payload_parser.done:
self._payload_parser = payload_parser
elif method == METH_CONNECT:
@@ -306,7 +308,9 @@
self._payload_parser = HttpPayloadParser(
payload, method=msg.method,
compression=msg.compression, readall=True,
- auto_decompress=self._auto_decompress)
+ auto_decompress=self._auto_decompress,
+ headers_parser=self._headers_parser,
+ )
else:
if (getattr(msg, 'code', 100) >= 199 and
length is None and self.read_until_eof):
@@ -318,7 +322,9 @@
compression=msg.compression,
code=self.code, readall=True,
response_with_body=self.response_with_body,
- auto_decompress=self._auto_decompress)
+ auto_decompress=self._auto_decompress,
+ headers_parser=self._headers_parser,
+ )
if not payload_parser.done:
self._payload_parser = payload_parser
else:
@@ -351,6 +357,10 @@
eof = True
data = b''
+ if isinstance(
+ exc, (InvalidHeader, TransferEncodingError)
+ ):
+ raise
if eof:
start_pos = 0
@@ -455,7 +465,7 @@
# read headers
(headers, raw_headers,
- close, compression, upgrade, chunked) = self.parse_headers(lines)
+ close, compression, upgrade, chunked) = self.parse_headers(lines[1:])
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
@@ -526,7 +536,7 @@
# read headers
(headers, raw_headers,
- close, compression, upgrade, chunked) = self.parse_headers(lines)
+ close, compression, upgrade, chunked) = self.parse_headers(lines[1:])
if close is None:
close = version_o <= HttpVersion10
@@ -546,13 +556,18 @@
method: Optional[str]=None,
readall: bool=False,
response_with_body: bool=True,
- auto_decompress: bool=True) -> None:
+ auto_decompress: bool=True,
+ *,
+ headers_parser: HeadersParser,
+ ) -> None:
self._length = 0
self._type = ParseState.PARSE_NONE
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
self._chunk_size = 0
self._chunk_tail = b''
self._auto_decompress = auto_decompress
+ self._headers_parser = headers_parser
+ self._trailer_lines = []
self.done = False
# payload decompression wrapper
@@ -637,7 +652,7 @@
# Verify no LF in the chunk-extension
if b"\n" in chunk[i:pos]:
ext = repr(chunk[i:pos])
- exc = BadHttpMessage(
+ exc = TransferEncodingError(
"Unexpected LF in chunk-extension: %s" % ext
)
set_exception(self.payload, exc)
@@ -655,7 +670,7 @@
chunk = chunk[pos+2:]
if size == 0: # eof marker
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ self._chunk = ChunkState.PARSE_TRAILERS
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
@@ -689,27 +704,28 @@
self._chunk_tail = chunk
return False, b''
- # if stream does not contain trailer, after 0\r\n
- # we should get another \r\n otherwise
- # trailers needs to be skiped until \r\n\r\n
- if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
- if chunk[:2] == SEP:
- # end of stream
- self.payload.feed_eof()
- return True, chunk[2:]
- else:
- self._chunk = ChunkState.PARSE_TRAILERS
-
- # read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
- if pos >= 0:
- chunk = chunk[pos+2:]
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
- else:
+ if pos < 0:
self._chunk_tail = chunk
return False, b''
+ line = chunk[:pos]
+ chunk = chunk[pos + len(SEP) :]
+ if SEP == b"\n":
+ line = line.rstrip(b"\r")
+ self._trailer_lines.append(line)
+
+ if self._trailer_lines[-1] == b"":
+ try:
+ trailers, raw_trailers = self._headers_parser.parse_headers(
+ self._trailer_lines
+ )
+ finally:
+ self._trailer_lines.clear()
+ self.payload.feed_eof()
+ return True, chunk
+
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
Index: aiohttp-3.6.0/aiohttp/multipart.py
===================================================================
--- aiohttp-3.6.0.orig/aiohttp/multipart.py 2025-09-03 23:13:34.917857552 +0200
+++ aiohttp-3.6.0/aiohttp/multipart.py 2025-09-03 23:13:40.828170089 +0200
@@ -710,7 +710,7 @@
% (chunk, self._boundary))
async def _read_headers(self) -> 'CIMultiDictProxy[str]':
- lines = [b'']
+ lines = []
while True:
chunk = await self._content.readline()
chunk = chunk.strip()
Index: aiohttp-3.6.0/tests/test_http_parser.py
===================================================================
--- aiohttp-3.6.0.orig/tests/test_http_parser.py 2025-09-03 23:13:32.011805611 +0200
+++ aiohttp-3.6.0/tests/test_http_parser.py 2025-09-03 23:13:40.828437723 +0200
@@ -13,8 +13,10 @@
from aiohttp.base_protocol import BaseProtocol
from aiohttp.http_parser import (
NO_EXTENSIONS,
+ HeadersParser,
DeflateBuffer,
HttpPayloadParser,
+ HttpRequestParser,
HttpRequestParserPy,
HttpResponseParserPy,
)
@@ -734,6 +736,21 @@
assert [4, 8] == payload._http_chunk_splits
assert payload.is_eof()
+async def test_request_chunked_with_trailer(parser: HttpRequestParser) -> None:
+ text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n4\r\ntest\r\n0\r\ntest: trailer\r\nsecond: test trailer\r\n\r\n"
+ messages, upgraded, tail = parser.feed_data(text)
+ assert not tail
+ msg, payload = messages[0]
+ assert await payload.read() == b"test"
+
+ # TODO: Add assertion of trailers when API added.
+
+
+async def test_request_chunked_reject_bad_trailer(parser: HttpRequestParser) -> None:
+ text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n0\r\nbad\ntrailer\r\n\r\n"
+ with pytest.raises(http_exceptions.BadHttpMessage, match=r"b'bad\\ntrailer'"):
+ parser.feed_data(text)
+
def _test_parse_no_length_or_te_on_post(loop, protocol, request_cls):
parser = request_cls(protocol, loop, readall=True)
@@ -805,7 +822,7 @@
async def test_parse_eof_payload(self, stream) -> None:
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
- p = HttpPayloadParser(out, readall=True)
+ p = HttpPayloadParser(out, readall=True, headers_parser=HeadersParser())
p.feed_data(b'data')
p.feed_eof()
@@ -815,7 +832,7 @@
async def test_parse_no_body(self, stream) -> None:
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
- p = HttpPayloadParser(out, method='PUT')
+ p = HttpPayloadParser(out, method='PUT', headers_parser=HeadersParser())
assert out.is_eof()
assert p.done
@@ -824,7 +841,7 @@
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
- p = HttpPayloadParser(out, length=4)
+ p = HttpPayloadParser(out, length=4, headers_parser=HeadersParser())
p.feed_data(b'da')
with pytest.raises(http_exceptions.ContentLengthError):
@@ -833,7 +850,7 @@
async def test_parse_chunked_payload_size_error(self, stream) -> None:
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
with pytest.raises(http_exceptions.TransferEncodingError):
p.feed_data(b'blah\r\n')
assert isinstance(out.exception(),
@@ -842,7 +859,7 @@
async def test_http_payload_parser_length(self, stream) -> None:
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
- p = HttpPayloadParser(out, length=2)
+ p = HttpPayloadParser(out, length=2, headers_parser=HeadersParser())
eof, tail = p.feed_data(b'1245')
assert eof
@@ -857,7 +874,7 @@
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
p = HttpPayloadParser(
- out, length=length, compression='deflate')
+ out, length=length, compression='deflate', headers_parser=HeadersParser())
p.feed_data(self._COMPRESSED)
assert b'data' == b''.join(d for d, _ in out._buffer)
assert out.is_eof()
@@ -870,7 +887,7 @@
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
p = HttpPayloadParser(
- out, length=length, compression='deflate')
+ out, length=length, compression='deflate', headers_parser=HeadersParser())
p.feed_data(COMPRESSED)
assert b'data' == b''.join(d for d, _ in out._buffer)
assert out.is_eof()
@@ -878,7 +895,7 @@
async def test_http_payload_parser_length_zero(self, stream) -> None:
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
- p = HttpPayloadParser(out, length=0)
+ p = HttpPayloadParser(out, length=0, headers_parser=HeadersParser())
assert p.done
assert out.is_eof()
@@ -888,7 +905,7 @@
out = aiohttp.FlowControlDataQueue(stream,
loop=asyncio.get_event_loop())
p = HttpPayloadParser(
- out, length=len(compressed), compression='br')
+ out, length=len(compressed), compression='br', headers_parser=HeadersParser())
p.feed_data(compressed)
assert b'brotli data' == b''.join(d for d, _ in out._buffer)
assert out.is_eof()
@@ -974,27 +991,3 @@
dbuf.feed_eof()
assert buf.at_eof()
-
-
-async def test_parse_chunked_payload_with_lf_in_extensions_py_parser(
- loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
-) -> None:
- """Test the py-parser with a chunked payload that has a LF in the chunk extensions."""
- # The py parser will not raise the BadHttpMessage directly, but instead
- # it will set the exception on the StreamReader.
- parser = HttpRequestParserPy(
- protocol,
- loop,
- max_line_size=8190,
- max_field_size=8190,
- )
- payload = (
- b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n"
- b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n"
- b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n"
- b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n"
- )
- messages, _, _ = parser.feed_data(payload)
- reader = messages[0][1]
- assert isinstance(reader.exception(), http_exceptions.BadHttpMessage)
- assert "\\nxx" in str(reader.exception())