File CVE-2024-41671.patch of Package python3-Twisted.34928

Index: Twisted-22.2.0/src/twisted/web/http.py
===================================================================
--- Twisted-22.2.0.orig/src/twisted/web/http.py
+++ Twisted-22.2.0/src/twisted/web/http.py
@@ -1897,6 +1897,10 @@ class _ChunkedTransferDecoder:
         self.finishCallback = finishCallback
         self._buffer = bytearray()
         self._start = 0
+        self._trailerHeaders: List[bytearray] = []
+        self._maxTrailerHeadersSize = 2**16
+        self._receivedTrailerHeadersSize = 0
+
 
     def _dataReceived_CHUNK_LENGTH(self) -> bool:
         """
@@ -1973,23 +1977,44 @@ class _ChunkedTransferDecoder:
 
     def _dataReceived_TRAILER(self) -> bool:
         """
-        Await the carriage return and line feed characters that follow the
-        terminal zero-length chunk. Then invoke C{finishCallback} and switch to
-        state C{'FINISHED'}.
+        Collect trailer headers if received and finish at the terminal zero-length
+        chunk. Then invoke C{finishCallback} and switch to state C{'FINISHED'}.
 
         @returns: C{False}, as there is either insufficient data to continue,
             or no data remains.
-
-        @raises _MalformedChunkedDataError: when anything other than CRLF is
-            received.
         """
-        if len(self._buffer) < 2:
+        eolIndex = self._buffer.find(b"\r\n", self._start)
+
+        if eolIndex == -1:
+            # Still no end of network line marker found.
+            #
+            # Check if we've run up against the trailer size limit: if the next
+            # read contains the terminating CRLF then we'll have this many bytes
+            # of trailers (including the CRLFs).
+            minTrailerSize = (
+                self._receivedTrailerHeadersSize
+                + len(self._buffer)
+                + (1 if self._buffer.endswith(b"\r") else 2)
+            )
+            if minTrailerSize > self._maxTrailerHeadersSize:
+                raise _MalformedChunkedDataError("Trailer headers data is too long.")
+            # Continue processing more data.
             return False
 
-        if not self._buffer.startswith(b"\r\n"):
-            raise _MalformedChunkedDataError("Chunk did not end with CRLF")
+        if eolIndex > 0:
+            # A trailer header was detected.
+            self._trailerHeaders.append(self._buffer[0:eolIndex])
+            del self._buffer[0 : eolIndex + 2]
+            self._start = 0
+            self._receivedTrailerHeadersSize += eolIndex + 2
+            if self._receivedTrailerHeadersSize > self._maxTrailerHeadersSize:
+                raise _MalformedChunkedDataError("Trailer headers data is too long.")
+            return True
+
+        # eolIndex in this part of code is equal to 0
 
         data = memoryview(self._buffer)[2:].tobytes()
+
         del self._buffer[:]
         self.state = "FINISHED"
         self.finishCallback(data)
@@ -2307,8 +2332,8 @@ class HTTPChannel(basic.LineReceiver, po
             self.__header = line
 
     def _finishRequestBody(self, data):
-        self.allContentReceived()
         self._dataBuffer.append(data)
+        self.allContentReceived()
 
     def _maybeChooseTransferDecoder(self, header, data):
         """
Index: Twisted-22.2.0/src/twisted/web/newsfragments/12248.bugfix
===================================================================
--- /dev/null
+++ Twisted-22.2.0/src/twisted/web/newsfragments/12248.bugfix
@@ -0,0 +1 @@
+The HTTP 1.0 and 1.1 server provided by twisted.web could process pipelined HTTP requests out-of-order, possibly resulting in information disclosure (CVE-2024-41671/GHSA-c8m8-j448-xjx7)
Index: Twisted-22.2.0/src/twisted/web/test/test_http.py
===================================================================
--- Twisted-22.2.0.orig/src/twisted/web/test/test_http.py
+++ Twisted-22.2.0/src/twisted/web/test/test_http.py
@@ -136,7 +136,7 @@ class DummyHTTPHandler(http.Request):
         data = self.content.read()
         length = self.getHeader(b"content-length")
         if length is None:
-            length = networkString(str(length))
+            length = str(length).encode()
         request = b"'''\n" + length + b"\n" + data + b"'''\n"
         self.setResponseCode(200)
         self.setHeader(b"Request", self.uri)
@@ -567,17 +567,23 @@ class HTTP0_9Tests(HTTP1_0Tests):
 
 class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
     """
-    Tests that multiple pipelined requests with bodies are correctly buffered.
+    Pipelined requests get buffered and executed in the order received,
+    not processed in parallel.
     """
 
     requests = (
         b"POST / HTTP/1.1\r\n"
         b"Content-Length: 10\r\n"
         b"\r\n"
-        b"0123456789POST / HTTP/1.1\r\n"
-        b"Content-Length: 10\r\n"
-        b"\r\n"
         b"0123456789"
+        # Chunk encoded request.
+        b"POST / HTTP/1.1\r\n"
+        b"Transfer-Encoding: chunked\r\n"
+        b"\r\n"
+        b"a\r\n"
+        b"0123456789\r\n"
+        b"0\r\n"
+        b"\r\n"
     )
 
     expectedResponses = [
@@ -594,14 +600,16 @@ class PipeliningBodyTests(unittest.TestC
             b"Request: /",
             b"Command: POST",
             b"Version: HTTP/1.1",
-            b"Content-Length: 21",
-            b"'''\n10\n0123456789'''\n",
+            b"Content-Length: 23",
+            b"'''\nNone\n0123456789'''\n",
         ),
     ]
 
-    def test_noPipelining(self):
+    def test_stepwiseTinyTube(self):
         """
-        Test that pipelined requests get buffered, not processed in parallel.
+        Imitate a slow connection that delivers one byte at a time.
+        The request handler (L{DelayedHTTPHandler}) is puppeted to
+        step through the handling of each request.
         """
         b = StringTransport()
         a = http.HTTPChannel()
@@ -610,10 +618,9 @@ class PipeliningBodyTests(unittest.TestC
         # one byte at a time, to stress it.
         for byte in iterbytes(self.requests):
             a.dataReceived(byte)
-        value = b.value()
 
         # So far only one request should have been dispatched.
-        self.assertEqual(value, b"")
+        self.assertEqual(b.value(), b"")
         self.assertEqual(1, len(a.requests))
 
         # Now, process each request one at a time.
@@ -622,8 +629,95 @@ class PipeliningBodyTests(unittest.TestC
             request = a.requests[0].original
             request.delayedProcess()
 
-        value = b.value()
-        self.assertResponseEquals(value, self.expectedResponses)
+        self.assertResponseEquals(b.value(), self.expectedResponses)
+
+    def test_stepwiseDumpTruck(self):
+        """
+        Imitate a fast connection where several pipelined
+        requests arrive in a single read. The request handler
+        (L{DelayedHTTPHandler}) is puppeted to step through the
+        handling of each request.
+        """
+        b = StringTransport()
+        a = http.HTTPChannel()
+        a.requestFactory = DelayedHTTPHandlerProxy
+        a.makeConnection(b)
+
+        a.dataReceived(self.requests)
+
+        # So far only one request should have been dispatched.
+        self.assertEqual(b.value(), b"")
+        self.assertEqual(1, len(a.requests))
+
+        # Now, process each request one at a time.
+        while a.requests:
+            self.assertEqual(1, len(a.requests))
+            request = a.requests[0].original
+            request.delayedProcess()
+
+        self.assertResponseEquals(b.value(), self.expectedResponses)
+
+    def test_immediateTinyTube(self):
+        """
+        Imitate a slow connection that delivers one byte at a time.
+
+        (L{DummyHTTPHandler}) immediately responds, but no more
+        than one
+        """
+        b = StringTransport()
+        a = http.HTTPChannel()
+        a.requestFactory = DummyHTTPHandlerProxy  # "sync"
+        a.makeConnection(b)
+
+        # one byte at a time, to stress it.
+        for byte in iterbytes(self.requests):
+            a.dataReceived(byte)
+            # There is never more than one request dispatched at a time:
+            self.assertLessEqual(len(a.requests), 1)
+
+        self.assertResponseEquals(b.value(), self.expectedResponses)
+
+    def test_immediateDumpTruck(self):
+        """
+        Imitate a fast connection where several pipelined
+        requests arrive in a single read. The request handler
+        (L{DummyHTTPHandler}) immediately responds.
+
+        This doesn't check the at-most-one pending request
+        invariant but exercises otherwise uncovered code paths.
+        See GHSA-c8m8-j448-xjx7.
+        """
+        b = StringTransport()
+        a = http.HTTPChannel()
+        a.requestFactory = DummyHTTPHandlerProxy
+        a.makeConnection(b)
+
+        # All bytes at once to ensure there's stuff to buffer.
+        a.dataReceived(self.requests)
+
+        self.assertResponseEquals(b.value(), self.expectedResponses)
+
+    def test_immediateABiggerTruck(self):
+        """
+        Imitate a fast connection where a so many pipelined
+        requests arrive in a single read that backpressure is indicated.
+        The request handler (L{DummyHTTPHandler}) immediately responds.
+
+        This doesn't check the at-most-one pending request
+        invariant but exercises otherwise uncovered code paths.
+        See GHSA-c8m8-j448-xjx7.
+
+        @see: L{http.HTTPChannel._optimisticEagerReadSize}
+        """
+        b = StringTransport()
+        a = http.HTTPChannel()
+        a.requestFactory = DummyHTTPHandlerProxy
+        a.makeConnection(b)
+
+        overLimitCount = a._optimisticEagerReadSize // len(self.requests) * 10
+        a.dataReceived(self.requests * overLimitCount)
+
+        self.assertResponseEquals(b.value(), self.expectedResponses * overLimitCount)
 
     def test_pipeliningReadLimit(self):
         """
@@ -1386,20 +1480,6 @@ class ChunkedTransferEncodingTests(unitt
             http._MalformedChunkedDataError, p.dataReceived, b"3\r\nabc!!!!"
         )
 
-    def test_malformedChunkEndFinal(self):
-        r"""
-        L{_ChunkedTransferDecoder.dataReceived} raises
-        L{_MalformedChunkedDataError} when the terminal zero-length chunk is
-        followed by characters other than C{\r\n}.
-        """
-        p = http._ChunkedTransferDecoder(
-            lambda b: None,
-            lambda b: None,  # pragma: nocov
-        )
-        self.assertRaises(
-            http._MalformedChunkedDataError, p.dataReceived, b"3\r\nabc\r\n0\r\n!!"
-        )
-
     def test_finish(self):
         """
         L{_ChunkedTransferDecoder.dataReceived} interprets a zero-length
@@ -1474,6 +1554,44 @@ class ChunkedTransferEncodingTests(unitt
         self.assertEqual(errors, [])
         self.assertEqual(successes, [True])
 
+    def test_tooLongTrailerHeader(self):
+        r"""
+        L{_ChunkedTransferDecoder.dataReceived} raises
+        L{_MalformedChunkedDataError} when the trailing headers data is too long.
+        """
+        p = http._ChunkedTransferDecoder(
+            lambda b: None,
+            lambda b: None,  # pragma: nocov
+        )
+        p._maxTrailerHeadersSize = 10
+        self.assertRaises(
+            http._MalformedChunkedDataError,
+            p.dataReceived,
+            b"3\r\nabc\r\n0\r\nTotal-Trailer: header;greater-then=10\r\n\r\n",
+        )
+
+    def test_unfinishedTrailerHeader(self):
+        r"""
+        L{_ChunkedTransferDecoder.dataReceived} raises
+        L{_MalformedChunkedDataError} when the trailing headers data is too long
+        and doesn't have final CRLF characters.
+        """
+        p = http._ChunkedTransferDecoder(
+            lambda b: None,
+            lambda b: None,  # pragma: nocov
+        )
+        p._maxTrailerHeadersSize = 10
+        # 9 bytes are received so far, in 2 packets.
+        # For now, all is ok.
+        p.dataReceived(b"3\r\nabc\r\n0\r\n01234567")
+        p.dataReceived(b"\r")
+        # Once the 10th byte is received, the processing fails.
+        self.assertRaises(
+            http._MalformedChunkedDataError,
+            p.dataReceived,
+            b"A",
+        )
+
 
 class ChunkingTests(unittest.TestCase, ResponseTestMixin):
 
openSUSE Build Service is sponsored by