File CVE-2025-54121.patch of Package python-starlette.39899
From 9f7ec2eb512fcc3fe90b43cb9dd9e1d08696bec1 Mon Sep 17 00:00:00 2001
From: Michael Honaker <37811263+HonakerM@users.noreply.github.com>
Date: Mon, 21 Jul 2025 02:24:02 +0900
Subject: [PATCH] Make UploadFile check for future rollover (#2962)
Co-authored-by: Marcelo Trylesinski <marcelotryle@gmail.com>
---
starlette/datastructures.py | 22 ++++++++++---
tests/test_formparsers.py | 66 +++++++++++++++++++++++++++++++++++--
2 files changed, 82 insertions(+), 6 deletions(-)
Index: starlette-0.35.1/starlette/datastructures.py
===================================================================
--- starlette-0.35.1.orig/starlette/datastructures.py
+++ starlette-0.35.1/starlette/datastructures.py
@@ -446,6 +446,10 @@ class UploadFile:
self.size = size
self.headers = headers or Headers()
+ # Capture max size from SpooledTemporaryFile if one is provided. This slightly speeds up future checks.
+ # Note 0 means unlimited mirroring SpooledTemporaryFile's __init__
+ self._max_mem_size = getattr(self.file, "_max_size", 0)
+
@property
def content_type(self) -> typing.Optional[str]:
return self.headers.get("content-type", None)
@@ -456,14 +460,24 @@ class UploadFile:
rolled_to_disk = getattr(self.file, "_rolled", True)
return not rolled_to_disk
+ def _will_roll(self, size_to_add: int) -> bool:
+ # If we're not in_memory then we will always roll
+ if not self._in_memory:
+ return True
+
+ # Check for SpooledTemporaryFile._max_size
+ future_size = self.file.tell() + size_to_add
+ return bool(future_size > self._max_mem_size) if self._max_mem_size else False
+
async def write(self, data: bytes) -> None:
+ new_data_len = len(data)
if self.size is not None:
- self.size += len(data)
+ self.size += new_data_len
- if self._in_memory:
- self.file.write(data)
- else:
+ if self._will_roll(new_data_len):
await run_in_threadpool(self.file.write, data)
+ else:
+ self.file.write(data)
async def read(self, size: int = -1) -> bytes:
if self._in_memory:
Index: starlette-0.35.1/tests/test_formparsers.py
===================================================================
--- starlette-0.35.1.orig/tests/test_formparsers.py
+++ starlette-0.35.1/tests/test_formparsers.py
@@ -1,15 +1,23 @@
+import threading
+from collections.abc import Generator
import os
import typing
from contextlib import nullcontext as does_not_raise
+from io import BytesIO
import pytest
+from tempfile import SpooledTemporaryFile
+from typing import Any, ClassVar
+from unittest import mock
+
from starlette.applications import Starlette
from starlette.datastructures import UploadFile
-from starlette.formparsers import MultiPartException, _user_safe_decode
+from starlette.formparsers import MultiPartException, MultiPartParser, _user_safe_decode
from starlette.requests import Request
from starlette.responses import JSONResponse
from starlette.routing import Mount
+from starlette.types import Receive, Scope, Send
class ForceMultipartDict(typing.Dict[typing.Any, typing.Any]):
@@ -99,6 +107,22 @@ async def app_read_body(scope, receive,
await response(scope, receive, send)
+async def app_monitor_thread(scope: Scope, receive: Receive, send: Send) -> None:
+ """Helper app to monitor what thread the app was called on.
+
+ This can later be used to validate thread/event loop operations.
+ """
+ request = Request(scope, receive)
+
+ # Make sure we parse the form
+ await request.form()
+ await request.close()
+
+ # Send back the current thread id
+ response = JSONResponse({"thread_ident": threading.current_thread().ident})
+ await response(scope, receive, send)
+
+
def make_app_max_parts(max_files: int = 1000, max_fields: int = 1000):
async def app(scope, receive, send):
request = Request(scope, receive)
@@ -304,6 +328,52 @@ def test_multipart_request_mixed_files_a
}
+class ThreadTrackingSpooledTemporaryFile(SpooledTemporaryFile[bytes]):
+ """Helper class to track which threads performed the rollover operation.
+
+ This is not threadsafe/multi-test safe.
+ """
+
+ rollover_threads: ClassVar[set[int | None]] = set()
+
+ def rollover(self) -> None:
+ ThreadTrackingSpooledTemporaryFile.rollover_threads.add(
+ threading.current_thread().ident
+ )
+ super().rollover()
+
+
+@pytest.fixture
+def mock_spooled_temporary_file() -> Generator[None]:
+ try:
+ with mock.patch(
+ "starlette.formparsers.SpooledTemporaryFile",
+ ThreadTrackingSpooledTemporaryFile,
+ ):
+ yield
+ finally:
+ ThreadTrackingSpooledTemporaryFile.rollover_threads.clear()
+
+
+def test_multipart_request_large_file_rollover_in_background_thread(
+ mock_spooled_temporary_file: None, test_client_factory
+) -> None:
+ """Test that Spooled file rollovers happen in background threads."""
+ data = BytesIO(b" " * (MultiPartParser.max_file_size + 1))
+
+ client = test_client_factory(app_monitor_thread)
+ response = client.post("/", files=[("test_large", data)])
+ assert response.status_code == 200
+
+ # Parse the event thread id from the API response and ensure we have one
+ app_thread_ident = response.json().get("thread_ident")
+ assert app_thread_ident is not None
+
+ # Ensure the app thread was not the same as the rollover one and that a rollover thread exists
+ assert app_thread_ident not in ThreadTrackingSpooledTemporaryFile.rollover_threads
+ assert len(ThreadTrackingSpooledTemporaryFile.rollover_threads) == 1
+
+
def test_multipart_request_with_charset_for_filename(tmpdir, test_client_factory):
client = test_client_factory(app)
response = client.post(