323 lines
12 KiB
Diff
323 lines
12 KiB
Diff
From ef2c755e9e9d57d58132af790bd2fd2b957b3fb1 Mon Sep 17 00:00:00 2001
|
|
From: Tom Most <twm@freecog.net>
|
|
Date: Mon, 22 Jul 2024 23:21:49 -0700
|
|
Subject: [PATCH] Tests and partial fix
|
|
|
|
---
|
|
src/twisted/web/http.py | 35 +++-
|
|
src/twisted/web/newsfragments/12248.bugfix | 1 +
|
|
src/twisted/web/test/test_http.py | 191 +++++++++++++++++++--
|
|
3 files changed, 212 insertions(+), 15 deletions(-)
|
|
create mode 100644 src/twisted/web/newsfragments/12248.bugfix
|
|
|
|
diff --git a/src/twisted/web/http.py b/src/twisted/web/http.py
|
|
index 23f8817..c7216d0 100644
|
|
--- a/src/twisted/web/http.py
|
|
+++ b/src/twisted/web/http.py
|
|
@@ -1921,6 +1921,9 @@ class _ChunkedTransferDecoder:
|
|
self.finishCallback = finishCallback
|
|
self._buffer = bytearray()
|
|
self._start = 0
|
|
+ self._trailerHeaders = []
|
|
+ self._maxTrailerHeadersSize = 2**16
|
|
+ self._receivedTrailerHeadersSize = 0
|
|
|
|
def _dataReceived_CHUNK_LENGTH(self) -> bool:
|
|
"""
|
|
@@ -2007,11 +2010,35 @@ class _ChunkedTransferDecoder:
|
|
@raises _MalformedChunkedDataError: when anything other than CRLF is
|
|
received.
|
|
"""
|
|
- if len(self._buffer) < 2:
|
|
+ eolIndex = self._buffer.find(b"\r\n", self._start)
|
|
+
|
|
+ if eolIndex == -1:
|
|
+ # Still no end of network line marker found.
|
|
+ #
|
|
+ # Check if we've run up against the trailer size limit: if the next
|
|
+ # read contains the terminating CRLF then we'll have this many bytes
|
|
+ # of trailers (including the CRLFs).
|
|
+ minTrailerSize = (
|
|
+ self._receivedTrailerHeadersSize
|
|
+ + len(self._buffer)
|
|
+ + (1 if self._buffer.endswith(b"\r") else 2)
|
|
+ )
|
|
+ if minTrailerSize > self._maxTrailerHeadersSize:
|
|
+ raise _MalformedChunkedDataError("Trailer headers data is too long.")
|
|
+ # Continue processing more data.
|
|
return False
|
|
|
|
- if not self._buffer.startswith(b"\r\n"):
|
|
- raise _MalformedChunkedDataError("Chunk did not end with CRLF")
|
|
+ if eolIndex > 0:
|
|
+ # A trailer header was detected.
|
|
+ self._trailerHeaders.append(self._buffer[0:eolIndex])
|
|
+ del self._buffer[0 : eolIndex + 2]
|
|
+ self._start = 0
|
|
+ self._receivedTrailerHeadersSize += eolIndex + 2
|
|
+ if self._receivedTrailerHeadersSize > self._maxTrailerHeadersSize:
|
|
+ raise _MalformedChunkedDataError("Trailer headers data is too long.")
|
|
+ return True
|
|
+
|
|
+ # eolIndex in this part of code is equal to 0
|
|
|
|
data = memoryview(self._buffer)[2:].tobytes()
|
|
del self._buffer[:]
|
|
@@ -2331,8 +2358,8 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):
|
|
self.__header = line
|
|
|
|
def _finishRequestBody(self, data):
|
|
- self.allContentReceived()
|
|
self._dataBuffer.append(data)
|
|
+ self.allContentReceived()
|
|
|
|
def _maybeChooseTransferDecoder(self, header, data):
|
|
"""
|
|
diff --git a/src/twisted/web/newsfragments/12248.bugfix b/src/twisted/web/newsfragments/12248.bugfix
|
|
new file mode 100644
|
|
index 0000000..2fb6067
|
|
--- /dev/null
|
|
+++ b/src/twisted/web/newsfragments/12248.bugfix
|
|
@@ -0,0 +1 @@
|
|
+The HTTP 1.0 and 1.1 server provided by twisted.web could process pipelined HTTP requests out-of-order, possibly resulting in information disclosure (CVE-2024-41671/GHSA-c8m8-j448-xjx7)
|
|
diff --git a/src/twisted/web/test/test_http.py b/src/twisted/web/test/test_http.py
|
|
index f304991..ae061cd 100644
|
|
--- a/src/twisted/web/test/test_http.py
|
|
+++ b/src/twisted/web/test/test_http.py
|
|
@@ -460,10 +460,9 @@ class HTTP1_0Tests(unittest.TestCase, ResponseTestMixin):
|
|
# one byte at a time, to stress it.
|
|
for byte in iterbytes(self.requests):
|
|
a.dataReceived(byte)
|
|
- value = b.value()
|
|
|
|
# So far only one request should have been dispatched.
|
|
- self.assertEqual(value, b"")
|
|
+ self.assertEqual(b.value(), b"")
|
|
self.assertEqual(1, len(a.requests))
|
|
|
|
# Now, process each request one at a time.
|
|
@@ -472,8 +471,95 @@ class HTTP1_0Tests(unittest.TestCase, ResponseTestMixin):
|
|
request = a.requests[0].original
|
|
request.delayedProcess()
|
|
|
|
- value = b.value()
|
|
- self.assertResponseEquals(value, self.expected_response)
|
|
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
|
+
|
|
+ def test_stepwiseDumpTruck(self):
|
|
+ """
|
|
+ Imitate a fast connection where several pipelined
|
|
+ requests arrive in a single read. The request handler
|
|
+ (L{DelayedHTTPHandler}) is puppeted to step through the
|
|
+ handling of each request.
|
|
+ """
|
|
+ b = StringTransport()
|
|
+ a = http.HTTPChannel()
|
|
+ a.requestFactory = DelayedHTTPHandlerProxy
|
|
+ a.makeConnection(b)
|
|
+
|
|
+ a.dataReceived(self.requests)
|
|
+
|
|
+ # So far only one request should have been dispatched.
|
|
+ self.assertEqual(b.value(), b"")
|
|
+ self.assertEqual(1, len(a.requests))
|
|
+
|
|
+ # Now, process each request one at a time.
|
|
+ while a.requests:
|
|
+ self.assertEqual(1, len(a.requests))
|
|
+ request = a.requests[0].original
|
|
+ request.delayedProcess()
|
|
+
|
|
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
|
+
|
|
+ def test_immediateTinyTube(self):
|
|
+ """
|
|
+ Imitate a slow connection that delivers one byte at a time.
|
|
+
|
|
+ (L{DummyHTTPHandler}) immediately responds, but no more
|
|
+ than one
|
|
+ """
|
|
+ b = StringTransport()
|
|
+ a = http.HTTPChannel()
|
|
+ a.requestFactory = DummyHTTPHandlerProxy # "sync"
|
|
+ a.makeConnection(b)
|
|
+
|
|
+ # one byte at a time, to stress it.
|
|
+ for byte in iterbytes(self.requests):
|
|
+ a.dataReceived(byte)
|
|
+ # There is never more than one request dispatched at a time:
|
|
+ self.assertLessEqual(len(a.requests), 1)
|
|
+
|
|
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
|
+
|
|
+ def test_immediateDumpTruck(self):
|
|
+ """
|
|
+ Imitate a fast connection where several pipelined
|
|
+ requests arrive in a single read. The request handler
|
|
+ (L{DummyHTTPHandler}) immediately responds.
|
|
+
|
|
+ This doesn't check the at-most-one pending request
|
|
+ invariant but exercises otherwise uncovered code paths.
|
|
+ See GHSA-c8m8-j448-xjx7.
|
|
+ """
|
|
+ b = StringTransport()
|
|
+ a = http.HTTPChannel()
|
|
+ a.requestFactory = DummyHTTPHandlerProxy
|
|
+ a.makeConnection(b)
|
|
+
|
|
+ # All bytes at once to ensure there's stuff to buffer.
|
|
+ a.dataReceived(self.requests)
|
|
+
|
|
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
|
+
|
|
+ def test_immediateABiggerTruck(self):
|
|
+ """
|
|
+ Imitate a fast connection where a so many pipelined
|
|
+ requests arrive in a single read that backpressure is indicated.
|
|
+ The request handler (L{DummyHTTPHandler}) immediately responds.
|
|
+
|
|
+ This doesn't check the at-most-one pending request
|
|
+ invariant but exercises otherwise uncovered code paths.
|
|
+ See GHSA-c8m8-j448-xjx7.
|
|
+
|
|
+ @see: L{http.HTTPChannel._optimisticEagerReadSize}
|
|
+ """
|
|
+ b = StringTransport()
|
|
+ a = http.HTTPChannel()
|
|
+ a.requestFactory = DummyHTTPHandlerProxy
|
|
+ a.makeConnection(b)
|
|
+
|
|
+ overLimitCount = a._optimisticEagerReadSize // len(self.requests) * 10
|
|
+ a.dataReceived(self.requests * overLimitCount)
|
|
+
|
|
+ self.assertResponseEquals(b.value(), self.expectedResponses * overLimitCount)
|
|
|
|
|
|
class HTTP1_1Tests(HTTP1_0Tests):
|
|
@@ -574,10 +660,15 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
|
|
b"POST / HTTP/1.1\r\n"
|
|
b"Content-Length: 10\r\n"
|
|
b"\r\n"
|
|
- b"0123456789POST / HTTP/1.1\r\n"
|
|
- b"Content-Length: 10\r\n"
|
|
- b"\r\n"
|
|
b"0123456789"
|
|
+ # Chunk encoded request.
|
|
+ b"POST / HTTP/1.1\r\n"
|
|
+ b"Transfer-Encoding: chunked\r\n"
|
|
+ b"\r\n"
|
|
+ b"a\r\n"
|
|
+ b"0123456789\r\n"
|
|
+ b"0\r\n"
|
|
+ b"\r\n"
|
|
)
|
|
|
|
expectedResponses = [
|
|
@@ -594,14 +685,16 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
|
|
b"Request: /",
|
|
b"Command: POST",
|
|
b"Version: HTTP/1.1",
|
|
- b"Content-Length: 21",
|
|
- b"'''\n10\n0123456789'''\n",
|
|
+ b"Content-Length: 23",
|
|
+ b"'''\nNone\n0123456789'''\n",
|
|
),
|
|
]
|
|
|
|
- def test_noPipelining(self):
|
|
+ def test_stepwiseTinyTube(self):
|
|
"""
|
|
- Test that pipelined requests get buffered, not processed in parallel.
|
|
+ Imitate a slow connection that delivers one byte at a time.
|
|
+ The request handler (L{DelayedHTTPHandler}) is puppeted to
|
|
+ step through the handling of each request.
|
|
"""
|
|
b = StringTransport()
|
|
a = http.HTTPChannel()
|
|
@@ -1474,6 +1567,82 @@ class ChunkedTransferEncodingTests(unittest.TestCase):
|
|
self.assertEqual(errors, [])
|
|
self.assertEqual(successes, [True])
|
|
|
|
+ def test_trailerHeaders(self):
|
|
+ """
|
|
+ L{_ChunkedTransferDecoder.dataReceived} decodes chunked-encoded data
|
|
+ and ignores trailer headers which come after the terminating zero-length
|
|
+ chunk.
|
|
+ """
|
|
+ L = []
|
|
+ finished = []
|
|
+ p = http._ChunkedTransferDecoder(L.append, finished.append)
|
|
+ p.dataReceived(b"3\r\nabc\r\n5\r\n12345\r\n")
|
|
+ p.dataReceived(
|
|
+ b"a\r\n0123456789\r\n0\r\nServer-Timing: total;dur=123.4\r\nExpires: Wed, 21 Oct 2015 07:28:00 GMT\r\n\r\n"
|
|
+ )
|
|
+ self.assertEqual(L, [b"abc", b"12345", b"0123456789"])
|
|
+ self.assertEqual(finished, [b""])
|
|
+ self.assertEqual(
|
|
+ p._trailerHeaders,
|
|
+ [
|
|
+ b"Server-Timing: total;dur=123.4",
|
|
+ b"Expires: Wed, 21 Oct 2015 07:28:00 GMT",
|
|
+ ],
|
|
+ )
|
|
+
|
|
+ def test_shortTrailerHeader(self):
|
|
+ """
|
|
+ L{_ChunkedTransferDecoder.dataReceived} decodes chunks of input with
|
|
+ tailer header broken up and delivered in multiple calls.
|
|
+ """
|
|
+ L = []
|
|
+ finished = []
|
|
+ p = http._ChunkedTransferDecoder(L.append, finished.append)
|
|
+ for s in iterbytes(
|
|
+ b"3\r\nabc\r\n5\r\n12345\r\n0\r\nServer-Timing: total;dur=123.4\r\n\r\n"
|
|
+ ):
|
|
+ p.dataReceived(s)
|
|
+ self.assertEqual(L, [b"a", b"b", b"c", b"1", b"2", b"3", b"4", b"5"])
|
|
+ self.assertEqual(finished, [b""])
|
|
+ self.assertEqual(p._trailerHeaders, [b"Server-Timing: total;dur=123.4"])
|
|
+
|
|
+ def test_tooLongTrailerHeader(self):
|
|
+ r"""
|
|
+ L{_ChunkedTransferDecoder.dataReceived} raises
|
|
+ L{_MalformedChunkedDataError} when the trailing headers data is too long.
|
|
+ """
|
|
+ p = http._ChunkedTransferDecoder(
|
|
+ lambda b: None,
|
|
+ lambda b: None, # pragma: nocov
|
|
+ )
|
|
+ p._maxTrailerHeadersSize = 10
|
|
+ self.assertRaises(
|
|
+ http._MalformedChunkedDataError,
|
|
+ p.dataReceived,
|
|
+ b"3\r\nabc\r\n0\r\nTotal-Trailer: header;greater-then=10\r\n\r\n",
|
|
+ )
|
|
+
|
|
+ def test_unfinishedTrailerHeader(self):
|
|
+ r"""
|
|
+ L{_ChunkedTransferDecoder.dataReceived} raises
|
|
+ L{_MalformedChunkedDataError} when the trailing headers data is too long
|
|
+ and doesn't have final CRLF characters.
|
|
+ """
|
|
+ p = http._ChunkedTransferDecoder(
|
|
+ lambda b: None,
|
|
+ lambda b: None, # pragma: nocov
|
|
+ )
|
|
+ p._maxTrailerHeadersSize = 10
|
|
+ # 9 bytes are received so far, in 2 packets.
|
|
+ # For now, all is ok.
|
|
+ p.dataReceived(b"3\r\nabc\r\n0\r\n01234567")
|
|
+ p.dataReceived(b"\r")
|
|
+ # Once the 10th byte is received, the processing fails.
|
|
+ self.assertRaises(
|
|
+ http._MalformedChunkedDataError,
|
|
+ p.dataReceived,
|
|
+ b"A",
|
|
+ )
|
|
|
|
class ChunkingTests(unittest.TestCase, ResponseTestMixin):
|
|
|
|
--
|
|
2.41.0
|
|
|