diff options
Diffstat (limited to 'meta-python/recipes-devtools/python/python3-twisted/CVE-2024-41671.patch')
| -rw-r--r-- | meta-python/recipes-devtools/python/python3-twisted/CVE-2024-41671.patch | 255 |
1 files changed, 255 insertions, 0 deletions
diff --git a/meta-python/recipes-devtools/python/python3-twisted/CVE-2024-41671.patch b/meta-python/recipes-devtools/python/python3-twisted/CVE-2024-41671.patch new file mode 100644 index 0000000000..5c0d7b6a77 --- /dev/null +++ b/meta-python/recipes-devtools/python/python3-twisted/CVE-2024-41671.patch | |||
| @@ -0,0 +1,255 @@ | |||
| 1 | From 4a930de12fb67e88fefcb8822104152f42b27abc Mon Sep 17 00:00:00 2001 | ||
| 2 | From: Adi Roiban <adiroiban@gmail.com> | ||
| 3 | Date: Mon Jul 29 14:27:23 2024 +0100 | ||
| 4 | Subject: [PATCH] Merge commit from fork | ||
| 5 | |||
| 6 | Address GHSA-c8m8-j448-xjx7 | ||
| 7 | |||
| 8 | CVE: CVE-2024-41671 | ||
| 9 | |||
| 10 | Upstream-Status: Backport [https://github.com/twisted/twisted/commit/4a930de12fb67e88fefcb8822104152f42b27abc] | ||
| 11 | |||
| 12 | Signed-off-by: Soumya Sambu <soumya.sambu@windriver.com> | ||
| 13 | |||
| 14 | Dropped newsfragements change from the original commit. | ||
| 15 | |||
| 16 | Signed-off-by: Ankur Tyagi <ankur.tyagi85@gmail.com> | ||
| 17 | --- | ||
| 18 | src/twisted/web/http.py | 21 +++-- | ||
| 19 | src/twisted/web/test/test_http.py | 122 ++++++++++++++++++++++++++---- | ||
| 20 | 2 files changed, 122 insertions(+), 21 deletions(-) | ||
| 21 | |||
| 22 | diff --git a/src/twisted/web/http.py b/src/twisted/web/http.py | ||
| 23 | index 1c59838..3b784f5 100644 | ||
| 24 | --- a/src/twisted/web/http.py | ||
| 25 | +++ b/src/twisted/web/http.py | ||
| 26 | @@ -2000,16 +2000,21 @@ class _ChunkedTransferDecoder: | ||
| 27 | @returns: C{False}, as there is either insufficient data to continue, | ||
| 28 | or no data remains. | ||
| 29 | """ | ||
| 30 | - if ( | ||
| 31 | - self._receivedTrailerHeadersSize + len(self._buffer) | ||
| 32 | - > self._maxTrailerHeadersSize | ||
| 33 | - ): | ||
| 34 | - raise _MalformedChunkedDataError("Trailer headers data is too long.") | ||
| 35 | - | ||
| 36 | eolIndex = self._buffer.find(b"\r\n", self._start) | ||
| 37 | |||
| 38 | if eolIndex == -1: | ||
| 39 | # Still no end of network line marker found. | ||
| 40 | + # | ||
| 41 | + # Check if we've run up against the trailer size limit: if the next | ||
| 42 | + # read contains the terminating CRLF then we'll have this many bytes | ||
| 43 | + # of trailers (including the CRLFs). | ||
| 44 | + minTrailerSize = ( | ||
| 45 | + self._receivedTrailerHeadersSize | ||
| 46 | + + len(self._buffer) | ||
| 47 | + + (1 if self._buffer.endswith(b"\r") else 2) | ||
| 48 | + ) | ||
| 49 | + if minTrailerSize > self._maxTrailerHeadersSize: | ||
| 50 | + raise _MalformedChunkedDataError("Trailer headers data is too long.") | ||
| 51 | # Continue processing more data. | ||
| 52 | return False | ||
| 53 | |||
| 54 | @@ -2019,6 +2024,8 @@ class _ChunkedTransferDecoder: | ||
| 55 | del self._buffer[0 : eolIndex + 2] | ||
| 56 | self._start = 0 | ||
| 57 | self._receivedTrailerHeadersSize += eolIndex + 2 | ||
| 58 | + if self._receivedTrailerHeadersSize > self._maxTrailerHeadersSize: | ||
| 59 | + raise _MalformedChunkedDataError("Trailer headers data is too long.") | ||
| 60 | return True | ||
| 61 | |||
| 62 | # eolIndex in this part of code is equal to 0 | ||
| 63 | @@ -2342,8 +2349,8 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin): | ||
| 64 | self.__header = line | ||
| 65 | |||
| 66 | def _finishRequestBody(self, data): | ||
| 67 | - self.allContentReceived() | ||
| 68 | self._dataBuffer.append(data) | ||
| 69 | + self.allContentReceived() | ||
| 70 | |||
| 71 | def _maybeChooseTransferDecoder(self, header, data): | ||
| 72 | """ | ||
| 73 | diff --git a/src/twisted/web/test/test_http.py b/src/twisted/web/test/test_http.py | ||
| 74 | index 33d0a49..1130d31 100644 | ||
| 75 | --- a/src/twisted/web/test/test_http.py | ||
| 76 | +++ b/src/twisted/web/test/test_http.py | ||
| 77 | @@ -135,7 +135,7 @@ class DummyHTTPHandler(http.Request): | ||
| 78 | data = self.content.read() | ||
| 79 | length = self.getHeader(b"content-length") | ||
| 80 | if length is None: | ||
| 81 | - length = networkString(str(length)) | ||
| 82 | + length = str(length).encode() | ||
| 83 | request = b"'''\n" + length + b"\n" + data + b"'''\n" | ||
| 84 | self.setResponseCode(200) | ||
| 85 | self.setHeader(b"Request", self.uri) | ||
| 86 | @@ -563,17 +563,23 @@ class HTTP0_9Tests(HTTP1_0Tests): | ||
| 87 | |||
| 88 | class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin): | ||
| 89 | """ | ||
| 90 | - Tests that multiple pipelined requests with bodies are correctly buffered. | ||
| 91 | + Pipelined requests get buffered and executed in the order received, | ||
| 92 | + not processed in parallel. | ||
| 93 | """ | ||
| 94 | |||
| 95 | requests = ( | ||
| 96 | b"POST / HTTP/1.1\r\n" | ||
| 97 | b"Content-Length: 10\r\n" | ||
| 98 | b"\r\n" | ||
| 99 | - b"0123456789POST / HTTP/1.1\r\n" | ||
| 100 | - b"Content-Length: 10\r\n" | ||
| 101 | - b"\r\n" | ||
| 102 | b"0123456789" | ||
| 103 | + # Chunk encoded request. | ||
| 104 | + b"POST / HTTP/1.1\r\n" | ||
| 105 | + b"Transfer-Encoding: chunked\r\n" | ||
| 106 | + b"\r\n" | ||
| 107 | + b"a\r\n" | ||
| 108 | + b"0123456789\r\n" | ||
| 109 | + b"0\r\n" | ||
| 110 | + b"\r\n" | ||
| 111 | ) | ||
| 112 | |||
| 113 | expectedResponses = [ | ||
| 114 | @@ -590,14 +596,16 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin): | ||
| 115 | b"Request: /", | ||
| 116 | b"Command: POST", | ||
| 117 | b"Version: HTTP/1.1", | ||
| 118 | - b"Content-Length: 21", | ||
| 119 | - b"'''\n10\n0123456789'''\n", | ||
| 120 | + b"Content-Length: 23", | ||
| 121 | + b"'''\nNone\n0123456789'''\n", | ||
| 122 | ), | ||
| 123 | ] | ||
| 124 | |||
| 125 | - def test_noPipelining(self): | ||
| 126 | + def test_stepwiseTinyTube(self): | ||
| 127 | """ | ||
| 128 | - Test that pipelined requests get buffered, not processed in parallel. | ||
| 129 | + Imitate a slow connection that delivers one byte at a time. | ||
| 130 | + The request handler (L{DelayedHTTPHandler}) is puppeted to | ||
| 131 | + step through the handling of each request. | ||
| 132 | """ | ||
| 133 | b = StringTransport() | ||
| 134 | a = http.HTTPChannel() | ||
| 135 | @@ -606,10 +614,9 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin): | ||
| 136 | # one byte at a time, to stress it. | ||
| 137 | for byte in iterbytes(self.requests): | ||
| 138 | a.dataReceived(byte) | ||
| 139 | - value = b.value() | ||
| 140 | |||
| 141 | # So far only one request should have been dispatched. | ||
| 142 | - self.assertEqual(value, b"") | ||
| 143 | + self.assertEqual(b.value(), b"") | ||
| 144 | self.assertEqual(1, len(a.requests)) | ||
| 145 | |||
| 146 | # Now, process each request one at a time. | ||
| 147 | @@ -618,8 +625,91 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin): | ||
| 148 | request = a.requests[0].original | ||
| 149 | request.delayedProcess() | ||
| 150 | |||
| 151 | - value = b.value() | ||
| 152 | - self.assertResponseEquals(value, self.expectedResponses) | ||
| 153 | + self.assertResponseEquals(b.value(), self.expectedResponses) | ||
| 154 | + | ||
| 155 | + def test_stepwiseDumpTruck(self): | ||
| 156 | + """ | ||
| 157 | + Imitate a fast connection where several pipelined | ||
| 158 | + requests arrive in a single read. The request handler | ||
| 159 | + (L{DelayedHTTPHandler}) is puppeted to step through the | ||
| 160 | + handling of each request. | ||
| 161 | + """ | ||
| 162 | + b = StringTransport() | ||
| 163 | + a = http.HTTPChannel() | ||
| 164 | + a.requestFactory = DelayedHTTPHandlerProxy | ||
| 165 | + a.makeConnection(b) | ||
| 166 | + | ||
| 167 | + a.dataReceived(self.requests) | ||
| 168 | + | ||
| 169 | + # So far only one request should have been dispatched. | ||
| 170 | + self.assertEqual(b.value(), b"") | ||
| 171 | + self.assertEqual(1, len(a.requests)) | ||
| 172 | + | ||
| 173 | + # Now, process each request one at a time. | ||
| 174 | + while a.requests: | ||
| 175 | + self.assertEqual(1, len(a.requests)) | ||
| 176 | + request = a.requests[0].original | ||
| 177 | + request.delayedProcess() | ||
| 178 | + | ||
| 179 | + self.assertResponseEquals(b.value(), self.expectedResponses) | ||
| 180 | + | ||
| 181 | + def test_immediateTinyTube(self): | ||
| 182 | + """ | ||
| 183 | + Imitate a slow connection that delivers one byte at a time. | ||
| 184 | + (L{DummyHTTPHandler}) immediately responds, but no more | ||
| 185 | + than one | ||
| 186 | + """ | ||
| 187 | + b = StringTransport() | ||
| 188 | + a = http.HTTPChannel() | ||
| 189 | + a.requestFactory = DummyHTTPHandlerProxy # "sync" | ||
| 190 | + a.makeConnection(b) | ||
| 191 | + | ||
| 192 | + # one byte at a time, to stress it. | ||
| 193 | + for byte in iterbytes(self.requests): | ||
| 194 | + a.dataReceived(byte) | ||
| 195 | + # There is never more than one request dispatched at a time: | ||
| 196 | + self.assertLessEqual(len(a.requests), 1) | ||
| 197 | + | ||
| 198 | + self.assertResponseEquals(b.value(), self.expectedResponses) | ||
| 199 | + | ||
| 200 | + def test_immediateDumpTruck(self): | ||
| 201 | + """ | ||
| 202 | + Imitate a fast connection where several pipelined | ||
| 203 | + requests arrive in a single read. The request handler | ||
| 204 | + (L{DummyHTTPHandler}) immediately responds. | ||
| 205 | + This doesn't check the at-most-one pending request | ||
| 206 | + invariant but exercises otherwise uncovered code paths. | ||
| 207 | + See GHSA-c8m8-j448-xjx7. | ||
| 208 | + """ | ||
| 209 | + b = StringTransport() | ||
| 210 | + a = http.HTTPChannel() | ||
| 211 | + a.requestFactory = DummyHTTPHandlerProxy | ||
| 212 | + a.makeConnection(b) | ||
| 213 | + | ||
| 214 | + # All bytes at once to ensure there's stuff to buffer. | ||
| 215 | + a.dataReceived(self.requests) | ||
| 216 | + | ||
| 217 | + self.assertResponseEquals(b.value(), self.expectedResponses) | ||
| 218 | + | ||
| 219 | + def test_immediateABiggerTruck(self): | ||
| 220 | + """ | ||
| 221 | + Imitate a fast connection where a so many pipelined | ||
| 222 | + requests arrive in a single read that backpressure is indicated. | ||
| 223 | + The request handler (L{DummyHTTPHandler}) immediately responds. | ||
| 224 | + This doesn't check the at-most-one pending request | ||
| 225 | + invariant but exercises otherwise uncovered code paths. | ||
| 226 | + See GHSA-c8m8-j448-xjx7. | ||
| 227 | + @see: L{http.HTTPChannel._optimisticEagerReadSize} | ||
| 228 | + """ | ||
| 229 | + b = StringTransport() | ||
| 230 | + a = http.HTTPChannel() | ||
| 231 | + a.requestFactory = DummyHTTPHandlerProxy | ||
| 232 | + a.makeConnection(b) | ||
| 233 | + | ||
| 234 | + overLimitCount = a._optimisticEagerReadSize // len(self.requests) * 10 | ||
| 235 | + a.dataReceived(self.requests * overLimitCount) | ||
| 236 | + | ||
| 237 | + self.assertResponseEquals(b.value(), self.expectedResponses * overLimitCount) | ||
| 238 | |||
| 239 | def test_pipeliningReadLimit(self): | ||
| 240 | """ | ||
| 241 | @@ -1522,7 +1612,11 @@ class ChunkedTransferEncodingTests(unittest.TestCase): | ||
| 242 | lambda b: None, # pragma: nocov | ||
| 243 | ) | ||
| 244 | p._maxTrailerHeadersSize = 10 | ||
| 245 | - p.dataReceived(b"3\r\nabc\r\n0\r\n0123456789") | ||
| 246 | + # 9 bytes are received so far, in 2 packets. | ||
| 247 | + # For now, all is ok. | ||
| 248 | + p.dataReceived(b"3\r\nabc\r\n0\r\n01234567") | ||
| 249 | + p.dataReceived(b"\r") | ||
| 250 | + # Once the 10th byte is received, the processing fails. | ||
| 251 | self.assertRaises( | ||
| 252 | http._MalformedChunkedDataError, | ||
| 253 | p.dataReceived, | ||
| 254 | -- | ||
| 255 | 2.40.0 | ||
