summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMingli Yu <mingli.yu@windriver.com>2023-11-02 16:46:06 +0800
committerSteve Sakoman <steve@sakoman.com>2023-11-11 08:23:01 -1000
commiteefb8f69cebc052f2fe4f081ce6f74413df099f7 (patch)
treed33e445cada08353c68f650917c9ce9e8dd7dbd4
parent7b04ca50eee2e1b431219419a4d9599d2d14c71b (diff)
downloadpoky-eefb8f69cebc052f2fe4f081ce6f74413df099f7.tar.gz
curl: Fix CVE-2023-38039
Backport patch [1] to fix CVE-2023-38039 and reference [2] and [3] to fix the build error. [1] https://github.com/curl/curl/commit/3ee79c1674fd6f9 [2] https://github.com/curl/curl/commit/2cb0d346aaa [3] https://github.com/curl/curl/commit/83319e027179 (From OE-Core rev: 77a7921660e8da1cb618ba3634835790ae8adfdd) Signed-off-by: Mingli Yu <mingli.yu@windriver.com> Signed-off-by: Steve Sakoman <steve@sakoman.com>
-rw-r--r--meta/recipes-support/curl/curl/CVE-2023-38039.patch209
-rw-r--r--meta/recipes-support/curl/curl_8.0.1.bb1
2 files changed, 210 insertions, 0 deletions
diff --git a/meta/recipes-support/curl/curl/CVE-2023-38039.patch b/meta/recipes-support/curl/curl/CVE-2023-38039.patch
new file mode 100644
index 0000000000..ef8b600413
--- /dev/null
+++ b/meta/recipes-support/curl/curl/CVE-2023-38039.patch
@@ -0,0 +1,209 @@
1From daa73dbfa9d4dbaf5415cc14dcbf31e45ed77468 Mon Sep 17 00:00:00 2001
2From: Daniel Stenberg <daniel@haxx.se>
3Date: Thu, 2 Nov 2023 15:57:39 +0800
4Subject: [PATCH] http: return error when receiving too large header set
5
6To avoid abuse. The limit is set to 300 KB for the accumulated size of
7all received HTTP headers for a single response. Incomplete research
8suggests that Chrome uses a 256-300 KB limit, while Firefox allows up to
91MB.
10
11Closes #11582
12
13CVE: CVE-2023-38039
14
15Upstream-Status: Backport [https://github.com/curl/curl/commit/3ee79c1674fd6f9]
16
17Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
18---
19 lib/c-hyper.c | 12 +++++++-----
20 lib/http.c | 39 +++++++++++++++++++++++++++++++++++----
21 lib/http.h | 9 +++++++++
22 lib/pingpong.c | 2 +-
23 lib/urldata.h | 18 ++++++++++--------
24 5 files changed, 62 insertions(+), 18 deletions(-)
25
26diff --git a/lib/c-hyper.c b/lib/c-hyper.c
27index 9c7632d..28f64ef 100644
28--- a/lib/c-hyper.c
29+++ b/lib/c-hyper.c
30@@ -174,8 +174,11 @@ static int hyper_each_header(void *userdata,
31 }
32 }
33
34- data->info.header_size += (curl_off_t)len;
35- data->req.headerbytecount += (curl_off_t)len;
36+ result = Curl_bump_headersize(data, len, FALSE);
37+ if(result) {
38+ data->state.hresult = result;
39+ return HYPER_ITER_BREAK;
40+ }
41 return HYPER_ITER_CONTINUE;
42 }
43
44@@ -305,9 +308,8 @@ static CURLcode status_line(struct Curl_easy *data,
45 if(result)
46 return result;
47 }
48- data->info.header_size += (curl_off_t)len;
49- data->req.headerbytecount += (curl_off_t)len;
50- return CURLE_OK;
51+ result = Curl_bump_headersize(data, len, FALSE);
52+ return result;
53 }
54
55 /*
56diff --git a/lib/http.c b/lib/http.c
57index 400d2b0..d3efd60 100644
58--- a/lib/http.c
59+++ b/lib/http.c
60@@ -3760,6 +3760,34 @@ static CURLcode verify_header(struct Curl_easy *data)
61 return CURLE_OK;
62 }
63
64+CURLcode Curl_bump_headersize(struct Curl_easy *data,
65+ size_t delta,
66+ bool connect_only)
67+{
68+ size_t bad = 0;
69+ unsigned int max = MAX_HTTP_RESP_HEADER_SIZE;
70+ if(delta < MAX_HTTP_RESP_HEADER_SIZE) {
71+ data->info.header_size += (unsigned int)delta;
72+ data->req.allheadercount += (unsigned int)delta;
73+ if(!connect_only)
74+ data->req.headerbytecount += (unsigned int)delta;
75+ if(data->req.allheadercount > max)
76+ bad = data->req.allheadercount;
77+ else if(data->info.header_size > (max * 20)) {
78+ bad = data->info.header_size;
79+ max *= 20;
80+ }
81+ }
82+ else
83+ bad = data->req.allheadercount + delta;
84+ if(bad) {
85+ failf(data, "Too large response headers: %zu > %u", bad, max);
86+ return CURLE_RECV_ERROR;
87+ }
88+ return CURLE_OK;
89+}
90+
91+
92 /*
93 * Read any HTTP header lines from the server and pass them to the client app.
94 */
95@@ -4007,8 +4035,9 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data,
96 if(result)
97 return result;
98
99- data->info.header_size += (long)headerlen;
100- data->req.headerbytecount += (long)headerlen;
101+ result = Curl_bump_headersize(data, headerlen, FALSE);
102+ if(result)
103+ return result;
104
105 /*
106 * When all the headers have been parsed, see if we should give
107@@ -4330,8 +4359,10 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data,
108 if(result)
109 return result;
110
111- data->info.header_size += Curl_dyn_len(&data->state.headerb);
112- data->req.headerbytecount += Curl_dyn_len(&data->state.headerb);
113+ result = Curl_bump_headersize(data, Curl_dyn_len(&data->state.headerb),
114+ FALSE);
115+ if(result)
116+ return result;
117
118 Curl_dyn_reset(&data->state.headerb);
119 }
120diff --git a/lib/http.h b/lib/http.h
121index 444abc0..ea3b37e 100644
122--- a/lib/http.h
123+++ b/lib/http.h
124@@ -60,6 +60,10 @@ extern const struct Curl_handler Curl_handler_wss;
125 #endif
126 #endif /* websockets */
127
128+CURLcode Curl_bump_headersize(struct Curl_easy *data,
129+ size_t delta,
130+ bool connect_only);
131+
132
133 /* Header specific functions */
134 bool Curl_compareheader(const char *headerline, /* line to check */
135@@ -176,6 +180,11 @@ CURLcode Curl_http_auth_act(struct Curl_easy *data);
136 #define EXPECT_100_THRESHOLD (1024*1024)
137 #endif
138
139+/* MAX_HTTP_RESP_HEADER_SIZE is the maximum size of all response headers
140+ combined that libcurl allows for a single HTTP response, any HTTP
141+ version. This count includes CONNECT response headers. */
142+#define MAX_HTTP_RESP_HEADER_SIZE (300*1024)
143+
144 #endif /* CURL_DISABLE_HTTP */
145
146 #ifdef USE_NGHTTP3
147diff --git a/lib/pingpong.c b/lib/pingpong.c
148index 2f4aa1c..e53a506 100644
149--- a/lib/pingpong.c
150+++ b/lib/pingpong.c
151@@ -341,7 +341,7 @@ CURLcode Curl_pp_readresp(struct Curl_easy *data,
152 ssize_t clipamount = 0;
153 bool restart = FALSE;
154
155- data->req.headerbytecount += (long)gotbytes;
156+ data->req.headerbytecount += (unsigned int)gotbytes;
157
158 pp->nread_resp += gotbytes;
159 for(i = 0; i < gotbytes; ptr++, i++) {
160diff --git a/lib/urldata.h b/lib/urldata.h
161index f3e782a..748660f 100644
162--- a/lib/urldata.h
163+++ b/lib/urldata.h
164@@ -619,17 +619,19 @@ struct SingleRequest {
165 curl_off_t bytecount; /* total number of bytes read */
166 curl_off_t writebytecount; /* number of bytes written */
167
168- curl_off_t headerbytecount; /* only count received headers */
169- curl_off_t deductheadercount; /* this amount of bytes doesn't count when we
170- check if anything has been transferred at
171- the end of a connection. We use this
172- counter to make only a 100 reply (without a
173- following second response code) result in a
174- CURLE_GOT_NOTHING error code */
175
176 curl_off_t pendingheader; /* this many bytes left to send is actually
177 header and not body */
178 struct curltime start; /* transfer started at this time */
179+ unsigned int headerbytecount; /* received server headers (not CONNECT
180+ headers) */
181+ unsigned int allheadercount; /* all received headers (server + CONNECT) */
182+ unsigned int deductheadercount; /* this amount of bytes doesn't count when
183+ we check if anything has been transferred
184+ at the end of a connection. We use this
185+ counter to make only a 100 reply (without
186+ a following second response code) result
187+ in a CURLE_GOT_NOTHING error code */
188 enum {
189 HEADER_NORMAL, /* no bad header at all */
190 HEADER_PARTHEADER, /* part of the chunk is a bad header, the rest
191@@ -1076,7 +1078,6 @@ struct PureInfo {
192 int httpversion; /* the http version number X.Y = X*10+Y */
193 time_t filetime; /* If requested, this is might get set. Set to -1 if the
194 time was unretrievable. */
195- curl_off_t header_size; /* size of read header(s) in bytes */
196 curl_off_t request_size; /* the amount of bytes sent in the request(s) */
197 unsigned long proxyauthavail; /* what proxy auth types were announced */
198 unsigned long httpauthavail; /* what host auth types were announced */
199@@ -1084,6 +1085,7 @@ struct PureInfo {
200 char *contenttype; /* the content type of the object */
201 char *wouldredirect; /* URL this would've been redirected to if asked to */
202 curl_off_t retry_after; /* info from Retry-After: header */
203+ unsigned int header_size; /* size of read header(s) in bytes */
204
205 /* PureInfo members 'conn_primary_ip', 'conn_primary_port', 'conn_local_ip'
206 and, 'conn_local_port' are copied over from the connectdata struct in
207--
2082.25.1
209
diff --git a/meta/recipes-support/curl/curl_8.0.1.bb b/meta/recipes-support/curl/curl_8.0.1.bb
index 375b4d2f93..04da092ee9 100644
--- a/meta/recipes-support/curl/curl_8.0.1.bb
+++ b/meta/recipes-support/curl/curl_8.0.1.bb
@@ -21,6 +21,7 @@ SRC_URI = " \
21 file://CVE-2023-28320-fol1.patch \ 21 file://CVE-2023-28320-fol1.patch \
22 file://CVE-2023-38545.patch \ 22 file://CVE-2023-38545.patch \
23 file://CVE-2023-38546.patch \ 23 file://CVE-2023-38546.patch \
24 file://CVE-2023-38039.patch \
24" 25"
25SRC_URI[sha256sum] = "0a381cd82f4d00a9a334438b8ca239afea5bfefcfa9a1025f2bf118e79e0b5f0" 26SRC_URI[sha256sum] = "0a381cd82f4d00a9a334438b8ca239afea5bfefcfa9a1025f2bf118e79e0b5f0"
26 27