summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVijay Anusuri <vanusuri@mvista.com>2024-01-31 08:04:59 +0530
committerArmin Kuster <akuster808@gmail.com>2024-03-03 16:38:27 -0500
commit724f1e1a28e1ab45f8c223329e92bcc85a349ea2 (patch)
tree739c38987441f5784c0e7470436e10226fdf2f62
parent45ea2ed7593b82825e0342d5e3928f83b8e3a2ce (diff)
downloadmeta-openembedded-724f1e1a28e1ab45f8c223329e92bcc85a349ea2.tar.gz
squid: backport Debian patch for CVE-2023-46728 and CVE-2023-46846
import patches from ubuntu to fix CVE-2023-46728 CVE-2023-46846 Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/squid/tree/debian/patches?h=ubuntu/focal-security&id=9ccd217ca9428c9a6597e9310a99552026b245fa Upstream commit https://github.com/squid-cache/squid/commit/6ea12e8fb590ac6959e9356a81aa3370576568c3 & https://github.com/squid-cache/squid/commit/417da4006cf5c97d44e74431b816fc58fec9e270 & https://github.com/squid-cache/squid/commit/05f6af2f4c85cc99323cfff6149c3d74af661b6d] Signed-off-by: Vijay Anusuri <vanusuri@mvista.com> Signed-off-by: Armin Kuster <akuster808@gmail.com>
-rw-r--r--meta-networking/recipes-daemons/squid/files/CVE-2023-46728.patch608
-rw-r--r--meta-networking/recipes-daemons/squid/files/CVE-2023-46846-pre1.patch1154
-rw-r--r--meta-networking/recipes-daemons/squid/files/CVE-2023-46846.patch169
-rw-r--r--meta-networking/recipes-daemons/squid/squid_4.9.bb3
4 files changed, 1934 insertions, 0 deletions
diff --git a/meta-networking/recipes-daemons/squid/files/CVE-2023-46728.patch b/meta-networking/recipes-daemons/squid/files/CVE-2023-46728.patch
new file mode 100644
index 000000000..b11721041
--- /dev/null
+++ b/meta-networking/recipes-daemons/squid/files/CVE-2023-46728.patch
@@ -0,0 +1,608 @@
1Partial backport of:
2
3From 6ea12e8fb590ac6959e9356a81aa3370576568c3 Mon Sep 17 00:00:00 2001
4From: Alex Rousskov <rousskov@measurement-factory.com>
5Date: Tue, 26 Jul 2022 15:05:54 +0000
6Subject: [PATCH] Remove support for Gopher protocol (#1092)
7
8Gopher code quality remains too low for production use in most
9environments. The code is a persistent source of vulnerabilities and
10fixing it requires significant effort. We should not be spending scarce
11Project resources on improving that code, especially given the lack of
12strong demand for Gopher support.
13
14With this change, Gopher requests will be handled like any other request
15with an unknown (to Squid) protocol. For example, HTTP requests with
16Gopher URI scheme result in ERR_UNSUP_REQ.
17
18Default Squid configuration still considers TCP port 70 "safe". The
19corresponding Safe_ports ACL rule has not been removed for consistency
20sake: We consider WAIS port safe even though Squid refuses to forward
21WAIS requests:
22
23 acl Safe_ports port 70 # gopher
24 acl Safe_ports port 210 # wais
25
26Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/squid/tree/debian/patches/CVE-2023-46728.patch?h=ubuntu/focal-security&id=9ccd217ca9428c9a6597e9310a99552026b245fa
27Upstream commit https://github.com/squid-cache/squid/commit/6ea12e8fb590ac6959e9356a81aa3370576568c3]
28CVE: CVE-2023-46728
29Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
30---
31 doc/Programming-Guide/Groups.dox | 5 -
32 doc/debug-sections.txt | 1 -
33 doc/manuals/de.po | 2 +-
34 doc/manuals/en.po | 2 +-
35 doc/manuals/en_AU.po | 2 +-
36 doc/manuals/es.po | 2 +-
37 doc/manuals/fr.po | 2 +-
38 doc/manuals/it.po | 2 +-
39 errors/af.po | 6 +-
40 errors/az.po | 6 +-
41 errors/bg.po | 6 +-
42 errors/ca.po | 6 +-
43 errors/cs.po | 6 +-
44 errors/da.po | 6 +-
45 errors/de.po | 6 +-
46 errors/el.po | 4 +-
47 errors/en.po | 6 +-
48 errors/errorpage.css | 2 +-
49 errors/es-mx.po | 3 +-
50 errors/es.po | 4 +-
51 errors/et.po | 6 +-
52 errors/fi.po | 7 +-
53 errors/fr.po | 6 +-
54 errors/he.po | 6 +-
55 errors/hu.po | 6 +-
56 errors/hy.po | 6 +-
57 errors/it.po | 4 +-
58 errors/ja.po | 6 +-
59 errors/ko.po | 6 +-
60 errors/lt.po | 6 +-
61 errors/lv.po | 6 +-
62 errors/nl.po | 6 +-
63 errors/pl.po | 6 +-
64 errors/pt-br.po | 6 +-
65 errors/pt.po | 6 +-
66 errors/ro.po | 4 +-
67 errors/ru.po | 6 +-
68 errors/sk.po | 6 +-
69 errors/sl.po | 6 +-
70 errors/sr-latn.po | 4 +-
71 errors/sv.po | 6 +-
72 errors/templates/ERR_UNSUP_REQ | 2 +-
73 errors/tr.po | 6 +-
74 errors/uk.po | 6 +-
75 errors/vi.po | 4 +-
76 errors/zh-hans.po | 6 +-
77 errors/zh-hant.po | 7 +-
78 src/FwdState.cc | 5 -
79 src/HttpRequest.cc | 6 -
80 src/IoStats.h | 2 +-
81 src/Makefile.am | 8 -
82 src/adaptation/ecap/Host.cc | 1 -
83 src/adaptation/ecap/MessageRep.cc | 2 -
84 src/anyp/ProtocolType.h | 1 -
85 src/anyp/Uri.cc | 1 -
86 src/anyp/UriScheme.cc | 3 -
87 src/cf.data.pre | 5 +-
88 src/client_side_request.cc | 4 -
89 src/error/forward.h | 2 +-
90 src/gopher.cc | 993 -----------------------
91 src/gopher.h | 29 -
92 src/http/Message.h | 1 -
93 src/mgr/IoAction.cc | 3 -
94 src/mgr/IoAction.h | 2 -
95 src/squid.8.in | 2 +-
96 src/stat.cc | 19 -
97 src/tests/Stub.am | 1 -
98 src/tests/stub_gopher.cc | 17 -
99 test-suite/squidconf/regressions-3.4.0.1 | 1 -
100 69 files changed, 88 insertions(+), 1251 deletions(-)
101 delete mode 100644 src/gopher.cc
102 delete mode 100644 src/gopher.h
103 delete mode 100644 src/tests/stub_gopher.cc
104
105--- a/src/FwdState.cc
106+++ b/src/FwdState.cc
107@@ -28,7 +28,6 @@
108 #include "fde.h"
109 #include "FwdState.h"
110 #include "globals.h"
111-#include "gopher.h"
112 #include "hier_code.h"
113 #include "http.h"
114 #include "http/Stream.h"
115@@ -1004,10 +1003,6 @@ FwdState::dispatch()
116 httpStart(this);
117 break;
118
119- case AnyP::PROTO_GOPHER:
120- gopherStart(this);
121- break;
122-
123 case AnyP::PROTO_FTP:
124 if (request->flags.ftpNative)
125 Ftp::StartRelay(this);
126--- a/src/HttpRequest.cc
127+++ b/src/HttpRequest.cc
128@@ -18,7 +18,6 @@
129 #include "Downloader.h"
130 #include "err_detail_type.h"
131 #include "globals.h"
132-#include "gopher.h"
133 #include "http.h"
134 #include "http/one/RequestParser.h"
135 #include "http/Stream.h"
136@@ -556,11 +555,6 @@ HttpRequest::maybeCacheable()
137 return false;
138 break;
139
140- case AnyP::PROTO_GOPHER:
141- if (!gopherCachable(this))
142- return false;
143- break;
144-
145 case AnyP::PROTO_CACHE_OBJECT:
146 return false;
147
148--- a/src/IoStats.h
149+++ b/src/IoStats.h
150@@ -22,7 +22,7 @@ public:
151 int writes;
152 int write_hist[histSize];
153 }
154- Http, Ftp, Gopher;
155+ Http, Ftp;
156 };
157
158 #endif /* SQUID_IOSTATS_H_ */
159--- a/src/Makefile.am
160+++ b/src/Makefile.am
161@@ -306,8 +306,6 @@ squid_SOURCES = \
162 FwdState.h \
163 Generic.h \
164 globals.h \
165- gopher.h \
166- gopher.cc \
167 helper.cc \
168 helper.h \
169 hier_code.h \
170@@ -1259,8 +1257,6 @@ tests_testCacheManager_SOURCES = \
171 fqdncache.cc \
172 FwdState.cc \
173 FwdState.h \
174- gopher.h \
175- gopher.cc \
176 hier_code.h \
177 helper.cc \
178 $(HTCPSOURCE) \
179@@ -1678,8 +1674,6 @@ tests_testEvent_SOURCES = \
180 fqdncache.cc \
181 FwdState.cc \
182 FwdState.h \
183- gopher.h \
184- gopher.cc \
185 helper.cc \
186 hier_code.h \
187 $(HTCPSOURCE) \
188@@ -1914,8 +1908,6 @@ tests_testEventLoop_SOURCES = \
189 fqdncache.cc \
190 FwdState.cc \
191 FwdState.h \
192- gopher.h \
193- gopher.cc \
194 helper.cc \
195 hier_code.h \
196 $(HTCPSOURCE) \
197@@ -2145,8 +2137,6 @@ tests_test_http_range_SOURCES = \
198 fqdncache.cc \
199 FwdState.cc \
200 FwdState.h \
201- gopher.h \
202- gopher.cc \
203 helper.cc \
204 hier_code.h \
205 $(HTCPSOURCE) \
206@@ -2461,8 +2451,6 @@ tests_testHttpRequest_SOURCES = \
207 fqdncache.cc \
208 FwdState.cc \
209 FwdState.h \
210- gopher.h \
211- gopher.cc \
212 helper.cc \
213 hier_code.h \
214 $(HTCPSOURCE) \
215@@ -3307,8 +3295,6 @@ tests_testURL_SOURCES = \
216 fqdncache.cc \
217 FwdState.cc \
218 FwdState.h \
219- gopher.h \
220- gopher.cc \
221 helper.cc \
222 hier_code.h \
223 $(HTCPSOURCE) \
224--- a/src/adaptation/ecap/Host.cc
225+++ b/src/adaptation/ecap/Host.cc
226@@ -49,7 +49,6 @@ Adaptation::Ecap::Host::Host()
227 libecap::protocolHttp.assignHostId(AnyP::PROTO_HTTP);
228 libecap::protocolHttps.assignHostId(AnyP::PROTO_HTTPS);
229 libecap::protocolFtp.assignHostId(AnyP::PROTO_FTP);
230- libecap::protocolGopher.assignHostId(AnyP::PROTO_GOPHER);
231 libecap::protocolWais.assignHostId(AnyP::PROTO_WAIS);
232 libecap::protocolUrn.assignHostId(AnyP::PROTO_URN);
233 libecap::protocolWhois.assignHostId(AnyP::PROTO_WHOIS);
234--- a/src/adaptation/ecap/MessageRep.cc
235+++ b/src/adaptation/ecap/MessageRep.cc
236@@ -140,8 +140,6 @@ Adaptation::Ecap::FirstLineRep::protocol
237 return libecap::protocolHttps;
238 case AnyP::PROTO_FTP:
239 return libecap::protocolFtp;
240- case AnyP::PROTO_GOPHER:
241- return libecap::protocolGopher;
242 case AnyP::PROTO_WAIS:
243 return libecap::protocolWais;
244 case AnyP::PROTO_WHOIS:
245--- a/src/anyp/ProtocolType.h
246+++ b/src/anyp/ProtocolType.h
247@@ -27,7 +27,6 @@ typedef enum {
248 PROTO_HTTPS,
249 PROTO_COAP,
250 PROTO_COAPS,
251- PROTO_GOPHER,
252 PROTO_WAIS,
253 PROTO_CACHE_OBJECT,
254 PROTO_ICP,
255--- a/src/anyp/Uri.cc
256+++ b/src/anyp/Uri.cc
257@@ -852,8 +852,6 @@ urlCheckRequest(const HttpRequest * r)
258 if (r->method == Http::METHOD_PUT)
259 rc = 1;
260
261- case AnyP::PROTO_GOPHER:
262-
263 case AnyP::PROTO_WAIS:
264
265 case AnyP::PROTO_WHOIS:
266--- a/src/anyp/UriScheme.cc
267+++ b/src/anyp/UriScheme.cc
268@@ -87,9 +87,6 @@ AnyP::UriScheme::defaultPort() const
269 // Assuming IANA policy of allocating same port for base and TLS protocol versions will occur.
270 return 5683;
271
272- case AnyP::PROTO_GOPHER:
273- return 70;
274-
275 case AnyP::PROTO_WAIS:
276 return 210;
277
278--- a/src/client_side_request.cc
279+++ b/src/client_side_request.cc
280@@ -33,7 +33,6 @@
281 #include "fd.h"
282 #include "fde.h"
283 #include "format/Token.h"
284-#include "gopher.h"
285 #include "helper.h"
286 #include "helper/Reply.h"
287 #include "http.h"
288@@ -965,9 +964,6 @@ clientHierarchical(ClientHttpRequest * h
289 if (request->url.getScheme() == AnyP::PROTO_HTTP)
290 return method.respMaybeCacheable();
291
292- if (request->url.getScheme() == AnyP::PROTO_GOPHER)
293- return gopherCachable(request);
294-
295 if (request->url.getScheme() == AnyP::PROTO_CACHE_OBJECT)
296 return 0;
297
298--- a/src/err_type.h
299+++ b/src/err_type.h
300@@ -65,7 +65,7 @@ typedef enum {
301 ERR_GATEWAY_FAILURE,
302
303 /* Special Cases */
304- ERR_DIR_LISTING, /* Display of remote directory (FTP, Gopher) */
305+ ERR_DIR_LISTING, /* Display of remote directory (FTP) */
306 ERR_SQUID_SIGNATURE, /* not really an error */
307 ERR_SHUTTING_DOWN,
308 ERR_PROTOCOL_UNKNOWN,
309--- a/src/HttpMsg.h
310+++ b/src/HttpMsg.h
311@@ -38,7 +38,6 @@ public:
312 srcFtp = 1 << (16 + 1), ///< ftp_port or FTP server
313 srcIcap = 1 << (16 + 2), ///< traditional ICAP service without encryption
314 srcEcap = 1 << (16 + 3), ///< eCAP service that uses insecure libraries/daemons
315- srcGopher = 1 << (16 + 14), ///< Gopher server
316 srcWhois = 1 << (16 + 15), ///< Whois server
317 srcUnsafe = 0xFFFF0000, ///< Unsafe sources mask
318 srcSafe = 0x0000FFFF ///< Safe sources mask
319--- a/src/mgr/IoAction.cc
320+++ b/src/mgr/IoAction.cc
321@@ -35,9 +35,6 @@ Mgr::IoActionData::operator += (const Io
322 ftp_reads += stats.ftp_reads;
323 for (int i = 0; i < IoStats::histSize; ++i)
324 ftp_read_hist[i] += stats.ftp_read_hist[i];
325- gopher_reads += stats.gopher_reads;
326- for (int i = 0; i < IoStats::histSize; ++i)
327- gopher_read_hist[i] += stats.gopher_read_hist[i];
328
329 return *this;
330 }
331--- a/src/mgr/IoAction.h
332+++ b/src/mgr/IoAction.h
333@@ -27,10 +27,8 @@ public:
334 public:
335 double http_reads;
336 double ftp_reads;
337- double gopher_reads;
338 double http_read_hist[IoStats::histSize];
339 double ftp_read_hist[IoStats::histSize];
340- double gopher_read_hist[IoStats::histSize];
341 };
342
343 /// implement aggregated 'io' action
344--- a/src/stat.cc
345+++ b/src/stat.cc
346@@ -206,12 +206,6 @@ GetIoStats(Mgr::IoActionData& stats)
347 for (i = 0; i < IoStats::histSize; ++i) {
348 stats.ftp_read_hist[i] = IOStats.Ftp.read_hist[i];
349 }
350-
351- stats.gopher_reads = IOStats.Gopher.reads;
352-
353- for (i = 0; i < IoStats::histSize; ++i) {
354- stats.gopher_read_hist[i] = IOStats.Gopher.read_hist[i];
355- }
356 }
357
358 void
359@@ -245,19 +239,6 @@ DumpIoStats(Mgr::IoActionData& stats, St
360 }
361
362 storeAppendPrintf(sentry, "\n");
363- storeAppendPrintf(sentry, "Gopher I/O\n");
364- storeAppendPrintf(sentry, "number of reads: %.0f\n", stats.gopher_reads);
365- storeAppendPrintf(sentry, "Read Histogram:\n");
366-
367- for (i = 0; i < IoStats::histSize; ++i) {
368- storeAppendPrintf(sentry, "%5d-%5d: %9.0f %2.0f%%\n",
369- i ? (1 << (i - 1)) + 1 : 1,
370- 1 << i,
371- stats.gopher_read_hist[i],
372- Math::doublePercent(stats.gopher_read_hist[i], stats.gopher_reads));
373- }
374-
375- storeAppendPrintf(sentry, "\n");
376 }
377
378 static const char *
379--- a/src/Makefile.in
380+++ b/src/Makefile.in
381@@ -263,7 +263,7 @@ am__squid_SOURCES_DIST = AclRegs.cc Auth
382 ExternalACL.h ExternalACLEntry.cc ExternalACLEntry.h \
383 FadingCounter.h FadingCounter.cc fatal.h fatal.cc fd.h fd.cc \
384 fde.cc fde.h FileMap.h filemap.cc fqdncache.h fqdncache.cc \
385- FwdState.cc FwdState.h Generic.h globals.h gopher.h gopher.cc \
386+ FwdState.cc FwdState.h Generic.h globals.h \
387 helper.cc helper.h hier_code.h HierarchyLogEntry.h htcp.cc \
388 htcp.h http.cc http.h HttpHeaderFieldStat.h HttpHdrCc.h \
389 HttpHdrCc.cc HttpHdrCc.cci HttpHdrRange.cc HttpHdrSc.cc \
390@@ -352,7 +352,7 @@ am_squid_OBJECTS = $(am__objects_1) Acce
391 EventLoop.$(OBJEXT) external_acl.$(OBJEXT) \
392 ExternalACLEntry.$(OBJEXT) FadingCounter.$(OBJEXT) \
393 fatal.$(OBJEXT) fd.$(OBJEXT) fde.$(OBJEXT) filemap.$(OBJEXT) \
394- fqdncache.$(OBJEXT) FwdState.$(OBJEXT) gopher.$(OBJEXT) \
395+ fqdncache.$(OBJEXT) FwdState.$(OBJEXT) \
396 helper.$(OBJEXT) $(am__objects_5) http.$(OBJEXT) \
397 HttpHdrCc.$(OBJEXT) HttpHdrRange.$(OBJEXT) HttpHdrSc.$(OBJEXT) \
398 HttpHdrScTarget.$(OBJEXT) HttpHdrContRange.$(OBJEXT) \
399@@ -539,7 +539,7 @@ am__tests_testCacheManager_SOURCES_DIST
400 tests/stub_ETag.cc event.cc external_acl.cc \
401 ExternalACLEntry.cc fatal.h tests/stub_fatal.cc fd.h fd.cc \
402 fde.cc FileMap.h filemap.cc fqdncache.h fqdncache.cc \
403- FwdState.cc FwdState.h gopher.h gopher.cc hier_code.h \
404+ FwdState.cc FwdState.h hier_code.h \
405 helper.cc htcp.cc htcp.h http.cc HttpBody.h HttpBody.cc \
406 HttpHeader.h HttpHeader.cc HttpHeaderFieldInfo.h \
407 HttpHeaderTools.h HttpHeaderTools.cc HttpHeaderFieldStat.h \
408@@ -594,7 +594,7 @@ am_tests_testCacheManager_OBJECTS = Acce
409 event.$(OBJEXT) external_acl.$(OBJEXT) \
410 ExternalACLEntry.$(OBJEXT) tests/stub_fatal.$(OBJEXT) \
411 fd.$(OBJEXT) fde.$(OBJEXT) filemap.$(OBJEXT) \
412- fqdncache.$(OBJEXT) FwdState.$(OBJEXT) gopher.$(OBJEXT) \
413+ fqdncache.$(OBJEXT) FwdState.$(OBJEXT) \
414 helper.$(OBJEXT) $(am__objects_5) http.$(OBJEXT) \
415 HttpBody.$(OBJEXT) HttpHeader.$(OBJEXT) \
416 HttpHeaderTools.$(OBJEXT) HttpHdrCc.$(OBJEXT) \
417@@ -838,7 +838,7 @@ am__tests_testEvent_SOURCES_DIST = Acces
418 EventLoop.h EventLoop.cc external_acl.cc ExternalACLEntry.cc \
419 FadingCounter.cc fatal.h tests/stub_fatal.cc fd.h fd.cc fde.cc \
420 FileMap.h filemap.cc fqdncache.h fqdncache.cc FwdState.cc \
421- FwdState.h gopher.h gopher.cc helper.cc hier_code.h htcp.cc \
422+ FwdState.h helper.cc hier_code.h htcp.cc \
423 htcp.h http.cc HttpBody.h HttpBody.cc \
424 tests/stub_HttpControlMsg.cc HttpHeader.h HttpHeader.cc \
425 HttpHeaderFieldInfo.h HttpHeaderTools.h HttpHeaderTools.cc \
426@@ -891,7 +891,7 @@ am_tests_testEvent_OBJECTS = AccessLogEn
427 external_acl.$(OBJEXT) ExternalACLEntry.$(OBJEXT) \
428 FadingCounter.$(OBJEXT) tests/stub_fatal.$(OBJEXT) \
429 fd.$(OBJEXT) fde.$(OBJEXT) filemap.$(OBJEXT) \
430- fqdncache.$(OBJEXT) FwdState.$(OBJEXT) gopher.$(OBJEXT) \
431+ fqdncache.$(OBJEXT) FwdState.$(OBJEXT) \
432 helper.$(OBJEXT) $(am__objects_5) http.$(OBJEXT) \
433 HttpBody.$(OBJEXT) tests/stub_HttpControlMsg.$(OBJEXT) \
434 HttpHeader.$(OBJEXT) HttpHeaderTools.$(OBJEXT) \
435@@ -975,8 +975,8 @@ am__tests_testEventLoop_SOURCES_DIST = A
436 tests/stub_ETag.cc EventLoop.h EventLoop.cc event.cc \
437 external_acl.cc ExternalACLEntry.cc FadingCounter.cc fatal.h \
438 tests/stub_fatal.cc fd.h fd.cc fde.cc FileMap.h filemap.cc \
439- fqdncache.h fqdncache.cc FwdState.cc FwdState.h gopher.h \
440- gopher.cc helper.cc hier_code.h htcp.cc htcp.h http.cc \
441+ fqdncache.h fqdncache.cc FwdState.cc FwdState.h \
442+ helper.cc hier_code.h htcp.cc htcp.h http.cc \
443 HttpBody.h HttpBody.cc tests/stub_HttpControlMsg.cc \
444 HttpHeader.h HttpHeader.cc HttpHeaderFieldInfo.h \
445 HttpHeaderTools.h HttpHeaderTools.cc HttpHeaderFieldStat.h \
446@@ -1029,7 +1029,7 @@ am_tests_testEventLoop_OBJECTS = AccessL
447 external_acl.$(OBJEXT) ExternalACLEntry.$(OBJEXT) \
448 FadingCounter.$(OBJEXT) tests/stub_fatal.$(OBJEXT) \
449 fd.$(OBJEXT) fde.$(OBJEXT) filemap.$(OBJEXT) \
450- fqdncache.$(OBJEXT) FwdState.$(OBJEXT) gopher.$(OBJEXT) \
451+ fqdncache.$(OBJEXT) FwdState.$(OBJEXT) \
452 helper.$(OBJEXT) $(am__objects_5) http.$(OBJEXT) \
453 HttpBody.$(OBJEXT) tests/stub_HttpControlMsg.$(OBJEXT) \
454 HttpHeader.$(OBJEXT) HttpHeaderTools.$(OBJEXT) \
455@@ -1187,7 +1187,7 @@ am__tests_testHttpRequest_SOURCES_DIST =
456 fs_io.cc dlink.h dlink.cc dns_internal.cc errorpage.cc \
457 tests/stub_ETag.cc external_acl.cc ExternalACLEntry.cc fatal.h \
458 tests/stub_fatal.cc fd.h fd.cc fde.cc fqdncache.h fqdncache.cc \
459- FwdState.cc FwdState.h gopher.h gopher.cc helper.cc \
460+ FwdState.cc FwdState.h helper.cc \
461 hier_code.h htcp.cc htcp.h http.cc HttpBody.h HttpBody.cc \
462 tests/stub_HttpControlMsg.cc HttpHeader.h HttpHeader.cc \
463 HttpHeaderFieldInfo.h HttpHeaderTools.h HttpHeaderTools.cc \
464@@ -1243,7 +1243,7 @@ am_tests_testHttpRequest_OBJECTS = Acces
465 $(am__objects_4) errorpage.$(OBJEXT) tests/stub_ETag.$(OBJEXT) \
466 external_acl.$(OBJEXT) ExternalACLEntry.$(OBJEXT) \
467 tests/stub_fatal.$(OBJEXT) fd.$(OBJEXT) fde.$(OBJEXT) \
468- fqdncache.$(OBJEXT) FwdState.$(OBJEXT) gopher.$(OBJEXT) \
469+ fqdncache.$(OBJEXT) FwdState.$(OBJEXT) \
470 helper.$(OBJEXT) $(am__objects_5) http.$(OBJEXT) \
471 HttpBody.$(OBJEXT) tests/stub_HttpControlMsg.$(OBJEXT) \
472 HttpHeader.$(OBJEXT) HttpHeaderTools.$(OBJEXT) \
473@@ -1670,8 +1670,8 @@ am__tests_testURL_SOURCES_DIST = AccessL
474 fs_io.cc dlink.h dlink.cc dns_internal.cc errorpage.cc ETag.cc \
475 event.cc external_acl.cc ExternalACLEntry.cc fatal.h \
476 tests/stub_fatal.cc fd.h fd.cc fde.cc FileMap.h filemap.cc \
477- fqdncache.h fqdncache.cc FwdState.cc FwdState.h gopher.h \
478- gopher.cc helper.cc hier_code.h htcp.cc htcp.h http.cc \
479+ fqdncache.h fqdncache.cc FwdState.cc FwdState.h \
480+ helper.cc hier_code.h htcp.cc htcp.h http.cc \
481 HttpBody.h HttpBody.cc tests/stub_HttpControlMsg.cc \
482 HttpHeaderFieldStat.h HttpHdrCc.h HttpHdrCc.cc HttpHdrCc.cci \
483 HttpHdrContRange.cc HttpHdrRange.cc HttpHdrSc.cc \
484@@ -1725,7 +1725,7 @@ am_tests_testURL_OBJECTS = AccessLogEntr
485 event.$(OBJEXT) external_acl.$(OBJEXT) \
486 ExternalACLEntry.$(OBJEXT) tests/stub_fatal.$(OBJEXT) \
487 fd.$(OBJEXT) fde.$(OBJEXT) filemap.$(OBJEXT) \
488- fqdncache.$(OBJEXT) FwdState.$(OBJEXT) gopher.$(OBJEXT) \
489+ fqdncache.$(OBJEXT) FwdState.$(OBJEXT) \
490 helper.$(OBJEXT) $(am__objects_5) http.$(OBJEXT) \
491 HttpBody.$(OBJEXT) tests/stub_HttpControlMsg.$(OBJEXT) \
492 HttpHdrCc.$(OBJEXT) HttpHdrContRange.$(OBJEXT) \
493@@ -1925,8 +1925,8 @@ am__tests_test_http_range_SOURCES_DIST =
494 dns_internal.cc errorpage.cc tests/stub_ETag.cc event.cc \
495 FadingCounter.cc fatal.h tests/stub_libauth.cc \
496 tests/stub_fatal.cc fd.h fd.cc fde.cc FileMap.h filemap.cc \
497- fqdncache.h fqdncache.cc FwdState.cc FwdState.h gopher.h \
498- gopher.cc helper.cc hier_code.h htcp.cc htcp.h http.cc \
499+ fqdncache.h fqdncache.cc FwdState.cc FwdState.h \
500+ helper.cc hier_code.h htcp.cc htcp.h http.cc \
501 HttpBody.h HttpBody.cc tests/stub_HttpControlMsg.cc \
502 HttpHeaderFieldStat.h HttpHdrCc.h HttpHdrCc.cc HttpHdrCc.cci \
503 HttpHdrContRange.cc HttpHdrRange.cc HttpHdrSc.cc \
504@@ -1979,7 +1979,7 @@ am_tests_test_http_range_OBJECTS = Acces
505 FadingCounter.$(OBJEXT) tests/stub_libauth.$(OBJEXT) \
506 tests/stub_fatal.$(OBJEXT) fd.$(OBJEXT) fde.$(OBJEXT) \
507 filemap.$(OBJEXT) fqdncache.$(OBJEXT) FwdState.$(OBJEXT) \
508- gopher.$(OBJEXT) helper.$(OBJEXT) $(am__objects_5) \
509+ helper.$(OBJEXT) $(am__objects_5) \
510 http.$(OBJEXT) HttpBody.$(OBJEXT) \
511 tests/stub_HttpControlMsg.$(OBJEXT) HttpHdrCc.$(OBJEXT) \
512 HttpHdrContRange.$(OBJEXT) HttpHdrRange.$(OBJEXT) \
513@@ -2131,7 +2131,7 @@ am__depfiles_remade = ./$(DEPDIR)/Access
514 ./$(DEPDIR)/external_acl.Po ./$(DEPDIR)/fatal.Po \
515 ./$(DEPDIR)/fd.Po ./$(DEPDIR)/fde.Po ./$(DEPDIR)/filemap.Po \
516 ./$(DEPDIR)/fqdncache.Po ./$(DEPDIR)/fs_io.Po \
517- ./$(DEPDIR)/globals.Po ./$(DEPDIR)/gopher.Po \
518+ ./$(DEPDIR)/globals.Po \
519 ./$(DEPDIR)/helper.Po ./$(DEPDIR)/hier_code.Po \
520 ./$(DEPDIR)/htcp.Po ./$(DEPDIR)/http.Po \
521 ./$(DEPDIR)/icp_opcode.Po ./$(DEPDIR)/icp_v2.Po \
522@@ -3043,7 +3043,7 @@ squid_SOURCES = $(ACL_REGISTRATION_SOURC
523 ExternalACL.h ExternalACLEntry.cc ExternalACLEntry.h \
524 FadingCounter.h FadingCounter.cc fatal.h fatal.cc fd.h fd.cc \
525 fde.cc fde.h FileMap.h filemap.cc fqdncache.h fqdncache.cc \
526- FwdState.cc FwdState.h Generic.h globals.h gopher.h gopher.cc \
527+ FwdState.cc FwdState.h Generic.h globals.h \
528 helper.cc helper.h hier_code.h HierarchyLogEntry.h \
529 $(HTCPSOURCE) http.cc http.h HttpHeaderFieldStat.h HttpHdrCc.h \
530 HttpHdrCc.cc HttpHdrCc.cci HttpHdrRange.cc HttpHdrSc.cc \
531@@ -3708,8 +3708,6 @@ tests_testCacheManager_SOURCES = \
532 fqdncache.cc \
533 FwdState.cc \
534 FwdState.h \
535- gopher.h \
536- gopher.cc \
537 hier_code.h \
538 helper.cc \
539 $(HTCPSOURCE) \
540@@ -4134,8 +4132,6 @@ tests_testEvent_SOURCES = \
541 fqdncache.cc \
542 FwdState.cc \
543 FwdState.h \
544- gopher.h \
545- gopher.cc \
546 helper.cc \
547 hier_code.h \
548 $(HTCPSOURCE) \
549@@ -4371,8 +4367,6 @@ tests_testEventLoop_SOURCES = \
550 fqdncache.cc \
551 FwdState.cc \
552 FwdState.h \
553- gopher.h \
554- gopher.cc \
555 helper.cc \
556 hier_code.h \
557 $(HTCPSOURCE) \
558@@ -4604,8 +4598,6 @@ tests_test_http_range_SOURCES = \
559 fqdncache.cc \
560 FwdState.cc \
561 FwdState.h \
562- gopher.h \
563- gopher.cc \
564 helper.cc \
565 hier_code.h \
566 $(HTCPSOURCE) \
567@@ -4924,8 +4916,6 @@ tests_testHttpRequest_SOURCES = \
568 fqdncache.cc \
569 FwdState.cc \
570 FwdState.h \
571- gopher.h \
572- gopher.cc \
573 helper.cc \
574 hier_code.h \
575 $(HTCPSOURCE) \
576@@ -5777,8 +5767,6 @@ tests_testURL_SOURCES = \
577 fqdncache.cc \
578 FwdState.cc \
579 FwdState.h \
580- gopher.h \
581- gopher.cc \
582 helper.cc \
583 hier_code.h \
584 $(HTCPSOURCE) \
585@@ -6823,7 +6811,6 @@ distclean-compile:
586 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/fqdncache.Po@am__quote@ # am--include-marker
587 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/fs_io.Po@am__quote@ # am--include-marker
588 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/globals.Po@am__quote@ # am--include-marker
589-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gopher.Po@am__quote@ # am--include-marker
590 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/helper.Po@am__quote@ # am--include-marker
591 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/hier_code.Po@am__quote@ # am--include-marker
592 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/htcp.Po@am__quote@ # am--include-marker
593@@ -7804,7 +7791,6 @@ distclean: distclean-recursive
594 -rm -f ./$(DEPDIR)/fqdncache.Po
595 -rm -f ./$(DEPDIR)/fs_io.Po
596 -rm -f ./$(DEPDIR)/globals.Po
597- -rm -f ./$(DEPDIR)/gopher.Po
598 -rm -f ./$(DEPDIR)/helper.Po
599 -rm -f ./$(DEPDIR)/hier_code.Po
600 -rm -f ./$(DEPDIR)/htcp.Po
601@@ -8129,7 +8115,6 @@ maintainer-clean: maintainer-clean-recur
602 -rm -f ./$(DEPDIR)/fqdncache.Po
603 -rm -f ./$(DEPDIR)/fs_io.Po
604 -rm -f ./$(DEPDIR)/globals.Po
605- -rm -f ./$(DEPDIR)/gopher.Po
606 -rm -f ./$(DEPDIR)/helper.Po
607 -rm -f ./$(DEPDIR)/hier_code.Po
608 -rm -f ./$(DEPDIR)/htcp.Po
diff --git a/meta-networking/recipes-daemons/squid/files/CVE-2023-46846-pre1.patch b/meta-networking/recipes-daemons/squid/files/CVE-2023-46846-pre1.patch
new file mode 100644
index 000000000..5b4e370d4
--- /dev/null
+++ b/meta-networking/recipes-daemons/squid/files/CVE-2023-46846-pre1.patch
@@ -0,0 +1,1154 @@
1Backport of:
2
3From 417da4006cf5c97d44e74431b816fc58fec9e270 Mon Sep 17 00:00:00 2001
4From: Eduard Bagdasaryan <eduard.bagdasaryan@measurement-factory.com>
5Date: Mon, 18 Mar 2019 17:48:21 +0000
6Subject: [PATCH] Fix incremental parsing of chunked quoted extensions (#310)
7
8Before this change, incremental parsing of quoted chunked extensions
9was broken for two reasons:
10
11* Http::One::Parser::skipLineTerminator() unexpectedly threw after
12 partially received quoted chunk extension value.
13
14* When Http::One::Tokenizer was unable to parse a quoted extension,
15 it incorrectly restored the input buffer to the beginning of the
16 extension value (instead of the extension itself), thus making
17 further incremental parsing iterations impossible.
18
19IMO, the reason for this problem was that Http::One::Tokenizer::qdText()
20could not distinguish two cases (returning false in both):
21
22* the end of the quoted string not yet reached
23
24* an input error, e.g., wrong/unexpected character
25
26A possible approach could be to improve Http::One::Tokenizer, making it
27aware about "needs more data" state. However, to be acceptable,
28these improvements should be done in the base Parser::Tokenizer
29class instead. These changes seem to be non-trivial and could be
30done separately and later.
31
32Another approach, used here, is to simplify the complex and error-prone
33chunked extensions parsing algorithm, fixing incremental parsing bugs
34and still parse incrementally in almost all cases. The performance
35regression could be expected only in relatively rare cases of partially
36received or malformed extensions.
37
38Also:
39* fixed parsing of partial use-original-body extension values
40* do not treat an invalid use-original-body as an unknown extension
41* optimization: parse use-original-body extension only in ICAP context
42 (i.e., where it is expected)
43* improvement: added a new API to TeChunkedParser to specify known
44 chunked extensions list
45
46Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/squid/tree/debian/patches/CVE-2023-46846-pre1.patch?h=ubuntu/focal-security&id=9ccd217ca9428c9a6597e9310a99552026b245fa
47Upstream commit https://github.com/squid-cache/squid/commit/417da4006cf5c97d44e74431b816fc58fec9e270]
48CVE: CVE-2023-46846 #Dependency Patch1
49Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
50---
51 src/adaptation/icap/ModXact.cc | 21 ++++-
52 src/adaptation/icap/ModXact.h | 20 +++++
53 src/http/one/Parser.cc | 35 ++++----
54 src/http/one/Parser.h | 10 ++-
55 src/http/one/RequestParser.cc | 16 ++--
56 src/http/one/RequestParser.h | 8 +-
57 src/http/one/ResponseParser.cc | 17 ++--
58 src/http/one/ResponseParser.h | 2 +-
59 src/http/one/TeChunkedParser.cc | 139 ++++++++++++++++++--------------
60 src/http/one/TeChunkedParser.h | 41 ++++++++--
61 src/http/one/Tokenizer.cc | 104 ++++++++++++------------
62 src/http/one/Tokenizer.h | 89 ++++++++------------
63 src/http/one/forward.h | 3 +
64 src/parser/BinaryTokenizer.h | 3 +-
65 src/parser/Makefile.am | 1 +
66 src/parser/Tokenizer.cc | 40 +++++++++
67 src/parser/Tokenizer.h | 13 +++
68 src/parser/forward.h | 22 +++++
69 18 files changed, 364 insertions(+), 220 deletions(-)
70 create mode 100644 src/parser/forward.h
71
72--- a/src/adaptation/icap/ModXact.cc
73+++ b/src/adaptation/icap/ModXact.cc
74@@ -25,12 +25,13 @@
75 #include "comm.h"
76 #include "comm/Connection.h"
77 #include "err_detail_type.h"
78-#include "http/one/TeChunkedParser.h"
79 #include "HttpHeaderTools.h"
80 #include "HttpMsg.h"
81 #include "HttpReply.h"
82 #include "HttpRequest.h"
83 #include "MasterXaction.h"
84+#include "parser/Tokenizer.h"
85+#include "sbuf/Stream.h"
86 #include "SquidTime.h"
87
88 // flow and terminology:
89@@ -44,6 +45,8 @@ CBDATA_NAMESPACED_CLASS_INIT(Adaptation:
90
91 static const size_t TheBackupLimit = BodyPipe::MaxCapacity;
92
93+const SBuf Adaptation::Icap::ChunkExtensionValueParser::UseOriginalBodyName("use-original-body");
94+
95 Adaptation::Icap::ModXact::State::State()
96 {
97 memset(this, 0, sizeof(*this));
98@@ -1108,6 +1111,7 @@ void Adaptation::Icap::ModXact::decideOn
99 state.parsing = State::psBody;
100 replyHttpBodySize = 0;
101 bodyParser = new Http1::TeChunkedParser;
102+ bodyParser->parseExtensionValuesWith(&extensionParser);
103 makeAdaptedBodyPipe("adapted response from the ICAP server");
104 Must(state.sending == State::sendingAdapted);
105 } else {
106@@ -1142,9 +1146,8 @@ void Adaptation::Icap::ModXact::parseBod
107 }
108
109 if (parsed) {
110- if (state.readyForUob && bodyParser->useOriginBody >= 0) {
111- prepPartialBodyEchoing(
112- static_cast<uint64_t>(bodyParser->useOriginBody));
113+ if (state.readyForUob && extensionParser.sawUseOriginalBody()) {
114+ prepPartialBodyEchoing(extensionParser.useOriginalBody());
115 stopParsing();
116 return;
117 }
118@@ -2014,3 +2017,14 @@ void Adaptation::Icap::ModXactLauncher::
119 }
120 }
121
122+void
123+Adaptation::Icap::ChunkExtensionValueParser::parse(Tokenizer &tok, const SBuf &extName)
124+{
125+ if (extName == UseOriginalBodyName) {
126+ useOriginalBody_ = tok.udec64("use-original-body");
127+ assert(useOriginalBody_ >= 0);
128+ } else {
129+ Ignore(tok, extName);
130+ }
131+}
132+
133--- a/src/adaptation/icap/ModXact.h
134+++ b/src/adaptation/icap/ModXact.h
135@@ -15,6 +15,7 @@
136 #include "adaptation/icap/Xaction.h"
137 #include "BodyPipe.h"
138 #include "http/one/forward.h"
139+#include "http/one/TeChunkedParser.h"
140
141 /*
142 * ICAPModXact implements ICAP REQMOD and RESPMOD transaction using
143@@ -105,6 +106,23 @@ private:
144 enum State { stDisabled, stWriting, stIeof, stDone } theState;
145 };
146
147+/// handles ICAP-specific chunk extensions supported by Squid
148+class ChunkExtensionValueParser: public Http1::ChunkExtensionValueParser
149+{
150+public:
151+ /* Http1::ChunkExtensionValueParser API */
152+ virtual void parse(Tokenizer &tok, const SBuf &extName) override;
153+
154+ bool sawUseOriginalBody() const { return useOriginalBody_ >= 0; }
155+ uint64_t useOriginalBody() const { assert(sawUseOriginalBody()); return static_cast<uint64_t>(useOriginalBody_); }
156+
157+private:
158+ static const SBuf UseOriginalBodyName;
159+
160+ /// the value of the parsed use-original-body chunk extension (or -1)
161+ int64_t useOriginalBody_ = -1;
162+};
163+
164 class ModXact: public Xaction, public BodyProducer, public BodyConsumer
165 {
166 CBDATA_CLASS(ModXact);
167@@ -270,6 +288,8 @@ private:
168
169 int adaptHistoryId; ///< adaptation history slot reservation
170
171+ ChunkExtensionValueParser extensionParser;
172+
173 class State
174 {
175
176--- a/src/http/one/Parser.cc
177+++ b/src/http/one/Parser.cc
178@@ -7,10 +7,11 @@
179 */
180
181 #include "squid.h"
182+#include "base/CharacterSet.h"
183 #include "Debug.h"
184 #include "http/one/Parser.h"
185-#include "http/one/Tokenizer.h"
186 #include "mime_header.h"
187+#include "parser/Tokenizer.h"
188 #include "SquidConfig.h"
189
190 /// RFC 7230 section 2.6 - 7 magic octets
191@@ -61,20 +62,19 @@ Http::One::Parser::DelimiterCharacters()
192 RelaxedDelimiterCharacters() : CharacterSet::SP;
193 }
194
195-bool
196-Http::One::Parser::skipLineTerminator(Http1::Tokenizer &tok) const
197+void
198+Http::One::Parser::skipLineTerminator(Tokenizer &tok) const
199 {
200 if (tok.skip(Http1::CrLf()))
201- return true;
202+ return;
203
204 if (Config.onoff.relaxed_header_parser && tok.skipOne(CharacterSet::LF))
205- return true;
206+ return;
207
208 if (tok.atEnd() || (tok.remaining().length() == 1 && tok.remaining().at(0) == '\r'))
209- return false; // need more data
210+ throw InsufficientInput();
211
212 throw TexcHere("garbage instead of CRLF line terminator");
213- return false; // unreachable, but make naive compilers happy
214 }
215
216 /// all characters except the LF line terminator
217@@ -102,7 +102,7 @@ LineCharacters()
218 void
219 Http::One::Parser::cleanMimePrefix()
220 {
221- Http1::Tokenizer tok(mimeHeaderBlock_);
222+ Tokenizer tok(mimeHeaderBlock_);
223 while (tok.skipOne(RelaxedDelimiterCharacters())) {
224 (void)tok.skipAll(LineCharacters()); // optional line content
225 // LF terminator is required.
226@@ -137,7 +137,7 @@ Http::One::Parser::cleanMimePrefix()
227 void
228 Http::One::Parser::unfoldMime()
229 {
230- Http1::Tokenizer tok(mimeHeaderBlock_);
231+ Tokenizer tok(mimeHeaderBlock_);
232 const auto szLimit = mimeHeaderBlock_.length();
233 mimeHeaderBlock_.clear();
234 // prevent the mime sender being able to make append() realloc/grow multiple times.
235@@ -228,7 +228,7 @@ Http::One::Parser::getHostHeaderField()
236 debugs(25, 5, "looking for " << name);
237
238 // while we can find more LF in the SBuf
239- Http1::Tokenizer tok(mimeHeaderBlock_);
240+ Tokenizer tok(mimeHeaderBlock_);
241 SBuf p;
242
243 while (tok.prefix(p, LineCharacters())) {
244@@ -250,7 +250,7 @@ Http::One::Parser::getHostHeaderField()
245 p.consume(namelen + 1);
246
247 // TODO: optimize SBuf::trim to take CharacterSet directly
248- Http1::Tokenizer t(p);
249+ Tokenizer t(p);
250 t.skipAll(CharacterSet::WSP);
251 p = t.remaining();
252
253@@ -278,10 +278,15 @@ Http::One::ErrorLevel()
254 }
255
256 // BWS = *( SP / HTAB ) ; WhitespaceCharacters() may relax this RFC 7230 rule
257-bool
258-Http::One::ParseBws(Tokenizer &tok)
259+void
260+Http::One::ParseBws(Parser::Tokenizer &tok)
261 {
262- if (const auto count = tok.skipAll(Parser::WhitespaceCharacters())) {
263+ const auto count = tok.skipAll(Parser::WhitespaceCharacters());
264+
265+ if (tok.atEnd())
266+ throw InsufficientInput(); // even if count is positive
267+
268+ if (count) {
269 // Generating BWS is a MUST-level violation so warn about it as needed.
270 debugs(33, ErrorLevel(), "found " << count << " BWS octets");
271 // RFC 7230 says we MUST parse BWS, so we fall through even if
272@@ -289,6 +294,6 @@ Http::One::ParseBws(Tokenizer &tok)
273 }
274 // else we successfully "parsed" an empty BWS sequence
275
276- return true;
277+ // success: no more BWS characters expected
278 }
279
280--- a/src/http/one/Parser.h
281+++ b/src/http/one/Parser.h
282@@ -12,6 +12,7 @@
283 #include "anyp/ProtocolVersion.h"
284 #include "http/one/forward.h"
285 #include "http/StatusCode.h"
286+#include "parser/forward.h"
287 #include "sbuf/SBuf.h"
288
289 namespace Http {
290@@ -40,6 +41,7 @@ class Parser : public RefCountable
291 {
292 public:
293 typedef SBuf::size_type size_type;
294+ typedef ::Parser::Tokenizer Tokenizer;
295
296 Parser() : parseStatusCode(Http::scNone), parsingStage_(HTTP_PARSE_NONE), hackExpectsMime_(false) {}
297 virtual ~Parser() {}
298@@ -118,11 +120,11 @@ protected:
299 * detect and skip the CRLF or (if tolerant) LF line terminator
300 * consume from the tokenizer.
301 *
302- * throws if non-terminator is detected.
303+ * \throws exception on bad or InsuffientInput.
304 * \retval true only if line terminator found.
305 * \retval false incomplete or missing line terminator, need more data.
306 */
307- bool skipLineTerminator(Http1::Tokenizer &tok) const;
308+ void skipLineTerminator(Tokenizer &) const;
309
310 /**
311 * Scan to find the mime headers block for current message.
312@@ -159,8 +161,8 @@ private:
313 };
314
315 /// skips and, if needed, warns about RFC 7230 BWS ("bad" whitespace)
316-/// \returns true (always; unlike all the skip*() functions)
317-bool ParseBws(Tokenizer &tok);
318+/// \throws InsufficientInput when the end of BWS cannot be confirmed
319+void ParseBws(Parser::Tokenizer &);
320
321 /// the right debugs() level for logging HTTP violation messages
322 int ErrorLevel();
323--- a/src/http/one/RequestParser.cc
324+++ b/src/http/one/RequestParser.cc
325@@ -9,8 +9,8 @@
326 #include "squid.h"
327 #include "Debug.h"
328 #include "http/one/RequestParser.h"
329-#include "http/one/Tokenizer.h"
330 #include "http/ProtocolVersion.h"
331+#include "parser/Tokenizer.h"
332 #include "profiler/Profiler.h"
333 #include "SquidConfig.h"
334
335@@ -64,7 +64,7 @@ Http::One::RequestParser::skipGarbageLin
336 * RFC 7230 section 2.6, 3.1 and 3.5
337 */
338 bool
339-Http::One::RequestParser::parseMethodField(Http1::Tokenizer &tok)
340+Http::One::RequestParser::parseMethodField(Tokenizer &tok)
341 {
342 // method field is a sequence of TCHAR.
343 // Limit to 32 characters to prevent overly long sequences of non-HTTP
344@@ -145,7 +145,7 @@ Http::One::RequestParser::RequestTargetC
345 }
346
347 bool
348-Http::One::RequestParser::parseUriField(Http1::Tokenizer &tok)
349+Http::One::RequestParser::parseUriField(Tokenizer &tok)
350 {
351 /* Arbitrary 64KB URI upper length limit.
352 *
353@@ -178,7 +178,7 @@ Http::One::RequestParser::parseUriField(
354 }
355
356 bool
357-Http::One::RequestParser::parseHttpVersionField(Http1::Tokenizer &tok)
358+Http::One::RequestParser::parseHttpVersionField(Tokenizer &tok)
359 {
360 static const SBuf http1p0("HTTP/1.0");
361 static const SBuf http1p1("HTTP/1.1");
362@@ -253,7 +253,7 @@ Http::One::RequestParser::skipDelimiter(
363
364 /// Parse CRs at the end of request-line, just before the terminating LF.
365 bool
366-Http::One::RequestParser::skipTrailingCrs(Http1::Tokenizer &tok)
367+Http::One::RequestParser::skipTrailingCrs(Tokenizer &tok)
368 {
369 if (Config.onoff.relaxed_header_parser) {
370 (void)tok.skipAllTrailing(CharacterSet::CR); // optional; multiple OK
371@@ -289,12 +289,12 @@ Http::One::RequestParser::parseRequestFi
372 // Earlier, skipGarbageLines() took care of any leading LFs (if allowed).
373 // Now, the request line has to end at the first LF.
374 static const CharacterSet lineChars = CharacterSet::LF.complement("notLF");
375- ::Parser::Tokenizer lineTok(buf_);
376+ Tokenizer lineTok(buf_);
377 if (!lineTok.prefix(line, lineChars) || !lineTok.skip('\n')) {
378 if (buf_.length() >= Config.maxRequestHeaderSize) {
379 /* who should we blame for our failure to parse this line? */
380
381- Http1::Tokenizer methodTok(buf_);
382+ Tokenizer methodTok(buf_);
383 if (!parseMethodField(methodTok))
384 return -1; // blame a bad method (or its delimiter)
385
386@@ -308,7 +308,7 @@ Http::One::RequestParser::parseRequestFi
387 return 0;
388 }
389
390- Http1::Tokenizer tok(line);
391+ Tokenizer tok(line);
392
393 if (!parseMethodField(tok))
394 return -1;
395--- a/src/http/one/RequestParser.h
396+++ b/src/http/one/RequestParser.h
397@@ -54,11 +54,11 @@ private:
398 bool doParse(const SBuf &aBuf);
399
400 /* all these return false and set parseStatusCode on parsing failures */
401- bool parseMethodField(Http1::Tokenizer &);
402- bool parseUriField(Http1::Tokenizer &);
403- bool parseHttpVersionField(Http1::Tokenizer &);
404+ bool parseMethodField(Tokenizer &);
405+ bool parseUriField(Tokenizer &);
406+ bool parseHttpVersionField(Tokenizer &);
407 bool skipDelimiter(const size_t count, const char *where);
408- bool skipTrailingCrs(Http1::Tokenizer &tok);
409+ bool skipTrailingCrs(Tokenizer &tok);
410
411 bool http0() const {return !msgProtocol_.major;}
412 static const CharacterSet &RequestTargetCharacters();
413--- a/src/http/one/ResponseParser.cc
414+++ b/src/http/one/ResponseParser.cc
415@@ -9,8 +9,8 @@
416 #include "squid.h"
417 #include "Debug.h"
418 #include "http/one/ResponseParser.h"
419-#include "http/one/Tokenizer.h"
420 #include "http/ProtocolVersion.h"
421+#include "parser/Tokenizer.h"
422 #include "profiler/Profiler.h"
423 #include "SquidConfig.h"
424
425@@ -47,7 +47,7 @@ Http::One::ResponseParser::firstLineSize
426 // NP: we found the protocol version and consumed it already.
427 // just need the status code and reason phrase
428 int
429-Http::One::ResponseParser::parseResponseStatusAndReason(Http1::Tokenizer &tok, const CharacterSet &WspDelim)
430+Http::One::ResponseParser::parseResponseStatusAndReason(Tokenizer &tok, const CharacterSet &WspDelim)
431 {
432 if (!completedStatus_) {
433 debugs(74, 9, "seek status-code in: " << tok.remaining().substr(0,10) << "...");
434@@ -87,14 +87,13 @@ Http::One::ResponseParser::parseResponse
435 static const CharacterSet phraseChars = CharacterSet::WSP + CharacterSet::VCHAR + CharacterSet::OBSTEXT;
436 (void)tok.prefix(reasonPhrase_, phraseChars); // optional, no error if missing
437 try {
438- if (skipLineTerminator(tok)) {
439- debugs(74, DBG_DATA, "parse remaining buf={length=" << tok.remaining().length() << ", data='" << tok.remaining() << "'}");
440- buf_ = tok.remaining(); // resume checkpoint
441- return 1;
442- }
443+ skipLineTerminator(tok);
444+ buf_ = tok.remaining(); // resume checkpoint
445+ debugs(74, DBG_DATA, Raw("leftovers", buf_.rawContent(), buf_.length()));
446+ return 1;
447+ } catch (const InsufficientInput &) {
448 reasonPhrase_.clear();
449 return 0; // need more to be sure we have it all
450-
451 } catch (const std::exception &ex) {
452 debugs(74, 6, "invalid status-line: " << ex.what());
453 }
454@@ -119,7 +118,7 @@ Http::One::ResponseParser::parseResponse
455 int
456 Http::One::ResponseParser::parseResponseFirstLine()
457 {
458- Http1::Tokenizer tok(buf_);
459+ Tokenizer tok(buf_);
460
461 const CharacterSet &WspDelim = DelimiterCharacters();
462
463--- a/src/http/one/ResponseParser.h
464+++ b/src/http/one/ResponseParser.h
465@@ -43,7 +43,7 @@ public:
466
467 private:
468 int parseResponseFirstLine();
469- int parseResponseStatusAndReason(Http1::Tokenizer&, const CharacterSet &);
470+ int parseResponseStatusAndReason(Tokenizer&, const CharacterSet &);
471
472 /// magic prefix for identifying ICY response messages
473 static const SBuf IcyMagic;
474--- a/src/http/one/TeChunkedParser.cc
475+++ b/src/http/one/TeChunkedParser.cc
476@@ -13,10 +13,13 @@
477 #include "http/one/Tokenizer.h"
478 #include "http/ProtocolVersion.h"
479 #include "MemBuf.h"
480+#include "parser/Tokenizer.h"
481 #include "Parsing.h"
482+#include "sbuf/Stream.h"
483 #include "SquidConfig.h"
484
485-Http::One::TeChunkedParser::TeChunkedParser()
486+Http::One::TeChunkedParser::TeChunkedParser():
487+ customExtensionValueParser(nullptr)
488 {
489 // chunked encoding only exists in HTTP/1.1
490 Http1::Parser::msgProtocol_ = Http::ProtocolVersion(1,1);
491@@ -31,7 +34,11 @@ Http::One::TeChunkedParser::clear()
492 buf_.clear();
493 theChunkSize = theLeftBodySize = 0;
494 theOut = NULL;
495- useOriginBody = -1;
496+ // XXX: We do not reset customExtensionValueParser here. Based on the
497+ // clear() API description, we must, but it makes little sense and could
498+ // break method callers if they appear because some of them may forget to
499+ // reset customExtensionValueParser. TODO: Remove Http1::Parser as our
500+ // parent class and this unnecessary method with it.
501 }
502
503 bool
504@@ -49,14 +56,14 @@ Http::One::TeChunkedParser::parse(const
505 if (parsingStage_ == Http1::HTTP_PARSE_NONE)
506 parsingStage_ = Http1::HTTP_PARSE_CHUNK_SZ;
507
508- Http1::Tokenizer tok(buf_);
509+ Tokenizer tok(buf_);
510
511 // loop for as many chunks as we can
512 // use do-while instead of while so that we can incrementally
513 // restart in the middle of a chunk/frame
514 do {
515
516- if (parsingStage_ == Http1::HTTP_PARSE_CHUNK_EXT && !parseChunkExtension(tok, theChunkSize))
517+ if (parsingStage_ == Http1::HTTP_PARSE_CHUNK_EXT && !parseChunkMetadataSuffix(tok))
518 return false;
519
520 if (parsingStage_ == Http1::HTTP_PARSE_CHUNK && !parseChunkBody(tok))
521@@ -80,7 +87,7 @@ Http::One::TeChunkedParser::needsMoreSpa
522
523 /// RFC 7230 section 4.1 chunk-size
524 bool
525-Http::One::TeChunkedParser::parseChunkSize(Http1::Tokenizer &tok)
526+Http::One::TeChunkedParser::parseChunkSize(Tokenizer &tok)
527 {
528 Must(theChunkSize <= 0); // Should(), really
529
530@@ -104,66 +111,75 @@ Http::One::TeChunkedParser::parseChunkSi
531 return false; // should not be reachable
532 }
533
534-/**
535- * Parses chunk metadata suffix, looking for interesting extensions and/or
536- * getting to the line terminator. RFC 7230 section 4.1.1 and its Errata #4667:
537- *
538- * chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
539- * chunk-ext-name = token
540- * chunk-ext-val = token / quoted-string
541- *
542- * ICAP 'use-original-body=N' extension is supported.
543- */
544-bool
545-Http::One::TeChunkedParser::parseChunkExtension(Http1::Tokenizer &tok, bool skipKnown)
546-{
547- SBuf ext;
548- SBuf value;
549- while (
550- ParseBws(tok) && // Bug 4492: IBM_HTTP_Server sends SP after chunk-size
551- tok.skip(';') &&
552- ParseBws(tok) && // Bug 4492: ICAP servers send SP before chunk-ext-name
553- tok.prefix(ext, CharacterSet::TCHAR)) { // chunk-ext-name
554-
555- // whole value part is optional. if no '=' expect next chunk-ext
556- if (ParseBws(tok) && tok.skip('=') && ParseBws(tok)) {
557-
558- if (!skipKnown) {
559- if (ext.cmp("use-original-body",17) == 0 && tok.int64(useOriginBody, 10)) {
560- debugs(94, 3, "Found chunk extension " << ext << "=" << useOriginBody);
561- buf_ = tok.remaining(); // parse checkpoint
562- continue;
563- }
564- }
565-
566- debugs(94, 5, "skipping unknown chunk extension " << ext);
567-
568- // unknown might have a value token or quoted-string
569- if (tok.quotedStringOrToken(value) && !tok.atEnd()) {
570- buf_ = tok.remaining(); // parse checkpoint
571- continue;
572- }
573-
574- // otherwise need more data OR corrupt syntax
575- break;
576- }
577-
578- if (!tok.atEnd())
579- buf_ = tok.remaining(); // parse checkpoint (unless there might be more token name)
580- }
581-
582- if (skipLineTerminator(tok)) {
583- buf_ = tok.remaining(); // checkpoint
584- // non-0 chunk means data, 0-size means optional Trailer follows
585+/// Parses "[chunk-ext] CRLF" from RFC 7230 section 4.1.1:
586+/// chunk = chunk-size [ chunk-ext ] CRLF chunk-data CRLF
587+/// last-chunk = 1*"0" [ chunk-ext ] CRLF
588+bool
589+Http::One::TeChunkedParser::parseChunkMetadataSuffix(Tokenizer &tok)
590+{
591+ // Code becomes much simpler when incremental parsing functions throw on
592+ // bad or insufficient input, like in the code below. TODO: Expand up.
593+ try {
594+ parseChunkExtensions(tok); // a possibly empty chunk-ext list
595+ skipLineTerminator(tok);
596+ buf_ = tok.remaining();
597 parsingStage_ = theChunkSize ? Http1::HTTP_PARSE_CHUNK : Http1::HTTP_PARSE_MIME;
598 return true;
599+ } catch (const InsufficientInput &) {
600+ tok.reset(buf_); // backtrack to the last commit point
601+ return false;
602 }
603+ // other exceptions bubble up to kill message parsing
604+}
605+
606+/// Parses the chunk-ext list (RFC 7230 section 4.1.1 and its Errata #4667):
607+/// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
608+void
609+Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &tok)
610+{
611+ do {
612+ ParseBws(tok); // Bug 4492: IBM_HTTP_Server sends SP after chunk-size
613
614- return false;
615+ if (!tok.skip(';'))
616+ return; // reached the end of extensions (if any)
617+
618+ parseOneChunkExtension(tok);
619+ buf_ = tok.remaining(); // got one extension
620+ } while (true);
621+}
622+
623+void
624+Http::One::ChunkExtensionValueParser::Ignore(Tokenizer &tok, const SBuf &extName)
625+{
626+ const auto ignoredValue = tokenOrQuotedString(tok);
627+ debugs(94, 5, extName << " with value " << ignoredValue);
628+}
629+
630+/// Parses a single chunk-ext list element:
631+/// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
632+void
633+Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &tok)
634+{
635+ ParseBws(tok); // Bug 4492: ICAP servers send SP before chunk-ext-name
636+
637+ const auto extName = tok.prefix("chunk-ext-name", CharacterSet::TCHAR);
638+
639+ ParseBws(tok);
640+
641+ if (!tok.skip('='))
642+ return; // parsed a valueless chunk-ext
643+
644+ ParseBws(tok);
645+
646+ // optimization: the only currently supported extension needs last-chunk
647+ if (!theChunkSize && customExtensionValueParser)
648+ customExtensionValueParser->parse(tok, extName);
649+ else
650+ ChunkExtensionValueParser::Ignore(tok, extName);
651 }
652
653 bool
654-Http::One::TeChunkedParser::parseChunkBody(Http1::Tokenizer &tok)
655+Http::One::TeChunkedParser::parseChunkBody(Tokenizer &tok)
656 {
657 if (theLeftBodySize > 0) {
658 buf_ = tok.remaining(); // sync buffers before buf_ use
659@@ -188,17 +204,20 @@ Http::One::TeChunkedParser::parseChunkBo
660 }
661
662 bool
663-Http::One::TeChunkedParser::parseChunkEnd(Http1::Tokenizer &tok)
664+Http::One::TeChunkedParser::parseChunkEnd(Tokenizer &tok)
665 {
666 Must(theLeftBodySize == 0); // Should(), really
667
668- if (skipLineTerminator(tok)) {
669+ try {
670+ skipLineTerminator(tok);
671 buf_ = tok.remaining(); // parse checkpoint
672 theChunkSize = 0; // done with the current chunk
673 parsingStage_ = Http1::HTTP_PARSE_CHUNK_SZ;
674 return true;
675 }
676-
677- return false;
678+ catch (const InsufficientInput &) {
679+ return false;
680+ }
681+ // other exceptions bubble up to kill message parsing
682 }
683
684--- a/src/http/one/TeChunkedParser.h
685+++ b/src/http/one/TeChunkedParser.h
686@@ -18,6 +18,26 @@ namespace Http
687 namespace One
688 {
689
690+using ::Parser::InsufficientInput;
691+
692+// TODO: Move this class into http/one/ChunkExtensionValueParser.*
693+/// A customizable parser of a single chunk extension value (chunk-ext-val).
694+/// From RFC 7230 section 4.1.1 and its Errata #4667:
695+/// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
696+/// chunk-ext-name = token
697+/// chunk-ext-val = token / quoted-string
698+class ChunkExtensionValueParser
699+{
700+public:
701+ typedef ::Parser::Tokenizer Tokenizer;
702+
703+ /// extracts and ignores the value of a named extension
704+ static void Ignore(Tokenizer &tok, const SBuf &extName);
705+
706+ /// extracts and then interprets (or ignores) the extension value
707+ virtual void parse(Tokenizer &tok, const SBuf &extName) = 0;
708+};
709+
710 /**
711 * An incremental parser for chunked transfer coding
712 * defined in RFC 7230 section 4.1.
713@@ -25,7 +45,7 @@ namespace One
714 *
715 * The parser shovels content bytes from the raw
716 * input buffer into the content output buffer, both caller-supplied.
717- * Ignores chunk extensions except for ICAP's ieof.
718+ * Chunk extensions like use-original-body are handled via parseExtensionValuesWith().
719 * Trailers are available via mimeHeader() if wanted.
720 */
721 class TeChunkedParser : public Http1::Parser
722@@ -37,6 +57,10 @@ public:
723 /// set the buffer to be used to store decoded chunk data
724 void setPayloadBuffer(MemBuf *parsedContent) {theOut = parsedContent;}
725
726+ /// Instead of ignoring all chunk extension values, give the supplied
727+ /// parser a chance to handle them. Only applied to last-chunk (for now).
728+ void parseExtensionValuesWith(ChunkExtensionValueParser *parser) { customExtensionValueParser = parser; }
729+
730 bool needsMoreSpace() const;
731
732 /* Http1::Parser API */
733@@ -45,17 +69,20 @@ public:
734 virtual Parser::size_type firstLineSize() const {return 0;} // has no meaning with multiple chunks
735
736 private:
737- bool parseChunkSize(Http1::Tokenizer &tok);
738- bool parseChunkExtension(Http1::Tokenizer &tok, bool skipKnown);
739- bool parseChunkBody(Http1::Tokenizer &tok);
740- bool parseChunkEnd(Http1::Tokenizer &tok);
741+ bool parseChunkSize(Tokenizer &tok);
742+ bool parseChunkMetadataSuffix(Tokenizer &);
743+ void parseChunkExtensions(Tokenizer &);
744+ void parseOneChunkExtension(Tokenizer &);
745+ bool parseChunkBody(Tokenizer &tok);
746+ bool parseChunkEnd(Tokenizer &tok);
747
748 MemBuf *theOut;
749 uint64_t theChunkSize;
750 uint64_t theLeftBodySize;
751
752-public:
753- int64_t useOriginBody;
754+ /// An optional plugin for parsing and interpreting custom chunk-ext-val.
755+ /// This "visitor" object is owned by our creator.
756+ ChunkExtensionValueParser *customExtensionValueParser;
757 };
758
759 } // namespace One
760--- a/src/http/one/Tokenizer.cc
761+++ b/src/http/one/Tokenizer.cc
762@@ -8,35 +8,18 @@
763
764 #include "squid.h"
765 #include "Debug.h"
766+#include "http/one/Parser.h"
767 #include "http/one/Tokenizer.h"
768+#include "parser/Tokenizer.h"
769+#include "sbuf/Stream.h"
770
771-bool
772-Http::One::Tokenizer::quotedString(SBuf &returnedToken, const bool http1p0)
773+/// Extracts quoted-string after the caller removes the initial '"'.
774+/// \param http1p0 whether to prohibit \-escaped characters in quoted strings
775+/// \throws InsufficientInput when input can be a token _prefix_
776+/// \returns extracted quoted string (without quotes and with chars unescaped)
777+static SBuf
778+parseQuotedStringSuffix(Parser::Tokenizer &tok, const bool http1p0)
779 {
780- checkpoint();
781-
782- if (!skip('"'))
783- return false;
784-
785- return qdText(returnedToken, http1p0);
786-}
787-
788-bool
789-Http::One::Tokenizer::quotedStringOrToken(SBuf &returnedToken, const bool http1p0)
790-{
791- checkpoint();
792-
793- if (!skip('"'))
794- return prefix(returnedToken, CharacterSet::TCHAR);
795-
796- return qdText(returnedToken, http1p0);
797-}
798-
799-bool
800-Http::One::Tokenizer::qdText(SBuf &returnedToken, const bool http1p0)
801-{
802- // the initial DQUOTE has been skipped by the caller
803-
804 /*
805 * RFC 1945 - defines qdtext:
806 * inclusive of LWS (which includes CR and LF)
807@@ -61,12 +44,17 @@ Http::One::Tokenizer::qdText(SBuf &retur
808 // best we can do is a conditional reference since http1p0 value may change per-client
809 const CharacterSet &tokenChars = (http1p0 ? qdtext1p0 : qdtext1p1);
810
811- for (;;) {
812- SBuf::size_type prefixLen = buf().findFirstNotOf(tokenChars);
813- returnedToken.append(consume(prefixLen));
814+ SBuf parsedToken;
815+
816+ while (!tok.atEnd()) {
817+ SBuf qdText;
818+ if (tok.prefix(qdText, tokenChars))
819+ parsedToken.append(qdText);
820+
821+ if (!http1p0 && tok.skip('\\')) { // HTTP/1.1 allows quoted-pair, HTTP/1.0 does not
822+ if (tok.atEnd())
823+ break;
824
825- // HTTP/1.1 allows quoted-pair, HTTP/1.0 does not
826- if (!http1p0 && skip('\\')) {
827 /* RFC 7230 section 3.2.6
828 *
829 * The backslash octet ("\") can be used as a single-octet quoting
830@@ -78,32 +66,42 @@ Http::One::Tokenizer::qdText(SBuf &retur
831 */
832 static const CharacterSet qPairChars = CharacterSet::HTAB + CharacterSet::SP + CharacterSet::VCHAR + CharacterSet::OBSTEXT;
833 SBuf escaped;
834- if (!prefix(escaped, qPairChars, 1)) {
835- returnedToken.clear();
836- restoreLastCheckpoint();
837- return false;
838- }
839- returnedToken.append(escaped);
840+ if (!tok.prefix(escaped, qPairChars, 1))
841+ throw TexcHere("invalid escaped character in quoted-pair");
842+
843+ parsedToken.append(escaped);
844 continue;
845+ }
846
847- } else if (skip('"')) {
848- break; // done
849+ if (tok.skip('"'))
850+ return parsedToken; // may be empty
851
852- } else if (atEnd()) {
853- // need more data
854- returnedToken.clear();
855- restoreLastCheckpoint();
856- return false;
857- }
858+ if (tok.atEnd())
859+ break;
860
861- // else, we have an error
862- debugs(24, 8, "invalid bytes for set " << tokenChars.name);
863- returnedToken.clear();
864- restoreLastCheckpoint();
865- return false;
866+ throw TexcHere(ToSBuf("invalid bytes for set ", tokenChars.name));
867 }
868
869- // found the whole string
870- return true;
871+ throw Http::One::InsufficientInput();
872+}
873+
874+SBuf
875+Http::One::tokenOrQuotedString(Parser::Tokenizer &tok, const bool http1p0)
876+{
877+ if (tok.skip('"'))
878+ return parseQuotedStringSuffix(tok, http1p0);
879+
880+ if (tok.atEnd())
881+ throw InsufficientInput();
882+
883+ SBuf parsedToken;
884+ if (!tok.prefix(parsedToken, CharacterSet::TCHAR))
885+ throw TexcHere("invalid input while expecting an HTTP token");
886+
887+ if (tok.atEnd())
888+ throw InsufficientInput();
889+
890+ // got the complete token
891+ return parsedToken;
892 }
893
894--- a/src/http/one/Tokenizer.h
895+++ b/src/http/one/Tokenizer.h
896@@ -9,68 +9,47 @@
897 #ifndef SQUID_SRC_HTTP_ONE_TOKENIZER_H
898 #define SQUID_SRC_HTTP_ONE_TOKENIZER_H
899
900-#include "parser/Tokenizer.h"
901+#include "parser/forward.h"
902+#include "sbuf/forward.h"
903
904 namespace Http {
905 namespace One {
906
907 /**
908- * Lexical processor extended to tokenize HTTP/1.x syntax.
909+ * Extracts either an HTTP/1 token or quoted-string while dealing with
910+ * possibly incomplete input typical for incremental text parsers.
911+ * Unescapes escaped characters in HTTP/1.1 quoted strings.
912 *
913- * \see ::Parser::Tokenizer for more detail
914+ * \param http1p0 whether to prohibit \-escaped characters in quoted strings
915+ * \throws InsufficientInput as appropriate, including on unterminated tokens
916+ * \returns extracted token or quoted string (without quotes)
917+ *
918+ * Governed by:
919+ * - RFC 1945 section 2.1
920+ * "
921+ * A string of text is parsed as a single word if it is quoted using
922+ * double-quote marks.
923+ *
924+ * quoted-string = ( <"> *(qdtext) <"> )
925+ *
926+ * qdtext = <any CHAR except <"> and CTLs,
927+ * but including LWS>
928+ *
929+ * Single-character quoting using the backslash ("\") character is not
930+ * permitted in HTTP/1.0.
931+ * "
932+ *
933+ * - RFC 7230 section 3.2.6
934+ * "
935+ * A string of text is parsed as a single value if it is quoted using
936+ * double-quote marks.
937+ *
938+ * quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
939+ * qdtext = HTAB / SP /%x21 / %x23-5B / %x5D-7E / obs-text
940+ * obs-text = %x80-FF
941+ * "
942 */
943-class Tokenizer : public ::Parser::Tokenizer
944-{
945-public:
946- Tokenizer(SBuf &s) : ::Parser::Tokenizer(s), savedStats_(0) {}
947-
948- /**
949- * Attempt to parse a quoted-string lexical construct.
950- *
951- * Governed by:
952- * - RFC 1945 section 2.1
953- * "
954- * A string of text is parsed as a single word if it is quoted using
955- * double-quote marks.
956- *
957- * quoted-string = ( <"> *(qdtext) <"> )
958- *
959- * qdtext = <any CHAR except <"> and CTLs,
960- * but including LWS>
961- *
962- * Single-character quoting using the backslash ("\") character is not
963- * permitted in HTTP/1.0.
964- * "
965- *
966- * - RFC 7230 section 3.2.6
967- * "
968- * A string of text is parsed as a single value if it is quoted using
969- * double-quote marks.
970- *
971- * quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
972- * qdtext = HTAB / SP /%x21 / %x23-5B / %x5D-7E / obs-text
973- * obs-text = %x80-FF
974- * "
975- *
976- * \param escaped HTTP/1.0 does not permit \-escaped characters
977- */
978- bool quotedString(SBuf &value, const bool http1p0 = false);
979-
980- /**
981- * Attempt to parse a (token / quoted-string ) lexical construct.
982- */
983- bool quotedStringOrToken(SBuf &value, const bool http1p0 = false);
984-
985-private:
986- /// parse the internal component of a quote-string, and terminal DQUOTE
987- bool qdText(SBuf &value, const bool http1p0);
988-
989- void checkpoint() { savedCheckpoint_ = buf(); savedStats_ = parsedSize(); }
990- void restoreLastCheckpoint() { undoParse(savedCheckpoint_, savedStats_); }
991-
992- SBuf savedCheckpoint_;
993- SBuf::size_type savedStats_;
994-};
995+SBuf tokenOrQuotedString(Parser::Tokenizer &tok, const bool http1p0 = false);
996
997 } // namespace One
998 } // namespace Http
999--- a/src/http/one/forward.h
1000+++ b/src/http/one/forward.h
1001@@ -10,6 +10,7 @@
1002 #define SQUID_SRC_HTTP_ONE_FORWARD_H
1003
1004 #include "base/RefCount.h"
1005+#include "parser/forward.h"
1006 #include "sbuf/forward.h"
1007
1008 namespace Http {
1009@@ -31,6 +32,8 @@ typedef RefCount<Http::One::ResponsePars
1010 /// CRLF textual representation
1011 const SBuf &CrLf();
1012
1013+using ::Parser::InsufficientInput;
1014+
1015 } // namespace One
1016 } // namespace Http
1017
1018--- a/src/parser/BinaryTokenizer.h
1019+++ b/src/parser/BinaryTokenizer.h
1020@@ -9,6 +9,7 @@
1021 #ifndef SQUID_SRC_PARSER_BINARYTOKENIZER_H
1022 #define SQUID_SRC_PARSER_BINARYTOKENIZER_H
1023
1024+#include "parser/forward.h"
1025 #include "sbuf/SBuf.h"
1026
1027 namespace Parser
1028@@ -44,7 +45,7 @@ public:
1029 class BinaryTokenizer
1030 {
1031 public:
1032- class InsufficientInput {}; // thrown when a method runs out of data
1033+ typedef ::Parser::InsufficientInput InsufficientInput;
1034 typedef uint64_t size_type; // enough for the largest supported offset
1035
1036 BinaryTokenizer();
1037--- a/src/parser/Makefile.am
1038+++ b/src/parser/Makefile.am
1039@@ -13,6 +13,7 @@ noinst_LTLIBRARIES = libparser.la
1040 libparser_la_SOURCES = \
1041 BinaryTokenizer.h \
1042 BinaryTokenizer.cc \
1043+ forward.h \
1044 Tokenizer.h \
1045 Tokenizer.cc
1046
1047--- a/src/parser/Tokenizer.cc
1048+++ b/src/parser/Tokenizer.cc
1049@@ -10,7 +10,9 @@
1050
1051 #include "squid.h"
1052 #include "Debug.h"
1053+#include "parser/forward.h"
1054 #include "parser/Tokenizer.h"
1055+#include "sbuf/Stream.h"
1056
1057 #include <cerrno>
1058 #if HAVE_CTYPE_H
1059@@ -96,6 +98,23 @@ Parser::Tokenizer::prefix(SBuf &returned
1060 return true;
1061 }
1062
1063+SBuf
1064+Parser::Tokenizer::prefix(const char *description, const CharacterSet &tokenChars, const SBuf::size_type limit)
1065+{
1066+ if (atEnd())
1067+ throw InsufficientInput();
1068+
1069+ SBuf result;
1070+
1071+ if (!prefix(result, tokenChars, limit))
1072+ throw TexcHere(ToSBuf("cannot parse ", description));
1073+
1074+ if (atEnd())
1075+ throw InsufficientInput();
1076+
1077+ return result;
1078+}
1079+
1080 bool
1081 Parser::Tokenizer::suffix(SBuf &returnedToken, const CharacterSet &tokenChars, const SBuf::size_type limit)
1082 {
1083@@ -283,3 +302,24 @@ Parser::Tokenizer::int64(int64_t & resul
1084 return success(s - range.rawContent());
1085 }
1086
1087+int64_t
1088+Parser::Tokenizer::udec64(const char *description, const SBuf::size_type limit)
1089+{
1090+ if (atEnd())
1091+ throw InsufficientInput();
1092+
1093+ int64_t result = 0;
1094+
1095+ // Since we only support unsigned decimals, a parsing failure with a
1096+ // non-empty input always implies invalid/malformed input (or a buggy
1097+ // limit=0 caller). TODO: Support signed and non-decimal integers by
1098+ // refactoring int64() to detect insufficient input.
1099+ if (!int64(result, 10, false, limit))
1100+ throw TexcHere(ToSBuf("cannot parse ", description));
1101+
1102+ if (atEnd())
1103+ throw InsufficientInput(); // more digits may be coming
1104+
1105+ return result;
1106+}
1107+
1108--- a/src/parser/Tokenizer.h
1109+++ b/src/parser/Tokenizer.h
1110@@ -143,6 +143,19 @@ public:
1111 */
1112 bool int64(int64_t &result, int base = 0, bool allowSign = true, SBuf::size_type limit = SBuf::npos);
1113
1114+ /*
1115+ * The methods below mimic their counterparts documented above, but they
1116+ * throw on errors, including InsufficientInput. The field description
1117+ * parameter is used for error reporting and debugging.
1118+ */
1119+
1120+ /// prefix() wrapper but throws InsufficientInput if input contains
1121+ /// nothing but the prefix (i.e. if the prefix is not "terminated")
1122+ SBuf prefix(const char *description, const CharacterSet &tokenChars, SBuf::size_type limit = SBuf::npos);
1123+
1124+ /// int64() wrapper but limited to unsigned decimal integers (for now)
1125+ int64_t udec64(const char *description, SBuf::size_type limit = SBuf::npos);
1126+
1127 protected:
1128 SBuf consume(const SBuf::size_type n);
1129 SBuf::size_type success(const SBuf::size_type n);
1130--- /dev/null
1131+++ b/src/parser/forward.h
1132@@ -0,0 +1,22 @@
1133+/*
1134+ * Copyright (C) 1996-2019 The Squid Software Foundation and contributors
1135+ *
1136+ * Squid software is distributed under GPLv2+ license and includes
1137+ * contributions from numerous individuals and organizations.
1138+ * Please see the COPYING and CONTRIBUTORS files for details.
1139+ */
1140+
1141+#ifndef SQUID_PARSER_FORWARD_H
1142+#define SQUID_PARSER_FORWARD_H
1143+
1144+namespace Parser {
1145+class Tokenizer;
1146+class BinaryTokenizer;
1147+
1148+// TODO: Move this declaration (to parser/Elements.h) if we need more like it.
1149+/// thrown by modern "incremental" parsers when they need more data
1150+class InsufficientInput {};
1151+} // namespace Parser
1152+
1153+#endif /* SQUID_PARSER_FORWARD_H */
1154+
diff --git a/meta-networking/recipes-daemons/squid/files/CVE-2023-46846.patch b/meta-networking/recipes-daemons/squid/files/CVE-2023-46846.patch
new file mode 100644
index 000000000..a6d0965e7
--- /dev/null
+++ b/meta-networking/recipes-daemons/squid/files/CVE-2023-46846.patch
@@ -0,0 +1,169 @@
1From 05f6af2f4c85cc99323cfff6149c3d74af661b6d Mon Sep 17 00:00:00 2001
2From: Amos Jeffries <yadij@users.noreply.github.com>
3Date: Fri, 13 Oct 2023 08:44:16 +0000
4Subject: [PATCH] RFC 9112: Improve HTTP chunked encoding compliance (#1498)
5
6Upstream-Status: Backport [import from ubuntu https://git.launchpad.net/ubuntu/+source/squid/tree/debian/patches/CVE-2023-46846.patch?h=ubuntu/focal-security&id=9ccd217ca9428c9a6597e9310a99552026b245fa
7Upstream commit https://github.com/squid-cache/squid/commit/05f6af2f4c85cc99323cfff6149c3d74af661b6d]
8CVE: CVE-2023-46846
9Signed-off-by: Vijay Anusuri <vanusuri@mvista.com>
10---
11 src/http/one/Parser.cc | 8 +-------
12 src/http/one/Parser.h | 4 +---
13 src/http/one/TeChunkedParser.cc | 23 ++++++++++++++++++-----
14 src/parser/Tokenizer.cc | 12 ++++++++++++
15 src/parser/Tokenizer.h | 7 +++++++
16 5 files changed, 39 insertions(+), 15 deletions(-)
17
18--- a/src/http/one/Parser.cc
19+++ b/src/http/one/Parser.cc
20@@ -65,16 +65,10 @@ Http::One::Parser::DelimiterCharacters()
21 void
22 Http::One::Parser::skipLineTerminator(Tokenizer &tok) const
23 {
24- if (tok.skip(Http1::CrLf()))
25- return;
26-
27 if (Config.onoff.relaxed_header_parser && tok.skipOne(CharacterSet::LF))
28 return;
29
30- if (tok.atEnd() || (tok.remaining().length() == 1 && tok.remaining().at(0) == '\r'))
31- throw InsufficientInput();
32-
33- throw TexcHere("garbage instead of CRLF line terminator");
34+ tok.skipRequired("line-terminating CRLF", Http1::CrLf());
35 }
36
37 /// all characters except the LF line terminator
38--- a/src/http/one/Parser.h
39+++ b/src/http/one/Parser.h
40@@ -120,9 +120,7 @@ protected:
41 * detect and skip the CRLF or (if tolerant) LF line terminator
42 * consume from the tokenizer.
43 *
44- * \throws exception on bad or InsuffientInput.
45- * \retval true only if line terminator found.
46- * \retval false incomplete or missing line terminator, need more data.
47+ * \throws exception on bad or InsufficientInput
48 */
49 void skipLineTerminator(Tokenizer &) const;
50
51--- a/src/http/one/TeChunkedParser.cc
52+++ b/src/http/one/TeChunkedParser.cc
53@@ -91,6 +91,11 @@ Http::One::TeChunkedParser::parseChunkSi
54 {
55 Must(theChunkSize <= 0); // Should(), really
56
57+ static const SBuf bannedHexPrefixLower("0x");
58+ static const SBuf bannedHexPrefixUpper("0X");
59+ if (tok.skip(bannedHexPrefixLower) || tok.skip(bannedHexPrefixUpper))
60+ throw TextException("chunk starts with 0x", Here());
61+
62 int64_t size = -1;
63 if (tok.int64(size, 16, false) && !tok.atEnd()) {
64 if (size < 0)
65@@ -121,7 +126,7 @@ Http::One::TeChunkedParser::parseChunkMe
66 // bad or insufficient input, like in the code below. TODO: Expand up.
67 try {
68 parseChunkExtensions(tok); // a possibly empty chunk-ext list
69- skipLineTerminator(tok);
70+ tok.skipRequired("CRLF after [chunk-ext]", Http1::CrLf());
71 buf_ = tok.remaining();
72 parsingStage_ = theChunkSize ? Http1::HTTP_PARSE_CHUNK : Http1::HTTP_PARSE_MIME;
73 return true;
74@@ -132,12 +137,14 @@ Http::One::TeChunkedParser::parseChunkMe
75 // other exceptions bubble up to kill message parsing
76 }
77
78-/// Parses the chunk-ext list (RFC 7230 section 4.1.1 and its Errata #4667):
79+/// Parses the chunk-ext list (RFC 9112 section 7.1.1:
80 /// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
81 void
82-Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &tok)
83+Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &callerTok)
84 {
85 do {
86+ auto tok = callerTok;
87+
88 ParseBws(tok); // Bug 4492: IBM_HTTP_Server sends SP after chunk-size
89
90 if (!tok.skip(';'))
91@@ -145,6 +152,7 @@ Http::One::TeChunkedParser::parseChunkEx
92
93 parseOneChunkExtension(tok);
94 buf_ = tok.remaining(); // got one extension
95+ callerTok = tok;
96 } while (true);
97 }
98
99@@ -158,11 +166,14 @@ Http::One::ChunkExtensionValueParser::Ig
100 /// Parses a single chunk-ext list element:
101 /// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
102 void
103-Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &tok)
104+Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &callerTok)
105 {
106+ auto tok = callerTok;
107+
108 ParseBws(tok); // Bug 4492: ICAP servers send SP before chunk-ext-name
109
110 const auto extName = tok.prefix("chunk-ext-name", CharacterSet::TCHAR);
111+ callerTok = tok; // in case we determine that this is a valueless chunk-ext
112
113 ParseBws(tok);
114
115@@ -176,6 +187,8 @@ Http::One::TeChunkedParser::parseOneChun
116 customExtensionValueParser->parse(tok, extName);
117 else
118 ChunkExtensionValueParser::Ignore(tok, extName);
119+
120+ callerTok = tok;
121 }
122
123 bool
124@@ -209,7 +222,7 @@ Http::One::TeChunkedParser::parseChunkEn
125 Must(theLeftBodySize == 0); // Should(), really
126
127 try {
128- skipLineTerminator(tok);
129+ tok.skipRequired("chunk CRLF", Http1::CrLf());
130 buf_ = tok.remaining(); // parse checkpoint
131 theChunkSize = 0; // done with the current chunk
132 parsingStage_ = Http1::HTTP_PARSE_CHUNK_SZ;
133--- a/src/parser/Tokenizer.cc
134+++ b/src/parser/Tokenizer.cc
135@@ -147,6 +147,18 @@ Parser::Tokenizer::skipAll(const Charact
136 return success(prefixLen);
137 }
138
139+void
140+Parser::Tokenizer::skipRequired(const char *description, const SBuf &tokenToSkip)
141+{
142+ if (skip(tokenToSkip) || tokenToSkip.isEmpty())
143+ return;
144+
145+ if (tokenToSkip.startsWith(buf_))
146+ throw InsufficientInput();
147+
148+ throw TextException(ToSBuf("cannot skip ", description), Here());
149+}
150+
151 bool
152 Parser::Tokenizer::skipOne(const CharacterSet &chars)
153 {
154--- a/src/parser/Tokenizer.h
155+++ b/src/parser/Tokenizer.h
156@@ -115,6 +115,13 @@ public:
157 */
158 SBuf::size_type skipAll(const CharacterSet &discardables);
159
160+ /** skips a given character sequence (string);
161+ * does nothing if the sequence is empty
162+ *
163+ * \throws exception on mismatching prefix or InsufficientInput
164+ */
165+ void skipRequired(const char *description, const SBuf &tokenToSkip);
166+
167 /** Removes a single trailing character from the set.
168 *
169 * \return whether a character was removed
diff --git a/meta-networking/recipes-daemons/squid/squid_4.9.bb b/meta-networking/recipes-daemons/squid/squid_4.9.bb
index c9a92772d..98257e54c 100644
--- a/meta-networking/recipes-daemons/squid/squid_4.9.bb
+++ b/meta-networking/recipes-daemons/squid/squid_4.9.bb
@@ -25,6 +25,9 @@ SRC_URI = "http://www.squid-cache.org/Versions/v${MAJ_VER}/${BPN}-${PV}.tar.bz2
25 file://0001-tools.cc-fixed-unused-result-warning.patch \ 25 file://0001-tools.cc-fixed-unused-result-warning.patch \
26 file://0001-splay.cc-fix-bind-is-not-a-member-of-std.patch \ 26 file://0001-splay.cc-fix-bind-is-not-a-member-of-std.patch \
27 file://CVE-2023-46847.patch \ 27 file://CVE-2023-46847.patch \
28 file://CVE-2023-46728.patch \
29 file://CVE-2023-46846-pre1.patch \
30 file://CVE-2023-46846.patch \
28 " 31 "
29 32
30SRC_URI_remove_toolchain-clang = "file://0001-configure-Check-for-Wno-error-format-truncation-comp.patch" 33SRC_URI_remove_toolchain-clang = "file://0001-configure-Check-for-Wno-error-format-truncation-comp.patch"