summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDan Tran <dantran@microsoft.com>2019-09-09 18:24:01 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2019-10-08 22:52:28 +0100
commit90e5385568621b2a1eeb4ce206fa2fac96fe3149 (patch)
treeeb479761ab10edab56293ee54fd6c3a5874cc9e2
parent45cebeda6e501b9c31ab40030267ea1d6840f34b (diff)
downloadpoky-90e5385568621b2a1eeb4ce206fa2fac96fe3149.tar.gz
python: Fix 3 CVEs
Fixes CVE-2018-20852, CVE-2019-9740, and CVE-2019-9747 (From OE-Core rev: 3f1c02aa7b7d485e64503d601124c335d4b7299f) Signed-off-by: Dan Tran <dantran@microsoft.com> Signed-off-by: Armin Kuster <akuster808@gmail.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r--meta/recipes-devtools/python/python/bpo-30458-cve-2019-9740.patch219
-rw-r--r--meta/recipes-devtools/python/python/bpo-35121-cve-2018-20852.patch127
-rw-r--r--meta/recipes-devtools/python/python_2.7.16.bb2
3 files changed, 348 insertions, 0 deletions
diff --git a/meta/recipes-devtools/python/python/bpo-30458-cve-2019-9740.patch b/meta/recipes-devtools/python/python/bpo-30458-cve-2019-9740.patch
new file mode 100644
index 0000000000..f4c56bb828
--- /dev/null
+++ b/meta/recipes-devtools/python/python/bpo-30458-cve-2019-9740.patch
@@ -0,0 +1,219 @@
1From 39815ee5bb7f2f9ca1f0d5e9f51e27a2877ec35b Mon Sep 17 00:00:00 2001
2From: Victor Stinner <victor.stinner@gmail.com>
3Date: Tue, 21 May 2019 15:12:33 +0200
4Subject: [PATCH] bpo-30458: Disallow control chars in http URLs (GH-12755)
5 (GH-13154) (GH-13315)
6MIME-Version: 1.0
7Content-Type: text/plain; charset=UTF-8
8Content-Transfer-Encoding: 8bit
9
10Disallow control chars in http URLs in urllib2.urlopen. This
11addresses a potential security problem for applications that do not
12sanity check their URLs where http request headers could be injected.
13
14Disable https related urllib tests on a build without ssl (GH-13032)
15These tests require an SSL enabled build. Skip these tests when
16python is built without SSL to fix test failures.
17
18Use httplib.InvalidURL instead of ValueError as the new error case's
19exception. (GH-13044)
20
21Backport Co-Authored-By: Miro HronĨok <miro@hroncok.cz>
22
23(cherry picked from commit 7e200e0763f5b71c199aaf98bd5588f291585619)
24
25Notes on backport to Python 2.7:
26
27* test_urllib tests urllib.urlopen() which quotes the URL and so is
28 not vulerable to HTTP Header Injection.
29* Add tests to test_urllib2 on urllib2.urlopen().
30* Reject non-ASCII characters: range 0x80-0xff.
31
32CVE: CVE-2019-9740 CVE-2019-9747
33Upstream-Status: Accepted
34
35Signed-off-by: Dan Tran <dantran@microsoft.com>
36---
37 Lib/httplib.py | 16 ++++++
38 Lib/test/test_urllib.py | 25 +++++++++
39 Lib/test/test_urllib2.py | 51 ++++++++++++++++++-
40 Lib/test/test_xmlrpc.py | 8 ++-
41 .../2019-04-10-08-53-30.bpo-30458.51E-DA.rst | 1 +
42 5 files changed, 99 insertions(+), 2 deletions(-)
43 create mode 100644 Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst
44
45diff --git a/Lib/httplib.py b/Lib/httplib.py
46index 60a8fb4e35..1b41c346e0 100644
47--- a/Lib/httplib.py
48+++ b/Lib/httplib.py
49@@ -247,6 +247,16 @@ _MAXHEADERS = 100
50 _is_legal_header_name = re.compile(r'\A[^:\s][^:\r\n]*\Z').match
51 _is_illegal_header_value = re.compile(r'\n(?![ \t])|\r(?![ \t\n])').search
52
53+# These characters are not allowed within HTTP URL paths.
54+# See https://tools.ietf.org/html/rfc3986#section-3.3 and the
55+# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
56+# Prevents CVE-2019-9740. Includes control characters such as \r\n.
57+# Restrict non-ASCII characters above \x7f (0x80-0xff).
58+_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f-\xff]')
59+# Arguably only these _should_ allowed:
60+# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
61+# We are more lenient for assumed real world compatibility purposes.
62+
63 # We always set the Content-Length header for these methods because some
64 # servers will otherwise respond with a 411
65 _METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
66@@ -927,6 +937,12 @@ class HTTPConnection:
67 self._method = method
68 if not url:
69 url = '/'
70+ # Prevent CVE-2019-9740.
71+ match = _contains_disallowed_url_pchar_re.search(url)
72+ if match:
73+ raise InvalidURL("URL can't contain control characters. %r "
74+ "(found at least %r)"
75+ % (url, match.group()))
76 hdr = '%s %s %s' % (method, url, self._http_vsn_str)
77
78 self._output(hdr)
79diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
80index 1ce9201c06..d7778d4194 100644
81--- a/Lib/test/test_urllib.py
82+++ b/Lib/test/test_urllib.py
83@@ -257,6 +257,31 @@ class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin):
84 finally:
85 self.unfakehttp()
86
87+ def test_url_with_control_char_rejected(self):
88+ for char_no in range(0, 0x21) + range(0x7f, 0x100):
89+ char = chr(char_no)
90+ schemeless_url = "//localhost:7777/test%s/" % char
91+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
92+ try:
93+ # urllib quotes the URL so there is no injection.
94+ resp = urllib.urlopen("http:" + schemeless_url)
95+ self.assertNotIn(char, resp.geturl())
96+ finally:
97+ self.unfakehttp()
98+
99+ def test_url_with_newline_header_injection_rejected(self):
100+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
101+ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123"
102+ schemeless_url = "//" + host + ":8080/test/?test=a"
103+ try:
104+ # urllib quotes the URL so there is no injection.
105+ resp = urllib.urlopen("http:" + schemeless_url)
106+ self.assertNotIn(' ', resp.geturl())
107+ self.assertNotIn('\r', resp.geturl())
108+ self.assertNotIn('\n', resp.geturl())
109+ finally:
110+ self.unfakehttp()
111+
112 def test_read_bogus(self):
113 # urlopen() should raise IOError for many error codes.
114 self.fakehttp('''HTTP/1.1 401 Authentication Required
115diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
116index 6d24d5ddf8..9531818e16 100644
117--- a/Lib/test/test_urllib2.py
118+++ b/Lib/test/test_urllib2.py
119@@ -15,6 +15,9 @@ try:
120 except ImportError:
121 ssl = None
122
123+from test.test_urllib import FakeHTTPMixin
124+
125+
126 # XXX
127 # Request
128 # CacheFTPHandler (hard to write)
129@@ -1262,7 +1265,7 @@ class HandlerTests(unittest.TestCase):
130 self.assertEqual(len(http_handler.requests), 1)
131 self.assertFalse(http_handler.requests[0].has_header(auth_header))
132
133-class MiscTests(unittest.TestCase):
134+class MiscTests(unittest.TestCase, FakeHTTPMixin):
135
136 def test_build_opener(self):
137 class MyHTTPHandler(urllib2.HTTPHandler): pass
138@@ -1317,6 +1320,52 @@ class MiscTests(unittest.TestCase):
139 "Unsupported digest authentication algorithm 'invalid'"
140 )
141
142+ @unittest.skipUnless(ssl, "ssl module required")
143+ def test_url_with_control_char_rejected(self):
144+ for char_no in range(0, 0x21) + range(0x7f, 0x100):
145+ char = chr(char_no)
146+ schemeless_url = "//localhost:7777/test%s/" % char
147+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
148+ try:
149+ # We explicitly test urllib.request.urlopen() instead of the top
150+ # level 'def urlopen()' function defined in this... (quite ugly)
151+ # test suite. They use different url opening codepaths. Plain
152+ # urlopen uses FancyURLOpener which goes via a codepath that
153+ # calls urllib.parse.quote() on the URL which makes all of the
154+ # above attempts at injection within the url _path_ safe.
155+ escaped_char_repr = repr(char).replace('\\', r'\\')
156+ InvalidURL = httplib.InvalidURL
157+ with self.assertRaisesRegexp(
158+ InvalidURL, "contain control.*" + escaped_char_repr):
159+ urllib2.urlopen("http:" + schemeless_url)
160+ with self.assertRaisesRegexp(
161+ InvalidURL, "contain control.*" + escaped_char_repr):
162+ urllib2.urlopen("https:" + schemeless_url)
163+ finally:
164+ self.unfakehttp()
165+
166+ @unittest.skipUnless(ssl, "ssl module required")
167+ def test_url_with_newline_header_injection_rejected(self):
168+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
169+ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123"
170+ schemeless_url = "//" + host + ":8080/test/?test=a"
171+ try:
172+ # We explicitly test urllib2.urlopen() instead of the top
173+ # level 'def urlopen()' function defined in this... (quite ugly)
174+ # test suite. They use different url opening codepaths. Plain
175+ # urlopen uses FancyURLOpener which goes via a codepath that
176+ # calls urllib.parse.quote() on the URL which makes all of the
177+ # above attempts at injection within the url _path_ safe.
178+ InvalidURL = httplib.InvalidURL
179+ with self.assertRaisesRegexp(
180+ InvalidURL, r"contain control.*\\r.*(found at least . .)"):
181+ urllib2.urlopen("http:" + schemeless_url)
182+ with self.assertRaisesRegexp(InvalidURL, r"contain control.*\\n"):
183+ urllib2.urlopen("https:" + schemeless_url)
184+ finally:
185+ self.unfakehttp()
186+
187+
188
189 class RequestTests(unittest.TestCase):
190
191diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py
192index 36b3be67fd..90ccb30716 100644
193--- a/Lib/test/test_xmlrpc.py
194+++ b/Lib/test/test_xmlrpc.py
195@@ -659,7 +659,13 @@ class SimpleServerTestCase(BaseServerTestCase):
196 def test_partial_post(self):
197 # Check that a partial POST doesn't make the server loop: issue #14001.
198 conn = httplib.HTTPConnection(ADDR, PORT)
199- conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye')
200+ conn.send('POST /RPC2 HTTP/1.0\r\n'
201+ 'Content-Length: 100\r\n\r\n'
202+ 'bye HTTP/1.1\r\n'
203+ 'Host: %s:%s\r\n'
204+ 'Accept-Encoding: identity\r\n'
205+ 'Content-Length: 0\r\n\r\n'
206+ % (ADDR, PORT))
207 conn.close()
208
209 class SimpleServerEncodingTestCase(BaseServerTestCase):
210diff --git a/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst
211new file mode 100644
212index 0000000000..47cb899df1
213--- /dev/null
214+++ b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst
215@@ -0,0 +1 @@
216+Address CVE-2019-9740 by disallowing URL paths with embedded whitespace or control characters through into the underlying http client request. Such potentially malicious header injection URLs now cause an httplib.InvalidURL exception to be raised.
217--
2182.22.0.vfs.1.1.57.gbaf16c8
219
diff --git a/meta/recipes-devtools/python/python/bpo-35121-cve-2018-20852.patch b/meta/recipes-devtools/python/python/bpo-35121-cve-2018-20852.patch
new file mode 100644
index 0000000000..7ce7b1f9e0
--- /dev/null
+++ b/meta/recipes-devtools/python/python/bpo-35121-cve-2018-20852.patch
@@ -0,0 +1,127 @@
1From 1bd50d351e508b8947e5813c5f925eb4b61c8d76 Mon Sep 17 00:00:00 2001
2From: Xtreak <tir.karthi@gmail.com>
3Date: Sat, 15 Jun 2019 20:59:43 +0530
4Subject: [PATCH] [2.7] bpo-35121: prefix dot in domain for proper subdomain
5 validation (GH-10258) (GH-13426)
6
7This is a manual backport of ca7fe5063593958e5efdf90f068582837f07bd14 since 2.7 has `http.cookiejar` in `cookielib`
8
9https://bugs.python.org/issue35121
10
11CVE: CVE-2018-20852
12Upstream-Status: Accepted
13
14Signed-off-by: Dan Tran <dantran@microsoft.com>
15---
16 Lib/cookielib.py | 13 ++++++--
17 Lib/test/test_cookielib.py | 30 +++++++++++++++++++
18 .../2019-05-20-00-35-12.bpo-35121.RRi-HU.rst | 4 +++
19 3 files changed, 45 insertions(+), 2 deletions(-)
20 create mode 100644 Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst
21
22diff --git a/Lib/cookielib.py b/Lib/cookielib.py
23index 2dd7c48728..0b471a42f2 100644
24--- a/Lib/cookielib.py
25+++ b/Lib/cookielib.py
26@@ -1139,6 +1139,11 @@ class DefaultCookiePolicy(CookiePolicy):
27 req_host, erhn = eff_request_host(request)
28 domain = cookie.domain
29
30+ if domain and not domain.startswith("."):
31+ dotdomain = "." + domain
32+ else:
33+ dotdomain = domain
34+
35 # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't
36 if (cookie.version == 0 and
37 (self.strict_ns_domain & self.DomainStrictNonDomain) and
38@@ -1151,7 +1156,7 @@ class DefaultCookiePolicy(CookiePolicy):
39 _debug(" effective request-host name %s does not domain-match "
40 "RFC 2965 cookie domain %s", erhn, domain)
41 return False
42- if cookie.version == 0 and not ("."+erhn).endswith(domain):
43+ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain):
44 _debug(" request-host %s does not match Netscape cookie domain "
45 "%s", req_host, domain)
46 return False
47@@ -1165,7 +1170,11 @@ class DefaultCookiePolicy(CookiePolicy):
48 req_host = "."+req_host
49 if not erhn.startswith("."):
50 erhn = "."+erhn
51- if not (req_host.endswith(domain) or erhn.endswith(domain)):
52+ if domain and not domain.startswith("."):
53+ dotdomain = "." + domain
54+ else:
55+ dotdomain = domain
56+ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)):
57 #_debug(" request domain %s does not match cookie domain %s",
58 # req_host, domain)
59 return False
60diff --git a/Lib/test/test_cookielib.py b/Lib/test/test_cookielib.py
61index f2dd9727d1..7f7ff614d6 100644
62--- a/Lib/test/test_cookielib.py
63+++ b/Lib/test/test_cookielib.py
64@@ -368,6 +368,7 @@ class CookieTests(TestCase):
65 ("http://foo.bar.com/", ".foo.bar.com", True),
66 ("http://foo.bar.com/", "foo.bar.com", True),
67 ("http://foo.bar.com/", ".bar.com", True),
68+ ("http://foo.bar.com/", "bar.com", True),
69 ("http://foo.bar.com/", "com", True),
70 ("http://foo.com/", "rhubarb.foo.com", False),
71 ("http://foo.com/", ".foo.com", True),
72@@ -378,6 +379,8 @@ class CookieTests(TestCase):
73 ("http://foo/", "foo", True),
74 ("http://foo/", "foo.local", True),
75 ("http://foo/", ".local", True),
76+ ("http://barfoo.com", ".foo.com", False),
77+ ("http://barfoo.com", "foo.com", False),
78 ]:
79 request = urllib2.Request(url)
80 r = pol.domain_return_ok(domain, request)
81@@ -938,6 +941,33 @@ class CookieTests(TestCase):
82 c.add_cookie_header(req)
83 self.assertFalse(req.has_header("Cookie"))
84
85+ c.clear()
86+
87+ pol.set_blocked_domains([])
88+ req = Request("http://acme.com/")
89+ res = FakeResponse(headers, "http://acme.com/")
90+ cookies = c.make_cookies(res, req)
91+ c.extract_cookies(res, req)
92+ self.assertEqual(len(c), 1)
93+
94+ req = Request("http://acme.com/")
95+ c.add_cookie_header(req)
96+ self.assertTrue(req.has_header("Cookie"))
97+
98+ req = Request("http://badacme.com/")
99+ c.add_cookie_header(req)
100+ self.assertFalse(pol.return_ok(cookies[0], req))
101+ self.assertFalse(req.has_header("Cookie"))
102+
103+ p = pol.set_blocked_domains(["acme.com"])
104+ req = Request("http://acme.com/")
105+ c.add_cookie_header(req)
106+ self.assertFalse(req.has_header("Cookie"))
107+
108+ req = Request("http://badacme.com/")
109+ c.add_cookie_header(req)
110+ self.assertFalse(req.has_header("Cookie"))
111+
112 def test_secure(self):
113 from cookielib import CookieJar, DefaultCookiePolicy
114
115diff --git a/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst b/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst
116new file mode 100644
117index 0000000000..7725180616
118--- /dev/null
119+++ b/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst
120@@ -0,0 +1,4 @@
121+Don't send cookies of domain A without Domain attribute to domain B when
122+domain A is a suffix match of domain B while using a cookiejar with
123+:class:`cookielib.DefaultCookiePolicy` policy. Patch by Karthikeyan
124+Singaravelan.
125--
1262.22.0.vfs.1.1.57.gbaf16c8
127
diff --git a/meta/recipes-devtools/python/python_2.7.16.bb b/meta/recipes-devtools/python/python_2.7.16.bb
index 9c79faf9ed..16b1744704 100644
--- a/meta/recipes-devtools/python/python_2.7.16.bb
+++ b/meta/recipes-devtools/python/python_2.7.16.bb
@@ -35,6 +35,8 @@ SRC_URI += "\
35 file://bpo-35907-cve-2019-9948-fix.patch \ 35 file://bpo-35907-cve-2019-9948-fix.patch \
36 file://bpo-36216-cve-2019-9636.patch \ 36 file://bpo-36216-cve-2019-9636.patch \
37 file://bpo-36216-cve-2019-9636-fix.patch \ 37 file://bpo-36216-cve-2019-9636-fix.patch \
38 file://bpo-35121-cve-2018-20852.patch \
39 file://bpo-30458-cve-2019-9740.patch \
38" 40"
39 41
40S = "${WORKDIR}/Python-${PV}" 42S = "${WORKDIR}/Python-${PV}"