diff options
author | Alexander Kanavin <alex.kanavin@gmail.com> | 2019-11-18 15:28:40 +0100 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2019-11-21 23:08:19 +0000 |
commit | f681fd3b2fd69b6ecf71c317bc4a8e8786c0a38f (patch) | |
tree | 006e8d9d1fe955de723c09bc398da2c11d5984d9 /meta/recipes-devtools | |
parent | d05b78b245e9a1b2743d3f649378dc3e3565b12d (diff) | |
download | poky-f681fd3b2fd69b6ecf71c317bc4a8e8786c0a38f.tar.gz |
python: update to 2.7.17
Drop backports, rebase a couple of patches.
This is the second last release of py 2.x; upstream support ends on
1 January 2020, there will be one final 2.x afterwards.
Note that the only thing that still needs python 2.x in oe-core is
u-boot; when the next u-boot update arrives, we should find out
where the py3 migration is for that component before merging the
update.
(From OE-Core rev: 184b60eb905bb75ecc7a0c29a175e624d8555fac)
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/recipes-devtools')
14 files changed, 49 insertions, 904 deletions
diff --git a/meta/recipes-devtools/python/python-native/0001-python-native-fix-one-do_populate_sysroot-warning.patch b/meta/recipes-devtools/python/python-native/0001-python-native-fix-one-do_populate_sysroot-warning.patch index 989818927d..707ee596fa 100644 --- a/meta/recipes-devtools/python/python-native/0001-python-native-fix-one-do_populate_sysroot-warning.patch +++ b/meta/recipes-devtools/python/python-native/0001-python-native-fix-one-do_populate_sysroot-warning.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 12292444e1b3662b994bc223d92b8338fb0895ff Mon Sep 17 00:00:00 2001 | 1 | From 6cbb7529cf7ff0da3ca649fb3486facd9620d625 Mon Sep 17 00:00:00 2001 |
2 | From: Changqing Li <changqing.li@windriver.com> | 2 | From: Changqing Li <changqing.li@windriver.com> |
3 | Date: Thu, 25 Oct 2018 07:32:14 +0000 | 3 | Date: Thu, 25 Oct 2018 07:32:14 +0000 |
4 | Subject: [PATCH] python-native: fix one do_populate_sysroot warning | 4 | Subject: [PATCH] python-native: fix one do_populate_sysroot warning |
@@ -17,23 +17,24 @@ when do_populate_sysroot. use append to fix it. | |||
17 | Upstream-Status: Inappropriate [oe-specific] | 17 | Upstream-Status: Inappropriate [oe-specific] |
18 | 18 | ||
19 | Signed-off-by: Changqing Li <changqing.li@windriver.com> | 19 | Signed-off-by: Changqing Li <changqing.li@windriver.com> |
20 | |||
20 | --- | 21 | --- |
21 | setup.py | 2 +- | 22 | setup.py | 2 +- |
22 | 1 file changed, 1 insertion(+), 1 deletion(-) | 23 | 1 file changed, 1 insertion(+), 1 deletion(-) |
23 | 24 | ||
24 | diff --git a/setup.py b/setup.py | 25 | diff --git a/setup.py b/setup.py |
25 | index 7bf13ed..6c0f29b 100644 | 26 | index a2c8127..22f9e23 100644 |
26 | --- a/setup.py | 27 | --- a/setup.py |
27 | +++ b/setup.py | 28 | +++ b/setup.py |
28 | @@ -40,7 +40,7 @@ def add_dir_to_list(dirlist, dir): | 29 | @@ -47,7 +47,7 @@ def add_dir_to_list(dirlist, dir): |
29 | 1) 'dir' is not already in 'dirlist' | 30 | else: |
30 | 2) 'dir' actually exists, and is a directory.""" | 31 | dir_exists = os.path.isdir(dir) |
31 | if dir is not None and os.path.isdir(dir) and dir not in dirlist: | 32 | if dir_exists: |
32 | - dirlist.insert(0, dir) | 33 | - dirlist.insert(0, dir) |
33 | + dirlist.append(dir) | 34 | + dirlist.append(dir) |
34 | 35 | ||
35 | def macosx_sdk_root(): | 36 | MACOS_SDK_ROOT = None |
36 | """ | 37 | |
37 | -- | 38 | -- |
38 | 2.18.0 | 39 | 2.17.1 |
39 | 40 | ||
diff --git a/meta/recipes-devtools/python/python-native_2.7.16.bb b/meta/recipes-devtools/python/python-native_2.7.17.bb index 90103af8be..936810d980 100644 --- a/meta/recipes-devtools/python/python-native_2.7.16.bb +++ b/meta/recipes-devtools/python/python-native_2.7.17.bb | |||
@@ -11,7 +11,7 @@ SRC_URI += "\ | |||
11 | file://nohostlibs.patch \ | 11 | file://nohostlibs.patch \ |
12 | file://multilib.patch \ | 12 | file://multilib.patch \ |
13 | file://add-md5module-support.patch \ | 13 | file://add-md5module-support.patch \ |
14 | file://builddir.patch \ | 14 | file://0001-python-Resolve-intermediate-staging-issues.patch \ |
15 | file://parallel-makeinst-create-bindir.patch \ | 15 | file://parallel-makeinst-create-bindir.patch \ |
16 | file://revert_use_of_sysconfigdata.patch \ | 16 | file://revert_use_of_sysconfigdata.patch \ |
17 | file://0001-python-native-fix-one-do_populate_sysroot-warning.patch \ | 17 | file://0001-python-native-fix-one-do_populate_sysroot-warning.patch \ |
diff --git a/meta/recipes-devtools/python/python.inc b/meta/recipes-devtools/python/python.inc index 1462b779e9..a630c26e89 100644 --- a/meta/recipes-devtools/python/python.inc +++ b/meta/recipes-devtools/python/python.inc | |||
@@ -8,16 +8,10 @@ INC_PR = "r1" | |||
8 | LIC_FILES_CHKSUM = "file://LICENSE;md5=e466242989bd33c1bd2b6a526a742498" | 8 | LIC_FILES_CHKSUM = "file://LICENSE;md5=e466242989bd33c1bd2b6a526a742498" |
9 | 9 | ||
10 | SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \ | 10 | SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \ |
11 | file://bpo-35907-cve-2019-9948.patch \ | ||
12 | file://bpo-35907-cve-2019-9948-fix.patch \ | ||
13 | file://bpo-36216-cve-2019-9636.patch \ | ||
14 | file://bpo-36216-cve-2019-9636-fix.patch \ | ||
15 | file://CVE-2019-9740.patch \ | ||
16 | file://CVE-2018-20852.patch \ | ||
17 | " | 11 | " |
18 | 12 | ||
19 | SRC_URI[md5sum] = "30157d85a2c0479c09ea2cbe61f2aaf5" | 13 | SRC_URI[md5sum] = "b3b6d2c92f42a60667814358ab9f0cfd" |
20 | SRC_URI[sha256sum] = "f222ef602647eecb6853681156d32de4450a2c39f4de93bd5b20235f2e660ed7" | 14 | SRC_URI[sha256sum] = "4d43f033cdbd0aa7b7023c81b0e986fd11e653b5248dac9144d508f11812ba41" |
21 | 15 | ||
22 | # python recipe is actually python 2.x | 16 | # python recipe is actually python 2.x |
23 | # also, exclude pre-releases for both python 2.x and 3.x | 17 | # also, exclude pre-releases for both python 2.x and 3.x |
diff --git a/meta/recipes-devtools/python/python/0001-2.7-bpo-34155-Dont-parse-domains-containing-GH-13079.patch b/meta/recipes-devtools/python/python/0001-2.7-bpo-34155-Dont-parse-domains-containing-GH-13079.patch deleted file mode 100644 index 5415472a35..0000000000 --- a/meta/recipes-devtools/python/python/0001-2.7-bpo-34155-Dont-parse-domains-containing-GH-13079.patch +++ /dev/null | |||
@@ -1,90 +0,0 @@ | |||
1 | From 532ed09c5454bb789a301bb6f1339a0818255610 Mon Sep 17 00:00:00 2001 | ||
2 | From: =?UTF-8?q?Roberto=20C=2E=20S=C3=A1nchez?= <roberto@connexer.com> | ||
3 | Date: Sat, 14 Sep 2019 13:26:38 -0400 | ||
4 | Subject: [PATCH] [2.7] bpo-34155: Dont parse domains containing @ (GH-13079) | ||
5 | (GH-16006) | ||
6 | |||
7 | This change skips parsing of email addresses where domains include a "@" character, which can be maliciously used since the local part is returned as a complete address. | ||
8 | |||
9 | (cherry picked from commit 8cb65d1381b027f0b09ee36bfed7f35bb4dec9a9) | ||
10 | |||
11 | Excludes changes to Lib/email/_header_value_parser.py, which did not | ||
12 | exist in 2.7. | ||
13 | |||
14 | Co-authored-by: jpic <jpic@users.noreply.github.com> | ||
15 | |||
16 | https://bugs.python.org/issue34155 | ||
17 | |||
18 | Upstream-Status: Backport [https://github.com/python/cpython/commit/8cb65d1381b027f0b09ee36bfed7f35bb4dec9a9] | ||
19 | |||
20 | CVE: CVE-2019-16056 | ||
21 | |||
22 | Signed-off-by: Chen Qi <Qi.Chen@windriver.com> | ||
23 | --- | ||
24 | Lib/email/_parseaddr.py | 11 ++++++++++- | ||
25 | Lib/email/test/test_email.py | 14 ++++++++++++++ | ||
26 | .../2019-05-04-13-33-37.bpo-34155.MJll68.rst | 1 + | ||
27 | 3 files changed, 25 insertions(+), 1 deletion(-) | ||
28 | create mode 100644 Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst | ||
29 | |||
30 | diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py | ||
31 | index 690db2c22d..dc49d2e45a 100644 | ||
32 | --- a/Lib/email/_parseaddr.py | ||
33 | +++ b/Lib/email/_parseaddr.py | ||
34 | @@ -336,7 +336,12 @@ class AddrlistClass: | ||
35 | aslist.append('@') | ||
36 | self.pos += 1 | ||
37 | self.gotonext() | ||
38 | - return EMPTYSTRING.join(aslist) + self.getdomain() | ||
39 | + domain = self.getdomain() | ||
40 | + if not domain: | ||
41 | + # Invalid domain, return an empty address instead of returning a | ||
42 | + # local part to denote failed parsing. | ||
43 | + return EMPTYSTRING | ||
44 | + return EMPTYSTRING.join(aslist) + domain | ||
45 | |||
46 | def getdomain(self): | ||
47 | """Get the complete domain name from an address.""" | ||
48 | @@ -351,6 +356,10 @@ class AddrlistClass: | ||
49 | elif self.field[self.pos] == '.': | ||
50 | self.pos += 1 | ||
51 | sdlist.append('.') | ||
52 | + elif self.field[self.pos] == '@': | ||
53 | + # bpo-34155: Don't parse domains with two `@` like | ||
54 | + # `a@malicious.org@important.com`. | ||
55 | + return EMPTYSTRING | ||
56 | elif self.field[self.pos] in self.atomends: | ||
57 | break | ||
58 | else: | ||
59 | diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py | ||
60 | index 4b4dee3d34..2efe44ac5a 100644 | ||
61 | --- a/Lib/email/test/test_email.py | ||
62 | +++ b/Lib/email/test/test_email.py | ||
63 | @@ -2306,6 +2306,20 @@ class TestMiscellaneous(TestEmailBase): | ||
64 | self.assertEqual(Utils.parseaddr('<>'), ('', '')) | ||
65 | self.assertEqual(Utils.formataddr(Utils.parseaddr('<>')), '') | ||
66 | |||
67 | + def test_parseaddr_multiple_domains(self): | ||
68 | + self.assertEqual( | ||
69 | + Utils.parseaddr('a@b@c'), | ||
70 | + ('', '') | ||
71 | + ) | ||
72 | + self.assertEqual( | ||
73 | + Utils.parseaddr('a@b.c@c'), | ||
74 | + ('', '') | ||
75 | + ) | ||
76 | + self.assertEqual( | ||
77 | + Utils.parseaddr('a@172.17.0.1@c'), | ||
78 | + ('', '') | ||
79 | + ) | ||
80 | + | ||
81 | def test_noquote_dump(self): | ||
82 | self.assertEqual( | ||
83 | Utils.formataddr(('A Silly Person', 'person@dom.ain')), | ||
84 | diff --git a/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst | ||
85 | new file mode 100644 | ||
86 | index 0000000000..50292e29ed | ||
87 | --- /dev/null | ||
88 | +++ b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst | ||
89 | @@ -0,0 +1 @@ | ||
90 | +Fix parsing of invalid email addresses with more than one ``@`` (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email address. Patch by maxking & jpic. | ||
diff --git a/meta/recipes-devtools/python/python/0001-2.7-bpo-38243-Escape-the-server-title-of-DocXMLRPCSe.patch b/meta/recipes-devtools/python/python/0001-2.7-bpo-38243-Escape-the-server-title-of-DocXMLRPCSe.patch deleted file mode 100644 index 3025cf7bc8..0000000000 --- a/meta/recipes-devtools/python/python/0001-2.7-bpo-38243-Escape-the-server-title-of-DocXMLRPCSe.patch +++ /dev/null | |||
@@ -1,101 +0,0 @@ | |||
1 | From b161c89c8bd66fe928192e21364678c8e9b8fcc0 Mon Sep 17 00:00:00 2001 | ||
2 | From: Dong-hee Na <donghee.na92@gmail.com> | ||
3 | Date: Tue, 1 Oct 2019 19:58:01 +0900 | ||
4 | Subject: [PATCH] [2.7] bpo-38243: Escape the server title of DocXMLRPCServer | ||
5 | (GH-16447) | ||
6 | |||
7 | Escape the server title of DocXMLRPCServer.DocXMLRPCServer | ||
8 | when rendering the document page as HTML. | ||
9 | |||
10 | CVE: CVE-2019-16935 | ||
11 | |||
12 | Upstream-Status: Backport [https://github.com/python/cpython/pull/16447/commits/b41cde823d026f2adc21ef14b1c2e92b1006de06] | ||
13 | |||
14 | Signed-off-by: Chen Qi <Qi.Chen@windriver.com> | ||
15 | --- | ||
16 | Lib/DocXMLRPCServer.py | 13 +++++++++++- | ||
17 | Lib/test/test_docxmlrpc.py | 20 +++++++++++++++++++ | ||
18 | .../2019-09-25-13-21-09.bpo-38243.1pfz24.rst | 3 +++ | ||
19 | 3 files changed, 35 insertions(+), 1 deletion(-) | ||
20 | create mode 100644 Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst | ||
21 | |||
22 | diff --git a/Lib/DocXMLRPCServer.py b/Lib/DocXMLRPCServer.py | ||
23 | index 4064ec2e48..90b037dd35 100644 | ||
24 | --- a/Lib/DocXMLRPCServer.py | ||
25 | +++ b/Lib/DocXMLRPCServer.py | ||
26 | @@ -20,6 +20,16 @@ from SimpleXMLRPCServer import (SimpleXMLRPCServer, | ||
27 | CGIXMLRPCRequestHandler, | ||
28 | resolve_dotted_attribute) | ||
29 | |||
30 | + | ||
31 | +def _html_escape_quote(s): | ||
32 | + s = s.replace("&", "&") # Must be done first! | ||
33 | + s = s.replace("<", "<") | ||
34 | + s = s.replace(">", ">") | ||
35 | + s = s.replace('"', """) | ||
36 | + s = s.replace('\'', "'") | ||
37 | + return s | ||
38 | + | ||
39 | + | ||
40 | class ServerHTMLDoc(pydoc.HTMLDoc): | ||
41 | """Class used to generate pydoc HTML document for a server""" | ||
42 | |||
43 | @@ -210,7 +220,8 @@ class XMLRPCDocGenerator: | ||
44 | methods | ||
45 | ) | ||
46 | |||
47 | - return documenter.page(self.server_title, documentation) | ||
48 | + title = _html_escape_quote(self.server_title) | ||
49 | + return documenter.page(title, documentation) | ||
50 | |||
51 | class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): | ||
52 | """XML-RPC and documentation request handler class. | ||
53 | diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py | ||
54 | index 4dff4159e2..c45b892b8b 100644 | ||
55 | --- a/Lib/test/test_docxmlrpc.py | ||
56 | +++ b/Lib/test/test_docxmlrpc.py | ||
57 | @@ -1,5 +1,6 @@ | ||
58 | from DocXMLRPCServer import DocXMLRPCServer | ||
59 | import httplib | ||
60 | +import re | ||
61 | import sys | ||
62 | from test import test_support | ||
63 | threading = test_support.import_module('threading') | ||
64 | @@ -176,6 +177,25 @@ class DocXMLRPCHTTPGETServer(unittest.TestCase): | ||
65 | self.assertIn("""Try self.<strong>add</strong>, too.""", | ||
66 | response.read()) | ||
67 | |||
68 | + def test_server_title_escape(self): | ||
69 | + """Test that the server title and documentation | ||
70 | + are escaped for HTML. | ||
71 | + """ | ||
72 | + self.serv.set_server_title('test_title<script>') | ||
73 | + self.serv.set_server_documentation('test_documentation<script>') | ||
74 | + self.assertEqual('test_title<script>', self.serv.server_title) | ||
75 | + self.assertEqual('test_documentation<script>', | ||
76 | + self.serv.server_documentation) | ||
77 | + | ||
78 | + generated = self.serv.generate_html_documentation() | ||
79 | + title = re.search(r'<title>(.+?)</title>', generated).group() | ||
80 | + documentation = re.search(r'<p><tt>(.+?)</tt></p>', generated).group() | ||
81 | + self.assertEqual('<title>Python: test_title<script></title>', | ||
82 | + title) | ||
83 | + self.assertEqual('<p><tt>test_documentation<script></tt></p>', | ||
84 | + documentation) | ||
85 | + | ||
86 | + | ||
87 | def test_main(): | ||
88 | test_support.run_unittest(DocXMLRPCHTTPGETServer) | ||
89 | |||
90 | diff --git a/Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst b/Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst | ||
91 | new file mode 100644 | ||
92 | index 0000000000..8f02baed9e | ||
93 | --- /dev/null | ||
94 | +++ b/Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst | ||
95 | @@ -0,0 +1,3 @@ | ||
96 | +Escape the server title of :class:`DocXMLRPCServer.DocXMLRPCServer` | ||
97 | +when rendering the document page as HTML. | ||
98 | +(Contributed by Dong-hee Na in :issue:`38243`.) | ||
99 | -- | ||
100 | 2.17.1 | ||
101 | |||
diff --git a/meta/recipes-devtools/python/python/builddir.patch b/meta/recipes-devtools/python/python/0001-python-Resolve-intermediate-staging-issues.patch index ad629a022e..2ff2ccc43d 100644 --- a/meta/recipes-devtools/python/python/builddir.patch +++ b/meta/recipes-devtools/python/python/0001-python-Resolve-intermediate-staging-issues.patch | |||
@@ -1,5 +1,10 @@ | |||
1 | When cross compiling python, we used to need to install the Makefile, pyconfig.h | 1 | From 77bcb3238b2853d511714544e0f84a37be6c79bf Mon Sep 17 00:00:00 2001 |
2 | and the python library to their final location before being able to compile the | 2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> |
3 | Date: Wed, 14 Nov 2012 14:31:24 +0000 | ||
4 | Subject: [PATCH] python: Resolve intermediate staging issues | ||
5 | |||
6 | When cross compiling python, we used to need to install the Makefile, pyconfig.h | ||
7 | and the python library to their final location before being able to compile the | ||
3 | rest of python. This change allows us to point python at its own source when | 8 | rest of python. This change allows us to point python at its own source when |
4 | building, avoiding a variety of sysroot staging issues and simplifying the main | 9 | building, avoiding a variety of sysroot staging issues and simplifying the main |
5 | python recipe. | 10 | python recipe. |
@@ -7,10 +12,29 @@ python recipe. | |||
7 | Upstream-Status: Inappropriate | 12 | Upstream-Status: Inappropriate |
8 | RP 2012/11/13 | 13 | RP 2012/11/13 |
9 | 14 | ||
10 | Index: Python-2.7.9/Lib/sysconfig.py | 15 | --- |
11 | =================================================================== | 16 | Lib/distutils/sysconfig.py | 3 +++ |
12 | --- Python-2.7.9.orig/Lib/sysconfig.py | 17 | Lib/sysconfig.py | 5 ++++- |
13 | +++ Python-2.7.9/Lib/sysconfig.py | 18 | 2 files changed, 7 insertions(+), 1 deletion(-) |
19 | |||
20 | diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py | ||
21 | index 2f4b8ca..15bceb5 100644 | ||
22 | --- a/Lib/distutils/sysconfig.py | ||
23 | +++ b/Lib/distutils/sysconfig.py | ||
24 | @@ -31,6 +31,9 @@ else: | ||
25 | # sys.executable can be empty if argv[0] has been changed and Python is | ||
26 | # unable to retrieve the real program name | ||
27 | project_base = os.getcwd() | ||
28 | +_PYTHONBUILDDIR = os.environ.get("PYTHONBUILDDIR", None) | ||
29 | +if _PYTHONBUILDDIR: | ||
30 | + project_base = _PYTHONBUILDDIR | ||
31 | if os.name == "nt" and "pcbuild" in project_base[-8:].lower(): | ||
32 | project_base = os.path.abspath(os.path.join(project_base, os.path.pardir)) | ||
33 | # PC/VS7.1 | ||
34 | diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py | ||
35 | index 9c8350d..bddbe2e 100644 | ||
36 | --- a/Lib/sysconfig.py | ||
37 | +++ b/Lib/sysconfig.py | ||
14 | @@ -93,6 +93,7 @@ _PREFIX = os.path.normpath(sys.prefix) | 38 | @@ -93,6 +93,7 @@ _PREFIX = os.path.normpath(sys.prefix) |
15 | _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) | 39 | _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) |
16 | _CONFIG_VARS = None | 40 | _CONFIG_VARS = None |
@@ -30,17 +54,6 @@ Index: Python-2.7.9/Lib/sysconfig.py | |||
30 | _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) | 54 | _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) |
31 | else: | 55 | else: |
32 | # sys.executable can be empty if argv[0] has been changed and Python is | 56 | # sys.executable can be empty if argv[0] has been changed and Python is |
33 | Index: Python-2.7.9/Lib/distutils/sysconfig.py | 57 | -- |
34 | =================================================================== | 58 | 2.17.1 |
35 | --- Python-2.7.9.orig/Lib/distutils/sysconfig.py | 59 | |
36 | +++ Python-2.7.9/Lib/distutils/sysconfig.py | ||
37 | @@ -26,6 +26,9 @@ EXEC_PREFIX = os.path.normpath(sys.exec_ | ||
38 | # live in project/PCBuild9. If we're dealing with an x64 Windows build, | ||
39 | # it'll live in project/PCbuild/amd64. | ||
40 | project_base = os.path.dirname(os.path.abspath(sys.executable)) | ||
41 | +_PYTHONBUILDDIR = os.environ.get("PYTHONBUILDDIR", None) | ||
42 | +if _PYTHONBUILDDIR: | ||
43 | + project_base = _PYTHONBUILDDIR | ||
44 | if os.name == "nt" and "pcbuild" in project_base[-8:].lower(): | ||
45 | project_base = os.path.abspath(os.path.join(project_base, os.path.pardir)) | ||
46 | # PC/VS7.1 | ||
diff --git a/meta/recipes-devtools/python/python/CVE-2018-20852.patch b/meta/recipes-devtools/python/python/CVE-2018-20852.patch deleted file mode 100644 index 23c784a210..0000000000 --- a/meta/recipes-devtools/python/python/CVE-2018-20852.patch +++ /dev/null | |||
@@ -1,123 +0,0 @@ | |||
1 | From 979daae300916adb399ab5b51410b6ebd0888f13 Mon Sep 17 00:00:00 2001 | ||
2 | From: Xtreak <tir.karthi@gmail.com> | ||
3 | Date: Sat, 15 Jun 2019 20:59:43 +0530 | ||
4 | Subject: [PATCH] [2.7] bpo-35121: prefix dot in domain for proper subdomain | ||
5 | validation (GH-10258) (GH-13426) | ||
6 | |||
7 | This is a manual backport of ca7fe5063593958e5efdf90f068582837f07bd14 since 2.7 has `http.cookiejar` in `cookielib` | ||
8 | |||
9 | |||
10 | https://bugs.python.org/issue35121 | ||
11 | CVE: CVE-2018-20852 | ||
12 | Upstream-Status: Backport [https://github.com/python/cpython/pull/13426] | ||
13 | Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> | ||
14 | --- | ||
15 | Lib/cookielib.py | 13 ++++++-- | ||
16 | Lib/test/test_cookielib.py | 30 +++++++++++++++++++ | ||
17 | .../2019-05-20-00-35-12.bpo-35121.RRi-HU.rst | 4 +++ | ||
18 | 3 files changed, 45 insertions(+), 2 deletions(-) | ||
19 | create mode 100644 Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst | ||
20 | |||
21 | diff --git a/Lib/cookielib.py b/Lib/cookielib.py | ||
22 | index 2dd7c48728e0..0b471a42f296 100644 | ||
23 | --- a/Lib/cookielib.py | ||
24 | +++ b/Lib/cookielib.py | ||
25 | @@ -1139,6 +1139,11 @@ def return_ok_domain(self, cookie, request): | ||
26 | req_host, erhn = eff_request_host(request) | ||
27 | domain = cookie.domain | ||
28 | |||
29 | + if domain and not domain.startswith("."): | ||
30 | + dotdomain = "." + domain | ||
31 | + else: | ||
32 | + dotdomain = domain | ||
33 | + | ||
34 | # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't | ||
35 | if (cookie.version == 0 and | ||
36 | (self.strict_ns_domain & self.DomainStrictNonDomain) and | ||
37 | @@ -1151,7 +1156,7 @@ def return_ok_domain(self, cookie, request): | ||
38 | _debug(" effective request-host name %s does not domain-match " | ||
39 | "RFC 2965 cookie domain %s", erhn, domain) | ||
40 | return False | ||
41 | - if cookie.version == 0 and not ("."+erhn).endswith(domain): | ||
42 | + if cookie.version == 0 and not ("."+erhn).endswith(dotdomain): | ||
43 | _debug(" request-host %s does not match Netscape cookie domain " | ||
44 | "%s", req_host, domain) | ||
45 | return False | ||
46 | @@ -1165,7 +1170,11 @@ def domain_return_ok(self, domain, request): | ||
47 | req_host = "."+req_host | ||
48 | if not erhn.startswith("."): | ||
49 | erhn = "."+erhn | ||
50 | - if not (req_host.endswith(domain) or erhn.endswith(domain)): | ||
51 | + if domain and not domain.startswith("."): | ||
52 | + dotdomain = "." + domain | ||
53 | + else: | ||
54 | + dotdomain = domain | ||
55 | + if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)): | ||
56 | #_debug(" request domain %s does not match cookie domain %s", | ||
57 | # req_host, domain) | ||
58 | return False | ||
59 | diff --git a/Lib/test/test_cookielib.py b/Lib/test/test_cookielib.py | ||
60 | index f2dd9727d137..7f7ff614d61d 100644 | ||
61 | --- a/Lib/test/test_cookielib.py | ||
62 | +++ b/Lib/test/test_cookielib.py | ||
63 | @@ -368,6 +368,7 @@ def test_domain_return_ok(self): | ||
64 | ("http://foo.bar.com/", ".foo.bar.com", True), | ||
65 | ("http://foo.bar.com/", "foo.bar.com", True), | ||
66 | ("http://foo.bar.com/", ".bar.com", True), | ||
67 | + ("http://foo.bar.com/", "bar.com", True), | ||
68 | ("http://foo.bar.com/", "com", True), | ||
69 | ("http://foo.com/", "rhubarb.foo.com", False), | ||
70 | ("http://foo.com/", ".foo.com", True), | ||
71 | @@ -378,6 +379,8 @@ def test_domain_return_ok(self): | ||
72 | ("http://foo/", "foo", True), | ||
73 | ("http://foo/", "foo.local", True), | ||
74 | ("http://foo/", ".local", True), | ||
75 | + ("http://barfoo.com", ".foo.com", False), | ||
76 | + ("http://barfoo.com", "foo.com", False), | ||
77 | ]: | ||
78 | request = urllib2.Request(url) | ||
79 | r = pol.domain_return_ok(domain, request) | ||
80 | @@ -938,6 +941,33 @@ def test_domain_block(self): | ||
81 | c.add_cookie_header(req) | ||
82 | self.assertFalse(req.has_header("Cookie")) | ||
83 | |||
84 | + c.clear() | ||
85 | + | ||
86 | + pol.set_blocked_domains([]) | ||
87 | + req = Request("http://acme.com/") | ||
88 | + res = FakeResponse(headers, "http://acme.com/") | ||
89 | + cookies = c.make_cookies(res, req) | ||
90 | + c.extract_cookies(res, req) | ||
91 | + self.assertEqual(len(c), 1) | ||
92 | + | ||
93 | + req = Request("http://acme.com/") | ||
94 | + c.add_cookie_header(req) | ||
95 | + self.assertTrue(req.has_header("Cookie")) | ||
96 | + | ||
97 | + req = Request("http://badacme.com/") | ||
98 | + c.add_cookie_header(req) | ||
99 | + self.assertFalse(pol.return_ok(cookies[0], req)) | ||
100 | + self.assertFalse(req.has_header("Cookie")) | ||
101 | + | ||
102 | + p = pol.set_blocked_domains(["acme.com"]) | ||
103 | + req = Request("http://acme.com/") | ||
104 | + c.add_cookie_header(req) | ||
105 | + self.assertFalse(req.has_header("Cookie")) | ||
106 | + | ||
107 | + req = Request("http://badacme.com/") | ||
108 | + c.add_cookie_header(req) | ||
109 | + self.assertFalse(req.has_header("Cookie")) | ||
110 | + | ||
111 | def test_secure(self): | ||
112 | from cookielib import CookieJar, DefaultCookiePolicy | ||
113 | |||
114 | diff --git a/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst b/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst | ||
115 | new file mode 100644 | ||
116 | index 000000000000..77251806163b | ||
117 | --- /dev/null | ||
118 | +++ b/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst | ||
119 | @@ -0,0 +1,4 @@ | ||
120 | +Don't send cookies of domain A without Domain attribute to domain B when | ||
121 | +domain A is a suffix match of domain B while using a cookiejar with | ||
122 | +:class:`cookielib.DefaultCookiePolicy` policy. Patch by Karthikeyan | ||
123 | +Singaravelan. | ||
diff --git a/meta/recipes-devtools/python/python/CVE-2019-9740.patch b/meta/recipes-devtools/python/python/CVE-2019-9740.patch deleted file mode 100644 index 95f43e0387..0000000000 --- a/meta/recipes-devtools/python/python/CVE-2019-9740.patch +++ /dev/null | |||
@@ -1,216 +0,0 @@ | |||
1 | From bb8071a4cae5ab3fe321481dd3d73662ffb26052 Mon Sep 17 00:00:00 2001 | ||
2 | From: Victor Stinner <victor.stinner@gmail.com> | ||
3 | Date: Tue, 21 May 2019 15:12:33 +0200 | ||
4 | Subject: [PATCH] bpo-30458: Disallow control chars in http URLs (GH-12755) | ||
5 | (GH-13154) (GH-13315) | ||
6 | MIME-Version: 1.0 | ||
7 | Content-Type: text/plain; charset=UTF-8 | ||
8 | Content-Transfer-Encoding: 8bit | ||
9 | |||
10 | Disallow control chars in http URLs in urllib2.urlopen. This | ||
11 | addresses a potential security problem for applications that do not | ||
12 | sanity check their URLs where http request headers could be injected. | ||
13 | |||
14 | Disable https related urllib tests on a build without ssl (GH-13032) | ||
15 | These tests require an SSL enabled build. Skip these tests when | ||
16 | python is built without SSL to fix test failures. | ||
17 | |||
18 | Use httplib.InvalidURL instead of ValueError as the new error case's | ||
19 | exception. (GH-13044) | ||
20 | |||
21 | Backport Co-Authored-By: Miro Hrončok <miro@hroncok.cz> | ||
22 | |||
23 | (cherry picked from commit 7e200e0763f5b71c199aaf98bd5588f291585619) | ||
24 | |||
25 | Notes on backport to Python 2.7: | ||
26 | |||
27 | * test_urllib tests urllib.urlopen() which quotes the URL and so is | ||
28 | not vulerable to HTTP Header Injection. | ||
29 | * Add tests to test_urllib2 on urllib2.urlopen(). | ||
30 | * Reject non-ASCII characters: range 0x80-0xff. | ||
31 | |||
32 | Upstream-Status: Backport | ||
33 | CVE: CVE-2019-9740 | ||
34 | CVE: CVE-2019-9947 | ||
35 | Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> | ||
36 | --- | ||
37 | Lib/httplib.py | 16 ++++++ | ||
38 | Lib/test/test_urllib.py | 25 +++++++++ | ||
39 | Lib/test/test_urllib2.py | 51 ++++++++++++++++++- | ||
40 | Lib/test/test_xmlrpc.py | 8 ++- | ||
41 | .../2019-04-10-08-53-30.bpo-30458.51E-DA.rst | 1 + | ||
42 | 5 files changed, 99 insertions(+), 2 deletions(-) | ||
43 | create mode 100644 Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst | ||
44 | |||
45 | diff --git a/Lib/httplib.py b/Lib/httplib.py | ||
46 | index 60a8fb4e355f..1b41c346e090 100644 | ||
47 | --- a/Lib/httplib.py | ||
48 | +++ b/Lib/httplib.py | ||
49 | @@ -247,6 +247,16 @@ | ||
50 | _is_legal_header_name = re.compile(r'\A[^:\s][^:\r\n]*\Z').match | ||
51 | _is_illegal_header_value = re.compile(r'\n(?![ \t])|\r(?![ \t\n])').search | ||
52 | |||
53 | +# These characters are not allowed within HTTP URL paths. | ||
54 | +# See https://tools.ietf.org/html/rfc3986#section-3.3 and the | ||
55 | +# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition. | ||
56 | +# Prevents CVE-2019-9740. Includes control characters such as \r\n. | ||
57 | +# Restrict non-ASCII characters above \x7f (0x80-0xff). | ||
58 | +_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f-\xff]') | ||
59 | +# Arguably only these _should_ allowed: | ||
60 | +# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$") | ||
61 | +# We are more lenient for assumed real world compatibility purposes. | ||
62 | + | ||
63 | # We always set the Content-Length header for these methods because some | ||
64 | # servers will otherwise respond with a 411 | ||
65 | _METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'} | ||
66 | @@ -927,6 +937,12 @@ def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): | ||
67 | self._method = method | ||
68 | if not url: | ||
69 | url = '/' | ||
70 | + # Prevent CVE-2019-9740. | ||
71 | + match = _contains_disallowed_url_pchar_re.search(url) | ||
72 | + if match: | ||
73 | + raise InvalidURL("URL can't contain control characters. %r " | ||
74 | + "(found at least %r)" | ||
75 | + % (url, match.group())) | ||
76 | hdr = '%s %s %s' % (method, url, self._http_vsn_str) | ||
77 | |||
78 | self._output(hdr) | ||
79 | diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py | ||
80 | index 1ce9201c0693..d7778d4194f3 100644 | ||
81 | --- a/Lib/test/test_urllib.py | ||
82 | +++ b/Lib/test/test_urllib.py | ||
83 | @@ -257,6 +257,31 @@ def test_url_fragment(self): | ||
84 | finally: | ||
85 | self.unfakehttp() | ||
86 | |||
87 | + def test_url_with_control_char_rejected(self): | ||
88 | + for char_no in range(0, 0x21) + range(0x7f, 0x100): | ||
89 | + char = chr(char_no) | ||
90 | + schemeless_url = "//localhost:7777/test%s/" % char | ||
91 | + self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") | ||
92 | + try: | ||
93 | + # urllib quotes the URL so there is no injection. | ||
94 | + resp = urllib.urlopen("http:" + schemeless_url) | ||
95 | + self.assertNotIn(char, resp.geturl()) | ||
96 | + finally: | ||
97 | + self.unfakehttp() | ||
98 | + | ||
99 | + def test_url_with_newline_header_injection_rejected(self): | ||
100 | + self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") | ||
101 | + host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" | ||
102 | + schemeless_url = "//" + host + ":8080/test/?test=a" | ||
103 | + try: | ||
104 | + # urllib quotes the URL so there is no injection. | ||
105 | + resp = urllib.urlopen("http:" + schemeless_url) | ||
106 | + self.assertNotIn(' ', resp.geturl()) | ||
107 | + self.assertNotIn('\r', resp.geturl()) | ||
108 | + self.assertNotIn('\n', resp.geturl()) | ||
109 | + finally: | ||
110 | + self.unfakehttp() | ||
111 | + | ||
112 | def test_read_bogus(self): | ||
113 | # urlopen() should raise IOError for many error codes. | ||
114 | self.fakehttp('''HTTP/1.1 401 Authentication Required | ||
115 | diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py | ||
116 | index 6d24d5ddf83c..9531818e16b2 100644 | ||
117 | --- a/Lib/test/test_urllib2.py | ||
118 | +++ b/Lib/test/test_urllib2.py | ||
119 | @@ -15,6 +15,9 @@ | ||
120 | except ImportError: | ||
121 | ssl = None | ||
122 | |||
123 | +from test.test_urllib import FakeHTTPMixin | ||
124 | + | ||
125 | + | ||
126 | # XXX | ||
127 | # Request | ||
128 | # CacheFTPHandler (hard to write) | ||
129 | @@ -1262,7 +1265,7 @@ def _test_basic_auth(self, opener, auth_handler, auth_header, | ||
130 | self.assertEqual(len(http_handler.requests), 1) | ||
131 | self.assertFalse(http_handler.requests[0].has_header(auth_header)) | ||
132 | |||
133 | -class MiscTests(unittest.TestCase): | ||
134 | +class MiscTests(unittest.TestCase, FakeHTTPMixin): | ||
135 | |||
136 | def test_build_opener(self): | ||
137 | class MyHTTPHandler(urllib2.HTTPHandler): pass | ||
138 | @@ -1317,6 +1320,52 @@ def test_unsupported_algorithm(self): | ||
139 | "Unsupported digest authentication algorithm 'invalid'" | ||
140 | ) | ||
141 | |||
142 | + @unittest.skipUnless(ssl, "ssl module required") | ||
143 | + def test_url_with_control_char_rejected(self): | ||
144 | + for char_no in range(0, 0x21) + range(0x7f, 0x100): | ||
145 | + char = chr(char_no) | ||
146 | + schemeless_url = "//localhost:7777/test%s/" % char | ||
147 | + self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") | ||
148 | + try: | ||
149 | + # We explicitly test urllib.request.urlopen() instead of the top | ||
150 | + # level 'def urlopen()' function defined in this... (quite ugly) | ||
151 | + # test suite. They use different url opening codepaths. Plain | ||
152 | + # urlopen uses FancyURLOpener which goes via a codepath that | ||
153 | + # calls urllib.parse.quote() on the URL which makes all of the | ||
154 | + # above attempts at injection within the url _path_ safe. | ||
155 | + escaped_char_repr = repr(char).replace('\\', r'\\') | ||
156 | + InvalidURL = httplib.InvalidURL | ||
157 | + with self.assertRaisesRegexp( | ||
158 | + InvalidURL, "contain control.*" + escaped_char_repr): | ||
159 | + urllib2.urlopen("http:" + schemeless_url) | ||
160 | + with self.assertRaisesRegexp( | ||
161 | + InvalidURL, "contain control.*" + escaped_char_repr): | ||
162 | + urllib2.urlopen("https:" + schemeless_url) | ||
163 | + finally: | ||
164 | + self.unfakehttp() | ||
165 | + | ||
166 | + @unittest.skipUnless(ssl, "ssl module required") | ||
167 | + def test_url_with_newline_header_injection_rejected(self): | ||
168 | + self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") | ||
169 | + host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" | ||
170 | + schemeless_url = "//" + host + ":8080/test/?test=a" | ||
171 | + try: | ||
172 | + # We explicitly test urllib2.urlopen() instead of the top | ||
173 | + # level 'def urlopen()' function defined in this... (quite ugly) | ||
174 | + # test suite. They use different url opening codepaths. Plain | ||
175 | + # urlopen uses FancyURLOpener which goes via a codepath that | ||
176 | + # calls urllib.parse.quote() on the URL which makes all of the | ||
177 | + # above attempts at injection within the url _path_ safe. | ||
178 | + InvalidURL = httplib.InvalidURL | ||
179 | + with self.assertRaisesRegexp( | ||
180 | + InvalidURL, r"contain control.*\\r.*(found at least . .)"): | ||
181 | + urllib2.urlopen("http:" + schemeless_url) | ||
182 | + with self.assertRaisesRegexp(InvalidURL, r"contain control.*\\n"): | ||
183 | + urllib2.urlopen("https:" + schemeless_url) | ||
184 | + finally: | ||
185 | + self.unfakehttp() | ||
186 | + | ||
187 | + | ||
188 | |||
189 | class RequestTests(unittest.TestCase): | ||
190 | |||
191 | diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py | ||
192 | index 36b3be67fd6b..90ccb30716ff 100644 | ||
193 | --- a/Lib/test/test_xmlrpc.py | ||
194 | +++ b/Lib/test/test_xmlrpc.py | ||
195 | @@ -659,7 +659,13 @@ def test_dotted_attribute(self): | ||
196 | def test_partial_post(self): | ||
197 | # Check that a partial POST doesn't make the server loop: issue #14001. | ||
198 | conn = httplib.HTTPConnection(ADDR, PORT) | ||
199 | - conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye') | ||
200 | + conn.send('POST /RPC2 HTTP/1.0\r\n' | ||
201 | + 'Content-Length: 100\r\n\r\n' | ||
202 | + 'bye HTTP/1.1\r\n' | ||
203 | + 'Host: %s:%s\r\n' | ||
204 | + 'Accept-Encoding: identity\r\n' | ||
205 | + 'Content-Length: 0\r\n\r\n' | ||
206 | + % (ADDR, PORT)) | ||
207 | conn.close() | ||
208 | |||
209 | class SimpleServerEncodingTestCase(BaseServerTestCase): | ||
210 | diff --git a/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst | ||
211 | new file mode 100644 | ||
212 | index 000000000000..47cb899df1af | ||
213 | --- /dev/null | ||
214 | +++ b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst | ||
215 | @@ -0,0 +1 @@ | ||
216 | +Address CVE-2019-9740 by disallowing URL paths with embedded whitespace or control characters through into the underlying http client request. Such potentially malicious header injection URLs now cause an httplib.InvalidURL exception to be raised. | ||
diff --git a/meta/recipes-devtools/python/python/bpo-35907-cve-2019-9948-fix.patch b/meta/recipes-devtools/python/python/bpo-35907-cve-2019-9948-fix.patch deleted file mode 100644 index b267237018..0000000000 --- a/meta/recipes-devtools/python/python/bpo-35907-cve-2019-9948-fix.patch +++ /dev/null | |||
@@ -1,55 +0,0 @@ | |||
1 | From 179a5f75f1121dab271fe8f90eb35145f9dcbbda Mon Sep 17 00:00:00 2001 | ||
2 | From: Sihoon Lee <push0ebp@gmail.com> | ||
3 | Date: Fri, 17 May 2019 02:41:06 +0900 | ||
4 | Subject: [PATCH] Update test_urllib.py and urllib.py\nchange assertEqual into | ||
5 | assertRasies in DummyURLopener test, and simplify mitigation | ||
6 | |||
7 | Upstream-Status: Submitted https://github.com/python/cpython/pull/11842 | ||
8 | |||
9 | CVE: CVE-2019-9948 | ||
10 | |||
11 | Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com> | ||
12 | --- | ||
13 | Lib/test/test_urllib.py | 11 +++-------- | ||
14 | Lib/urllib.py | 4 ++-- | ||
15 | 2 files changed, 5 insertions(+), 10 deletions(-) | ||
16 | |||
17 | diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py | ||
18 | index e5f210e62a18..1e23dfb0bb16 100644 | ||
19 | --- a/Lib/test/test_urllib.py | ||
20 | +++ b/Lib/test/test_urllib.py | ||
21 | @@ -1027,14 +1027,9 @@ def test_local_file_open(self): | ||
22 | class DummyURLopener(urllib.URLopener): | ||
23 | def open_local_file(self, url): | ||
24 | return url | ||
25 | - self.assertEqual(DummyURLopener().open( | ||
26 | - 'local-file://example'), '//example') | ||
27 | - self.assertEqual(DummyURLopener().open( | ||
28 | - 'local_file://example'), '//example') | ||
29 | - self.assertRaises(IOError, urllib.urlopen, | ||
30 | - 'local-file://example') | ||
31 | - self.assertRaises(IOError, urllib.urlopen, | ||
32 | - 'local_file://example') | ||
33 | + for url in ('local_file://example', 'local-file://example'): | ||
34 | + self.assertRaises(IOError, DummyURLopener().open, url) | ||
35 | + self.assertRaises(IOError, urllib.urlopen, url) | ||
36 | |||
37 | # Just commented them out. | ||
38 | # Can't really tell why keep failing in windows and sparc. | ||
39 | diff --git a/Lib/urllib.py b/Lib/urllib.py | ||
40 | index a24e9a5c68fb..39b834054e9e 100644 | ||
41 | --- a/Lib/urllib.py | ||
42 | +++ b/Lib/urllib.py | ||
43 | @@ -203,10 +203,10 @@ def open(self, fullurl, data=None): | ||
44 | name = 'open_' + urltype | ||
45 | self.type = urltype | ||
46 | name = name.replace('-', '_') | ||
47 | - | ||
48 | + | ||
49 | # bpo-35907: # disallow the file reading with the type not allowed | ||
50 | if not hasattr(self, name) or \ | ||
51 | - (self == _urlopener and name == 'open_local_file'): | ||
52 | + getattr(self, name) == self.open_local_file: | ||
53 | if proxy: | ||
54 | return self.open_unknown_proxy(proxy, fullurl, data) | ||
55 | else: | ||
diff --git a/meta/recipes-devtools/python/python/bpo-35907-cve-2019-9948.patch b/meta/recipes-devtools/python/python/bpo-35907-cve-2019-9948.patch deleted file mode 100644 index f4c225d2fc..0000000000 --- a/meta/recipes-devtools/python/python/bpo-35907-cve-2019-9948.patch +++ /dev/null | |||
@@ -1,55 +0,0 @@ | |||
1 | From 8f99cc799e4393bf1112b9395b2342f81b3f45ef Mon Sep 17 00:00:00 2001 | ||
2 | From: push0ebp <push0ebp@shl-MacBook-Pro.local> | ||
3 | Date: Thu, 14 Feb 2019 02:05:46 +0900 | ||
4 | Subject: [PATCH] bpo-35907: Avoid file reading as disallowing the unnecessary | ||
5 | URL scheme in urllib | ||
6 | |||
7 | Upstream-Status: Submitted https://github.com/python/cpython/pull/11842 | ||
8 | |||
9 | CVE: CVE-2019-9948 | ||
10 | |||
11 | Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com> | ||
12 | --- | ||
13 | Lib/test/test_urllib.py | 12 ++++++++++++ | ||
14 | Lib/urllib.py | 5 ++++- | ||
15 | 2 files changed, 16 insertions(+), 1 deletion(-) | ||
16 | |||
17 | diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py | ||
18 | index 1ce9201c0693..e5f210e62a18 100644 | ||
19 | --- a/Lib/test/test_urllib.py | ||
20 | +++ b/Lib/test/test_urllib.py | ||
21 | @@ -1023,6 +1023,18 @@ def open_spam(self, url): | ||
22 | "spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), | ||
23 | "//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") | ||
24 | |||
25 | + def test_local_file_open(self): | ||
26 | + class DummyURLopener(urllib.URLopener): | ||
27 | + def open_local_file(self, url): | ||
28 | + return url | ||
29 | + self.assertEqual(DummyURLopener().open( | ||
30 | + 'local-file://example'), '//example') | ||
31 | + self.assertEqual(DummyURLopener().open( | ||
32 | + 'local_file://example'), '//example') | ||
33 | + self.assertRaises(IOError, urllib.urlopen, | ||
34 | + 'local-file://example') | ||
35 | + self.assertRaises(IOError, urllib.urlopen, | ||
36 | + 'local_file://example') | ||
37 | |||
38 | # Just commented them out. | ||
39 | # Can't really tell why keep failing in windows and sparc. | ||
40 | diff --git a/Lib/urllib.py b/Lib/urllib.py | ||
41 | index d85504a5cb7e..a24e9a5c68fb 100644 | ||
42 | --- a/Lib/urllib.py | ||
43 | +++ b/Lib/urllib.py | ||
44 | @@ -203,7 +203,10 @@ def open(self, fullurl, data=None): | ||
45 | name = 'open_' + urltype | ||
46 | self.type = urltype | ||
47 | name = name.replace('-', '_') | ||
48 | - if not hasattr(self, name): | ||
49 | + | ||
50 | + # bpo-35907: # disallow the file reading with the type not allowed | ||
51 | + if not hasattr(self, name) or \ | ||
52 | + (self == _urlopener and name == 'open_local_file'): | ||
53 | if proxy: | ||
54 | return self.open_unknown_proxy(proxy, fullurl, data) | ||
55 | else: | ||
diff --git a/meta/recipes-devtools/python/python/bpo-36216-cve-2019-9636-fix.patch b/meta/recipes-devtools/python/python/bpo-36216-cve-2019-9636-fix.patch deleted file mode 100644 index 2ce4d2cde7..0000000000 --- a/meta/recipes-devtools/python/python/bpo-36216-cve-2019-9636-fix.patch +++ /dev/null | |||
@@ -1,28 +0,0 @@ | |||
1 | From 06b5ee585d6e76bdbb4002f642d864d860cbbd2b Mon Sep 17 00:00:00 2001 | ||
2 | From: Steve Dower <steve.dower@python.org> | ||
3 | Date: Tue, 12 Mar 2019 08:23:33 -0700 | ||
4 | Subject: [PATCH] bpo-36216: Only print test messages when verbose | ||
5 | |||
6 | CVE: CVE-2019-9636 | ||
7 | |||
8 | Upstream-Status: Backport https://github.com/python/cpython/pull/12291/commits/06b5ee585d6e76bdbb4002f642d864d860cbbd2b | ||
9 | |||
10 | Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com> | ||
11 | --- | ||
12 | Lib/test/test_urlparse.py | 3 ++- | ||
13 | 1 file changed, 2 insertions(+), 1 deletion(-) | ||
14 | |||
15 | diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py | ||
16 | index 73b0228ea8e3..1830d0b28688 100644 | ||
17 | --- a/Lib/test/test_urlparse.py | ||
18 | +++ b/Lib/test/test_urlparse.py | ||
19 | @@ -644,7 +644,8 @@ def test_urlsplit_normalization(self): | ||
20 | for scheme in [u"http", u"https", u"ftp"]: | ||
21 | for c in denorm_chars: | ||
22 | url = u"{}://netloc{}false.netloc/path".format(scheme, c) | ||
23 | - print "Checking %r" % url | ||
24 | + if test_support.verbose: | ||
25 | + print "Checking %r" % url | ||
26 | with self.assertRaises(ValueError): | ||
27 | urlparse.urlsplit(url) | ||
28 | |||
diff --git a/meta/recipes-devtools/python/python/bpo-36216-cve-2019-9636.patch b/meta/recipes-devtools/python/python/bpo-36216-cve-2019-9636.patch deleted file mode 100644 index 352b13ba9b..0000000000 --- a/meta/recipes-devtools/python/python/bpo-36216-cve-2019-9636.patch +++ /dev/null | |||
@@ -1,111 +0,0 @@ | |||
1 | From 3e3669c9c41a27e1466e2c28b3906e3dd0ce3e7e Mon Sep 17 00:00:00 2001 | ||
2 | From: Steve Dower <steve.dower@python.org> | ||
3 | Date: Thu, 7 Mar 2019 08:25:22 -0800 | ||
4 | Subject: [PATCH] bpo-36216: Add check for characters in netloc that normalize | ||
5 | to separators (GH-12201) | ||
6 | |||
7 | CVE: CVE-2019-9636 | ||
8 | |||
9 | Upstream-Status: Backport https://github.com/python/cpython/pull/12216/commits/3e3669c9c41a27e1466e2c28b3906e3dd0ce3e7e | ||
10 | |||
11 | Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com> | ||
12 | --- | ||
13 | Doc/library/urlparse.rst | 20 ++++++++++++++++ | ||
14 | Lib/test/test_urlparse.py | 24 +++++++++++++++++++ | ||
15 | Lib/urlparse.py | 17 +++++++++++++ | ||
16 | .../2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | 3 +++ | ||
17 | 4 files changed, 64 insertions(+) | ||
18 | create mode 100644 Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | ||
19 | |||
20 | diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py | ||
21 | index 4e1ded73c266..73b0228ea8e3 100644 | ||
22 | --- a/Lib/test/test_urlparse.py | ||
23 | +++ b/Lib/test/test_urlparse.py | ||
24 | @@ -1,4 +1,6 @@ | ||
25 | from test import test_support | ||
26 | +import sys | ||
27 | +import unicodedata | ||
28 | import unittest | ||
29 | import urlparse | ||
30 | |||
31 | @@ -624,6 +626,28 @@ def test_portseparator(self): | ||
32 | self.assertEqual(urlparse.urlparse("http://www.python.org:80"), | ||
33 | ('http','www.python.org:80','','','','')) | ||
34 | |||
35 | + def test_urlsplit_normalization(self): | ||
36 | + # Certain characters should never occur in the netloc, | ||
37 | + # including under normalization. | ||
38 | + # Ensure that ALL of them are detected and cause an error | ||
39 | + illegal_chars = u'/:#?@' | ||
40 | + hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars} | ||
41 | + denorm_chars = [ | ||
42 | + c for c in map(unichr, range(128, sys.maxunicode)) | ||
43 | + if (hex_chars & set(unicodedata.decomposition(c).split())) | ||
44 | + and c not in illegal_chars | ||
45 | + ] | ||
46 | + # Sanity check that we found at least one such character | ||
47 | + self.assertIn(u'\u2100', denorm_chars) | ||
48 | + self.assertIn(u'\uFF03', denorm_chars) | ||
49 | + | ||
50 | + for scheme in [u"http", u"https", u"ftp"]: | ||
51 | + for c in denorm_chars: | ||
52 | + url = u"{}://netloc{}false.netloc/path".format(scheme, c) | ||
53 | + print "Checking %r" % url | ||
54 | + with self.assertRaises(ValueError): | ||
55 | + urlparse.urlsplit(url) | ||
56 | + | ||
57 | def test_main(): | ||
58 | test_support.run_unittest(UrlParseTestCase) | ||
59 | |||
60 | diff --git a/Lib/urlparse.py b/Lib/urlparse.py | ||
61 | index f7c2b032b097..54eda08651ab 100644 | ||
62 | --- a/Lib/urlparse.py | ||
63 | +++ b/Lib/urlparse.py | ||
64 | @@ -165,6 +165,21 @@ def _splitnetloc(url, start=0): | ||
65 | delim = min(delim, wdelim) # use earliest delim position | ||
66 | return url[start:delim], url[delim:] # return (domain, rest) | ||
67 | |||
68 | +def _checknetloc(netloc): | ||
69 | + if not netloc or not isinstance(netloc, unicode): | ||
70 | + return | ||
71 | + # looking for characters like \u2100 that expand to 'a/c' | ||
72 | + # IDNA uses NFKC equivalence, so normalize for this check | ||
73 | + import unicodedata | ||
74 | + netloc2 = unicodedata.normalize('NFKC', netloc) | ||
75 | + if netloc == netloc2: | ||
76 | + return | ||
77 | + _, _, netloc = netloc.rpartition('@') # anything to the left of '@' is okay | ||
78 | + for c in '/?#@:': | ||
79 | + if c in netloc2: | ||
80 | + raise ValueError("netloc '" + netloc2 + "' contains invalid " + | ||
81 | + "characters under NFKC normalization") | ||
82 | + | ||
83 | def urlsplit(url, scheme='', allow_fragments=True): | ||
84 | """Parse a URL into 5 components: | ||
85 | <scheme>://<netloc>/<path>?<query>#<fragment> | ||
86 | @@ -193,6 +208,7 @@ def urlsplit(url, scheme='', allow_fragments=True): | ||
87 | url, fragment = url.split('#', 1) | ||
88 | if '?' in url: | ||
89 | url, query = url.split('?', 1) | ||
90 | + _checknetloc(netloc) | ||
91 | v = SplitResult(scheme, netloc, url, query, fragment) | ||
92 | _parse_cache[key] = v | ||
93 | return v | ||
94 | @@ -216,6 +232,7 @@ def urlsplit(url, scheme='', allow_fragments=True): | ||
95 | url, fragment = url.split('#', 1) | ||
96 | if '?' in url: | ||
97 | url, query = url.split('?', 1) | ||
98 | + _checknetloc(netloc) | ||
99 | v = SplitResult(scheme, netloc, url, query, fragment) | ||
100 | _parse_cache[key] = v | ||
101 | return v | ||
102 | diff --git a/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | ||
103 | new file mode 100644 | ||
104 | index 000000000000..1e1ad92c6feb | ||
105 | --- /dev/null | ||
106 | +++ b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | ||
107 | @@ -0,0 +1,3 @@ | ||
108 | +Changes urlsplit() to raise ValueError when the URL contains characters that | ||
109 | +decompose under IDNA encoding (NFKC-normalization) into characters that | ||
110 | +affect how the URL is parsed. | ||
111 | \ No newline at end of file | ||
diff --git a/meta/recipes-devtools/python/python/bpo-36742-cve-2019-10160.patch b/meta/recipes-devtools/python/python/bpo-36742-cve-2019-10160.patch deleted file mode 100644 index 1b6cb8cf3e..0000000000 --- a/meta/recipes-devtools/python/python/bpo-36742-cve-2019-10160.patch +++ /dev/null | |||
@@ -1,81 +0,0 @@ | |||
1 | From 5a1033fe5be764a135adcfff2fdc14edc3e5f327 Mon Sep 17 00:00:00 2001 | ||
2 | From: Changqing Li <changqing.li@windriver.com> | ||
3 | Date: Thu, 10 Oct 2019 16:32:19 +0800 | ||
4 | Subject: [PATCH] bpo-36742: Fixes handling of pre-normalization characters in | ||
5 | urlsplit() bpo-36742: Corrects fix to handle decomposition in usernames | ||
6 | |||
7 | Upstream-Status: Backport | ||
8 | |||
9 | https://github.com/python/cpython/commit/98a4dcefbbc3bce5ab07e7c0830a183157250259 | ||
10 | https://github.com/python/cpython/commit/f61599b050c621386a3fc6bc480359e2d3bb93de#diff-b577545d73dd0cdb2c337a4c5f89e1d7 | ||
11 | |||
12 | CVE: CVE-2019-10160 | ||
13 | |||
14 | Signed-off-by: Changqing Li <changqing.li@windriver.com> | ||
15 | --- | ||
16 | Lib/test/test_urlparse.py | 19 +++++++++++++------ | ||
17 | Lib/urlparse.py | 14 +++++++++----- | ||
18 | 2 files changed, 22 insertions(+), 11 deletions(-) | ||
19 | |||
20 | diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py | ||
21 | index 1830d0b..857ed96 100644 | ||
22 | --- a/Lib/test/test_urlparse.py | ||
23 | +++ b/Lib/test/test_urlparse.py | ||
24 | @@ -641,13 +641,20 @@ class UrlParseTestCase(unittest.TestCase): | ||
25 | self.assertIn(u'\u2100', denorm_chars) | ||
26 | self.assertIn(u'\uFF03', denorm_chars) | ||
27 | |||
28 | + # bpo-36742: Verify port separators are ignored when they | ||
29 | + # existed prior to decomposition | ||
30 | + urlparse.urlsplit(u'http://\u30d5\u309a:80') | ||
31 | + with self.assertRaises(ValueError): | ||
32 | + urlparse.urlsplit(u'http://\u30d5\u309a\ufe1380') | ||
33 | + | ||
34 | for scheme in [u"http", u"https", u"ftp"]: | ||
35 | - for c in denorm_chars: | ||
36 | - url = u"{}://netloc{}false.netloc/path".format(scheme, c) | ||
37 | - if test_support.verbose: | ||
38 | - print "Checking %r" % url | ||
39 | - with self.assertRaises(ValueError): | ||
40 | - urlparse.urlsplit(url) | ||
41 | + for netloc in [u"netloc{}false.netloc", u"n{}user@netloc"]: | ||
42 | + for c in denorm_chars: | ||
43 | + url = u"{}://{}/path".format(scheme, netloc.format(c)) | ||
44 | + if test_support.verbose: | ||
45 | + print "Checking %r" % url | ||
46 | + with self.assertRaises(ValueError): | ||
47 | + urlparse.urlsplit(url) | ||
48 | |||
49 | def test_main(): | ||
50 | test_support.run_unittest(UrlParseTestCase) | ||
51 | diff --git a/Lib/urlparse.py b/Lib/urlparse.py | ||
52 | index 54eda08..e34b368 100644 | ||
53 | --- a/Lib/urlparse.py | ||
54 | +++ b/Lib/urlparse.py | ||
55 | @@ -171,14 +171,18 @@ def _checknetloc(netloc): | ||
56 | # looking for characters like \u2100 that expand to 'a/c' | ||
57 | # IDNA uses NFKC equivalence, so normalize for this check | ||
58 | import unicodedata | ||
59 | - netloc2 = unicodedata.normalize('NFKC', netloc) | ||
60 | - if netloc == netloc2: | ||
61 | + n = netloc.replace(u'@', u'') # ignore characters already included | ||
62 | + n = n.replace(u':', u'') # but not the surrounding text | ||
63 | + n = n.replace(u'#', u'') | ||
64 | + n = n.replace(u'?', u'') | ||
65 | + | ||
66 | + netloc2 = unicodedata.normalize('NFKC', n) | ||
67 | + if n == netloc2: | ||
68 | return | ||
69 | - _, _, netloc = netloc.rpartition('@') # anything to the left of '@' is okay | ||
70 | for c in '/?#@:': | ||
71 | if c in netloc2: | ||
72 | - raise ValueError("netloc '" + netloc2 + "' contains invalid " + | ||
73 | - "characters under NFKC normalization") | ||
74 | + raise ValueError(u"netloc '" + netloc + u"' contains invalid " + | ||
75 | + u"characters under NFKC normalization") | ||
76 | |||
77 | def urlsplit(url, scheme='', allow_fragments=True): | ||
78 | """Parse a URL into 5 components: | ||
79 | -- | ||
80 | 2.7.4 | ||
81 | |||
diff --git a/meta/recipes-devtools/python/python_2.7.16.bb b/meta/recipes-devtools/python/python_2.7.17.bb index 625c5312a6..5b856a5097 100644 --- a/meta/recipes-devtools/python/python_2.7.16.bb +++ b/meta/recipes-devtools/python/python_2.7.17.bb | |||
@@ -30,9 +30,6 @@ SRC_URI += " \ | |||
30 | file://support_SOURCE_DATE_EPOCH_in_py_compile_2.7.patch \ | 30 | file://support_SOURCE_DATE_EPOCH_in_py_compile_2.7.patch \ |
31 | file://float-endian.patch \ | 31 | file://float-endian.patch \ |
32 | file://0001-python2-use-cc_basename-to-replace-CC-for-checking-c.patch \ | 32 | file://0001-python2-use-cc_basename-to-replace-CC-for-checking-c.patch \ |
33 | file://0001-2.7-bpo-34155-Dont-parse-domains-containing-GH-13079.patch \ | ||
34 | file://bpo-36742-cve-2019-10160.patch \ | ||
35 | file://0001-2.7-bpo-38243-Escape-the-server-title-of-DocXMLRPCSe.patch \ | ||
36 | " | 33 | " |
37 | 34 | ||
38 | S = "${WORKDIR}/Python-${PV}" | 35 | S = "${WORKDIR}/Python-${PV}" |