- Update setuptools to 40.8.0 - Update pip to 19.0.3 - Refreshed patches - Removed 4 patches (2 of them was included in 3.7.3 and other two are included in this release) Makefile python3: - Move PKG_MAINTAINER above PKG_LICENSE Signed-off-by: Josef Schlehofer <pepe.schlehofer@gmail.com>lilik-openwrt-22.03
@ -1,120 +0,0 @@ | |||||
From be5de958e9052e322b0087c6dba81cdad0c3e031 Mon Sep 17 00:00:00 2001 | |||||
From: "Miss Islington (bot)" | |||||
<31488909+miss-islington@users.noreply.github.com> | |||||
Date: Tue, 15 Jan 2019 15:03:36 -0800 | |||||
Subject: [PATCH] bpo-35746: Fix segfault in ssl's cert parser (GH-11569) | |||||
Fix a NULL pointer deref in ssl module. The cert parser did not handle CRL | |||||
distribution points with empty DP or URI correctly. A malicious or buggy | |||||
certificate can result into segfault. | |||||
Signed-off-by: Christian Heimes <christian@python.org> | |||||
https://bugs.python.org/issue35746 | |||||
(cherry picked from commit a37f52436f9aa4b9292878b72f3ff1480e2606c3) | |||||
Co-authored-by: Christian Heimes <christian@python.org> | |||||
--- | |||||
Lib/test/talos-2019-0758.pem | 22 +++++++++++++++++++ | |||||
Lib/test/test_ssl.py | 22 +++++++++++++++++++ | |||||
.../2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | 3 +++ | |||||
Modules/_ssl.c | 4 ++++ | |||||
4 files changed, 51 insertions(+) | |||||
create mode 100644 Lib/test/talos-2019-0758.pem | |||||
create mode 100644 Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | |||||
diff --git a/Lib/test/talos-2019-0758.pem b/Lib/test/talos-2019-0758.pem | |||||
new file mode 100644 | |||||
index 0000000000..13b95a77fd | |||||
--- /dev/null | |||||
+++ b/Lib/test/talos-2019-0758.pem | |||||
@@ -0,0 +1,22 @@ | |||||
+-----BEGIN CERTIFICATE----- | |||||
+MIIDqDCCApKgAwIBAgIBAjALBgkqhkiG9w0BAQswHzELMAkGA1UEBhMCVUsxEDAO | |||||
+BgNVBAMTB2NvZHktY2EwHhcNMTgwNjE4MTgwMDU4WhcNMjgwNjE0MTgwMDU4WjA7 | |||||
+MQswCQYDVQQGEwJVSzEsMCoGA1UEAxMjY29kZW5vbWljb24tdm0tMi50ZXN0Lmxh | |||||
+bC5jaXNjby5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC63fGB | |||||
+J80A9Av1GB0bptslKRIUtJm8EeEu34HkDWbL6AJY0P8WfDtlXjlPaLqFa6sqH6ES | |||||
+V48prSm1ZUbDSVL8R6BYVYpOlK8/48xk4pGTgRzv69gf5SGtQLwHy8UPBKgjSZoD | |||||
+5a5k5wJXGswhKFFNqyyxqCvWmMnJWxXTt2XDCiWc4g4YAWi4O4+6SeeHVAV9rV7C | |||||
+1wxqjzKovVe2uZOHjKEzJbbIU6JBPb6TRfMdRdYOw98n1VXDcKVgdX2DuuqjCzHP | |||||
+WhU4Tw050M9NaK3eXp4Mh69VuiKoBGOLSOcS8reqHIU46Reg0hqeL8LIL6OhFHIF | |||||
+j7HR6V1X6F+BfRS/AgMBAAGjgdYwgdMwCQYDVR0TBAIwADAdBgNVHQ4EFgQUOktp | |||||
+HQjxDXXUg8prleY9jeLKeQ4wTwYDVR0jBEgwRoAUx6zgPygZ0ZErF9sPC4+5e2Io | |||||
+UU+hI6QhMB8xCzAJBgNVBAYTAlVLMRAwDgYDVQQDEwdjb2R5LWNhggkA1QEAuwb7 | |||||
+2s0wCQYDVR0SBAIwADAuBgNVHREEJzAlgiNjb2Rlbm9taWNvbi12bS0yLnRlc3Qu | |||||
+bGFsLmNpc2NvLmNvbTAOBgNVHQ8BAf8EBAMCBaAwCwYDVR0fBAQwAjAAMAsGCSqG | |||||
+SIb3DQEBCwOCAQEAvqantx2yBlM11RoFiCfi+AfSblXPdrIrHvccepV4pYc/yO6p | |||||
+t1f2dxHQb8rWH3i6cWag/EgIZx+HJQvo0rgPY1BFJsX1WnYf1/znZpkUBGbVmlJr | |||||
+t/dW1gSkNS6sPsM0Q+7HPgEv8CPDNK5eo7vU2seE0iWOkxSyVUuiCEY9ZVGaLVit | |||||
+p0C78nZ35Pdv4I+1cosmHl28+es1WI22rrnmdBpH8J1eY6WvUw2xuZHLeNVN0TzV | |||||
+Q3qq53AaCWuLOD1AjESWuUCxMZTK9DPS4JKXTK8RLyDeqOvJGjsSWp3kL0y3GaQ+ | |||||
+10T1rfkKJub2+m9A9duin1fn6tHc2wSvB7m3DA== | |||||
+-----END CERTIFICATE----- | |||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py | |||||
index f1b9565c8d..b6794ce3a8 100644 | |||||
--- a/Lib/test/test_ssl.py | |||||
+++ b/Lib/test/test_ssl.py | |||||
@@ -116,6 +116,7 @@ NONEXISTINGCERT = data_file("XXXnonexisting.pem") | |||||
BADKEY = data_file("badkey.pem") | |||||
NOKIACERT = data_file("nokia.pem") | |||||
NULLBYTECERT = data_file("nullbytecert.pem") | |||||
+TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem") | |||||
DHFILE = data_file("ffdh3072.pem") | |||||
BYTES_DHFILE = os.fsencode(DHFILE) | |||||
@@ -365,6 +366,27 @@ class BasicSocketTests(unittest.TestCase): | |||||
self.assertEqual(p['crlDistributionPoints'], | |||||
('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',)) | |||||
+ def test_parse_cert_CVE_2019_5010(self): | |||||
+ p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP) | |||||
+ if support.verbose: | |||||
+ sys.stdout.write("\n" + pprint.pformat(p) + "\n") | |||||
+ self.assertEqual( | |||||
+ p, | |||||
+ { | |||||
+ 'issuer': ( | |||||
+ (('countryName', 'UK'),), (('commonName', 'cody-ca'),)), | |||||
+ 'notAfter': 'Jun 14 18:00:58 2028 GMT', | |||||
+ 'notBefore': 'Jun 18 18:00:58 2018 GMT', | |||||
+ 'serialNumber': '02', | |||||
+ 'subject': ((('countryName', 'UK'),), | |||||
+ (('commonName', | |||||
+ 'codenomicon-vm-2.test.lal.cisco.com'),)), | |||||
+ 'subjectAltName': ( | |||||
+ ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),), | |||||
+ 'version': 3 | |||||
+ } | |||||
+ ) | |||||
+ | |||||
def test_parse_cert_CVE_2013_4238(self): | |||||
p = ssl._ssl._test_decode_cert(NULLBYTECERT) | |||||
if support.verbose: | |||||
diff --git a/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | |||||
new file mode 100644 | |||||
index 0000000000..dffe347eec | |||||
--- /dev/null | |||||
+++ b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | |||||
@@ -0,0 +1,3 @@ | |||||
+[CVE-2019-5010] Fix a NULL pointer deref in ssl module. The cert parser did | |||||
+not handle CRL distribution points with empty DP or URI correctly. A | |||||
+malicious or buggy certificate can result into segfault. | |||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c | |||||
index 9894ad821d..9baec8a9bc 100644 | |||||
--- a/Modules/_ssl.c | |||||
+++ b/Modules/_ssl.c | |||||
@@ -1516,6 +1516,10 @@ _get_crl_dp(X509 *certificate) { | |||||
STACK_OF(GENERAL_NAME) *gns; | |||||
dp = sk_DIST_POINT_value(dps, i); | |||||
+ if (dp->distpoint == NULL) { | |||||
+ /* Ignore empty DP value, CVE-2019-5010 */ | |||||
+ continue; | |||||
+ } | |||||
gns = dp->distpoint->name.fullname; | |||||
for (j=0; j < sk_GENERAL_NAME_num(gns); j++) { | |||||
-- | |||||
2.17.1 | |||||
@ -1,150 +0,0 @@ | |||||
From 30a779770fe690584456970b602ea16ec3f74ce7 Mon Sep 17 00:00:00 2001 | |||||
From: Steve Dower <steve.dower@python.org> | |||||
Date: Thu, 7 Mar 2019 08:05:31 -0800 | |||||
Subject: [PATCH] bpo-36216: Add check for characters in netloc that normalize | |||||
to separators (GH-12201) | |||||
--- | |||||
Doc/library/urllib.parse.rst | 18 +++++++++++++++ | |||||
Lib/test/test_urlparse.py | 23 +++++++++++++++++++ | |||||
Lib/urllib/parse.py | 17 ++++++++++++++ | |||||
.../2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | 3 +++ | |||||
4 files changed, 61 insertions(+) | |||||
create mode 100644 Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | |||||
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst | |||||
index 0c8f0f607314..b565e1edd321 100644 | |||||
--- a/Doc/library/urllib.parse.rst | |||||
+++ b/Doc/library/urllib.parse.rst | |||||
@@ -124,6 +124,11 @@ or on combining URL components into a URL string. | |||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a | |||||
:exc:`ValueError`. | |||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC | |||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, | |||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is | |||||
+ decomposed before parsing, no error will be raised. | |||||
+ | |||||
.. versionchanged:: 3.2 | |||||
Added IPv6 URL parsing capabilities. | |||||
@@ -136,6 +141,10 @@ or on combining URL components into a URL string. | |||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of | |||||
returning :const:`None`. | |||||
+ .. versionchanged:: 3.7.3 | |||||
+ Characters that affect netloc parsing under NFKC normalization will | |||||
+ now raise :exc:`ValueError`. | |||||
+ | |||||
.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None) | |||||
@@ -257,10 +266,19 @@ or on combining URL components into a URL string. | |||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a | |||||
:exc:`ValueError`. | |||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC | |||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, | |||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is | |||||
+ decomposed before parsing, no error will be raised. | |||||
+ | |||||
.. versionchanged:: 3.6 | |||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of | |||||
returning :const:`None`. | |||||
+ .. versionchanged:: 3.7.3 | |||||
+ Characters that affect netloc parsing under NFKC normalization will | |||||
+ now raise :exc:`ValueError`. | |||||
+ | |||||
.. function:: urlunsplit(parts) | |||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py | |||||
index be50b47603aa..e6638aee2244 100644 | |||||
--- a/Lib/test/test_urlparse.py | |||||
+++ b/Lib/test/test_urlparse.py | |||||
@@ -1,3 +1,5 @@ | |||||
+import sys | |||||
+import unicodedata | |||||
import unittest | |||||
import urllib.parse | |||||
@@ -984,6 +986,27 @@ def test_all(self): | |||||
expected.append(name) | |||||
self.assertCountEqual(urllib.parse.__all__, expected) | |||||
+ def test_urlsplit_normalization(self): | |||||
+ # Certain characters should never occur in the netloc, | |||||
+ # including under normalization. | |||||
+ # Ensure that ALL of them are detected and cause an error | |||||
+ illegal_chars = '/:#?@' | |||||
+ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars} | |||||
+ denorm_chars = [ | |||||
+ c for c in map(chr, range(128, sys.maxunicode)) | |||||
+ if (hex_chars & set(unicodedata.decomposition(c).split())) | |||||
+ and c not in illegal_chars | |||||
+ ] | |||||
+ # Sanity check that we found at least one such character | |||||
+ self.assertIn('\u2100', denorm_chars) | |||||
+ self.assertIn('\uFF03', denorm_chars) | |||||
+ | |||||
+ for scheme in ["http", "https", "ftp"]: | |||||
+ for c in denorm_chars: | |||||
+ url = "{}://netloc{}false.netloc/path".format(scheme, c) | |||||
+ with self.subTest(url=url, char='{:04X}'.format(ord(c))): | |||||
+ with self.assertRaises(ValueError): | |||||
+ urllib.parse.urlsplit(url) | |||||
class Utility_Tests(unittest.TestCase): | |||||
"""Testcase to test the various utility functions in the urllib.""" | |||||
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py | |||||
index f691ab74f87f..39c5d6a80824 100644 | |||||
--- a/Lib/urllib/parse.py | |||||
+++ b/Lib/urllib/parse.py | |||||
@@ -391,6 +391,21 @@ def _splitnetloc(url, start=0): | |||||
delim = min(delim, wdelim) # use earliest delim position | |||||
return url[start:delim], url[delim:] # return (domain, rest) | |||||
+def _checknetloc(netloc): | |||||
+ if not netloc or netloc.isascii(): | |||||
+ return | |||||
+ # looking for characters like \u2100 that expand to 'a/c' | |||||
+ # IDNA uses NFKC equivalence, so normalize for this check | |||||
+ import unicodedata | |||||
+ netloc2 = unicodedata.normalize('NFKC', netloc) | |||||
+ if netloc == netloc2: | |||||
+ return | |||||
+ _, _, netloc = netloc.rpartition('@') # anything to the left of '@' is okay | |||||
+ for c in '/?#@:': | |||||
+ if c in netloc2: | |||||
+ raise ValueError("netloc '" + netloc2 + "' contains invalid " + | |||||
+ "characters under NFKC normalization") | |||||
+ | |||||
def urlsplit(url, scheme='', allow_fragments=True): | |||||
"""Parse a URL into 5 components: | |||||
<scheme>://<netloc>/<path>?<query>#<fragment> | |||||
@@ -419,6 +434,7 @@ def urlsplit(url, scheme='', allow_fragments=True): | |||||
url, fragment = url.split('#', 1) | |||||
if '?' in url: | |||||
url, query = url.split('?', 1) | |||||
+ _checknetloc(netloc) | |||||
v = SplitResult('http', netloc, url, query, fragment) | |||||
_parse_cache[key] = v | |||||
return _coerce_result(v) | |||||
@@ -442,6 +458,7 @@ def urlsplit(url, scheme='', allow_fragments=True): | |||||
url, fragment = url.split('#', 1) | |||||
if '?' in url: | |||||
url, query = url.split('?', 1) | |||||
+ _checknetloc(netloc) | |||||
v = SplitResult(scheme, netloc, url, query, fragment) | |||||
_parse_cache[key] = v | |||||
return _coerce_result(v) | |||||
diff --git a/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | |||||
new file mode 100644 | |||||
index 000000000000..5546394157f9 | |||||
--- /dev/null | |||||
+++ b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | |||||
@@ -0,0 +1,3 @@ | |||||
+Changes urlsplit() to raise ValueError when the URL contains characters that | |||||
+decompose under IDNA encoding (NFKC-normalization) into characters that | |||||
+affect how the URL is parsed. |
@ -1,146 +0,0 @@ | |||||
From 233e3211cfdcca7310e25529e9115fbaddf47cca Mon Sep 17 00:00:00 2001 | |||||
From: "Gregory P. Smith" <greg@krypto.org> | |||||
Date: Tue, 30 Apr 2019 19:12:21 -0700 | |||||
Subject: [PATCH] bpo-30458: Disallow control chars in http URLs. (GH-12755) | |||||
MIME-Version: 1.0 | |||||
Content-Type: text/plain; charset=UTF-8 | |||||
Content-Transfer-Encoding: 8bit | |||||
Disallow control chars in http URLs in urllib.urlopen. This addresses a potential security problem for applications that do not sanity check their URLs where http request headers could be injected. | |||||
Disable https related urllib tests on a build without ssl (GH-13032) | |||||
These tests require an SSL enabled build. Skip these tests when python is built without SSL to fix test failures. | |||||
Use http.client.InvalidURL instead of ValueError as the new error case's exception. (GH-13044) | |||||
Co-Authored-By: Miro Hrončok <miro@hroncok.cz> | |||||
--- | |||||
Lib/http/client.py | 15 ++++++ | |||||
Lib/test/test_urllib.py | 53 +++++++++++++++++++ | |||||
Lib/test/test_xmlrpc.py | 7 ++- | |||||
.../2019-04-10-08-53-30.bpo-30458.51E-DA.rst | 1 + | |||||
4 files changed, 75 insertions(+), 1 deletion(-) | |||||
create mode 100644 Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst | |||||
diff --git a/Lib/http/client.py b/Lib/http/client.py | |||||
index 1de151c38e92..2afd452fe30f 100644 | |||||
--- a/Lib/http/client.py | |||||
+++ b/Lib/http/client.py | |||||
@@ -140,6 +140,16 @@ | |||||
_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch | |||||
_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search | |||||
+# These characters are not allowed within HTTP URL paths. | |||||
+# See https://tools.ietf.org/html/rfc3986#section-3.3 and the | |||||
+# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition. | |||||
+# Prevents CVE-2019-9740. Includes control characters such as \r\n. | |||||
+# We don't restrict chars above \x7f as putrequest() limits us to ASCII. | |||||
+_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]') | |||||
+# Arguably only these _should_ allowed: | |||||
+# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$") | |||||
+# We are more lenient for assumed real world compatibility purposes. | |||||
+ | |||||
# We always set the Content-Length header for these methods because some | |||||
# servers will otherwise respond with a 411 | |||||
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'} | |||||
@@ -1101,6 +1111,11 @@ def putrequest(self, method, url, skip_host=False, | |||||
self._method = method | |||||
if not url: | |||||
url = '/' | |||||
+ # Prevent CVE-2019-9740. | |||||
+ match = _contains_disallowed_url_pchar_re.search(url) | |||||
+ if match: | |||||
+ raise InvalidURL(f"URL can't contain control characters. {url!r} " | |||||
+ f"(found at least {match.group()!r})") | |||||
request = '%s %s %s' % (method, url, self._http_vsn_str) | |||||
# Non-ASCII characters should have been eliminated earlier | |||||
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py | |||||
index 2ac73b58d832..7214492eca9d 100644 | |||||
--- a/Lib/test/test_urllib.py | |||||
+++ b/Lib/test/test_urllib.py | |||||
@@ -329,6 +329,59 @@ def test_willclose(self): | |||||
finally: | |||||
self.unfakehttp() | |||||
+ @unittest.skipUnless(ssl, "ssl module required") | |||||
+ def test_url_with_control_char_rejected(self): | |||||
+ for char_no in list(range(0, 0x21)) + [0x7f]: | |||||
+ char = chr(char_no) | |||||
+ schemeless_url = f"//localhost:7777/test{char}/" | |||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") | |||||
+ try: | |||||
+ # We explicitly test urllib.request.urlopen() instead of the top | |||||
+ # level 'def urlopen()' function defined in this... (quite ugly) | |||||
+ # test suite. They use different url opening codepaths. Plain | |||||
+ # urlopen uses FancyURLOpener which goes via a codepath that | |||||
+ # calls urllib.parse.quote() on the URL which makes all of the | |||||
+ # above attempts at injection within the url _path_ safe. | |||||
+ escaped_char_repr = repr(char).replace('\\', r'\\') | |||||
+ InvalidURL = http.client.InvalidURL | |||||
+ with self.assertRaisesRegex( | |||||
+ InvalidURL, f"contain control.*{escaped_char_repr}"): | |||||
+ urllib.request.urlopen(f"http:{schemeless_url}") | |||||
+ with self.assertRaisesRegex( | |||||
+ InvalidURL, f"contain control.*{escaped_char_repr}"): | |||||
+ urllib.request.urlopen(f"https:{schemeless_url}") | |||||
+ # This code path quotes the URL so there is no injection. | |||||
+ resp = urlopen(f"http:{schemeless_url}") | |||||
+ self.assertNotIn(char, resp.geturl()) | |||||
+ finally: | |||||
+ self.unfakehttp() | |||||
+ | |||||
+ @unittest.skipUnless(ssl, "ssl module required") | |||||
+ def test_url_with_newline_header_injection_rejected(self): | |||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") | |||||
+ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" | |||||
+ schemeless_url = "//" + host + ":8080/test/?test=a" | |||||
+ try: | |||||
+ # We explicitly test urllib.request.urlopen() instead of the top | |||||
+ # level 'def urlopen()' function defined in this... (quite ugly) | |||||
+ # test suite. They use different url opening codepaths. Plain | |||||
+ # urlopen uses FancyURLOpener which goes via a codepath that | |||||
+ # calls urllib.parse.quote() on the URL which makes all of the | |||||
+ # above attempts at injection within the url _path_ safe. | |||||
+ InvalidURL = http.client.InvalidURL | |||||
+ with self.assertRaisesRegex( | |||||
+ InvalidURL, r"contain control.*\\r.*(found at least . .)"): | |||||
+ urllib.request.urlopen(f"http:{schemeless_url}") | |||||
+ with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"): | |||||
+ urllib.request.urlopen(f"https:{schemeless_url}") | |||||
+ # This code path quotes the URL so there is no injection. | |||||
+ resp = urlopen(f"http:{schemeless_url}") | |||||
+ self.assertNotIn(' ', resp.geturl()) | |||||
+ self.assertNotIn('\r', resp.geturl()) | |||||
+ self.assertNotIn('\n', resp.geturl()) | |||||
+ finally: | |||||
+ self.unfakehttp() | |||||
+ | |||||
def test_read_0_9(self): | |||||
# "0.9" response accepted (but not "simple responses" without | |||||
# a status line) | |||||
diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py | |||||
index 32263f7f0b3b..0e002ec4ef9f 100644 | |||||
--- a/Lib/test/test_xmlrpc.py | |||||
+++ b/Lib/test/test_xmlrpc.py | |||||
@@ -945,7 +945,12 @@ def test_unicode_host(self): | |||||
def test_partial_post(self): | |||||
# Check that a partial POST doesn't make the server loop: issue #14001. | |||||
conn = http.client.HTTPConnection(ADDR, PORT) | |||||
- conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye') | |||||
+ conn.send('POST /RPC2 HTTP/1.0\r\n' | |||||
+ 'Content-Length: 100\r\n\r\n' | |||||
+ 'bye HTTP/1.1\r\n' | |||||
+ f'Host: {ADDR}:{PORT}\r\n' | |||||
+ 'Accept-Encoding: identity\r\n' | |||||
+ 'Content-Length: 0\r\n\r\n'.encode('ascii')) | |||||
conn.close() | |||||
def test_context_manager(self): | |||||
diff --git a/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst | |||||
new file mode 100644 | |||||
index 000000000000..ed8027fb4d64 | |||||
--- /dev/null | |||||
+++ b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst | |||||
@@ -0,0 +1 @@ | |||||
+Address CVE-2019-9740 by disallowing URL paths with embedded whitespace or control characters through into the underlying http client request. Such potentially malicious header injection URLs now cause an http.client.InvalidURL exception to be raised. |
@ -1,76 +0,0 @@ | |||||
From 3fa72516a390fa8e3552007814e8dc1248686eb5 Mon Sep 17 00:00:00 2001 | |||||
From: Victor Stinner <victor.stinner@gmail.com> | |||||
Date: Wed, 22 May 2019 22:15:01 +0200 | |||||
Subject: [PATCH] bpo-35907, CVE-2019-9948: urllib rejects local_file:// scheme | |||||
(GH-13474) | |||||
CVE-2019-9948: Avoid file reading as disallowing the unnecessary URL | |||||
scheme in URLopener().open() and URLopener().retrieve() | |||||
of urllib.request. | |||||
Co-Authored-By: SH <push0ebp@gmail.com> | |||||
(cherry picked from commit 0c2b6a3943aa7b022e8eb4bfd9bffcddebf9a587) | |||||
--- | |||||
Lib/test/test_urllib.py | 18 ++++++++++++++++++ | |||||
Lib/urllib/request.py | 2 +- | |||||
.../2019-05-21-23-20-18.bpo-35907.NC_zNK.rst | 2 ++ | |||||
3 files changed, 21 insertions(+), 1 deletion(-) | |||||
create mode 100644 Misc/NEWS.d/next/Security/2019-05-21-23-20-18.bpo-35907.NC_zNK.rst | |||||
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py | |||||
index 7214492eca9d..7ec365b928a5 100644 | |||||
--- a/Lib/test/test_urllib.py | |||||
+++ b/Lib/test/test_urllib.py | |||||
@@ -16,6 +16,7 @@ | |||||
ssl = None | |||||
import sys | |||||
import tempfile | |||||
+import warnings | |||||
from nturl2path import url2pathname, pathname2url | |||||
from base64 import b64encode | |||||
@@ -1463,6 +1464,23 @@ def open_spam(self, url): | |||||
"spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), | |||||
"//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") | |||||
+ def test_local_file_open(self): | |||||
+ # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme | |||||
+ class DummyURLopener(urllib.request.URLopener): | |||||
+ def open_local_file(self, url): | |||||
+ return url | |||||
+ | |||||
+ with warnings.catch_warnings(record=True): | |||||
+ warnings.simplefilter("ignore", DeprecationWarning) | |||||
+ | |||||
+ for url in ('local_file://example', 'local-file://example'): | |||||
+ self.assertRaises(OSError, urllib.request.urlopen, url) | |||||
+ self.assertRaises(OSError, urllib.request.URLopener().open, url) | |||||
+ self.assertRaises(OSError, urllib.request.URLopener().retrieve, url) | |||||
+ self.assertRaises(OSError, DummyURLopener().open, url) | |||||
+ self.assertRaises(OSError, DummyURLopener().retrieve, url) | |||||
+ | |||||
+ | |||||
# Just commented them out. | |||||
# Can't really tell why keep failing in windows and sparc. | |||||
# Everywhere else they work ok, but on those machines, sometimes | |||||
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py | |||||
index d38f725d8e9f..37b254862887 100644 | |||||
--- a/Lib/urllib/request.py | |||||
+++ b/Lib/urllib/request.py | |||||
@@ -1746,7 +1746,7 @@ def open(self, fullurl, data=None): | |||||
name = 'open_' + urltype | |||||
self.type = urltype | |||||
name = name.replace('-', '_') | |||||
- if not hasattr(self, name): | |||||
+ if not hasattr(self, name) or name == 'open_local_file': | |||||
if proxy: | |||||
return self.open_unknown_proxy(proxy, fullurl, data) | |||||
else: | |||||
diff --git a/Misc/NEWS.d/next/Security/2019-05-21-23-20-18.bpo-35907.NC_zNK.rst b/Misc/NEWS.d/next/Security/2019-05-21-23-20-18.bpo-35907.NC_zNK.rst | |||||
new file mode 100644 | |||||
index 000000000000..16adc7a94e2f | |||||
--- /dev/null | |||||
+++ b/Misc/NEWS.d/next/Security/2019-05-21-23-20-18.bpo-35907.NC_zNK.rst | |||||
@@ -0,0 +1,2 @@ | |||||
+CVE-2019-9948: Avoid file reading as disallowing the unnecessary URL scheme in | |||||
+``URLopener().open()`` and ``URLopener().retrieve()`` of :mod:`urllib.request`. |