python: add a fix for CVE-2019-9948 and CVE-2019-9636

(From OE-Core rev: 9d23b982fa4e0290761b3d15f6959779fed72ad6)

Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Martin Jansa
2019-05-17 20:16:17 +00:00
committed by Richard Purdie
parent 95a4691d15
commit e5f6e0853e
5 changed files with 254 additions and 1 deletions

View File

@@ -0,0 +1,55 @@
From 179a5f75f1121dab271fe8f90eb35145f9dcbbda Mon Sep 17 00:00:00 2001
From: Sihoon Lee <push0ebp@gmail.com>
Date: Fri, 17 May 2019 02:41:06 +0900
Subject: [PATCH] Update test_urllib.py and urllib.py\nchange assertEqual into
assertRasies in DummyURLopener test, and simplify mitigation
Upstream-Status: Submitted https://github.com/python/cpython/pull/11842
CVE: CVE-2019-9948
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
---
Lib/test/test_urllib.py | 11 +++--------
Lib/urllib.py | 4 ++--
2 files changed, 5 insertions(+), 10 deletions(-)
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index e5f210e62a18..1e23dfb0bb16 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -1027,14 +1027,9 @@ def test_local_file_open(self):
class DummyURLopener(urllib.URLopener):
def open_local_file(self, url):
return url
- self.assertEqual(DummyURLopener().open(
- 'local-file://example'), '//example')
- self.assertEqual(DummyURLopener().open(
- 'local_file://example'), '//example')
- self.assertRaises(IOError, urllib.urlopen,
- 'local-file://example')
- self.assertRaises(IOError, urllib.urlopen,
- 'local_file://example')
+ for url in ('local_file://example', 'local-file://example'):
+ self.assertRaises(IOError, DummyURLopener().open, url)
+ self.assertRaises(IOError, urllib.urlopen, url)
# Just commented them out.
# Can't really tell why keep failing in windows and sparc.
diff --git a/Lib/urllib.py b/Lib/urllib.py
index a24e9a5c68fb..39b834054e9e 100644
--- a/Lib/urllib.py
+++ b/Lib/urllib.py
@@ -203,10 +203,10 @@ def open(self, fullurl, data=None):
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
-
+
# bpo-35907: # disallow the file reading with the type not allowed
if not hasattr(self, name) or \
- (self == _urlopener and name == 'open_local_file'):
+ getattr(self, name) == self.open_local_file:
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:

View File

@@ -0,0 +1,55 @@
From 8f99cc799e4393bf1112b9395b2342f81b3f45ef Mon Sep 17 00:00:00 2001
From: push0ebp <push0ebp@shl-MacBook-Pro.local>
Date: Thu, 14 Feb 2019 02:05:46 +0900
Subject: [PATCH] bpo-35907: Avoid file reading as disallowing the unnecessary
URL scheme in urllib
Upstream-Status: Submitted https://github.com/python/cpython/pull/11842
CVE: CVE-2019-9948
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
---
Lib/test/test_urllib.py | 12 ++++++++++++
Lib/urllib.py | 5 ++++-
2 files changed, 16 insertions(+), 1 deletion(-)
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index 1ce9201c0693..e5f210e62a18 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -1023,6 +1023,18 @@ def open_spam(self, url):
"spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"),
"//c:|windows%/:=&?~#+!$,;'@()*[]|/path/")
+ def test_local_file_open(self):
+ class DummyURLopener(urllib.URLopener):
+ def open_local_file(self, url):
+ return url
+ self.assertEqual(DummyURLopener().open(
+ 'local-file://example'), '//example')
+ self.assertEqual(DummyURLopener().open(
+ 'local_file://example'), '//example')
+ self.assertRaises(IOError, urllib.urlopen,
+ 'local-file://example')
+ self.assertRaises(IOError, urllib.urlopen,
+ 'local_file://example')
# Just commented them out.
# Can't really tell why keep failing in windows and sparc.
diff --git a/Lib/urllib.py b/Lib/urllib.py
index d85504a5cb7e..a24e9a5c68fb 100644
--- a/Lib/urllib.py
+++ b/Lib/urllib.py
@@ -203,7 +203,10 @@ def open(self, fullurl, data=None):
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
- if not hasattr(self, name):
+
+ # bpo-35907: # disallow the file reading with the type not allowed
+ if not hasattr(self, name) or \
+ (self == _urlopener and name == 'open_local_file'):
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:

View File

@@ -0,0 +1,28 @@
From 06b5ee585d6e76bdbb4002f642d864d860cbbd2b Mon Sep 17 00:00:00 2001
From: Steve Dower <steve.dower@python.org>
Date: Tue, 12 Mar 2019 08:23:33 -0700
Subject: [PATCH] bpo-36216: Only print test messages when verbose
CVE: CVE-2019-9636
Upstream-Status: Backport https://github.com/python/cpython/pull/12291/commits/06b5ee585d6e76bdbb4002f642d864d860cbbd2b
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
---
Lib/test/test_urlparse.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 73b0228ea8e3..1830d0b28688 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -644,7 +644,8 @@ def test_urlsplit_normalization(self):
for scheme in [u"http", u"https", u"ftp"]:
for c in denorm_chars:
url = u"{}://netloc{}false.netloc/path".format(scheme, c)
- print "Checking %r" % url
+ if test_support.verbose:
+ print "Checking %r" % url
with self.assertRaises(ValueError):
urlparse.urlsplit(url)

View File

@@ -0,0 +1,111 @@
From 3e3669c9c41a27e1466e2c28b3906e3dd0ce3e7e Mon Sep 17 00:00:00 2001
From: Steve Dower <steve.dower@python.org>
Date: Thu, 7 Mar 2019 08:25:22 -0800
Subject: [PATCH] bpo-36216: Add check for characters in netloc that normalize
to separators (GH-12201)
CVE: CVE-2019-9636
Upstream-Status: Backport https://github.com/python/cpython/pull/12216/commits/3e3669c9c41a27e1466e2c28b3906e3dd0ce3e7e
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
---
Doc/library/urlparse.rst | 20 ++++++++++++++++
Lib/test/test_urlparse.py | 24 +++++++++++++++++++
Lib/urlparse.py | 17 +++++++++++++
.../2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | 3 +++
4 files changed, 64 insertions(+)
create mode 100644 Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 4e1ded73c266..73b0228ea8e3 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -1,4 +1,6 @@
from test import test_support
+import sys
+import unicodedata
import unittest
import urlparse
@@ -624,6 +626,28 @@ def test_portseparator(self):
self.assertEqual(urlparse.urlparse("http://www.python.org:80"),
('http','www.python.org:80','','','',''))
+ def test_urlsplit_normalization(self):
+ # Certain characters should never occur in the netloc,
+ # including under normalization.
+ # Ensure that ALL of them are detected and cause an error
+ illegal_chars = u'/:#?@'
+ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars}
+ denorm_chars = [
+ c for c in map(unichr, range(128, sys.maxunicode))
+ if (hex_chars & set(unicodedata.decomposition(c).split()))
+ and c not in illegal_chars
+ ]
+ # Sanity check that we found at least one such character
+ self.assertIn(u'\u2100', denorm_chars)
+ self.assertIn(u'\uFF03', denorm_chars)
+
+ for scheme in [u"http", u"https", u"ftp"]:
+ for c in denorm_chars:
+ url = u"{}://netloc{}false.netloc/path".format(scheme, c)
+ print "Checking %r" % url
+ with self.assertRaises(ValueError):
+ urlparse.urlsplit(url)
+
def test_main():
test_support.run_unittest(UrlParseTestCase)
diff --git a/Lib/urlparse.py b/Lib/urlparse.py
index f7c2b032b097..54eda08651ab 100644
--- a/Lib/urlparse.py
+++ b/Lib/urlparse.py
@@ -165,6 +165,21 @@ def _splitnetloc(url, start=0):
delim = min(delim, wdelim) # use earliest delim position
return url[start:delim], url[delim:] # return (domain, rest)
+def _checknetloc(netloc):
+ if not netloc or not isinstance(netloc, unicode):
+ return
+ # looking for characters like \u2100 that expand to 'a/c'
+ # IDNA uses NFKC equivalence, so normalize for this check
+ import unicodedata
+ netloc2 = unicodedata.normalize('NFKC', netloc)
+ if netloc == netloc2:
+ return
+ _, _, netloc = netloc.rpartition('@') # anything to the left of '@' is okay
+ for c in '/?#@:':
+ if c in netloc2:
+ raise ValueError("netloc '" + netloc2 + "' contains invalid " +
+ "characters under NFKC normalization")
+
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
@@ -193,6 +208,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
+ _checknetloc(netloc)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
@@ -216,6 +232,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
+ _checknetloc(netloc)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
diff --git a/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst
new file mode 100644
index 000000000000..1e1ad92c6feb
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst
@@ -0,0 +1,3 @@
+Changes urlsplit() to raise ValueError when the URL contains characters that
+decompose under IDNA encoding (NFKC-normalization) into characters that
+affect how the URL is parsed.
\ No newline at end of file