python: Update to 2.7.17, refresh patches
Patches already merged and so removed:
* 019-bpo-36216-Add-check-for-characters-in-netloc-that-normalize-to-separators-GH-12216.patch
* 020-bpo-36216-Only-print-test-messages-when-verbose-GH-12291.patch
* 021-2.7-bpo-35121-prefix-dot-in-domain-for-proper-subdom.patch
* 027-bpo-38243-Escape-the-server-title-of-DocXMLRPCServer.patch
* 028-bpo-34155-Dont-parse-domains-containing-GH-13079.patch
Patches no longer necessary and so removed:
* 017_lib2to3_fix_pyc_search.patch
Signed-off-by: Jeffery To <jeffery.to@gmail.com>
(cherry picked from 83b300aa83
)
This commit is contained in:
parent
4d9282a4d6
commit
f184eb5f0e
14 changed files with 14 additions and 571 deletions
|
@ -6,10 +6,10 @@
|
|||
#
|
||||
|
||||
PYTHON_VERSION:=2.7
|
||||
PYTHON_VERSION_MICRO:=16
|
||||
PYTHON_VERSION_MICRO:=17
|
||||
|
||||
PYTHON_SETUPTOOLS_PKG_RELEASE:=1
|
||||
PYTHON_PIP_PKG_RELEASE:=1
|
||||
|
||||
PYTHON_SETUPTOOLS_VERSION:=40.6.2
|
||||
PYTHON_PIP_VERSION:=18.1
|
||||
PYTHON_SETUPTOOLS_VERSION:=41.2.0
|
||||
PYTHON_PIP_VERSION:=19.2.3
|
||||
|
|
|
@ -12,11 +12,11 @@ include ../python-version.mk
|
|||
|
||||
PKG_NAME:=python
|
||||
PKG_VERSION:=$(PYTHON_VERSION).$(PYTHON_VERSION_MICRO)
|
||||
PKG_RELEASE:=4
|
||||
PKG_RELEASE:=1
|
||||
|
||||
PKG_SOURCE:=Python-$(PKG_VERSION).tar.xz
|
||||
PKG_SOURCE_URL:=https://www.python.org/ftp/python/$(PKG_VERSION)
|
||||
PKG_HASH:=f222ef602647eecb6853681156d32de4450a2c39f4de93bd5b20235f2e660ed7
|
||||
PKG_HASH:=4d43f033cdbd0aa7b7023c81b0e986fd11e653b5248dac9144d508f11812ba41
|
||||
|
||||
PKG_LICENSE:=PSF
|
||||
PKG_LICENSE_FILES:=LICENSE Modules/_ctypes/libffi_msvc/LICENSE Modules/_ctypes/darwin/LICENSE Modules/_ctypes/libffi/LICENSE Modules/_ctypes/libffi_osx/LICENSE Tools/pybench/LICENSE
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -497,7 +497,8 @@ class PyBuildExt(build_ext):
|
||||
@@ -543,7 +543,8 @@ class PyBuildExt(build_ext):
|
||||
add_dir_to_list(dir_list, directory)
|
||||
|
||||
if os.path.normpath(sys.prefix) != '/usr' \
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -1127,6 +1127,7 @@ libinstall: build_all $(srcdir)/Lib/$(PL
|
||||
@@ -1133,6 +1133,7 @@ libinstall: build_all $(srcdir)/Lib/$(PL
|
||||
done; \
|
||||
done
|
||||
$(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt
|
||||
|
@ -8,7 +8,7 @@
|
|||
if test -d $(DESTDIR)$(LIBDEST)/distutils/tests; then \
|
||||
$(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \
|
||||
$(DESTDIR)$(LIBDEST)/distutils/tests ; \
|
||||
@@ -1153,6 +1154,7 @@ libinstall: build_all $(srcdir)/Lib/$(PL
|
||||
@@ -1159,6 +1160,7 @@ libinstall: build_all $(srcdir)/Lib/$(PL
|
||||
$(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/Grammar.txt
|
||||
-PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
|
||||
$(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/PatternGrammar.txt
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
@@ -71,7 +71,7 @@ int Py_InteractiveFlag; /* Needed by Py_
|
||||
int Py_InspectFlag; /* Needed to determine whether to exit at SystemExit */
|
||||
int Py_NoSiteFlag; /* Suppress 'import site' */
|
||||
int Py_BytesWarningFlag; /* Warn on str(bytes) and str(buffer) */
|
||||
int Py_BytesWarningFlag; /* Warn on comparison between bytearray and unicode */
|
||||
-int Py_DontWriteBytecodeFlag; /* Suppress writing bytecode files (*.py[co]) */
|
||||
+int Py_DontWriteBytecodeFlag = 1; /* Suppress writing bytecode files (*.py[co]) */
|
||||
int Py_UseClassExceptionsFlag = 1; /* Needed by bltinmodule.c: deprecated */
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -454,13 +454,8 @@ class PyBuildExt(build_ext):
|
||||
@@ -500,13 +500,8 @@ class PyBuildExt(build_ext):
|
||||
os.unlink(tmpfile)
|
||||
|
||||
def detect_modules(self):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -1083,6 +1083,7 @@ class PyBuildExt(build_ext):
|
||||
@@ -1129,6 +1129,7 @@ class PyBuildExt(build_ext):
|
||||
if db_setup_debug: print "db lib: ", dblib, "not found"
|
||||
|
||||
except db_found:
|
||||
|
@ -8,7 +8,7 @@
|
|||
if db_setup_debug:
|
||||
print "bsddb using BerkeleyDB lib:", db_ver, dblib
|
||||
print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
|
||||
@@ -1097,7 +1098,7 @@ class PyBuildExt(build_ext):
|
||||
@@ -1143,7 +1144,7 @@ class PyBuildExt(build_ext):
|
||||
exts.append(Extension('_bsddb', ['_bsddb.c'],
|
||||
depends = ['bsddb.h'],
|
||||
library_dirs=dblib_dir,
|
||||
|
@ -17,7 +17,7 @@
|
|||
include_dirs=db_incs,
|
||||
libraries=dblibs))
|
||||
else:
|
||||
@@ -1308,10 +1309,11 @@ class PyBuildExt(build_ext):
|
||||
@@ -1354,10 +1355,11 @@ class PyBuildExt(build_ext):
|
||||
break
|
||||
elif cand == "bdb":
|
||||
if db_incs is not None:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -463,8 +463,9 @@ class PyBuildExt(build_ext):
|
||||
@@ -509,8 +509,9 @@ class PyBuildExt(build_ext):
|
||||
# directly since an inconsistently reproducible issue comes up where
|
||||
# the environment variable is not set even though the value were passed
|
||||
# into configure and stored in the Makefile (issue found on OS X 10.3).
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
diff --git a/Lib/lib2to3/refactor.py b/Lib/lib2to3/refactor.py
|
||||
index 7841b99..1e0d3b3 100644
|
||||
--- a/Lib/lib2to3/refactor.py
|
||||
+++ b/Lib/lib2to3/refactor.py
|
||||
@@ -37,6 +37,12 @@ def get_all_fix_names(fixer_pkg, remove_prefix=True):
|
||||
if remove_prefix:
|
||||
name = name[4:]
|
||||
fix_names.append(name[:-3])
|
||||
+ if name.startswith("fix_") and name.endswith(".pyc"):
|
||||
+ if remove_prefix:
|
||||
+ name = name[4:]
|
||||
+ name = name[:-4]
|
||||
+ if name not in fix_names:
|
||||
+ fix_names.append(name)
|
||||
return fix_names
|
||||
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
From 3e3669c9c41a27e1466e2c28b3906e3dd0ce3e7e Mon Sep 17 00:00:00 2001
|
||||
From: Steve Dower <steve.dower@python.org>
|
||||
Date: Thu, 7 Mar 2019 08:25:22 -0800
|
||||
Subject: [PATCH] bpo-36216: Add check for characters in netloc that normalize
|
||||
to separators (GH-12201)
|
||||
|
||||
---
|
||||
Doc/library/urlparse.rst | 20 ++++++++++++++++
|
||||
Lib/test/test_urlparse.py | 24 +++++++++++++++++++
|
||||
Lib/urlparse.py | 17 +++++++++++++
|
||||
.../2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | 3 +++
|
||||
4 files changed, 64 insertions(+)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst
|
||||
|
||||
diff --git a/Doc/library/urlparse.rst b/Doc/library/urlparse.rst
|
||||
index 22249da54fbb..0989c88c3022 100644
|
||||
--- a/Doc/library/urlparse.rst
|
||||
+++ b/Doc/library/urlparse.rst
|
||||
@@ -119,12 +119,22 @@ The :mod:`urlparse` module defines the following functions:
|
||||
See section :ref:`urlparse-result-object` for more information on the result
|
||||
object.
|
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC
|
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``,
|
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is
|
||||
+ decomposed before parsing, or is not a Unicode string, no error will be
|
||||
+ raised.
|
||||
+
|
||||
.. versionchanged:: 2.5
|
||||
Added attributes to return value.
|
||||
|
||||
.. versionchanged:: 2.7
|
||||
Added IPv6 URL parsing capabilities.
|
||||
|
||||
+ .. versionchanged:: 2.7.17
|
||||
+ Characters that affect netloc parsing under NFKC normalization will
|
||||
+ now raise :exc:`ValueError`.
|
||||
+
|
||||
|
||||
.. function:: parse_qs(qs[, keep_blank_values[, strict_parsing[, max_num_fields]]])
|
||||
|
||||
@@ -232,11 +242,21 @@ The :mod:`urlparse` module defines the following functions:
|
||||
See section :ref:`urlparse-result-object` for more information on the result
|
||||
object.
|
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC
|
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``,
|
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is
|
||||
+ decomposed before parsing, or is not a Unicode string, no error will be
|
||||
+ raised.
|
||||
+
|
||||
.. versionadded:: 2.2
|
||||
|
||||
.. versionchanged:: 2.5
|
||||
Added attributes to return value.
|
||||
|
||||
+ .. versionchanged:: 2.7.17
|
||||
+ Characters that affect netloc parsing under NFKC normalization will
|
||||
+ now raise :exc:`ValueError`.
|
||||
+
|
||||
|
||||
.. function:: urlunsplit(parts)
|
||||
|
||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
|
||||
index 4e1ded73c266..73b0228ea8e3 100644
|
||||
--- a/Lib/test/test_urlparse.py
|
||||
+++ b/Lib/test/test_urlparse.py
|
||||
@@ -1,4 +1,6 @@
|
||||
from test import test_support
|
||||
+import sys
|
||||
+import unicodedata
|
||||
import unittest
|
||||
import urlparse
|
||||
|
||||
@@ -624,6 +626,28 @@ def test_portseparator(self):
|
||||
self.assertEqual(urlparse.urlparse("http://www.python.org:80"),
|
||||
('http','www.python.org:80','','','',''))
|
||||
|
||||
+ def test_urlsplit_normalization(self):
|
||||
+ # Certain characters should never occur in the netloc,
|
||||
+ # including under normalization.
|
||||
+ # Ensure that ALL of them are detected and cause an error
|
||||
+ illegal_chars = u'/:#?@'
|
||||
+ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars}
|
||||
+ denorm_chars = [
|
||||
+ c for c in map(unichr, range(128, sys.maxunicode))
|
||||
+ if (hex_chars & set(unicodedata.decomposition(c).split()))
|
||||
+ and c not in illegal_chars
|
||||
+ ]
|
||||
+ # Sanity check that we found at least one such character
|
||||
+ self.assertIn(u'\u2100', denorm_chars)
|
||||
+ self.assertIn(u'\uFF03', denorm_chars)
|
||||
+
|
||||
+ for scheme in [u"http", u"https", u"ftp"]:
|
||||
+ for c in denorm_chars:
|
||||
+ url = u"{}://netloc{}false.netloc/path".format(scheme, c)
|
||||
+ print "Checking %r" % url
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ urlparse.urlsplit(url)
|
||||
+
|
||||
def test_main():
|
||||
test_support.run_unittest(UrlParseTestCase)
|
||||
|
||||
diff --git a/Lib/urlparse.py b/Lib/urlparse.py
|
||||
index f7c2b032b097..54eda08651ab 100644
|
||||
--- a/Lib/urlparse.py
|
||||
+++ b/Lib/urlparse.py
|
||||
@@ -165,6 +165,21 @@ def _splitnetloc(url, start=0):
|
||||
delim = min(delim, wdelim) # use earliest delim position
|
||||
return url[start:delim], url[delim:] # return (domain, rest)
|
||||
|
||||
+def _checknetloc(netloc):
|
||||
+ if not netloc or not isinstance(netloc, unicode):
|
||||
+ return
|
||||
+ # looking for characters like \u2100 that expand to 'a/c'
|
||||
+ # IDNA uses NFKC equivalence, so normalize for this check
|
||||
+ import unicodedata
|
||||
+ netloc2 = unicodedata.normalize('NFKC', netloc)
|
||||
+ if netloc == netloc2:
|
||||
+ return
|
||||
+ _, _, netloc = netloc.rpartition('@') # anything to the left of '@' is okay
|
||||
+ for c in '/?#@:':
|
||||
+ if c in netloc2:
|
||||
+ raise ValueError("netloc '" + netloc2 + "' contains invalid " +
|
||||
+ "characters under NFKC normalization")
|
||||
+
|
||||
def urlsplit(url, scheme='', allow_fragments=True):
|
||||
"""Parse a URL into 5 components:
|
||||
<scheme>://<netloc>/<path>?<query>#<fragment>
|
||||
@@ -193,6 +208,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
|
||||
url, fragment = url.split('#', 1)
|
||||
if '?' in url:
|
||||
url, query = url.split('?', 1)
|
||||
+ _checknetloc(netloc)
|
||||
v = SplitResult(scheme, netloc, url, query, fragment)
|
||||
_parse_cache[key] = v
|
||||
return v
|
||||
@@ -216,6 +232,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
|
||||
url, fragment = url.split('#', 1)
|
||||
if '?' in url:
|
||||
url, query = url.split('?', 1)
|
||||
+ _checknetloc(netloc)
|
||||
v = SplitResult(scheme, netloc, url, query, fragment)
|
||||
_parse_cache[key] = v
|
||||
return v
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst
|
||||
new file mode 100644
|
||||
index 000000000000..1e1ad92c6feb
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+Changes urlsplit() to raise ValueError when the URL contains characters that
|
||||
+decompose under IDNA encoding (NFKC-normalization) into characters that
|
||||
+affect how the URL is parsed.
|
||||
\ No newline at end of file
|
|
@ -1,23 +0,0 @@
|
|||
From 06b5ee585d6e76bdbb4002f642d864d860cbbd2b Mon Sep 17 00:00:00 2001
|
||||
From: Steve Dower <steve.dower@python.org>
|
||||
Date: Tue, 12 Mar 2019 08:23:33 -0700
|
||||
Subject: [PATCH] bpo-36216: Only print test messages when verbose
|
||||
|
||||
---
|
||||
Lib/test/test_urlparse.py | 3 ++-
|
||||
1 file changed, 2 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
|
||||
index 73b0228ea8e3..1830d0b28688 100644
|
||||
--- a/Lib/test/test_urlparse.py
|
||||
+++ b/Lib/test/test_urlparse.py
|
||||
@@ -644,7 +644,8 @@ def test_urlsplit_normalization(self):
|
||||
for scheme in [u"http", u"https", u"ftp"]:
|
||||
for c in denorm_chars:
|
||||
url = u"{}://netloc{}false.netloc/path".format(scheme, c)
|
||||
- print "Checking %r" % url
|
||||
+ if test_support.verbose:
|
||||
+ print "Checking %r" % url
|
||||
with self.assertRaises(ValueError):
|
||||
urlparse.urlsplit(url)
|
||||
|
|
@ -1,123 +0,0 @@
|
|||
From 979daae300916adb399ab5b51410b6ebd0888f13 Mon Sep 17 00:00:00 2001
|
||||
From: Xtreak <tir.karthi@gmail.com>
|
||||
Date: Sat, 15 Jun 2019 20:59:43 +0530
|
||||
Subject: [PATCH] [2.7] bpo-35121: prefix dot in domain for proper subdomain
|
||||
validation (GH-10258) (GH-13426)
|
||||
|
||||
This is a manual backport of ca7fe5063593958e5efdf90f068582837f07bd14 since 2.7 has `http.cookiejar` in `cookielib`
|
||||
|
||||
|
||||
https://bugs.python.org/issue35121
|
||||
---
|
||||
Lib/cookielib.py | 13 ++++++--
|
||||
Lib/test/test_cookielib.py | 30 +++++++++++++++++++
|
||||
.../2019-05-20-00-35-12.bpo-35121.RRi-HU.rst | 4 +++
|
||||
3 files changed, 45 insertions(+), 2 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst
|
||||
|
||||
diff --git a/Lib/cookielib.py b/Lib/cookielib.py
|
||||
index 2dd7c48728..0b471a42f2 100644
|
||||
--- a/Lib/cookielib.py
|
||||
+++ b/Lib/cookielib.py
|
||||
@@ -1139,6 +1139,11 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
req_host, erhn = eff_request_host(request)
|
||||
domain = cookie.domain
|
||||
|
||||
+ if domain and not domain.startswith("."):
|
||||
+ dotdomain = "." + domain
|
||||
+ else:
|
||||
+ dotdomain = domain
|
||||
+
|
||||
# strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't
|
||||
if (cookie.version == 0 and
|
||||
(self.strict_ns_domain & self.DomainStrictNonDomain) and
|
||||
@@ -1151,7 +1156,7 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
_debug(" effective request-host name %s does not domain-match "
|
||||
"RFC 2965 cookie domain %s", erhn, domain)
|
||||
return False
|
||||
- if cookie.version == 0 and not ("."+erhn).endswith(domain):
|
||||
+ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain):
|
||||
_debug(" request-host %s does not match Netscape cookie domain "
|
||||
"%s", req_host, domain)
|
||||
return False
|
||||
@@ -1165,7 +1170,11 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
req_host = "."+req_host
|
||||
if not erhn.startswith("."):
|
||||
erhn = "."+erhn
|
||||
- if not (req_host.endswith(domain) or erhn.endswith(domain)):
|
||||
+ if domain and not domain.startswith("."):
|
||||
+ dotdomain = "." + domain
|
||||
+ else:
|
||||
+ dotdomain = domain
|
||||
+ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)):
|
||||
#_debug(" request domain %s does not match cookie domain %s",
|
||||
# req_host, domain)
|
||||
return False
|
||||
diff --git a/Lib/test/test_cookielib.py b/Lib/test/test_cookielib.py
|
||||
index f2dd9727d1..7f7ff614d6 100644
|
||||
--- a/Lib/test/test_cookielib.py
|
||||
+++ b/Lib/test/test_cookielib.py
|
||||
@@ -368,6 +368,7 @@ class CookieTests(TestCase):
|
||||
("http://foo.bar.com/", ".foo.bar.com", True),
|
||||
("http://foo.bar.com/", "foo.bar.com", True),
|
||||
("http://foo.bar.com/", ".bar.com", True),
|
||||
+ ("http://foo.bar.com/", "bar.com", True),
|
||||
("http://foo.bar.com/", "com", True),
|
||||
("http://foo.com/", "rhubarb.foo.com", False),
|
||||
("http://foo.com/", ".foo.com", True),
|
||||
@@ -378,6 +379,8 @@ class CookieTests(TestCase):
|
||||
("http://foo/", "foo", True),
|
||||
("http://foo/", "foo.local", True),
|
||||
("http://foo/", ".local", True),
|
||||
+ ("http://barfoo.com", ".foo.com", False),
|
||||
+ ("http://barfoo.com", "foo.com", False),
|
||||
]:
|
||||
request = urllib2.Request(url)
|
||||
r = pol.domain_return_ok(domain, request)
|
||||
@@ -938,6 +941,33 @@ class CookieTests(TestCase):
|
||||
c.add_cookie_header(req)
|
||||
self.assertFalse(req.has_header("Cookie"))
|
||||
|
||||
+ c.clear()
|
||||
+
|
||||
+ pol.set_blocked_domains([])
|
||||
+ req = Request("http://acme.com/")
|
||||
+ res = FakeResponse(headers, "http://acme.com/")
|
||||
+ cookies = c.make_cookies(res, req)
|
||||
+ c.extract_cookies(res, req)
|
||||
+ self.assertEqual(len(c), 1)
|
||||
+
|
||||
+ req = Request("http://acme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertTrue(req.has_header("Cookie"))
|
||||
+
|
||||
+ req = Request("http://badacme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(pol.return_ok(cookies[0], req))
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
+ p = pol.set_blocked_domains(["acme.com"])
|
||||
+ req = Request("http://acme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
+ req = Request("http://badacme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
def test_secure(self):
|
||||
from cookielib import CookieJar, DefaultCookiePolicy
|
||||
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst b/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst
|
||||
new file mode 100644
|
||||
index 0000000000..7725180616
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-05-20-00-35-12.bpo-35121.RRi-HU.rst
|
||||
@@ -0,0 +1,4 @@
|
||||
+Don't send cookies of domain A without Domain attribute to domain B when
|
||||
+domain A is a suffix match of domain B while using a cookiejar with
|
||||
+:class:`cookielib.DefaultCookiePolicy` policy. Patch by Karthikeyan
|
||||
+Singaravelan.
|
||||
--
|
||||
2.20.1
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
From b41cde823d026f2adc21ef14b1c2e92b1006de06 Mon Sep 17 00:00:00 2001
|
||||
From: Dong-hee Na <donghee.na92@gmail.com>
|
||||
Date: Sat, 28 Sep 2019 10:17:25 +0900
|
||||
Subject: [PATCH 1/3] [2.7] bpo-38243: Escape the server title of
|
||||
DocXMLRPCServer when rendering
|
||||
|
||||
---
|
||||
Lib/DocXMLRPCServer.py | 10 +++++++++-
|
||||
Lib/test/test_docxmlrpc.py | 20 +++++++++++++++++++
|
||||
.../2019-09-25-13-21-09.bpo-38243.1pfz24.rst | 3 +++
|
||||
3 files changed, 32 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst
|
||||
|
||||
diff --git a/Lib/DocXMLRPCServer.py b/Lib/DocXMLRPCServer.py
|
||||
index 4064ec2e48d4d..a0e407b6318ad 100644
|
||||
--- a/Lib/DocXMLRPCServer.py
|
||||
+++ b/Lib/DocXMLRPCServer.py
|
||||
@@ -210,7 +210,15 @@ def generate_html_documentation(self):
|
||||
methods
|
||||
)
|
||||
|
||||
- return documenter.page(self.server_title, documentation)
|
||||
+ escape_table = {
|
||||
+ "&": "&",
|
||||
+ '"': """,
|
||||
+ "'": "'",
|
||||
+ ">": ">",
|
||||
+ "<": "<",
|
||||
+ }
|
||||
+ title = ''.join(escape_table.get(c, c) for c in self.server_title)
|
||||
+ return documenter.page(title, documentation)
|
||||
|
||||
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
|
||||
"""XML-RPC and documentation request handler class.
|
||||
diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py
|
||||
index 4dff4159e2466..c45b892b8b3e7 100644
|
||||
--- a/Lib/test/test_docxmlrpc.py
|
||||
+++ b/Lib/test/test_docxmlrpc.py
|
||||
@@ -1,5 +1,6 @@
|
||||
from DocXMLRPCServer import DocXMLRPCServer
|
||||
import httplib
|
||||
+import re
|
||||
import sys
|
||||
from test import test_support
|
||||
threading = test_support.import_module('threading')
|
||||
@@ -176,6 +177,25 @@ def test_autolink_dotted_methods(self):
|
||||
self.assertIn("""Try self.<strong>add</strong>, too.""",
|
||||
response.read())
|
||||
|
||||
+ def test_server_title_escape(self):
|
||||
+ """Test that the server title and documentation
|
||||
+ are escaped for HTML.
|
||||
+ """
|
||||
+ self.serv.set_server_title('test_title<script>')
|
||||
+ self.serv.set_server_documentation('test_documentation<script>')
|
||||
+ self.assertEqual('test_title<script>', self.serv.server_title)
|
||||
+ self.assertEqual('test_documentation<script>',
|
||||
+ self.serv.server_documentation)
|
||||
+
|
||||
+ generated = self.serv.generate_html_documentation()
|
||||
+ title = re.search(r'<title>(.+?)</title>', generated).group()
|
||||
+ documentation = re.search(r'<p><tt>(.+?)</tt></p>', generated).group()
|
||||
+ self.assertEqual('<title>Python: test_title<script></title>',
|
||||
+ title)
|
||||
+ self.assertEqual('<p><tt>test_documentation<script></tt></p>',
|
||||
+ documentation)
|
||||
+
|
||||
+
|
||||
def test_main():
|
||||
test_support.run_unittest(DocXMLRPCHTTPGETServer)
|
||||
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst b/Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst
|
||||
new file mode 100644
|
||||
index 0000000000000..8f02baed9ebe5
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-09-25-13-21-09.bpo-38243.1pfz24.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+Escape the server title of :class:`DocXMLRPCServer.DocXMLRPCServer`
|
||||
+when rendering the document page as HTML.
|
||||
+(Contributed by Dong-hee Na in :issue:`38243`.)
|
||||
|
||||
From 00251ae0244cfae1f5a77d15f3d0415c12b65ada Mon Sep 17 00:00:00 2001
|
||||
From: Dong-hee Na <donghee.na92@gmail.com>
|
||||
Date: Tue, 1 Oct 2019 09:31:33 +0900
|
||||
Subject: [PATCH 2/3] bpo-38243:Refect victor's review
|
||||
|
||||
---
|
||||
Lib/DocXMLRPCServer.py | 20 ++++++++++++--------
|
||||
1 file changed, 12 insertions(+), 8 deletions(-)
|
||||
|
||||
diff --git a/Lib/DocXMLRPCServer.py b/Lib/DocXMLRPCServer.py
|
||||
index a0e407b6318ad..6ab41c31b403e 100644
|
||||
--- a/Lib/DocXMLRPCServer.py
|
||||
+++ b/Lib/DocXMLRPCServer.py
|
||||
@@ -20,6 +20,17 @@
|
||||
CGIXMLRPCRequestHandler,
|
||||
resolve_dotted_attribute)
|
||||
|
||||
+
|
||||
+def _html_escape_quote(s, quote=True):
|
||||
+ s = s.replace("&", "&") # Must be done first!
|
||||
+ s = s.replace("<", "<")
|
||||
+ s = s.replace(">", ">")
|
||||
+ if quote:
|
||||
+ s = s.replace('"', """)
|
||||
+ s = s.replace('\'', "'")
|
||||
+ return s
|
||||
+
|
||||
+
|
||||
class ServerHTMLDoc(pydoc.HTMLDoc):
|
||||
"""Class used to generate pydoc HTML document for a server"""
|
||||
|
||||
@@ -210,14 +221,7 @@ def generate_html_documentation(self):
|
||||
methods
|
||||
)
|
||||
|
||||
- escape_table = {
|
||||
- "&": "&",
|
||||
- '"': """,
|
||||
- "'": "'",
|
||||
- ">": ">",
|
||||
- "<": "<",
|
||||
- }
|
||||
- title = ''.join(escape_table.get(c, c) for c in self.server_title)
|
||||
+ title = _html_escape_quote(self.server_title)
|
||||
return documenter.page(title, documentation)
|
||||
|
||||
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
|
||||
|
||||
From 09b17d8230a24586e417d52c332058f541d47999 Mon Sep 17 00:00:00 2001
|
||||
From: Dong-hee Na <donghee.na92@gmail.com>
|
||||
Date: Tue, 1 Oct 2019 19:35:34 +0900
|
||||
Subject: [PATCH 3/3] bpo-38243: Update
|
||||
|
||||
---
|
||||
Lib/DocXMLRPCServer.py | 7 +++----
|
||||
1 file changed, 3 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/Lib/DocXMLRPCServer.py b/Lib/DocXMLRPCServer.py
|
||||
index 6ab41c31b403e..90b037dd35d6b 100644
|
||||
--- a/Lib/DocXMLRPCServer.py
|
||||
+++ b/Lib/DocXMLRPCServer.py
|
||||
@@ -21,13 +21,12 @@
|
||||
resolve_dotted_attribute)
|
||||
|
||||
|
||||
-def _html_escape_quote(s, quote=True):
|
||||
+def _html_escape_quote(s):
|
||||
s = s.replace("&", "&") # Must be done first!
|
||||
s = s.replace("<", "<")
|
||||
s = s.replace(">", ">")
|
||||
- if quote:
|
||||
- s = s.replace('"', """)
|
||||
- s = s.replace('\'', "'")
|
||||
+ s = s.replace('"', """)
|
||||
+ s = s.replace('\'', "'")
|
||||
return s
|
||||
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
From c2828900ec85e1e2957016e1e078de3a9677a963 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Roberto=20C=2E=20S=C3=A1nchez?= <roberto@connexer.com>
|
||||
Date: Tue, 10 Sep 2019 21:48:34 -0400
|
||||
Subject: [PATCH] [2.7] bpo-34155: Dont parse domains containing @ (GH-13079)
|
||||
|
||||
https://bugs.python.org/issue34155
|
||||
(cherry picked from commit 8cb65d1381b027f0b09ee36bfed7f35bb4dec9a9)
|
||||
|
||||
Excludes changes to Lib/email/_header_value_parser.py, which did not
|
||||
exist in 2.7.
|
||||
|
||||
Co-authored-by: jpic <jpic@users.noreply.github.com>
|
||||
---
|
||||
Lib/email/_parseaddr.py | 11 ++++++++++-
|
||||
Lib/email/test/test_email.py | 14 ++++++++++++++
|
||||
.../2019-05-04-13-33-37.bpo-34155.MJll68.rst | 1 +
|
||||
3 files changed, 25 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst
|
||||
|
||||
diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py
|
||||
index 690db2c22d34d..dc49d2e45a5eb 100644
|
||||
--- a/Lib/email/_parseaddr.py
|
||||
+++ b/Lib/email/_parseaddr.py
|
||||
@@ -336,7 +336,12 @@ def getaddrspec(self):
|
||||
aslist.append('@')
|
||||
self.pos += 1
|
||||
self.gotonext()
|
||||
- return EMPTYSTRING.join(aslist) + self.getdomain()
|
||||
+ domain = self.getdomain()
|
||||
+ if not domain:
|
||||
+ # Invalid domain, return an empty address instead of returning a
|
||||
+ # local part to denote failed parsing.
|
||||
+ return EMPTYSTRING
|
||||
+ return EMPTYSTRING.join(aslist) + domain
|
||||
|
||||
def getdomain(self):
|
||||
"""Get the complete domain name from an address."""
|
||||
@@ -351,6 +356,10 @@ def getdomain(self):
|
||||
elif self.field[self.pos] == '.':
|
||||
self.pos += 1
|
||||
sdlist.append('.')
|
||||
+ elif self.field[self.pos] == '@':
|
||||
+ # bpo-34155: Don't parse domains with two `@` like
|
||||
+ # `a@malicious.org@important.com`.
|
||||
+ return EMPTYSTRING
|
||||
elif self.field[self.pos] in self.atomends:
|
||||
break
|
||||
else:
|
||||
diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py
|
||||
index 4b4dee3d34644..2efe44ac5a73f 100644
|
||||
--- a/Lib/email/test/test_email.py
|
||||
+++ b/Lib/email/test/test_email.py
|
||||
@@ -2306,6 +2306,20 @@ def test_parseaddr_empty(self):
|
||||
self.assertEqual(Utils.parseaddr('<>'), ('', ''))
|
||||
self.assertEqual(Utils.formataddr(Utils.parseaddr('<>')), '')
|
||||
|
||||
+ def test_parseaddr_multiple_domains(self):
|
||||
+ self.assertEqual(
|
||||
+ Utils.parseaddr('a@b@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+ self.assertEqual(
|
||||
+ Utils.parseaddr('a@b.c@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+ self.assertEqual(
|
||||
+ Utils.parseaddr('a@172.17.0.1@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+
|
||||
def test_noquote_dump(self):
|
||||
self.assertEqual(
|
||||
Utils.formataddr(('A Silly Person', 'person@dom.ain')),
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst
|
||||
new file mode 100644
|
||||
index 0000000000000..50292e29ed1d2
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix parsing of invalid email addresses with more than one ``@`` (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email address. Patch by maxking & jpic.
|
Loading…
Reference in a new issue