Initialize for python3
This commit is contained in:
commit
832481301f
57 changed files with 22057 additions and 0 deletions
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
Python-3.6.15.tar.xz
|
1
.python3.metadata
Normal file
1
.python3.metadata
Normal file
|
@ -0,0 +1 @@
|
|||
9080d18e67d5e605d18fbad97f24d3528bcf5ca41f212ae7f3508ab6632fa5c4 Python-3.6.15.tar.xz
|
41
0001-allow-for-reproducible-builds-of-python-packages.patch
Normal file
41
0001-allow-for-reproducible-builds-of-python-packages.patch
Normal file
|
@ -0,0 +1,41 @@
|
|||
From 0d6dde67a01878b0d708b4216e2b31f1c76544fb Mon Sep 17 00:00:00 2001
|
||||
From: "Bernhard M. Wiedemann" <bwiedemann@suse.de>
|
||||
Date: Sat, 25 Feb 2017 06:42:28 +0100
|
||||
Subject: [PATCH] allow for reproducible builds of python packages
|
||||
|
||||
See https://reproducible-builds.org/ for why this is good
|
||||
and https://reproducible-builds.org/specs/source-date-epoch/
|
||||
for the definition of this variable.
|
||||
|
||||
Background:
|
||||
In some distributions like openSUSE, binary rpms contain precompiled .pyc files.
|
||||
|
||||
And packages like amqp or twisted dynamically generate .py files at build time
|
||||
so those have the current time and that timestamp gets embedded
|
||||
into the .pyc file header.
|
||||
When we then adapt file timestamps in rpms to be constant,
|
||||
the timestamp in the .pyc header will no more match
|
||||
the .py timestamp in the filesystem.
|
||||
The software will still work, but it will not use the .pyc file as it should.
|
||||
---
|
||||
Lib/py_compile.py | 4 ++++
|
||||
1 file changed, 4 insertions(+)
|
||||
|
||||
Index: Python-3.6.15/Lib/py_compile.py
|
||||
===================================================================
|
||||
--- Python-3.6.15.orig/Lib/py_compile.py
|
||||
+++ Python-3.6.15/Lib/py_compile.py
|
||||
@@ -137,6 +137,13 @@ def compile(file, cfile=None, dfile=None
|
||||
except FileExistsError:
|
||||
pass
|
||||
source_stats = loader.path_stats(file)
|
||||
+ sde = os.environ.get('SOURCE_DATE_EPOCH')
|
||||
+ if sde and source_stats['mtime'] > int(sde):
|
||||
+ source_stats['mtime'] = int(sde)
|
||||
+ try:
|
||||
+ os.utime(file, (source_stats['mtime'], source_stats['mtime']))
|
||||
+ except PermissionError:
|
||||
+ pass
|
||||
bytecode = importlib._bootstrap_external._code_to_bytecode(
|
||||
code, source_stats['mtime'], source_stats['size'])
|
||||
mode = importlib._bootstrap_external._calc_mode(file)
|
43
99366-patch.dict-can-decorate-async.patch
Normal file
43
99366-patch.dict-can-decorate-async.patch
Normal file
|
@ -0,0 +1,43 @@
|
|||
---
|
||||
Lib/unittest/mock.py | 18 ++++++++++
|
||||
Misc/NEWS.d/next/Library/2022-10-08-19-39-27.gh-issue-98086.y---WC.rst | 1
|
||||
2 files changed, 19 insertions(+)
|
||||
|
||||
--- a/Lib/unittest/mock.py
|
||||
+++ b/Lib/unittest/mock.py
|
||||
@@ -1595,6 +1595,12 @@ class _patch_dict(object):
|
||||
def __call__(self, f):
|
||||
if isinstance(f, type):
|
||||
return self.decorate_class(f)
|
||||
+ if inspect.iscoroutinefunction(f):
|
||||
+ return self.decorate_async_callable(f)
|
||||
+ return self.decorate_callable(f)
|
||||
+
|
||||
+
|
||||
+ def decorate_callable(self, f):
|
||||
@wraps(f)
|
||||
def _inner(*args, **kw):
|
||||
self._patch_dict()
|
||||
@@ -1603,6 +1609,18 @@ class _patch_dict(object):
|
||||
finally:
|
||||
self._unpatch_dict()
|
||||
|
||||
+ return _inner
|
||||
+
|
||||
+
|
||||
+ def decorate_async_callable(self, f):
|
||||
+ @wraps(f)
|
||||
+ async def _inner(*args, **kw):
|
||||
+ self._patch_dict()
|
||||
+ try:
|
||||
+ return await f(*args, **kw)
|
||||
+ finally:
|
||||
+ self._unpatch_dict()
|
||||
+
|
||||
return _inner
|
||||
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2022-10-08-19-39-27.gh-issue-98086.y---WC.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Make sure ``patch.dict()`` can be applied on async functions.
|
2598
CVE-2007-4559-filter-tarfile_extractall.patch
Normal file
2598
CVE-2007-4559-filter-tarfile_extractall.patch
Normal file
File diff suppressed because it is too large
Load diff
136
CVE-2015-20107-mailcap-unsafe-filenames.patch
Normal file
136
CVE-2015-20107-mailcap-unsafe-filenames.patch
Normal file
|
@ -0,0 +1,136 @@
|
|||
From c3e7f139b440d7424986204e9f3fc2275aea3377 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Wed, 27 Apr 2022 18:17:33 +0200
|
||||
Subject: [PATCH 1/4] gh-68966: Make mailcap refuse to match unsafe
|
||||
filenames/types/params
|
||||
|
||||
---
|
||||
Doc/library/mailcap.rst | 12 ++++
|
||||
Lib/mailcap.py | 26 +++++++++-
|
||||
Lib/test/test_mailcap.py | 8 ++-
|
||||
Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst | 4 +
|
||||
4 files changed, 46 insertions(+), 4 deletions(-)
|
||||
|
||||
--- a/Doc/library/mailcap.rst
|
||||
+++ b/Doc/library/mailcap.rst
|
||||
@@ -54,6 +54,18 @@ standard. However, mailcap files are su
|
||||
use) to determine whether or not the mailcap line applies. :func:`findmatch`
|
||||
will automatically check such conditions and skip the entry if the check fails.
|
||||
|
||||
+ .. versionchanged:: 3.11
|
||||
+
|
||||
+ To prevent security issues with shell metacharacters (symbols that have
|
||||
+ special effects in a shell command line), ``findmatch`` will refuse
|
||||
+ to inject ASCII characters other than alphanumerics and ``@+=:,./-_``
|
||||
+ into the returned command line.
|
||||
+
|
||||
+ If a disallowed character appears in *filename*, ``findmatch`` will always
|
||||
+ return ``(None, None)`` as if no entry was found.
|
||||
+ If such a character appears elsewhere (a value in *plist* or in *MIMEtype*),
|
||||
+ ``findmatch`` will ignore all mailcap entries which use that value.
|
||||
+ A :mod:`warning <warnings>` will be raised in either case.
|
||||
|
||||
.. function:: getcaps()
|
||||
|
||||
--- a/Lib/mailcap.py
|
||||
+++ b/Lib/mailcap.py
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import os
|
||||
import warnings
|
||||
+import re
|
||||
|
||||
__all__ = ["getcaps","findmatch"]
|
||||
|
||||
@@ -13,6 +14,11 @@ def lineno_sort_key(entry):
|
||||
else:
|
||||
return 1, 0
|
||||
|
||||
+_find_unsafe = re.compile(r'[^\xa1-\U0010FFFF\w@+=:,./-]').search
|
||||
+
|
||||
+class UnsafeMailcapInput(Warning):
|
||||
+ """Warning raised when refusing unsafe input"""
|
||||
+
|
||||
|
||||
# Part 1: top-level interface.
|
||||
|
||||
@@ -165,15 +171,22 @@ def findmatch(caps, MIMEtype, key='view'
|
||||
entry to use.
|
||||
|
||||
"""
|
||||
+ if _find_unsafe(filename):
|
||||
+ msg = "Refusing to use mailcap with filename %r. Use a safe temporary filename." % (filename,)
|
||||
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||
+ return None, None
|
||||
entries = lookup(caps, MIMEtype, key)
|
||||
# XXX This code should somehow check for the needsterminal flag.
|
||||
for e in entries:
|
||||
if 'test' in e:
|
||||
test = subst(e['test'], filename, plist)
|
||||
+ if test is None:
|
||||
+ continue
|
||||
if test and os.system(test) != 0:
|
||||
continue
|
||||
command = subst(e[key], MIMEtype, filename, plist)
|
||||
- return command, e
|
||||
+ if command is not None:
|
||||
+ return command, e
|
||||
return None, None
|
||||
|
||||
def lookup(caps, MIMEtype, key=None):
|
||||
@@ -206,6 +219,10 @@ def subst(field, MIMEtype, filename, pli
|
||||
elif c == 's':
|
||||
res = res + filename
|
||||
elif c == 't':
|
||||
+ if _find_unsafe(MIMEtype):
|
||||
+ msg = "Refusing to substitute MIME type %r into a shell command." % (MIMEtype,)
|
||||
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||
+ return None
|
||||
res = res + MIMEtype
|
||||
elif c == '{':
|
||||
start = i
|
||||
@@ -213,7 +230,12 @@ def subst(field, MIMEtype, filename, pli
|
||||
i = i+1
|
||||
name = field[start:i]
|
||||
i = i+1
|
||||
- res = res + findparam(name, plist)
|
||||
+ param = findparam(name, plist)
|
||||
+ if _find_unsafe(param):
|
||||
+ msg = "Refusing to substitute parameter %r (%s) into a shell command" % (param, name)
|
||||
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||
+ return None
|
||||
+ res = res + param
|
||||
# XXX To do:
|
||||
# %n == number of parts if type is multipart/*
|
||||
# %F == list of alternating type and filename for parts
|
||||
--- a/Lib/test/test_mailcap.py
|
||||
+++ b/Lib/test/test_mailcap.py
|
||||
@@ -121,7 +121,8 @@ class HelperFunctionTest(unittest.TestCa
|
||||
(["", "audio/*", "foo.txt"], ""),
|
||||
(["echo foo", "audio/*", "foo.txt"], "echo foo"),
|
||||
(["echo %s", "audio/*", "foo.txt"], "echo foo.txt"),
|
||||
- (["echo %t", "audio/*", "foo.txt"], "echo audio/*"),
|
||||
+ (["echo %t", "audio/*", "foo.txt"], None),
|
||||
+ (["echo %t", "audio/wav", "foo.txt"], "echo audio/wav"),
|
||||
(["echo \\%t", "audio/*", "foo.txt"], "echo %t"),
|
||||
(["echo foo", "audio/*", "foo.txt", plist], "echo foo"),
|
||||
(["echo %{total}", "audio/*", "foo.txt", plist], "echo 3")
|
||||
@@ -205,7 +206,10 @@ class FindmatchTest(unittest.TestCase):
|
||||
('"An audio fragment"', audio_basic_entry)),
|
||||
([c, "audio/*"],
|
||||
{"filename": fname},
|
||||
- ("/usr/local/bin/showaudio audio/*", audio_entry)),
|
||||
+ (None, None)),
|
||||
+ ([c, "audio/wav"],
|
||||
+ {"filename": fname},
|
||||
+ ("/usr/local/bin/showaudio audio/wav", audio_entry)),
|
||||
([c, "message/external-body"],
|
||||
{"plist": plist},
|
||||
("showexternal /dev/null default john python.org /tmp foo bar", message_entry))
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst
|
||||
@@ -0,0 +1,4 @@
|
||||
+The deprecated mailcap module now refuses to inject unsafe text (filenames,
|
||||
+MIME types, parameters) into shell commands. Instead of using such text, it
|
||||
+will warn and act as if a match was not found (or for test commands, as if
|
||||
+the test failed).
|
69
CVE-2019-9674-zip-bomb.patch
Normal file
69
CVE-2019-9674-zip-bomb.patch
Normal file
|
@ -0,0 +1,69 @@
|
|||
From b73fe12d4d85fc92e4b9658e417046b68fb68ecc Mon Sep 17 00:00:00 2001
|
||||
From: nick sung <sungboss2004@gmail.com>
|
||||
Date: Fri, 17 May 2019 15:45:31 +0800
|
||||
Subject: [PATCH 1/4] bpo-36260: Add pitfalls to zipfile module documentation
|
||||
|
||||
We saw vulnerability warning description (including zip bomb) in Doc/library/xml.rst file.
|
||||
This gave us the idea of documentation improvement.
|
||||
|
||||
So, we moved a little bit forward :P
|
||||
And the doc patch can be found (pr).
|
||||
---
|
||||
Doc/library/zipfile.rst | 29 +++++++++++++++++++++++++++++
|
||||
1 file changed, 29 insertions(+)
|
||||
|
||||
--- a/Doc/library/zipfile.rst
|
||||
+++ b/Doc/library/zipfile.rst
|
||||
@@ -706,5 +706,47 @@ Command-line options
|
||||
|
||||
Test whether the zipfile is valid or not.
|
||||
|
||||
+Decompression pitfalls
|
||||
+----------------------
|
||||
|
||||
+The extraction in zipfile module might fail due to some pitfalls
|
||||
+listed below.
|
||||
+
|
||||
+From file itself
|
||||
+~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+Decompression may fail due to incorrect password / CRC checksum
|
||||
+/ ZIP format or unsupported compression method / decryption.
|
||||
+
|
||||
+File System limitations
|
||||
+~~~~~~~~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+Exceeding limitations on different file systems can cause
|
||||
+decompression failed. Such as allowable characters in the
|
||||
+directory entries, length of the file name, length of the
|
||||
+pathname, size of a single file, and number of files, etc.
|
||||
+
|
||||
+Resources limitations
|
||||
+~~~~~~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+The lack of memory or disk volume would lead to decompression
|
||||
+failed. For example, decompression bombs (aka `ZIP bomb`_) apply
|
||||
+to zipfile library that can cause disk volume exhaustion.
|
||||
+
|
||||
+Interruption
|
||||
+~~~~~~~~~~~~
|
||||
+
|
||||
+Interruption during the decompression, such as pressing control-C
|
||||
+or killing the decompression process may result in incomplete
|
||||
+decompression of the archive.
|
||||
+
|
||||
+Default behaviors of extraction
|
||||
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+Not knowing the default extraction behaviors can cause unexpected
|
||||
+decompression results. For example, when extracting the same
|
||||
+archive twice, it overwrites files without asking.
|
||||
+
|
||||
+
|
||||
+.. _ZIP bomb: https://en.wikipedia.org/wiki/Zip_bomb
|
||||
.. _PKZIP Application Note: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Documentation/2019-06-04-09-29-00.bpo-36260.WrGuc-.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Add decompression pitfalls to zipfile module documentation.
|
||||
\ No newline at end of file
|
1359
CVE-2020-10735-DoS-no-limit-int-size.patch
Normal file
1359
CVE-2020-10735-DoS-no-limit-int-size.patch
Normal file
File diff suppressed because it is too large
Load diff
118
CVE-2021-28861-double-slash-path.patch
Normal file
118
CVE-2021-28861-double-slash-path.patch
Normal file
|
@ -0,0 +1,118 @@
|
|||
From d01648738934922d413b65f2f97951cbab66e0bd Mon Sep 17 00:00:00 2001
|
||||
From: "Gregory P. Smith" <greg@krypto.org>
|
||||
Date: Tue, 21 Jun 2022 13:16:57 -0700
|
||||
Subject: [PATCH] gh-87389: Fix an open redirection vulnerability in
|
||||
http.server. (GH-93879)
|
||||
|
||||
Fix an open redirection vulnerability in the `http.server` module when
|
||||
an URI path starts with `//` that could produce a 301 Location header
|
||||
with a misleading target. Vulnerability discovered, and logic fix
|
||||
proposed, by Hamza Avvan (@hamzaavvan).
|
||||
|
||||
Test and comments authored by Gregory P. Smith [Google].
|
||||
(cherry picked from commit 4abab6b603dd38bec1168e9a37c40a48ec89508e)
|
||||
|
||||
Co-authored-by: Gregory P. Smith <greg@krypto.org>
|
||||
---
|
||||
Lib/http/server.py | 7 +++
|
||||
Lib/test/test_httpservers.py | 53 ++++++++++++++++++-
|
||||
...2-06-15-20-09-23.gh-issue-87389.QVaC3f.rst | 3 ++
|
||||
3 files changed, 61 insertions(+), 2 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2022-06-15-20-09-23.gh-issue-87389.QVaC3f.rst
|
||||
|
||||
Index: Python-3.6.15/Lib/http/server.py
|
||||
===================================================================
|
||||
--- Python-3.6.15.orig/Lib/http/server.py
|
||||
+++ Python-3.6.15/Lib/http/server.py
|
||||
@@ -323,6 +323,13 @@ class BaseHTTPRequestHandler(socketserve
|
||||
return False
|
||||
self.command, self.path, self.request_version = command, path, version
|
||||
|
||||
+ # gh-87389: The purpose of replacing '//' with '/' is to protect
|
||||
+ # against open redirect attacks possibly triggered if the path starts
|
||||
+ # with '//' because http clients treat //path as an absolute URI
|
||||
+ # without scheme (similar to http://path) rather than a path.
|
||||
+ if self.path.startswith('//'):
|
||||
+ self.path = '/' + self.path.lstrip('/') # Reduce to a single /
|
||||
+
|
||||
# Examine the headers and look for a Connection directive.
|
||||
try:
|
||||
self.headers = http.client.parse_headers(self.rfile,
|
||||
Index: Python-3.6.15/Lib/test/test_httpservers.py
|
||||
===================================================================
|
||||
--- Python-3.6.15.orig/Lib/test/test_httpservers.py
|
||||
+++ Python-3.6.15/Lib/test/test_httpservers.py
|
||||
@@ -324,7 +324,7 @@ class SimpleHTTPServerTestCase(BaseTestC
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
- BaseTestCase.setUp(self)
|
||||
+ super().setUp()
|
||||
self.cwd = os.getcwd()
|
||||
basetempdir = tempfile.gettempdir()
|
||||
os.chdir(basetempdir)
|
||||
@@ -343,7 +343,7 @@ class SimpleHTTPServerTestCase(BaseTestC
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
- BaseTestCase.tearDown(self)
|
||||
+ super().tearDown()
|
||||
|
||||
def check_status_and_reason(self, response, status, data=None):
|
||||
def close_conn():
|
||||
@@ -399,6 +399,55 @@ class SimpleHTTPServerTestCase(BaseTestC
|
||||
self.check_status_and_reason(response, HTTPStatus.OK,
|
||||
data=support.TESTFN_UNDECODABLE)
|
||||
|
||||
+ def test_get_dir_redirect_location_domain_injection_bug(self):
|
||||
+ """Ensure //evil.co/..%2f../../X does not put //evil.co/ in Location.
|
||||
+
|
||||
+ //netloc/ in a Location header is a redirect to a new host.
|
||||
+ https://github.com/python/cpython/issues/87389
|
||||
+
|
||||
+ This checks that a path resolving to a directory on our server cannot
|
||||
+ resolve into a redirect to another server.
|
||||
+ """
|
||||
+ os.mkdir(os.path.join(self.tempdir, 'existing_directory'))
|
||||
+ url = f'/python.org/..%2f..%2f..%2f..%2f..%2f../%0a%0d/../{self.tempdir_name}/existing_directory'
|
||||
+ expected_location = f'{url}/' # /python.org.../ single slash single prefix, trailing slash
|
||||
+ # Canonicalizes to /tmp/tempdir_name/existing_directory which does
|
||||
+ # exist and is a dir, triggering the 301 redirect logic.
|
||||
+ response = self.request(url)
|
||||
+ self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY)
|
||||
+ location = response.getheader('Location')
|
||||
+ self.assertEqual(location, expected_location, msg='non-attack failed!')
|
||||
+
|
||||
+ # //python.org... multi-slash prefix, no trailing slash
|
||||
+ attack_url = f'/{url}'
|
||||
+ response = self.request(attack_url)
|
||||
+ self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY)
|
||||
+ location = response.getheader('Location')
|
||||
+ self.assertFalse(location.startswith('//'), msg=location)
|
||||
+ self.assertEqual(location, expected_location,
|
||||
+ msg='Expected Location header to start with a single / and '
|
||||
+ 'end with a / as this is a directory redirect.')
|
||||
+
|
||||
+ # ///python.org... triple-slash prefix, no trailing slash
|
||||
+ attack3_url = f'//{url}'
|
||||
+ response = self.request(attack3_url)
|
||||
+ self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY)
|
||||
+ self.assertEqual(response.getheader('Location'), expected_location)
|
||||
+
|
||||
+ # If the second word in the http request (Request-URI for the http
|
||||
+ # method) is a full URI, we don't worry about it, as that'll be parsed
|
||||
+ # and reassembled as a full URI within BaseHTTPRequestHandler.send_head
|
||||
+ # so no errant scheme-less //netloc//evil.co/ domain mixup can happen.
|
||||
+ attack_scheme_netloc_2slash_url = f'https://pypi.org/{url}'
|
||||
+ expected_scheme_netloc_location = f'{attack_scheme_netloc_2slash_url}/'
|
||||
+ response = self.request(attack_scheme_netloc_2slash_url)
|
||||
+ self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY)
|
||||
+ location = response.getheader('Location')
|
||||
+ # We're just ensuring that the scheme and domain make it through, if
|
||||
+ # there are or aren't multiple slashes at the start of the path that
|
||||
+ # follows that isn't important in this Location: header.
|
||||
+ self.assertTrue(location.startswith('https://pypi.org/'), msg=location)
|
||||
+
|
||||
def test_get(self):
|
||||
#constructs the path relative to the root directory of the HTTPServer
|
||||
response = self.request(self.base_url + '/test')
|
93
CVE-2022-37454-sha3-buffer-overflow.patch
Normal file
93
CVE-2022-37454-sha3-buffer-overflow.patch
Normal file
|
@ -0,0 +1,93 @@
|
|||
From ede8c1b94d43c4611361a47335306bab44b5e2e3 Mon Sep 17 00:00:00 2001
|
||||
From: Theo Buehler <botovq@users.noreply.github.com>
|
||||
Date: Fri, 21 Oct 2022 21:26:01 +0200
|
||||
Subject: [PATCH] [3.10] gh-98517: Fix buffer overflows in _sha3 module
|
||||
(GH-98519)
|
||||
|
||||
This is a port of the applicable part of XKCP's fix [1] for
|
||||
CVE-2022-37454 and avoids the segmentation fault and the infinite
|
||||
loop in the test cases published in [2].
|
||||
|
||||
[1]: https://github.com/XKCP/XKCP/commit/fdc6fef075f4e81d6b1bc38364248975e08e340a
|
||||
[2]: https://mouha.be/sha-3-buffer-overflow/
|
||||
|
||||
Regression test added by: Gregory P. Smith [Google LLC] <greg@krypto.org>
|
||||
(cherry picked from commit 0e4e058602d93b88256ff90bbef501ba20be9dd3)
|
||||
|
||||
Co-authored-by: Theo Buehler <botovq@users.noreply.github.com>
|
||||
---
|
||||
Lib/test/test_hashlib.py | 9 ++++++
|
||||
Misc/NEWS.d/next/Security/2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst | 1
|
||||
Modules/_sha3/kcp/KeccakSponge.inc | 15 +++++-----
|
||||
3 files changed, 18 insertions(+), 7 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst
|
||||
|
||||
--- a/Lib/test/test_hashlib.py
|
||||
+++ b/Lib/test/test_hashlib.py
|
||||
@@ -418,6 +418,15 @@ class HashLibTestCase(unittest.TestCase)
|
||||
def test_case_md5_uintmax(self, size):
|
||||
self.check('md5', b'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
|
||||
|
||||
+ @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems')
|
||||
+ @bigmemtest(size=_4G - 1, memuse=1, dry_run=False)
|
||||
+ def test_sha3_update_overflow(self, size):
|
||||
+ """Regression test for gh-98517 CVE-2022-37454."""
|
||||
+ h = hashlib.sha3_224()
|
||||
+ h.update(b'\x01')
|
||||
+ h.update(b'\x01'*0xffff_ffff)
|
||||
+ self.assertEqual(h.hexdigest(), '80762e8ce6700f114fec0f621fd97c4b9c00147fa052215294cceeed')
|
||||
+
|
||||
# use the three examples from Federal Information Processing Standards
|
||||
# Publication 180-1, Secure Hash Standard, 1995 April 17
|
||||
# http://www.itl.nist.gov/div897/pubs/fip180-1.htm
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2022-10-21-13-31-47.gh-issue-98517.SXXGfV.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Port XKCP's fix for the buffer overflows in SHA-3 (CVE-2022-37454).
|
||||
--- a/Modules/_sha3/kcp/KeccakSponge.inc
|
||||
+++ b/Modules/_sha3/kcp/KeccakSponge.inc
|
||||
@@ -171,7 +171,7 @@ int SpongeAbsorb(SpongeInstance *instanc
|
||||
i = 0;
|
||||
curData = data;
|
||||
while(i < dataByteLen) {
|
||||
- if ((instance->byteIOIndex == 0) && (dataByteLen >= (i + rateInBytes))) {
|
||||
+ if ((instance->byteIOIndex == 0) && (dataByteLen-i >= rateInBytes)) {
|
||||
#ifdef SnP_FastLoop_Absorb
|
||||
/* processing full blocks first */
|
||||
|
||||
@@ -199,10 +199,10 @@ int SpongeAbsorb(SpongeInstance *instanc
|
||||
}
|
||||
else {
|
||||
/* normal lane: using the message queue */
|
||||
-
|
||||
- partialBlock = (unsigned int)(dataByteLen - i);
|
||||
- if (partialBlock+instance->byteIOIndex > rateInBytes)
|
||||
+ if (dataByteLen-i > rateInBytes-instance->byteIOIndex)
|
||||
partialBlock = rateInBytes-instance->byteIOIndex;
|
||||
+ else
|
||||
+ partialBlock = (unsigned int)(dataByteLen - i);
|
||||
#ifdef KeccakReference
|
||||
displayBytes(1, "Block to be absorbed (part)", curData, partialBlock);
|
||||
#endif
|
||||
@@ -281,7 +281,7 @@ int SpongeSqueeze(SpongeInstance *instan
|
||||
i = 0;
|
||||
curData = data;
|
||||
while(i < dataByteLen) {
|
||||
- if ((instance->byteIOIndex == rateInBytes) && (dataByteLen >= (i + rateInBytes))) {
|
||||
+ if ((instance->byteIOIndex == rateInBytes) && (dataByteLen-i >= rateInBytes)) {
|
||||
for(j=dataByteLen-i; j>=rateInBytes; j-=rateInBytes) {
|
||||
SnP_Permute(instance->state);
|
||||
SnP_ExtractBytes(instance->state, curData, 0, rateInBytes);
|
||||
@@ -299,9 +299,10 @@ int SpongeSqueeze(SpongeInstance *instan
|
||||
SnP_Permute(instance->state);
|
||||
instance->byteIOIndex = 0;
|
||||
}
|
||||
- partialBlock = (unsigned int)(dataByteLen - i);
|
||||
- if (partialBlock+instance->byteIOIndex > rateInBytes)
|
||||
+ if (dataByteLen-i > rateInBytes-instance->byteIOIndex)
|
||||
partialBlock = rateInBytes-instance->byteIOIndex;
|
||||
+ else
|
||||
+ partialBlock = (unsigned int)(dataByteLen - i);
|
||||
i += partialBlock;
|
||||
|
||||
SnP_ExtractBytes(instance->state, curData, instance->byteIOIndex, partialBlock);
|
88
CVE-2022-45061-DoS-by-IDNA-decode.patch
Normal file
88
CVE-2022-45061-DoS-by-IDNA-decode.patch
Normal file
|
@ -0,0 +1,88 @@
|
|||
From fa792ddee55dc02c6392842c8194a464339f6f1b Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Mon, 7 Nov 2022 18:57:10 -0800
|
||||
Subject: [PATCH] [3.11] gh-98433: Fix quadratic time idna decoding. (GH-99092)
|
||||
(GH-99222)
|
||||
|
||||
There was an unnecessary quadratic loop in idna decoding. This restores
|
||||
the behavior to linear.
|
||||
|
||||
(cherry picked from commit d315722564927c7202dd6e111dc79eaf14240b0d)
|
||||
|
||||
(cherry picked from commit a6f6c3a3d6f2b580f2d87885c9b8a9350ad7bf15)
|
||||
|
||||
Co-authored-by: Miss Islington (bot) <31488909+miss-islington@users.noreply.github.com>
|
||||
Co-authored-by: Gregory P. Smith <greg@krypto.org>
|
||||
---
|
||||
Lib/encodings/idna.py | 32 ++++------
|
||||
Lib/test/test_codecs.py | 6 +
|
||||
Misc/NEWS.d/next/Security/2022-11-04-09-29-36.gh-issue-98433.l76c5G.rst | 6 +
|
||||
3 files changed, 27 insertions(+), 17 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2022-11-04-09-29-36.gh-issue-98433.l76c5G.rst
|
||||
|
||||
--- a/Lib/encodings/idna.py
|
||||
+++ b/Lib/encodings/idna.py
|
||||
@@ -39,23 +39,21 @@ def nameprep(label):
|
||||
|
||||
# Check bidi
|
||||
RandAL = [stringprep.in_table_d1(x) for x in label]
|
||||
- for c in RandAL:
|
||||
- if c:
|
||||
- # There is a RandAL char in the string. Must perform further
|
||||
- # tests:
|
||||
- # 1) The characters in section 5.8 MUST be prohibited.
|
||||
- # This is table C.8, which was already checked
|
||||
- # 2) If a string contains any RandALCat character, the string
|
||||
- # MUST NOT contain any LCat character.
|
||||
- if any(stringprep.in_table_d2(x) for x in label):
|
||||
- raise UnicodeError("Violation of BIDI requirement 2")
|
||||
-
|
||||
- # 3) If a string contains any RandALCat character, a
|
||||
- # RandALCat character MUST be the first character of the
|
||||
- # string, and a RandALCat character MUST be the last
|
||||
- # character of the string.
|
||||
- if not RandAL[0] or not RandAL[-1]:
|
||||
- raise UnicodeError("Violation of BIDI requirement 3")
|
||||
+ if any(RandAL):
|
||||
+ # There is a RandAL char in the string. Must perform further
|
||||
+ # tests:
|
||||
+ # 1) The characters in section 5.8 MUST be prohibited.
|
||||
+ # This is table C.8, which was already checked
|
||||
+ # 2) If a string contains any RandALCat character, the string
|
||||
+ # MUST NOT contain any LCat character.
|
||||
+ if any(stringprep.in_table_d2(x) for x in label):
|
||||
+ raise UnicodeError("Violation of BIDI requirement 2")
|
||||
+ # 3) If a string contains any RandALCat character, a
|
||||
+ # RandALCat character MUST be the first character of the
|
||||
+ # string, and a RandALCat character MUST be the last
|
||||
+ # character of the string.
|
||||
+ if not RandAL[0] or not RandAL[-1]:
|
||||
+ raise UnicodeError("Violation of BIDI requirement 3")
|
||||
|
||||
return label
|
||||
|
||||
--- a/Lib/test/test_codecs.py
|
||||
+++ b/Lib/test/test_codecs.py
|
||||
@@ -1640,6 +1640,12 @@ class IDNACodecTest(unittest.TestCase):
|
||||
self.assertEqual("pyth\xf6n.org".encode("idna"), b"xn--pythn-mua.org")
|
||||
self.assertEqual("pyth\xf6n.org.".encode("idna"), b"xn--pythn-mua.org.")
|
||||
|
||||
+ def test_builtin_decode_length_limit(self):
|
||||
+ with self.assertRaisesRegex(UnicodeError, "too long"):
|
||||
+ (b"xn--016c"+b"a"*1100).decode("idna")
|
||||
+ with self.assertRaisesRegex(UnicodeError, "too long"):
|
||||
+ (b"xn--016c"+b"a"*70).decode("idna")
|
||||
+
|
||||
def test_stream(self):
|
||||
r = codecs.getreader("idna")(io.BytesIO(b"abc"))
|
||||
r.read(3)
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2022-11-04-09-29-36.gh-issue-98433.l76c5G.rst
|
||||
@@ -0,0 +1,6 @@
|
||||
+The IDNA codec decoder used on DNS hostnames by :mod:`socket` or :mod:`asyncio`
|
||||
+related name resolution functions no longer involves a quadratic algorithm.
|
||||
+This prevents a potential CPU denial of service if an out-of-spec excessive
|
||||
+length hostname involving bidirectional characters were decoded. Some protocols
|
||||
+such as :mod:`urllib` http ``3xx`` redirects potentially allow for an attacker
|
||||
+to supply such a name.
|
74
CVE-2023-24329-blank-URL-bypass.patch
Normal file
74
CVE-2023-24329-blank-URL-bypass.patch
Normal file
|
@ -0,0 +1,74 @@
|
|||
From a284d69de1d1a42714576d4a9562145a94e62127 Mon Sep 17 00:00:00 2001
|
||||
From: Ben Kallus <benjamin.p.kallus.gr@dartmouth.edu>
|
||||
Date: Sat, 12 Nov 2022 15:43:33 -0500
|
||||
Subject: [PATCH 1/2] gh-99418: Prevent urllib.parse.urlparse from accepting
|
||||
schemes that don't begin with an alphabetical ASCII character.
|
||||
|
||||
---
|
||||
Lib/test/test_urlparse.py | 18 ++++++++++
|
||||
Lib/urllib/parse.py | 7 +++
|
||||
Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rst | 2 +
|
||||
3 files changed, 26 insertions(+), 1 deletion(-)
|
||||
|
||||
--- a/Lib/test/test_urlparse.py
|
||||
+++ b/Lib/test/test_urlparse.py
|
||||
@@ -676,6 +676,24 @@ class UrlParseTestCase(unittest.TestCase
|
||||
with self.assertRaises(ValueError):
|
||||
p.port
|
||||
|
||||
+ def test_attributes_bad_scheme(self):
|
||||
+ """Check handling of invalid schemes."""
|
||||
+ for bytes in (False, True):
|
||||
+ for parse in (urllib.parse.urlsplit, urllib.parse.urlparse):
|
||||
+ for scheme in (".", "+", "-", "0", "http&", "६http"):
|
||||
+ with self.subTest(bytes=bytes, parse=parse, scheme=scheme):
|
||||
+ url = scheme + "://www.example.net"
|
||||
+ if bytes:
|
||||
+ if urllib.parse.isascii(url):
|
||||
+ url = url.encode("ascii")
|
||||
+ else:
|
||||
+ continue
|
||||
+ p = parse(url)
|
||||
+ if bytes:
|
||||
+ self.assertEqual(p.scheme, b"")
|
||||
+ else:
|
||||
+ self.assertEqual(p.scheme, "")
|
||||
+
|
||||
def test_attributes_without_netloc(self):
|
||||
# This example is straight from RFC 3261. It looks like it
|
||||
# should allow the username, hostname, and port to be filled
|
||||
--- a/Lib/urllib/parse.py
|
||||
+++ b/Lib/urllib/parse.py
|
||||
@@ -35,6 +35,7 @@ __all__ = ["urlparse", "urlunparse", "ur
|
||||
"urlsplit", "urlunsplit", "urlencode", "parse_qs",
|
||||
"parse_qsl", "quote", "quote_plus", "quote_from_bytes",
|
||||
"unquote", "unquote_plus", "unquote_to_bytes",
|
||||
+ "isascii",
|
||||
"DefragResult", "ParseResult", "SplitResult",
|
||||
"DefragResultBytes", "ParseResultBytes", "SplitResultBytes"]
|
||||
|
||||
@@ -79,6 +80,10 @@ scheme_chars = ('abcdefghijklmnopqrstuvw
|
||||
# Unsafe bytes to be removed per WHATWG spec
|
||||
_UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r', '\n']
|
||||
|
||||
+# Python >= 3.7 shim
|
||||
+def isascii(word):
|
||||
+ return all([ord(c) < 128 for c in word])
|
||||
+
|
||||
# XXX: Consider replacing with functools.lru_cache
|
||||
MAX_CACHE_SIZE = 20
|
||||
_parse_cache = {}
|
||||
@@ -435,7 +440,7 @@ def urlsplit(url, scheme='', allow_fragm
|
||||
clear_cache()
|
||||
netloc = query = fragment = ''
|
||||
i = url.find(':')
|
||||
- if i > 0:
|
||||
+ if i > 0 and isascii(url[0]) and url[0].isalpha():
|
||||
if url[:i] == 'http': # optimize the common case
|
||||
scheme = url[:i].lower()
|
||||
url = url[i+1:]
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Fix bug in :func:`urllib.parse.urlparse` that causes URL schemes that begin
|
||||
+with a digit, a plus sign, or a minus sign to be parsed incorrectly.
|
470
CVE-2023-27043-email-parsing-errors.patch
Normal file
470
CVE-2023-27043-email-parsing-errors.patch
Normal file
|
@ -0,0 +1,470 @@
|
|||
---
|
||||
Doc/library/email.utils.rst | 19 -
|
||||
Lib/email/utils.py | 155 +++++++-
|
||||
Lib/test/test_email/test_email.py | 187 +++++++++-
|
||||
Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst | 8
|
||||
4 files changed, 339 insertions(+), 30 deletions(-)
|
||||
|
||||
--- a/Doc/library/email.utils.rst
|
||||
+++ b/Doc/library/email.utils.rst
|
||||
@@ -60,13 +60,18 @@ of the new API.
|
||||
begins with angle brackets, they are stripped off.
|
||||
|
||||
|
||||
-.. function:: parseaddr(address)
|
||||
+.. function:: parseaddr(address, *, strict=True)
|
||||
|
||||
Parse address -- which should be the value of some address-containing field such
|
||||
as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and
|
||||
*email address* parts. Returns a tuple of that information, unless the parse
|
||||
fails, in which case a 2-tuple of ``('', '')`` is returned.
|
||||
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: formataddr(pair, charset='utf-8')
|
||||
|
||||
@@ -84,12 +89,15 @@ of the new API.
|
||||
Added the *charset* option.
|
||||
|
||||
|
||||
-.. function:: getaddresses(fieldvalues)
|
||||
+.. function:: getaddresses(fieldvalues, *, strict=True)
|
||||
|
||||
This method returns a list of 2-tuples of the form returned by ``parseaddr()``.
|
||||
*fieldvalues* is a sequence of header field values as might be returned by
|
||||
- :meth:`Message.get_all <email.message.Message.get_all>`. Here's a simple
|
||||
- example that gets all the recipients of a message::
|
||||
+ :meth:`Message.get_all <email.message.Message.get_all>`.
|
||||
+
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ Here's a simple example that gets all the recipients of a message::
|
||||
|
||||
from email.utils import getaddresses
|
||||
|
||||
@@ -99,6 +107,9 @@ of the new API.
|
||||
resent_ccs = msg.get_all('resent-cc', [])
|
||||
all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
|
||||
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: parsedate(date)
|
||||
|
||||
--- a/Lib/email/utils.py
|
||||
+++ b/Lib/email/utils.py
|
||||
@@ -48,6 +48,7 @@ TICK = "'"
|
||||
specialsre = re.compile(r'[][\\()<>@,:;".]')
|
||||
escapesre = re.compile(r'[\\"]')
|
||||
|
||||
+
|
||||
def _has_surrogates(s):
|
||||
"""Return True if s contains surrogate-escaped binary data."""
|
||||
# This check is based on the fact that unless there are surrogates, utf8
|
||||
@@ -105,26 +106,127 @@ def formataddr(pair, charset='utf-8'):
|
||||
return '%s%s%s <%s>' % (quotes, name, quotes, address)
|
||||
return address
|
||||
|
||||
+def _iter_escaped_chars(addr):
|
||||
+ pos = 0
|
||||
+ escape = False
|
||||
+ for pos, ch in enumerate(addr):
|
||||
+ if escape:
|
||||
+ yield (pos, '\\' + ch)
|
||||
+ escape = False
|
||||
+ elif ch == '\\':
|
||||
+ escape = True
|
||||
+ else:
|
||||
+ yield (pos, ch)
|
||||
+ if escape:
|
||||
+ yield (pos, '\\')
|
||||
+
|
||||
+
|
||||
+def _strip_quoted_realnames(addr):
|
||||
+ """Strip real names between quotes."""
|
||||
+ if '"' not in addr:
|
||||
+ # Fast path
|
||||
+ return addr
|
||||
+
|
||||
+ start = 0
|
||||
+ open_pos = None
|
||||
+ result = []
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '"':
|
||||
+ if open_pos is None:
|
||||
+ open_pos = pos
|
||||
+ else:
|
||||
+ if start != open_pos:
|
||||
+ result.append(addr[start:open_pos])
|
||||
+ start = pos + 1
|
||||
+ open_pos = None
|
||||
+
|
||||
+ if start < len(addr):
|
||||
+ result.append(addr[start:])
|
||||
+
|
||||
+ return ''.join(result)
|
||||
|
||||
|
||||
-def getaddresses(fieldvalues):
|
||||
- """Return a list of (REALNAME, EMAIL) for each fieldvalue."""
|
||||
- all = COMMASPACE.join(fieldvalues)
|
||||
- a = _AddressList(all)
|
||||
- return a.addresslist
|
||||
+supports_strict_parsing = True
|
||||
|
||||
+def getaddresses(fieldvalues, *, strict=True):
|
||||
+ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue.
|
||||
|
||||
+ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in
|
||||
+ its place.
|
||||
|
||||
-ecre = re.compile(r'''
|
||||
- =\? # literal =?
|
||||
- (?P<charset>[^?]*?) # non-greedy up to the next ? is the charset
|
||||
- \? # literal ?
|
||||
- (?P<encoding>[qb]) # either a "q" or a "b", case insensitive
|
||||
- \? # literal ?
|
||||
- (?P<atom>.*?) # non-greedy up to the next ?= is the atom
|
||||
- \?= # literal ?=
|
||||
- ''', re.VERBOSE | re.IGNORECASE)
|
||||
+ If strict is true, use a strict parser which rejects malformed inputs.
|
||||
+ """
|
||||
+
|
||||
+ # If strict is true, if the resulting list of parsed addresses is greater
|
||||
+ # than the number of fieldvalues in the input list, a parsing error has
|
||||
+ # occurred and consequently a list containing a single empty 2-tuple [('',
|
||||
+ # '')] is returned in its place. This is done to avoid invalid output.
|
||||
+ #
|
||||
+ # Malformed input: getaddresses(['alice@example.com <bob@example.com>'])
|
||||
+ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')]
|
||||
+ # Safe output: [('', '')]
|
||||
+
|
||||
+ if not strict:
|
||||
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
|
||||
+ a = _AddressList(all)
|
||||
+ return a.addresslist
|
||||
+
|
||||
+ fieldvalues = [str(v) for v in fieldvalues]
|
||||
+ fieldvalues = _pre_parse_validation(fieldvalues)
|
||||
+ addr = COMMASPACE.join(fieldvalues)
|
||||
+ a = _AddressList(addr)
|
||||
+ result = _post_parse_validation(a.addresslist)
|
||||
+
|
||||
+ # Treat output as invalid if the number of addresses is not equal to the
|
||||
+ # expected number of addresses.
|
||||
+ n = 0
|
||||
+ for v in fieldvalues:
|
||||
+ # When a comma is used in the Real Name part it is not a deliminator.
|
||||
+ # So strip those out before counting the commas.
|
||||
+ v = _strip_quoted_realnames(v)
|
||||
+ # Expected number of addresses: 1 + number of commas
|
||||
+ n += 1 + v.count(',')
|
||||
+ if len(result) != n:
|
||||
+ return [('', '')]
|
||||
+
|
||||
+ return result
|
||||
+
|
||||
+
|
||||
+def _check_parenthesis(addr):
|
||||
+ # Ignore parenthesis in quoted real names.
|
||||
+ addr = _strip_quoted_realnames(addr)
|
||||
+
|
||||
+ opens = 0
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '(':
|
||||
+ opens += 1
|
||||
+ elif ch == ')':
|
||||
+ opens -= 1
|
||||
+ if opens < 0:
|
||||
+ return False
|
||||
+ return (opens == 0)
|
||||
+
|
||||
+
|
||||
+def _pre_parse_validation(email_header_fields):
|
||||
+ accepted_values = []
|
||||
+ for v in email_header_fields:
|
||||
+ if not _check_parenthesis(v):
|
||||
+ v = "('', '')"
|
||||
+ accepted_values.append(v)
|
||||
+
|
||||
+ return accepted_values
|
||||
+
|
||||
+
|
||||
+def _post_parse_validation(parsed_email_header_tuples):
|
||||
+ accepted_values = []
|
||||
+ # The parser would have parsed a correctly formatted domain-literal
|
||||
+ # The existence of an [ after parsing indicates a parsing failure
|
||||
+ for v in parsed_email_header_tuples:
|
||||
+ if '[' in v[1]:
|
||||
+ v = ('', '')
|
||||
+ accepted_values.append(v)
|
||||
|
||||
+ return accepted_values
|
||||
|
||||
def _format_timetuple_and_zone(timetuple, zone):
|
||||
return '%s, %02d %s %04d %02d:%02d:%02d %s' % (
|
||||
@@ -214,16 +316,33 @@ def parsedate_to_datetime(data):
|
||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))
|
||||
|
||||
|
||||
-def parseaddr(addr):
|
||||
+def parseaddr(addr, *, strict=True):
|
||||
"""
|
||||
Parse addr into its constituent realname and email address parts.
|
||||
|
||||
Return a tuple of realname and email address, unless the parse fails, in
|
||||
which case return a 2-tuple of ('', '').
|
||||
+
|
||||
+ If strict is True, use a strict parser which rejects malformed inputs.
|
||||
"""
|
||||
- addrs = _AddressList(addr).addresslist
|
||||
- if not addrs:
|
||||
- return '', ''
|
||||
+ if not strict:
|
||||
+ addrs = _AddressList(addr).addresslist
|
||||
+ if not addrs:
|
||||
+ return ('', '')
|
||||
+ return addrs[0]
|
||||
+
|
||||
+ if isinstance(addr, list):
|
||||
+ addr = addr[0]
|
||||
+
|
||||
+ if not isinstance(addr, str):
|
||||
+ return ('', '')
|
||||
+
|
||||
+ addr = _pre_parse_validation([addr])[0]
|
||||
+ addrs = _post_parse_validation(_AddressList(addr).addresslist)
|
||||
+
|
||||
+ if not addrs or len(addrs) > 1:
|
||||
+ return ('', '')
|
||||
+
|
||||
return addrs[0]
|
||||
|
||||
|
||||
--- a/Lib/test/test_email/test_email.py
|
||||
+++ b/Lib/test/test_email/test_email.py
|
||||
@@ -19,6 +19,7 @@ except ImportError:
|
||||
|
||||
import email
|
||||
import email.policy
|
||||
+import email.utils
|
||||
|
||||
from email.charset import Charset
|
||||
from email.header import Header, decode_header, make_header
|
||||
@@ -3242,15 +3243,137 @@ Foo
|
||||
[('Al Person', 'aperson@dom.ain'),
|
||||
('Bud Person', 'bperson@dom.ain')])
|
||||
|
||||
+ def test_parsing_errors(self):
|
||||
+ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056"""
|
||||
+ alice = 'alice@example.org'
|
||||
+ bob = 'bob@example.com'
|
||||
+ empty = ('', '')
|
||||
+
|
||||
+ # Test utils.getaddresses() and utils.parseaddr() on malformed email
|
||||
+ # addresses: default behavior (strict=True) rejects malformed address,
|
||||
+ # and strict=False which tolerates malformed address.
|
||||
+ for invalid_separator, expected_non_strict in (
|
||||
+ ('(', [(f'<{bob}>', alice)]),
|
||||
+ (')', [('', alice), empty, ('', bob)]),
|
||||
+ ('<', [('', alice), empty, ('', bob), empty]),
|
||||
+ ('>', [('', alice), empty, ('', bob)]),
|
||||
+ ('[', [('', f'{alice}[<{bob}>]')]),
|
||||
+ (']', [('', alice), empty, ('', bob)]),
|
||||
+ ('@', [empty, empty, ('', bob)]),
|
||||
+ (';', [('', alice), empty, ('', bob)]),
|
||||
+ (':', [('', alice), ('', bob)]),
|
||||
+ ('.', [('', alice + '.'), ('', bob)]),
|
||||
+ ('"', [('', alice), ('', f'<{bob}>')]),
|
||||
+ ):
|
||||
+ address = f'{alice}{invalid_separator}<{bob}>'
|
||||
+ with self.subTest(address=address):
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ expected_non_strict)
|
||||
+
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Comma (',') is treated differently depending on strict parameter.
|
||||
+ # Comma without quotes.
|
||||
+ address = f'{alice},<{bob}>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Real name between quotes containing comma.
|
||||
+ address = '"Alice, alice@example.org" <bob@example.com>'
|
||||
+ expected_strict = ('Alice, alice@example.org', 'bob@example.com')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Valid parenthesis in comments.
|
||||
+ address = 'alice@example.org (Alice)'
|
||||
+ expected_strict = ('Alice', 'alice@example.org')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Invalid parenthesis in comments.
|
||||
+ address = 'alice@example.org )Alice('
|
||||
+ self.assertEqual(utils.getaddresses([address]), [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', 'alice@example.org'), ('', ''), ('', 'Alice')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Two addresses with quotes separated by comma.
|
||||
+ address = '"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Test email.utils.supports_strict_parsing attribute
|
||||
+ self.assertEqual(email.utils.supports_strict_parsing, True)
|
||||
+
|
||||
def test_getaddresses_nasty(self):
|
||||
- eq = self.assertEqual
|
||||
- eq(utils.getaddresses(['foo: ;']), [('', '')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['[]*-- =~$']),
|
||||
- [('', ''), ('', ''), ('', '*--')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
|
||||
- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
|
||||
+ for addresses, expected in (
|
||||
+ (['"Sürname, Firstname" <to@example.com>'],
|
||||
+ [('Sürname, Firstname', 'to@example.com')]),
|
||||
+
|
||||
+ (['foo: ;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>'],
|
||||
+ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]),
|
||||
+
|
||||
+ ([r'Pete(A nice \) chap) <pete(his account)@silly.test(his host)>'],
|
||||
+ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]),
|
||||
+
|
||||
+ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'],
|
||||
+ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]),
|
||||
+
|
||||
+ (['John Doe <jdoe@machine(comment). example>'],
|
||||
+ [('John Doe (comment)', 'jdoe@machine.example')]),
|
||||
+
|
||||
+ (['"Mary Smith: Personal Account" <smith@home.example>'],
|
||||
+ [('Mary Smith: Personal Account', 'smith@home.example')]),
|
||||
+
|
||||
+ (['Undisclosed recipients:;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ ([r'<boss@nil.test>, "Giant; \"Big\" Box" <bob@example.net>'],
|
||||
+ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]),
|
||||
+ ):
|
||||
+ with self.subTest(addresses=addresses):
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ expected)
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ expected)
|
||||
+
|
||||
+ addresses = ['[]*-- =~$']
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ [('', '')])
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ [('', ''), ('', ''), ('', '*--')])
|
||||
|
||||
def test_getaddresses_embedded_comment(self):
|
||||
"""Test proper handling of a nested comment"""
|
||||
@@ -3432,6 +3555,54 @@ multipart/report
|
||||
m = cls(*constructor, policy=email.policy.default)
|
||||
self.assertIs(m.policy, email.policy.default)
|
||||
|
||||
+ def test_iter_escaped_chars(self):
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')),
|
||||
+ [(0, 'a'),
|
||||
+ (2, '\\\\'),
|
||||
+ (3, 'b'),
|
||||
+ (5, '\\"'),
|
||||
+ (6, 'c'),
|
||||
+ (8, '\\\\'),
|
||||
+ (9, '"'),
|
||||
+ (10, 'd')])
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars('a\\')),
|
||||
+ [(0, 'a'), (1, '\\')])
|
||||
+
|
||||
+ def test_strip_quoted_realnames(self):
|
||||
+ def check(addr, expected):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), expected)
|
||||
+
|
||||
+ check('"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>',
|
||||
+ ' <jane@example.net>, <john@example.net>')
|
||||
+ check(r'"Jane \"Doe\"." <jane@example.net>',
|
||||
+ ' <jane@example.net>')
|
||||
+
|
||||
+ # special cases
|
||||
+ check(r'before"name"after', 'beforeafter')
|
||||
+ check(r'before"name"', 'before')
|
||||
+ check(r'b"name"', 'b') # single char
|
||||
+ check(r'"name"after', 'after')
|
||||
+ check(r'"name"a', 'a') # single char
|
||||
+ check(r'"name"', '')
|
||||
+
|
||||
+ # no change
|
||||
+ for addr in (
|
||||
+ 'Jane Doe <jane@example.net>, John Doe <john@example.net>',
|
||||
+ 'lone " quote',
|
||||
+ ):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), addr)
|
||||
+
|
||||
+
|
||||
+ def test_check_parenthesis(self):
|
||||
+ addr = 'alice@example.net'
|
||||
+ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice('))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)'))
|
||||
+
|
||||
+ # Ignore real name between quotes
|
||||
+ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}'))
|
||||
+
|
||||
|
||||
# Test the iterator/generators
|
||||
class TestIterators(TestEmailBase):
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
@@ -0,0 +1,8 @@
|
||||
+:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now
|
||||
+return ``('', '')`` 2-tuples in more situations where invalid email
|
||||
+addresses are encountered instead of potentially inaccurate values. Add
|
||||
+optional *strict* parameter to these two functions: use ``strict=False`` to
|
||||
+get the old behavior, accept malformed inputs.
|
||||
+``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check
|
||||
+if the *strict* paramater is available. Patch by Thomas Dwyer and Victor
|
||||
+Stinner to improve the CVE-2023-27043 fix.
|
341
CVE-2023-40217-avoid-ssl-pre-close.patch
Normal file
341
CVE-2023-40217-avoid-ssl-pre-close.patch
Normal file
|
@ -0,0 +1,341 @@
|
|||
From f0c1e55dfd28970196768a6997a6dc0eab0f5259 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?=C5=81ukasz=20Langa?= <lukasz@langa.pl>
|
||||
Date: Tue, 22 Aug 2023 17:39:17 +0200
|
||||
Subject: [PATCH] gh-108310: Fix CVE-2023-40217: Check for & avoid the ssl
|
||||
pre-close flaw
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Instances of `ssl.SSLSocket` were vulnerable to a bypass of the TLS handshake
|
||||
and included protections (like certificate verification) and treating sent
|
||||
unencrypted data as if it were post-handshake TLS encrypted data.
|
||||
|
||||
The vulnerability is caused when a socket is connected, data is sent by the
|
||||
malicious peer and stored in a buffer, and then the malicious peer closes the
|
||||
socket within a small timing window before the other peers’ TLS handshake can
|
||||
begin. After this sequence of events the closed socket will not immediately
|
||||
attempt a TLS handshake due to not being connected but will also allow the
|
||||
buffered data to be read as if a successful TLS handshake had occurred.
|
||||
|
||||
Co-Authored-By: Gregory P. Smith [Google LLC] <greg@krypto.org>
|
||||
---
|
||||
Lib/ssl.py | 31 ++-
|
||||
Lib/test/test_ssl.py | 215 ++++++++++++++++++
|
||||
...-08-22-17-39-12.gh-issue-108310.fVM3sg.rst | 7 +
|
||||
3 files changed, 252 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
|
||||
|
||||
Index: Python-3.6.15/Lib/ssl.py
|
||||
===================================================================
|
||||
--- Python-3.6.15.orig/Lib/ssl.py
|
||||
+++ Python-3.6.15/Lib/ssl.py
|
||||
@@ -771,6 +771,8 @@ class SSLSocket(socket):
|
||||
"client mode")
|
||||
if self._context.check_hostname and not server_hostname:
|
||||
raise ValueError("check_hostname requires server_hostname")
|
||||
+ self._closed = False
|
||||
+ self._sslobj = None
|
||||
self._session = _session
|
||||
self.server_side = server_side
|
||||
self.server_hostname = server_hostname
|
||||
@@ -782,7 +784,7 @@ class SSLSocket(socket):
|
||||
type=sock.type,
|
||||
proto=sock.proto,
|
||||
fileno=sock.fileno())
|
||||
- self.settimeout(sock.gettimeout())
|
||||
+ sock_timeout = sock.gettimeout()
|
||||
sock.detach()
|
||||
elif fileno is not None:
|
||||
socket.__init__(self, fileno=fileno)
|
||||
@@ -796,11 +798,38 @@ class SSLSocket(socket):
|
||||
if e.errno != errno.ENOTCONN:
|
||||
raise
|
||||
connected = False
|
||||
+ blocking = self.gettimeout() == 0
|
||||
+ self.setblocking(False)
|
||||
+ try:
|
||||
+ # We are not connected so this is not supposed to block, but
|
||||
+ # testing revealed otherwise on macOS and Windows so we do
|
||||
+ # the non-blocking dance regardless. Our raise when any data
|
||||
+ # is found means consuming the data is harmless.
|
||||
+ notconn_pre_handshake_data = self.recv(1)
|
||||
+ except OSError as e:
|
||||
+ # EINVAL occurs for recv(1) on non-connected on unix sockets.
|
||||
+ if e.errno not in (errno.ENOTCONN, errno.EINVAL):
|
||||
+ raise
|
||||
+ notconn_pre_handshake_data = b''
|
||||
+ self.setblocking(blocking)
|
||||
+ if notconn_pre_handshake_data:
|
||||
+ # This prevents pending data sent to the socket before it was
|
||||
+ # closed from escaping to the caller who could otherwise
|
||||
+ # presume it came through a successful TLS connection.
|
||||
+ reason = "Closed before TLS handshake with data in recv buffer."
|
||||
+ notconn_pre_handshake_data_error = SSLError(e.errno, reason)
|
||||
+ # Add the SSLError attributes that _ssl.c always adds.
|
||||
+ notconn_pre_handshake_data_error.reason = reason
|
||||
+ notconn_pre_handshake_data_error.library = None
|
||||
+ try:
|
||||
+ self.close()
|
||||
+ except OSError:
|
||||
+ pass
|
||||
+ raise notconn_pre_handshake_data_error
|
||||
else:
|
||||
connected = True
|
||||
|
||||
- self._closed = False
|
||||
- self._sslobj = None
|
||||
+ self.settimeout(sock_timeout) # Must come after setblocking() calls.
|
||||
self._connected = connected
|
||||
if connected:
|
||||
# create the SSL object
|
||||
Index: Python-3.6.15/Lib/test/test_ssl.py
|
||||
===================================================================
|
||||
--- Python-3.6.15.orig/Lib/test/test_ssl.py
|
||||
+++ Python-3.6.15/Lib/test/test_ssl.py
|
||||
@@ -3,11 +3,14 @@
|
||||
import sys
|
||||
import unittest
|
||||
from test import support
|
||||
+import re
|
||||
import socket
|
||||
import select
|
||||
+import struct
|
||||
import time
|
||||
import datetime
|
||||
import gc
|
||||
+import http.client
|
||||
import os
|
||||
import errno
|
||||
import pprint
|
||||
@@ -3918,6 +3921,218 @@ class TestPostHandshakeAuth(unittest.Tes
|
||||
self.assertIn(b'WRONG_SSL_VERSION', s.recv(1024))
|
||||
|
||||
|
||||
+def set_socket_so_linger_on_with_zero_timeout(sock):
|
||||
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0))
|
||||
+
|
||||
+
|
||||
+class TestPreHandshakeClose(unittest.TestCase):
|
||||
+ """Verify behavior of close sockets with received data before to the handshake.
|
||||
+ """
|
||||
+
|
||||
+ class SingleConnectionTestServerThread(threading.Thread):
|
||||
+
|
||||
+ def __init__(self, *, name, call_after_accept):
|
||||
+ self.call_after_accept = call_after_accept
|
||||
+ self.received_data = b'' # set by .run()
|
||||
+ self.wrap_error = None # set by .run()
|
||||
+ self.listener = None # set by .start()
|
||||
+ self.port = None # set by .start()
|
||||
+ super().__init__(name=name)
|
||||
+
|
||||
+ def __enter__(self):
|
||||
+ self.start()
|
||||
+ return self
|
||||
+
|
||||
+ def __exit__(self, *args):
|
||||
+ try:
|
||||
+ if self.listener:
|
||||
+ self.listener.close()
|
||||
+ except OSError:
|
||||
+ pass
|
||||
+ self.join()
|
||||
+ self.wrap_error = None # avoid dangling references
|
||||
+
|
||||
+ def start(self):
|
||||
+ self.ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
+ self.ssl_ctx.verify_mode = ssl.CERT_REQUIRED
|
||||
+ self.ssl_ctx.load_verify_locations(cafile=ONLYCERT)
|
||||
+ self.ssl_ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY)
|
||||
+ self.listener = socket.socket()
|
||||
+ self.port = support.bind_port(self.listener)
|
||||
+ self.listener.settimeout(2.0)
|
||||
+ self.listener.listen(1)
|
||||
+ super().start()
|
||||
+
|
||||
+ def run(self):
|
||||
+ conn, address = self.listener.accept()
|
||||
+ self.listener.close()
|
||||
+ with conn:
|
||||
+ if self.call_after_accept(conn):
|
||||
+ return
|
||||
+ try:
|
||||
+ tls_socket = self.ssl_ctx.wrap_socket(conn, server_side=True)
|
||||
+ except OSError as err: # ssl.SSLError inherits from OSError
|
||||
+ self.wrap_error = err
|
||||
+ else:
|
||||
+ try:
|
||||
+ self.received_data = tls_socket.recv(400)
|
||||
+ except OSError:
|
||||
+ pass # closed, protocol error, etc.
|
||||
+
|
||||
+ def non_linux_skip_if_other_okay_error(self, err):
|
||||
+ if sys.platform == "linux":
|
||||
+ return # Expect the full test setup to always work on Linux.
|
||||
+ if (isinstance(err, ConnectionResetError) or
|
||||
+ (isinstance(err, OSError) and err.errno == errno.EINVAL) or
|
||||
+ re.search('wrong.version.number', getattr(err, "reason", ""), re.I)):
|
||||
+ # On Windows the TCP RST leads to a ConnectionResetError
|
||||
+ # (ECONNRESET) which Linux doesn't appear to surface to userspace.
|
||||
+ # If wrap_socket() winds up on the "if connected:" path and doing
|
||||
+ # the actual wrapping... we get an SSLError from OpenSSL. Typically
|
||||
+ # WRONG_VERSION_NUMBER. While appropriate, neither is the scenario
|
||||
+ # we're specifically trying to test. The way this test is written
|
||||
+ # is known to work on Linux. We'll skip it anywhere else that it
|
||||
+ # does not present as doing so.
|
||||
+ self.skipTest(f"Could not recreate conditions on {sys.platform}:"
|
||||
+ f" {err}")
|
||||
+ # If maintaining this conditional winds up being a problem.
|
||||
+ # just turn this into an unconditional skip anything but Linux.
|
||||
+ # The important thing is that our CI has the logic covered.
|
||||
+
|
||||
+ def test_preauth_data_to_tls_server(self):
|
||||
+ server_accept_called = threading.Event()
|
||||
+ ready_for_server_wrap_socket = threading.Event()
|
||||
+
|
||||
+ def call_after_accept(unused):
|
||||
+ server_accept_called.set()
|
||||
+ if not ready_for_server_wrap_socket.wait(2.0):
|
||||
+ raise RuntimeError("wrap_socket event never set, test may fail.")
|
||||
+ return False # Tell the server thread to continue.
|
||||
+
|
||||
+ server = self.SingleConnectionTestServerThread(
|
||||
+ call_after_accept=call_after_accept,
|
||||
+ name="preauth_data_to_tls_server")
|
||||
+ server.__enter__() # starts it
|
||||
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
|
||||
+
|
||||
+ with socket.socket() as client:
|
||||
+ client.connect(server.listener.getsockname())
|
||||
+ # This forces an immediate connection close via RST on .close().
|
||||
+ set_socket_so_linger_on_with_zero_timeout(client)
|
||||
+ client.setblocking(False)
|
||||
+
|
||||
+ server_accept_called.wait()
|
||||
+ client.send(b"DELETE /data HTTP/1.0\r\n\r\n")
|
||||
+ client.close() # RST
|
||||
+
|
||||
+ ready_for_server_wrap_socket.set()
|
||||
+ server.join()
|
||||
+ wrap_error = server.wrap_error
|
||||
+ self.assertEqual(b"", server.received_data)
|
||||
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
|
||||
+ self.non_linux_skip_if_other_okay_error(wrap_error)
|
||||
+ self.assertIsInstance(wrap_error, ssl.SSLError)
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
|
||||
+ self.assertNotEqual(0, wrap_error.args[0])
|
||||
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
|
||||
+
|
||||
+ def test_preauth_data_to_tls_client(self):
|
||||
+ client_can_continue_with_wrap_socket = threading.Event()
|
||||
+
|
||||
+ def call_after_accept(conn_to_client):
|
||||
+ # This forces an immediate connection close via RST on .close().
|
||||
+ set_socket_so_linger_on_with_zero_timeout(conn_to_client)
|
||||
+ conn_to_client.send(
|
||||
+ b"HTTP/1.0 307 Temporary Redirect\r\n"
|
||||
+ b"Location: https://example.com/someone-elses-server\r\n"
|
||||
+ b"\r\n")
|
||||
+ conn_to_client.close() # RST
|
||||
+ client_can_continue_with_wrap_socket.set()
|
||||
+ return True # Tell the server to stop.
|
||||
+
|
||||
+ server = self.SingleConnectionTestServerThread(
|
||||
+ call_after_accept=call_after_accept,
|
||||
+ name="preauth_data_to_tls_client")
|
||||
+ server.__enter__() # starts it
|
||||
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
|
||||
+
|
||||
+ # Redundant; call_after_accept sets SO_LINGER on the accepted conn.
|
||||
+ set_socket_so_linger_on_with_zero_timeout(server.listener)
|
||||
+
|
||||
+ with socket.socket() as client:
|
||||
+ client.connect(server.listener.getsockname())
|
||||
+ if not client_can_continue_with_wrap_socket.wait(2.0):
|
||||
+ self.fail("test server took too long.")
|
||||
+ ssl_ctx = ssl.create_default_context()
|
||||
+ try:
|
||||
+ tls_client = ssl_ctx.wrap_socket(
|
||||
+ client, server_hostname="localhost")
|
||||
+ except OSError as err: # SSLError inherits from OSError
|
||||
+ wrap_error = err
|
||||
+ received_data = b""
|
||||
+ else:
|
||||
+ wrap_error = None
|
||||
+ received_data = tls_client.recv(400)
|
||||
+ tls_client.close()
|
||||
+
|
||||
+ server.join()
|
||||
+ self.assertEqual(b"", received_data)
|
||||
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
|
||||
+ self.non_linux_skip_if_other_okay_error(wrap_error)
|
||||
+ self.assertIsInstance(wrap_error, ssl.SSLError)
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
|
||||
+ self.assertNotEqual(0, wrap_error.args[0])
|
||||
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
|
||||
+
|
||||
+ def test_https_client_non_tls_response_ignored(self):
|
||||
+
|
||||
+ server_responding = threading.Event()
|
||||
+
|
||||
+ class SynchronizedHTTPSConnection(http.client.HTTPSConnection):
|
||||
+ def connect(self):
|
||||
+ http.client.HTTPConnection.connect(self)
|
||||
+ # Wait for our fault injection server to have done its thing.
|
||||
+ if not server_responding.wait(1.0) and support.verbose:
|
||||
+ sys.stdout.write("server_responding event never set.")
|
||||
+ self.sock = self._context.wrap_socket(
|
||||
+ self.sock, server_hostname=self.host)
|
||||
+
|
||||
+ def call_after_accept(conn_to_client):
|
||||
+ # This forces an immediate connection close via RST on .close().
|
||||
+ set_socket_so_linger_on_with_zero_timeout(conn_to_client)
|
||||
+ conn_to_client.send(
|
||||
+ b"HTTP/1.0 402 Payment Required\r\n"
|
||||
+ b"\r\n")
|
||||
+ conn_to_client.close() # RST
|
||||
+ server_responding.set()
|
||||
+ return True # Tell the server to stop.
|
||||
+
|
||||
+ server = self.SingleConnectionTestServerThread(
|
||||
+ call_after_accept=call_after_accept,
|
||||
+ name="non_tls_http_RST_responder")
|
||||
+ server.__enter__() # starts it
|
||||
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
|
||||
+ # Redundant; call_after_accept sets SO_LINGER on the accepted conn.
|
||||
+ set_socket_so_linger_on_with_zero_timeout(server.listener)
|
||||
+
|
||||
+ connection = SynchronizedHTTPSConnection(
|
||||
+ f"localhost",
|
||||
+ port=server.port,
|
||||
+ context=ssl.create_default_context(),
|
||||
+ timeout=2.0,
|
||||
+ )
|
||||
+ # There are lots of reasons this raises as desired, long before this
|
||||
+ # test was added. Sending the request requires a successful TLS wrapped
|
||||
+ # socket; that fails if the connection is broken. It may seem pointless
|
||||
+ # to test this. It serves as an illustration of something that we never
|
||||
+ # want to happen... properly not happening.
|
||||
+ with self.assertRaises(OSError) as err_ctx:
|
||||
+ connection.request("HEAD", "/test", headers={"Host": "localhost"})
|
||||
+ response = connection.getresponse()
|
||||
+
|
||||
+
|
||||
def test_main(verbose=False):
|
||||
if support.verbose:
|
||||
import warnings
|
||||
Index: Python-3.6.15/Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
|
||||
===================================================================
|
||||
--- /dev/null
|
||||
+++ Python-3.6.15/Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
|
||||
@@ -0,0 +1,7 @@
|
||||
+Fixed an issue where instances of :class:`ssl.SSLSocket` were vulnerable to
|
||||
+a bypass of the TLS handshake and included protections (like certificate
|
||||
+verification) and treating sent unencrypted data as if it were
|
||||
+post-handshake TLS encrypted data. Security issue reported as
|
||||
+`CVE-2023-40217
|
||||
+<https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40217>`_ by
|
||||
+Aapo Oksman. Patch by Gregory P. Smith.
|
26
PACKAGING-NOTES
Normal file
26
PACKAGING-NOTES
Normal file
|
@ -0,0 +1,26 @@
|
|||
Notes for packagers of Python3
|
||||
==============================
|
||||
|
||||
0. Faster build turnaround
|
||||
--------------------------
|
||||
|
||||
By default, python builds with profile-guided optimization. This needs
|
||||
an additional run of the test suite and it is generally slow.
|
||||
PGO build takes around 50 minutes.
|
||||
|
||||
For development, use "--without profileopt" option to disable PGO. This
|
||||
shortens the build time to ~5 minutes including test suite.
|
||||
|
||||
1. import_failed.map
|
||||
----------------------
|
||||
|
||||
This is a mechanism installed as part of python3-base, that places shim modules
|
||||
on python's path (through a generated zzzz-import-failed-hooks.pth file, so that
|
||||
it is imported as much at the end as makes sense; and an _import_failed subdir
|
||||
of /usr/lib/pythonX.Y). Then when the user tries to import a module that is part
|
||||
of a subpackage, the ImportError will contain a helpful message telling them
|
||||
which missing subpackage to install.
|
||||
|
||||
This can sometimes cause problems on non-standard configurations, if the pth
|
||||
gets included too early (for instance if you are using a script to include all
|
||||
pths by hand in some strange order). Just something to look out for.
|
82
Python-3.0b1-record-rpm.patch
Normal file
82
Python-3.0b1-record-rpm.patch
Normal file
|
@ -0,0 +1,82 @@
|
|||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -131,6 +131,8 @@ class install(Command):
|
||||
|
||||
('record=', None,
|
||||
"filename in which to record list of installed files"),
|
||||
+ ('record-rpm=', None,
|
||||
+ "filename in which to record list of installed files and directories suitable as filelist for rpm"),
|
||||
]
|
||||
|
||||
boolean_options = ['compile', 'force', 'skip-build']
|
||||
@@ -213,6 +215,7 @@ class install(Command):
|
||||
#self.install_info = None
|
||||
|
||||
self.record = None
|
||||
+ self.record_rpm = None
|
||||
|
||||
|
||||
# -- Option finalizing methods -------------------------------------
|
||||
@@ -560,12 +563,61 @@ class install(Command):
|
||||
self.create_path_file()
|
||||
|
||||
# write list of installed files, if requested.
|
||||
- if self.record:
|
||||
+ if self.record or self.record_rpm:
|
||||
outputs = self.get_outputs()
|
||||
if self.root: # strip any package prefix
|
||||
root_len = len(self.root)
|
||||
for counter in range(len(outputs)):
|
||||
outputs[counter] = outputs[counter][root_len:]
|
||||
+ if self.record_rpm: # add directories
|
||||
+ self.record = self.record_rpm
|
||||
+ dirs = []
|
||||
+ # directories to reject:
|
||||
+ rejectdirs = [
|
||||
+ '/etc',
|
||||
+ '/',
|
||||
+ '',
|
||||
+ self.prefix,
|
||||
+ self.exec_prefix,
|
||||
+ self.install_base,
|
||||
+ self.install_platbase,
|
||||
+ self.install_purelib,
|
||||
+ self.install_platlib,
|
||||
+ self.install_headers[:len(self.install_headers) - len(self.distribution.get_name()) - 1],
|
||||
+ self.install_libbase,
|
||||
+ self.install_scripts,
|
||||
+ self.install_data,
|
||||
+ os.path.join(self.install_data, 'share'),
|
||||
+ os.path.join(self.install_data, 'share', 'doc'),
|
||||
+ ]
|
||||
+ # directories whose childs reject:
|
||||
+ rejectdirs2 = [
|
||||
+ os.path.join(self.install_data, 'share', 'man'),
|
||||
+ ]
|
||||
+ # directories whose grandsons reject:
|
||||
+ rejectdirs3 = [
|
||||
+ os.path.join(self.install_data, 'share', 'man'),
|
||||
+ os.path.join(self.install_data, 'share', 'locale'),
|
||||
+ ]
|
||||
+ for counter in range(len(rejectdirs)):
|
||||
+ if len(rejectdirs[counter]) > root_len:
|
||||
+ rejectdirs[counter] = rejectdirs[counter][root_len:]
|
||||
+ for counter in range(len(rejectdirs2)):
|
||||
+ if len(rejectdirs2[counter]) > root_len:
|
||||
+ rejectdirs2[counter] = rejectdirs2[counter][root_len:]
|
||||
+ for counter in range(len(rejectdirs3)):
|
||||
+ if len(rejectdirs3[counter]) > root_len:
|
||||
+ rejectdirs3[counter] = rejectdirs3[counter][root_len:]
|
||||
+ for counter in range(len(outputs)):
|
||||
+ directory = os.path.dirname(outputs[counter])
|
||||
+ while directory not in rejectdirs and \
|
||||
+ os.path.dirname(directory) not in rejectdirs2 and \
|
||||
+ os.path.dirname(os.path.dirname(directory)) not in rejectdirs3:
|
||||
+ dirname = '%dir ' + directory
|
||||
+ if dirname not in dirs:
|
||||
+ dirs.append(dirname)
|
||||
+ directory = os.path.dirname(directory)
|
||||
+ outputs += dirs
|
||||
self.execute(write_file,
|
||||
(self.record, outputs),
|
||||
"writing list of installed files to '%s'" %
|
16
Python-3.6.15.tar.xz.asc
Normal file
16
Python-3.6.15.tar.xz.asc
Normal file
|
@ -0,0 +1,16 @@
|
|||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAABCgAdFiEEDZbfTUEQ5cQ/v7F/LTR+pqplQh0FAmEzEKkACgkQLTR+pqpl
|
||||
Qh2VVA/+P7Lpu3yBKzEp0x6rzWnjTwHWV4l6F5JW/0wJRtmPM5mp50TLZSivpbf6
|
||||
jNDdgu5kKoHGCjF6YvwqxUF4VUdckm5SmMom9EV3PJUlcK0TULMwDTfVeCnjO1p8
|
||||
MGYK7R2rWYq/TM43z3xKwmqpsx0fZs3Usz0WNuR+LrvSh2cNN8Lzu0CjR5SON230
|
||||
0PJrS9362SOaqNz9ZXJz5/G2LHFf4DMkZE77kUD8+72yJn6ZVTs142QftRkVMAlS
|
||||
aMcVdWpy1WYQKdZhDVnaIQdbxuDKFEh5FO6mbQQ3ONtbMBKXjmg1e9FzdgwUuF3q
|
||||
NkitqFJpeOv2qvZ9FyMdjWuh0RzkMK8O+yj/c7x264e6TdXEZ237hG7GmTUhLfwW
|
||||
ucJNhgJ9t9FkZ8l/WaBQ5VLLDxIoZjqdLC7vzvHdhRrERBYIxTKMkFpyqUmd+6V8
|
||||
JKLQwegKMk3kyZuXvXTvfBbDjEBP0K748PU9Y84XJyc3rZ9W5Q/5hXALH2vjWO4/
|
||||
k2zImhAJy9FpLX3iNkpcwdoqZZtxA+Ktq9kF8qqyqKpcn8r+XJ+7TIpi+GC7KJWn
|
||||
+uAro69XvZQbtOl/bdIXDysPDQBqKKdqR2VLF9/V5/1HZiLLPxCaS63CmID0Ty1Y
|
||||
Leu1PsUX2MEHBjgUGBObnI1kYDNBK0C+KNtYxlKEw03fAk2Shho=
|
||||
=3GOp
|
||||
-----END PGP SIGNATURE-----
|
43
README.SUSE
Normal file
43
README.SUSE
Normal file
|
@ -0,0 +1,43 @@
|
|||
Python 3 in SUSE
|
||||
==============
|
||||
|
||||
* Subpackages *
|
||||
|
||||
Python 3 is split into several subpackages, based on external dependencies.
|
||||
The main package 'python3' has soft dependencies on all subpackages needed to
|
||||
assemble the standard library; however, these might not all be installed by default.
|
||||
|
||||
If you attempt to import a module that is currently not installed, an ImportError is thrown,
|
||||
with instructions to install the missing subpackage. Installing the subpackage might result
|
||||
in installing libraries that the subpackage requires to function.
|
||||
|
||||
|
||||
* ensurepip *
|
||||
|
||||
The 'ensurepip' module from Python 3 standard library (PEP 453) is supposed to deploy
|
||||
a bundled copy of the pip installer. This makes no sense in a managed distribution like SUSE.
|
||||
Instead, you need to install package 'python3-pip'. Usually this will be installed automatically
|
||||
with 'python3'.
|
||||
|
||||
Using 'ensurepip' when pip is not installed will result in an ImportError with instructions
|
||||
to install 'python3-pip'.
|
||||
|
||||
|
||||
* Documentation *
|
||||
|
||||
You can find documentation in seprarate packages: python3-doc and
|
||||
python3-doc-pdf. These contan following documents:
|
||||
|
||||
Tutorial, What's New in Python, Global Module Index, Library Reference,
|
||||
Macintosh Module Reference, Installing Python Modules, Distributing Python
|
||||
Modules, Language Reference, Extending and Embedding, Python/C API,
|
||||
Documenting Python
|
||||
|
||||
The python3-doc package constains many text files from source tarball.
|
||||
|
||||
|
||||
* Interactive mode *
|
||||
|
||||
Interactive mode is by default enhanced with of history and command completion.
|
||||
If you don't like these features, you can unset the PYTHONSTARTUP variable
|
||||
in your .profile or disable it system wide in /etc/profile.d/python.sh.
|
12
aarch64-prolong-timeout.patch
Normal file
12
aarch64-prolong-timeout.patch
Normal file
|
@ -0,0 +1,12 @@
|
|||
--- a/Lib/test/test_os.py
|
||||
+++ b/Lib/test/test_os.py
|
||||
@@ -614,7 +614,8 @@ class UtimeTests(unittest.TestCase):
|
||||
else:
|
||||
# bpo-30649: PPC64 Fedora 3.x buildbot requires
|
||||
# at least a delta of 14 ms
|
||||
- delta = 0.020
|
||||
+ # 0.020 is still too little for out slow buildbots.
|
||||
+ delta = 0.030
|
||||
st = os.stat(self.fname)
|
||||
msg = ("st_time=%r, current=%r, dt=%r"
|
||||
% (st.st_mtime, current, st.st_mtime - current))
|
3
baselibs.conf
Normal file
3
baselibs.conf
Normal file
|
@ -0,0 +1,3 @@
|
|||
python36-base
|
||||
python36
|
||||
libpython3_6m1_0
|
89
bpo-36576-skip_tests_for_OpenSSL-111.patch
Normal file
89
bpo-36576-skip_tests_for_OpenSSL-111.patch
Normal file
|
@ -0,0 +1,89 @@
|
|||
From 6f582ba86532d842f4b9fe55427488d2ac9dd229 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@redhat.com>
|
||||
Date: Fri, 5 Apr 2019 10:23:04 +0200
|
||||
Subject: [PATCH] bpo-36576: Skip test_ssl and test_asyncio tests failing with
|
||||
OpenSSL 1.1.1
|
||||
|
||||
Some test_ssl and test_asyncio are written for OpenSSL 1.0 and TLS
|
||||
1.0, but fail with OpenSSL 1.1.1 and TLS 1.3.
|
||||
|
||||
Fixing these needs require to backport new ssl flags like
|
||||
ssl.OP_NO_TLSv1_3 or ssl.OP_NO_COMPRESSION which cannot be done in a
|
||||
minor 3.5.x release. Moreover, it is not really worth it: the code
|
||||
works fine, issues are in the tests.
|
||||
---
|
||||
Lib/test/test_asyncio/test_events.py | 7 +++++++
|
||||
Lib/test/test_ssl.py | 5 +++++
|
||||
Misc/NEWS.d/next/Tests/2019-04-05-10-34-29.bpo-36576.7Cp2kK.rst | 1 +
|
||||
3 files changed, 13 insertions(+)
|
||||
create mode 100644 Misc/NEWS.d/next/Tests/2019-04-05-10-34-29.bpo-36576.7Cp2kK.rst
|
||||
|
||||
--- a/Lib/test/test_asyncio/test_events.py
|
||||
+++ b/Lib/test/test_asyncio/test_events.py
|
||||
@@ -39,6 +39,12 @@ except ImportError:
|
||||
from asyncio import test_support as support
|
||||
|
||||
|
||||
+if ssl is not None:
|
||||
+ IS_OPENSSL_1_1_1 = ssl.OPENSSL_VERSION_INFO >= (1, 1, 1)
|
||||
+else:
|
||||
+ IS_OPENSSL_1_1_1 = False
|
||||
+
|
||||
+
|
||||
def osx_tiger():
|
||||
"""Return True if the platform is Mac OS 10.4 or older."""
|
||||
if sys.platform != 'darwin':
|
||||
@@ -1159,6 +1165,7 @@ class EventLoopTestsMixin:
|
||||
self.test_create_unix_server_ssl_verify_failed()
|
||||
|
||||
@unittest.skipIf(ssl is None, 'No ssl module')
|
||||
+ @unittest.skipIf(IS_OPENSSL_1_1_1, "bpo-36576: fail on OpenSSL 1.1.1")
|
||||
def test_create_server_ssl_match_failed(self):
|
||||
proto = MyProto(loop=self.loop)
|
||||
server, host, port = self._make_ssl_server(
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -37,6 +37,7 @@ PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
|
||||
HOST = support.HOST
|
||||
IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
|
||||
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
|
||||
+IS_OPENSSL_1_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 1)
|
||||
|
||||
|
||||
def data_file(*name):
|
||||
@@ -961,6 +962,7 @@ class ContextTests(unittest.TestCase):
|
||||
self.assertIn('AES128-GCM-SHA256', names)
|
||||
|
||||
@skip_if_broken_ubuntu_ssl
|
||||
+ @unittest.skipIf(IS_OPENSSL_1_1_1, "bpo-36576: fail on OpenSSL 1.1.1")
|
||||
def test_options(self):
|
||||
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
# OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3 is the default value
|
||||
@@ -3240,6 +3242,7 @@ if _have_threads:
|
||||
])
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_ECDH, "test requires ECDH-enabled OpenSSL")
|
||||
+ @unittest.skipIf(IS_OPENSSL_1_1_1, "bpo-36576: fail on OpenSSL 1.1.1")
|
||||
def test_default_ecdh_curve(self):
|
||||
# Issue #21015: elliptic curve-based Diffie Hellman key exchange
|
||||
# should be enabled by default on SSL contexts.
|
||||
@@ -3372,6 +3375,7 @@ if _have_threads:
|
||||
self.assertIs(stats['client_alpn_protocol'], None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_ALPN, "ALPN support needed for this test")
|
||||
+ @unittest.skipIf(IS_OPENSSL_1_1_1, "bpo-36576: fail on OpenSSL 1.1.1")
|
||||
def test_alpn_protocols(self):
|
||||
server_protocols = ['foo', 'bar', 'milkshake']
|
||||
protocol_tests = [
|
||||
@@ -3553,6 +3557,7 @@ if _have_threads:
|
||||
self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_INTERNAL_ERROR')
|
||||
self.assertIn("TypeError", stderr.getvalue())
|
||||
|
||||
+ @unittest.skipIf(IS_OPENSSL_1_1_1, "bpo-36576: fail on OpenSSL 1.1.1")
|
||||
def test_shared_ciphers(self):
|
||||
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tests/2019-04-05-10-34-29.bpo-36576.7Cp2kK.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Skip test_ssl and test_asyncio tests failing with OpenSSL 1.1.1.
|
30
bpo-44434-libgcc_s-for-pthread_cancel.patch
Normal file
30
bpo-44434-libgcc_s-for-pthread_cancel.patch
Normal file
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
Modules/_testcapimodule.c | 2 --
|
||||
Modules/_threadmodule.c | 5 ++++-
|
||||
2 files changed, 4 insertions(+), 3 deletions(-)
|
||||
|
||||
--- a/Modules/_testcapimodule.c
|
||||
+++ b/Modules/_testcapimodule.c
|
||||
@@ -3651,8 +3651,6 @@ temporary_c_thread(void *data)
|
||||
PyGILState_Release(state);
|
||||
|
||||
PyThread_release_lock(test_c_thread->exit_event);
|
||||
-
|
||||
- PyThread_exit_thread();
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
--- a/Modules/_threadmodule.c
|
||||
+++ b/Modules/_threadmodule.c
|
||||
@@ -1027,7 +1027,10 @@ t_bootstrap(void *boot_raw)
|
||||
nb_threads--;
|
||||
PyThreadState_Clear(tstate);
|
||||
PyThreadState_DeleteCurrent();
|
||||
- PyThread_exit_thread();
|
||||
+
|
||||
+ // bpo-44434: Don't call explicitly PyThread_exit_thread(). On Linux with
|
||||
+ // the glibc, pthread_exit() can abort the whole process if dlopen() fails
|
||||
+ // to open the libgcc_s.so library (ex: EMFILE error).
|
||||
}
|
||||
|
||||
static PyObject *
|
82
bpo-46623-skip-zlib-s390x.patch
Normal file
82
bpo-46623-skip-zlib-s390x.patch
Normal file
|
@ -0,0 +1,82 @@
|
|||
From 9475dc0b8d2a0db40278bbcb88a89b1265a77ec9 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@python.org>
|
||||
Date: Fri, 25 Feb 2022 00:32:02 +0100
|
||||
Subject: [PATCH] bpo-46623: Skip two test_zlib tests on s390x (GH-31096)
|
||||
|
||||
Skip test_pair() and test_speech128() of test_zlib on s390x since
|
||||
they fail if zlib uses the s390x hardware accelerator.
|
||||
---
|
||||
Lib/test/test_zlib.py | 34 ++++++++++
|
||||
Misc/NEWS.d/next/Tests/2022-02-03-09-45-26.bpo-46623.vxzuhV.rst | 2
|
||||
2 files changed, 36 insertions(+)
|
||||
create mode 100644 Misc/NEWS.d/next/Tests/2022-02-03-09-45-26.bpo-46623.vxzuhV.rst
|
||||
|
||||
--- a/Lib/test/test_zlib.py
|
||||
+++ b/Lib/test/test_zlib.py
|
||||
@@ -1,6 +1,7 @@
|
||||
import unittest
|
||||
from test import support
|
||||
import binascii
|
||||
+import os
|
||||
import pickle
|
||||
import random
|
||||
import sys
|
||||
@@ -15,6 +16,37 @@ requires_Decompress_copy = unittest.skip
|
||||
hasattr(zlib.decompressobj(), "copy"),
|
||||
'requires Decompress.copy()')
|
||||
|
||||
+# bpo-46623: On s390x, when a hardware accelerator is used, using different
|
||||
+# ways to compress data with zlib can produce different compressed data.
|
||||
+# Simplified test_pair() code:
|
||||
+#
|
||||
+# def func1(data):
|
||||
+# return zlib.compress(data)
|
||||
+#
|
||||
+# def func2(data)
|
||||
+# co = zlib.compressobj()
|
||||
+# x1 = co.compress(data)
|
||||
+# x2 = co.flush()
|
||||
+# return x1 + x2
|
||||
+#
|
||||
+# On s390x if zlib uses a hardware accelerator, func1() creates a single
|
||||
+# "final" compressed block whereas func2() produces 3 compressed blocks (the
|
||||
+# last one is a final block). On other platforms with no accelerator, func1()
|
||||
+# and func2() produce the same compressed data made of a single (final)
|
||||
+# compressed block.
|
||||
+#
|
||||
+# Only the compressed data is different, the decompression returns the original
|
||||
+# data:
|
||||
+#
|
||||
+# zlib.decompress(func1(data)) == zlib.decompress(func2(data)) == data
|
||||
+#
|
||||
+# Make the assumption that s390x always has an accelerator to simplify the skip
|
||||
+# condition. Windows doesn't have os.uname() but it doesn't support s390x.
|
||||
+skip_on_s390x = unittest.skipIf(
|
||||
+ hasattr(os, 'uname') and
|
||||
+ os.uname().machine in ['s390x', 's390'],
|
||||
+ 'skipped on s390x')
|
||||
+
|
||||
|
||||
class VersionTestCase(unittest.TestCase):
|
||||
|
||||
@@ -174,6 +206,7 @@ class CompressTestCase(BaseCompressTestC
|
||||
bufsize=zlib.DEF_BUF_SIZE),
|
||||
HAMLET_SCENE)
|
||||
|
||||
+ @skip_on_s390x
|
||||
def test_speech128(self):
|
||||
# compress more data
|
||||
data = HAMLET_SCENE * 128
|
||||
@@ -225,6 +258,7 @@ class CompressTestCase(BaseCompressTestC
|
||||
|
||||
class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
|
||||
# Test compression object
|
||||
+ @skip_on_s390x
|
||||
def test_pair(self):
|
||||
# straightforward compress/decompress objects
|
||||
datasrc = HAMLET_SCENE * 128
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tests/2022-02-03-09-45-26.bpo-46623.vxzuhV.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Skip test_pair() and test_speech128() of test_zlib on s390x since they fail
|
||||
+if zlib uses the s390x hardware accelerator. Patch by Victor Stinner.
|
160
bpo23395-PyErr_SetInterrupt-signal.patch
Normal file
160
bpo23395-PyErr_SetInterrupt-signal.patch
Normal file
|
@ -0,0 +1,160 @@
|
|||
From d823eee619c0e7c1a9bc5b0caa0d0fa4734ac052 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Mat=C4=9Bj=20Cepl?= <mcepl@cepl.eu>
|
||||
Date: Thu, 23 May 2019 22:30:00 +0200
|
||||
Subject: [PATCH] bpo-23395: Fix PyErr_SetInterrupt if the SIGINT signal is
|
||||
ignored or not handled (GH-7778)
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
``_thread.interrupt_main()`` now avoids setting the Python error status if the ``SIGINT`` signal is ignored or not handled by Python.
|
||||
(cherry picked from commit 608876b6b1eb59538e6c29671a733033fb8b5be7)
|
||||
|
||||
Co-authored-by: Matěj Cepl <mcepl@cepl.eu>
|
||||
---
|
||||
Doc/c-api/exceptions.rst | 12 +++----
|
||||
Doc/library/_thread.rst | 8 +++--
|
||||
Lib/test/test_threading.py | 35 +++++++++++++++++++
|
||||
Misc/ACKS | 2 +-
|
||||
.../2016-07-27-11-06-43.bpo-23395.MuCEX9.rst | 2 ++
|
||||
Modules/signalmodule.c | 13 ++++---
|
||||
6 files changed, 59 insertions(+), 13 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2016-07-27-11-06-43.bpo-23395.MuCEX9.rst
|
||||
|
||||
--- a/Doc/c-api/exceptions.rst
|
||||
+++ b/Doc/c-api/exceptions.rst
|
||||
@@ -504,13 +504,13 @@ Signal Handling
|
||||
single: SIGINT
|
||||
single: KeyboardInterrupt (built-in exception)
|
||||
|
||||
- This function simulates the effect of a :const:`SIGINT` signal arriving --- the
|
||||
- next time :c:func:`PyErr_CheckSignals` is called, :exc:`KeyboardInterrupt` will
|
||||
- be raised. It may be called without holding the interpreter lock.
|
||||
-
|
||||
- .. % XXX This was described as obsolete, but is used in
|
||||
- .. % _thread.interrupt_main() (used from IDLE), so it's still needed.
|
||||
+ Simulate the effect of a :const:`SIGINT` signal arriving. The next time
|
||||
+ :c:func:`PyErr_CheckSignals` is called, the Python signal handler for
|
||||
+ :const:`SIGINT` will be called.
|
||||
|
||||
+ If :const:`SIGINT` isn't handled by Python (it was set to
|
||||
+ :data:`signal.SIG_DFL` or :data:`signal.SIG_IGN`), this function does
|
||||
+ nothing.
|
||||
|
||||
.. c:function:: int PySignal_SetWakeupFd(int fd)
|
||||
|
||||
--- a/Doc/library/_thread.rst
|
||||
+++ b/Doc/library/_thread.rst
|
||||
@@ -57,8 +57,12 @@ It defines the following constants and f
|
||||
|
||||
.. function:: interrupt_main()
|
||||
|
||||
- Raise a :exc:`KeyboardInterrupt` exception in the main thread. A subthread can
|
||||
- use this function to interrupt the main thread.
|
||||
+ Simulate the effect of a :data:`signal.SIGINT` signal arriving in the main
|
||||
+ thread. A thread can use this function to interrupt the main thread.
|
||||
+
|
||||
+ If :data:`signal.SIGINT` isn't handled by Python (it was set to
|
||||
+ :data:`signal.SIG_DFL` or :data:`signal.SIG_IGN`), this function does
|
||||
+ nothing.
|
||||
|
||||
|
||||
.. function:: exit()
|
||||
--- a/Lib/test/test_threading.py
|
||||
+++ b/Lib/test/test_threading.py
|
||||
@@ -16,6 +16,7 @@ import unittest
|
||||
import weakref
|
||||
import os
|
||||
import subprocess
|
||||
+import signal
|
||||
|
||||
from test import lock_tests
|
||||
from test import support
|
||||
@@ -1162,6 +1163,7 @@ class BoundedSemaphoreTests(lock_tests.B
|
||||
class BarrierTests(lock_tests.BarrierTests):
|
||||
barriertype = staticmethod(threading.Barrier)
|
||||
|
||||
+
|
||||
class MiscTestCase(unittest.TestCase):
|
||||
def test__all__(self):
|
||||
extra = {"ThreadError"}
|
||||
@@ -1169,5 +1171,38 @@ class MiscTestCase(unittest.TestCase):
|
||||
support.check__all__(self, threading, ('threading', '_thread'),
|
||||
extra=extra, blacklist=blacklist)
|
||||
|
||||
+
|
||||
+class InterruptMainTests(unittest.TestCase):
|
||||
+ def test_interrupt_main_subthread(self):
|
||||
+ # Calling start_new_thread with a function that executes interrupt_main
|
||||
+ # should raise KeyboardInterrupt upon completion.
|
||||
+ def call_interrupt():
|
||||
+ _thread.interrupt_main()
|
||||
+ t = threading.Thread(target=call_interrupt)
|
||||
+ with self.assertRaises(KeyboardInterrupt):
|
||||
+ t.start()
|
||||
+ t.join()
|
||||
+ t.join()
|
||||
+
|
||||
+ def test_interrupt_main_mainthread(self):
|
||||
+ # Make sure that if interrupt_main is called in main thread that
|
||||
+ # KeyboardInterrupt is raised instantly.
|
||||
+ with self.assertRaises(KeyboardInterrupt):
|
||||
+ _thread.interrupt_main()
|
||||
+
|
||||
+ def test_interrupt_main_noerror(self):
|
||||
+ handler = signal.getsignal(signal.SIGINT)
|
||||
+ try:
|
||||
+ # No exception should arise.
|
||||
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
+ _thread.interrupt_main()
|
||||
+
|
||||
+ signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
+ _thread.interrupt_main()
|
||||
+ finally:
|
||||
+ # Restore original handler
|
||||
+ signal.signal(signal.SIGINT, handler)
|
||||
+
|
||||
+
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
--- a/Misc/ACKS
|
||||
+++ b/Misc/ACKS
|
||||
@@ -248,7 +248,7 @@ Donn Cave
|
||||
Charles Cazabon
|
||||
Jesús Cea Avión
|
||||
Per Cederqvist
|
||||
-Matej Cepl
|
||||
+Matěj Cepl
|
||||
Carl Cerecke
|
||||
Octavian Cerna
|
||||
Michael Cetrulo
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2016-07-27-11-06-43.bpo-23395.MuCEX9.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+``_thread.interrupt_main()`` now avoids setting the Python error status
|
||||
+if the ``SIGINT`` signal is ignored or not handled by Python.
|
||||
--- a/Modules/signalmodule.c
|
||||
+++ b/Modules/signalmodule.c
|
||||
@@ -1561,13 +1561,18 @@ PyErr_CheckSignals(void)
|
||||
}
|
||||
|
||||
|
||||
-/* Replacements for intrcheck.c functionality
|
||||
- * Declared in pyerrors.h
|
||||
- */
|
||||
+/* Simulate the effect of a signal.SIGINT signal arriving. The next time
|
||||
+ PyErr_CheckSignals is called, the Python SIGINT signal handler will be
|
||||
+ raised.
|
||||
+
|
||||
+ Missing signal handler for the SIGINT signal is silently ignored. */
|
||||
void
|
||||
PyErr_SetInterrupt(void)
|
||||
{
|
||||
- trip_signal(SIGINT);
|
||||
+ if ((Handlers[SIGINT].func != IgnoreHandler) &&
|
||||
+ (Handlers[SIGINT].func != DefaultHandler)) {
|
||||
+ trip_signal(SIGINT);
|
||||
+ }
|
||||
}
|
||||
|
||||
void
|
87
bpo27321-email-no-replace-header.patch
Normal file
87
bpo27321-email-no-replace-header.patch
Normal file
|
@ -0,0 +1,87 @@
|
|||
From 72ce82abcf9051b18a05350936de7ecab7306662 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Skeleton (bot)" <31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Mon, 19 Oct 2020 16:11:37 -0700
|
||||
Subject: [PATCH] bpo-27321 Fix email.generator.py to not replace a
|
||||
non-existent header. (GH-18074)
|
||||
|
||||
This PR replaces GH-1977. The reason for the replacement is two-fold.
|
||||
|
||||
The fix itself is different is that if the CTE header doesn't exist in the original message, it is inserted. This is important because the new CTE could be quoted-printable whereas the original is implicit 8bit.
|
||||
|
||||
Also the tests are different. The test_nonascii_as_string_without_cte test in GH-1977 doesn't actually test the issue in that it passes without the fix. The test_nonascii_as_string_without_content_type_and_cte test is improved here, and even though it doesn't fail without the fix, it is included for completeness.
|
||||
|
||||
Automerge-Triggered-By: @warsaw
|
||||
(cherry picked from commit bf838227c35212709dc43b3c3c57f8e1655c1d24)
|
||||
|
||||
Co-authored-by: Mark Sapiro <mark@msapiro.net>
|
||||
---
|
||||
Lib/email/generator.py | 6 +
|
||||
Lib/test/test_email/test_email.py | 35 ++++++++++
|
||||
Misc/NEWS.d/next/Library/2020-01-19-18-40-26.bpo-27321.8e6SpM.rst | 2
|
||||
3 files changed, 42 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2020-01-19-18-40-26.bpo-27321.8e6SpM.rst
|
||||
|
||||
--- a/Lib/email/generator.py
|
||||
+++ b/Lib/email/generator.py
|
||||
@@ -186,7 +186,11 @@ class Generator:
|
||||
# If we munged the cte, copy the message again and re-fix the CTE.
|
||||
if munge_cte:
|
||||
msg = deepcopy(msg)
|
||||
- msg.replace_header('content-transfer-encoding', munge_cte[0])
|
||||
+ # Preserve the header order if the CTE header already exists.
|
||||
+ if msg.get('content-transfer-encoding') is None:
|
||||
+ msg['Content-Transfer-Encoding'] = munge_cte[0]
|
||||
+ else:
|
||||
+ msg.replace_header('content-transfer-encoding', munge_cte[0])
|
||||
msg.replace_header('content-type', munge_cte[1])
|
||||
# Write the headers. First we see if the message object wants to
|
||||
# handle that itself. If not, we'll do it generically.
|
||||
--- a/Lib/test/test_email/test_email.py
|
||||
+++ b/Lib/test/test_email/test_email.py
|
||||
@@ -314,6 +314,41 @@ class TestMessageAPI(TestEmailBase):
|
||||
g.flatten(msg)
|
||||
self.assertEqual(fullrepr, s.getvalue())
|
||||
|
||||
+ def test_nonascii_as_string_without_cte(self):
|
||||
+ m = textwrap.dedent("""\
|
||||
+ MIME-Version: 1.0
|
||||
+ Content-type: text/plain; charset="iso-8859-1"
|
||||
+
|
||||
+ Test if non-ascii messages with no Content-Transfer-Encoding set
|
||||
+ can be as_string'd:
|
||||
+ Föö bär
|
||||
+ """)
|
||||
+ source = m.encode('iso-8859-1')
|
||||
+ expected = textwrap.dedent("""\
|
||||
+ MIME-Version: 1.0
|
||||
+ Content-type: text/plain; charset="iso-8859-1"
|
||||
+ Content-Transfer-Encoding: quoted-printable
|
||||
+
|
||||
+ Test if non-ascii messages with no Content-Transfer-Encoding set
|
||||
+ can be as_string'd:
|
||||
+ F=F6=F6 b=E4r
|
||||
+ """)
|
||||
+ msg = email.message_from_bytes(source)
|
||||
+ self.assertEqual(msg.as_string(), expected)
|
||||
+
|
||||
+ def test_nonascii_as_string_without_content_type_and_cte(self):
|
||||
+ m = textwrap.dedent("""\
|
||||
+ MIME-Version: 1.0
|
||||
+
|
||||
+ Test if non-ascii messages with no Content-Type nor
|
||||
+ Content-Transfer-Encoding set can be as_string'd:
|
||||
+ Föö bär
|
||||
+ """)
|
||||
+ source = m.encode('iso-8859-1')
|
||||
+ expected = source.decode('ascii', 'replace')
|
||||
+ msg = email.message_from_bytes(source)
|
||||
+ self.assertEqual(msg.as_string(), expected)
|
||||
+
|
||||
def test_as_bytes(self):
|
||||
msg = self._msgobj('msg_01.txt')
|
||||
with openfile('msg_01.txt') as fp:
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2020-01-19-18-40-26.bpo-27321.8e6SpM.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Fixed KeyError exception when flattening an email to a string attempts to
|
||||
+replace a non-existent Content-Transfer-Encoding header.
|
31
bpo36263-Fix_hashlib_scrypt.patch
Normal file
31
bpo36263-Fix_hashlib_scrypt.patch
Normal file
|
@ -0,0 +1,31 @@
|
|||
From b8dc92b32bc87b127b1679f4b4a4f987d1e1d080 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@redhat.com>
|
||||
Date: Mon, 11 Mar 2019 16:47:10 +0100
|
||||
Subject: [PATCH] bpo-36263: Fix hashlib.scrypt()
|
||||
|
||||
Fix hashlib.scrypt(): pass the salt when validating arguments.
|
||||
---
|
||||
.../next/Library/2019-03-11-16-52-09.bpo-36263.IzB4p5.rst | 1 +
|
||||
Modules/_hashopenssl.c | 6 +++++-
|
||||
2 files changed, 6 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2019-03-11-16-52-09.bpo-36263.IzB4p5.rst
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2019-03-11-16-52-09.bpo-36263.IzB4p5.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix :func:`hashlib.scrypt`: pass the salt when validating arguments.
|
||||
--- a/Modules/_hashopenssl.c
|
||||
+++ b/Modules/_hashopenssl.c
|
||||
@@ -831,7 +831,11 @@ _hashlib_scrypt_impl(PyObject *module, P
|
||||
}
|
||||
|
||||
/* let OpenSSL validate the rest */
|
||||
- retval = EVP_PBE_scrypt(NULL, 0, NULL, 0, n, r, p, maxmem, NULL, 0);
|
||||
+ retval = EVP_PBE_scrypt(
|
||||
+ NULL, 0,
|
||||
+ (const unsigned char *)salt->buf, (size_t)salt->len,
|
||||
+ n, r, p, maxmem,
|
||||
+ NULL, 0);
|
||||
if (!retval) {
|
||||
/* sorry, can't do much better */
|
||||
PyErr_SetString(PyExc_ValueError,
|
55
bsc1188607-pythreadstate_clear-decref.patch
Normal file
55
bsc1188607-pythreadstate_clear-decref.patch
Normal file
|
@ -0,0 +1,55 @@
|
|||
From 35ab1e55aa0a359762c9eb6b2b5f7490440d6b1c Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@python.org>
|
||||
Date: Tue, 24 Mar 2020 17:12:19 +0100
|
||||
Subject: [PATCH] bpo-20526: Fix PyThreadState_Clear(): don't decref frame
|
||||
(GH-19120) (GH-19136)
|
||||
|
||||
PyThreadState.frame is a borrowed reference, not a strong reference:
|
||||
PyThreadState_Clear() must not call Py_CLEAR(tstate->frame).
|
||||
|
||||
Remove test_threading.test_warnings_at_exit(): we cannot warranty
|
||||
that the Python thread state of daemon threads is cleared in a
|
||||
reliable way during Python shutdown.
|
||||
|
||||
(cherry picked from commit 5804f878e779712e803be927ca8a6df389d82cdf)
|
||||
(cherry picked from commit e97c8b0688bc62959ced477d842fcd37992ef649)
|
||||
---
|
||||
Include/pystate.h | 1 +
|
||||
Python/pystate.c | 12 ++++++++++--
|
||||
2 files changed, 11 insertions(+), 2 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2020-03-23-18-08-34.bpo-20526.NHNZIv.rst
|
||||
|
||||
--- a/Include/pystate.h
|
||||
+++ b/Include/pystate.h
|
||||
@@ -88,6 +88,7 @@ typedef struct _ts {
|
||||
struct _ts *next;
|
||||
PyInterpreterState *interp;
|
||||
|
||||
+ /* Borrowed reference to the current frame (it can be NULL) */
|
||||
struct _frame *frame;
|
||||
int recursion_depth;
|
||||
char overflowed; /* The stack has overflowed. Allow 50 more calls
|
||||
--- a/Python/pystate.c
|
||||
+++ b/Python/pystate.c
|
||||
@@ -409,11 +409,19 @@ _PyState_ClearModules(void)
|
||||
void
|
||||
PyThreadState_Clear(PyThreadState *tstate)
|
||||
{
|
||||
- if (Py_VerboseFlag && tstate->frame != NULL)
|
||||
+ if (Py_VerboseFlag && tstate->frame != NULL) {
|
||||
+ /* bpo-20526: After the main thread calls
|
||||
+ _PyRuntimeState_SetFinalizing() in Py_FinalizeEx(), threads must
|
||||
+ exit when trying to take the GIL. If a thread exit in the middle of
|
||||
+ _PyEval_EvalFrameDefault(), tstate->frame is not reset to its
|
||||
+ previous value. It is more likely with daemon threads, but it can
|
||||
+ happen with regular threads if threading._shutdown() fails
|
||||
+ (ex: interrupted by CTRL+C). */
|
||||
fprintf(stderr,
|
||||
"PyThreadState_Clear: warning: thread still has a frame\n");
|
||||
+ }
|
||||
|
||||
- Py_CLEAR(tstate->frame);
|
||||
+ /* Don't clear tstate->frame: it is a borrowed reference */
|
||||
|
||||
Py_CLEAR(tstate->dict);
|
||||
Py_CLEAR(tstate->async_exc);
|
13
distutils-reproducible-compile.patch
Normal file
13
distutils-reproducible-compile.patch
Normal file
|
@ -0,0 +1,13 @@
|
|||
Index: Python-3.6.2/Lib/distutils/util.py
|
||||
===================================================================
|
||||
--- Python-3.6.2.orig/Lib/distutils/util.py
|
||||
+++ Python-3.6.2/Lib/distutils/util.py
|
||||
@@ -431,7 +431,7 @@ byte_compile(files, optimize=%r, force=%
|
||||
else:
|
||||
from py_compile import compile
|
||||
|
||||
- for file in py_files:
|
||||
+ for file in sorted(py_files):
|
||||
if file[-3:] != ".py":
|
||||
# This lets us be lazy and not filter filenames in
|
||||
# the "install_lib" command.
|
35
idle3.appdata.xml
Normal file
35
idle3.appdata.xml
Normal file
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!-- Copyright 2017 Zbigniew Jędrzejewski-Szmek -->
|
||||
<application>
|
||||
<id type="desktop">idle3.desktop</id>
|
||||
<name>IDLE3</name>
|
||||
<metadata_licence>CC0</metadata_licence>
|
||||
<project_license>Python-2.0</project_license>
|
||||
<summary>Python 3 Integrated Development and Learning Environment</summary>
|
||||
<description>
|
||||
<p>
|
||||
IDLE is Python’s Integrated Development and Learning Environment.
|
||||
The GUI is uniform between Windows, Unix, and Mac OS X.
|
||||
IDLE provides an easy way to start writing, running, and debugging
|
||||
Python code.
|
||||
</p>
|
||||
<p>
|
||||
IDLE is written in pure Python, and uses the tkinter GUI toolkit.
|
||||
It provides:
|
||||
</p>
|
||||
<ul>
|
||||
<li>a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,</li>
|
||||
<li>a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,</li>
|
||||
<li>search within any window, replace within editor windows, and search through multiple files (grep),</li>
|
||||
<li>a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.</li>
|
||||
</ul>
|
||||
</description>
|
||||
<url type="homepage">https://docs.python.org/3/library/idle.html</url>
|
||||
<screenshots>
|
||||
<screenshot type="default">http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png</screenshot>
|
||||
</screenshots>
|
||||
<update_contact>zbyszek@in.waw.pl</update_contact>
|
||||
</application>
|
12
idle3.desktop
Normal file
12
idle3.desktop
Normal file
|
@ -0,0 +1,12 @@
|
|||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Name=IDLE 3
|
||||
GenericName=Python 3 IDE
|
||||
Comment=Python 3 Integrated Development and Learning Environment
|
||||
Exec=idle3 %F
|
||||
TryExec=idle3
|
||||
Terminal=false
|
||||
Type=Application
|
||||
Icon=idle3
|
||||
Categories=Development;IDE;
|
||||
MimeType=text/x-python;
|
10
ignore_pip_deprec_warn.patch
Normal file
10
ignore_pip_deprec_warn.patch
Normal file
|
@ -0,0 +1,10 @@
|
|||
--- a/Lib/test/test_venv.py
|
||||
+++ b/Lib/test/test_venv.py
|
||||
@@ -438,6 +438,7 @@ class EnsurePipTest(BaseTest):
|
||||
' module unconditionally')
|
||||
# Issue #26610: pip/pep425tags.py requires ctypes
|
||||
@unittest.skipUnless(ctypes, 'pip requires ctypes')
|
||||
+ @unittest.skip("Doesn't work with modified wheels")
|
||||
@requires_zlib
|
||||
def test_with_pip(self):
|
||||
self.do_test_with_pip(False)
|
7
import_failed.map
Normal file
7
import_failed.map
Normal file
|
@ -0,0 +1,7 @@
|
|||
python36-curses: curses _curses _curses_panel
|
||||
python36-dbm: dbm _dbm _gdbm
|
||||
python36-idle: idlelib
|
||||
python36-testsuite: test _ctypes_test _testbuffer _testcapi _testinternalcapi _testimportmultiple _testmultiphase xxlimited
|
||||
python36-tk: tkinter _tkinter
|
||||
python36-tools: turtledemo
|
||||
python36: sqlite3 readline _sqlite3 nis
|
23
import_failed.py
Normal file
23
import_failed.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
import sys, os
|
||||
from sysconfig import get_path
|
||||
|
||||
failed_map_path = os.path.join(get_path('stdlib'), '_import_failed', 'import_failed.map')
|
||||
|
||||
if __spec__:
|
||||
failed_name = __spec__.name
|
||||
else:
|
||||
failed_name = __name__
|
||||
|
||||
with open(failed_map_path) as fd:
|
||||
for line in fd:
|
||||
package = line.split(':')[0]
|
||||
imports = line.split(':')[1]
|
||||
if failed_name in imports:
|
||||
raise ImportError(f"""Module '{failed_name}' is not installed.
|
||||
Use:
|
||||
sudo zypper install {package}
|
||||
to install it.""")
|
||||
|
||||
raise ImportError(f"""Module '{failed_name}' is not installed.
|
||||
It is supposed to be part of python3 distribution, but missing from failed import map.
|
||||
Please file a bug on the SUSE Bugzilla.""")
|
28
macros.python3
Normal file
28
macros.python3
Normal file
|
@ -0,0 +1,28 @@
|
|||
%have_python3 1
|
||||
|
||||
# commented out legacy macro definitions
|
||||
#py3_prefix /usr
|
||||
#py3_incdir /usr/include/python3.5m
|
||||
#py3_ver 3.5
|
||||
|
||||
# these should now be provided by macros.python_all
|
||||
#python3_sitearch /usr/lib64/python3.5/site-packages
|
||||
#python3_sitelib /usr/lib/python3.5/site-packages
|
||||
#python3_version 3.5
|
||||
|
||||
# hard to say if anyone ever used these?
|
||||
#py3_soflags cpython-35m-x86_64-linux-gnu
|
||||
#py3_abiflags m
|
||||
%cpython3_soabi %(python3 -c "import sysconfig; print(sysconfig.get_config_var('SOABI'))")
|
||||
%py3_soflags %cpython3_soabi
|
||||
|
||||
# compilation macros that might be in use somewhere
|
||||
%py3_compile(O) \
|
||||
find %1 -name '*.pyc' -exec rm -f {} ";"\
|
||||
python3 -c "import sys, os, compileall; br='%{buildroot}'; compileall.compile_dir(sys.argv[1], ddir=br and (sys.argv[1][len(os.path.abspath(br)):]+'/') or None)" %1\
|
||||
%{-O:\
|
||||
find %1 -name '*.pyo' -exec rm -f {} ";"\
|
||||
python3 -O -c "import sys, os, compileall; br='%{buildroot}'; compileall.compile_dir(sys.argv[1], ddir=br and (sys.argv[1][len(os.path.abspath(br)):]+'/') or None)" %1\
|
||||
}
|
||||
|
||||
|
880
pep538_coerce_legacy_c_locale.patch
Normal file
880
pep538_coerce_legacy_c_locale.patch
Normal file
|
@ -0,0 +1,880 @@
|
|||
--- a/Doc/using/cmdline.rst
|
||||
+++ b/Doc/using/cmdline.rst
|
||||
@@ -728,6 +728,45 @@ conflict.
|
||||
|
||||
.. versionadded:: 3.6
|
||||
|
||||
+
|
||||
+.. envvar:: PYTHONCOERCECLOCALE
|
||||
+
|
||||
+ If set to the value ``0``, causes the main Python command line application
|
||||
+ to skip coercing the legacy ASCII-based C locale to a more capable UTF-8
|
||||
+ based alternative. Note that this setting is checked even when the
|
||||
+ :option:`-E` or :option:`-I` options are used, as it is handled prior to
|
||||
+ the processing of command line options.
|
||||
+
|
||||
+ If this variable is *not* set, or is set to a value other than ``0``, and
|
||||
+ the current locale reported for the ``LC_CTYPE`` category is the default
|
||||
+ ``C`` locale, then the Python CLI will attempt to configure one of the
|
||||
+ following locales for the given locale categories before loading the
|
||||
+ interpreter runtime:
|
||||
+
|
||||
+ * ``C.UTF-8`` (``LC_ALL``)
|
||||
+ * ``C.utf8`` (``LC_ALL``)
|
||||
+ * ``UTF-8`` (``LC_CTYPE``)
|
||||
+
|
||||
+ If setting one of these locale categories succeeds, then the matching
|
||||
+ environment variables will be set (both ``LC_ALL`` and ``LANG`` for the
|
||||
+ ``LC_ALL`` category, and ``LC_CTYPE`` for the ``LC_CTYPE`` category) in
|
||||
+ the current process environment before the Python runtime is initialized.
|
||||
+
|
||||
+ Configuring one of these locales (either explicitly or via the above
|
||||
+ implicit locale coercion) will automatically set the error handler for
|
||||
+ :data:`sys.stdin` and :data:`sys.stdout` to ``surrogateescape``. This
|
||||
+ behavior can be overridden using :envvar:`PYTHONIOENCODING` as usual.
|
||||
+
|
||||
+ For debugging purposes, setting ``PYTHONCOERCECLOCALE=warn`` will cause
|
||||
+ Python to emit warning messages on ``stderr`` if either the locale coercion
|
||||
+ activates, or else if a locale that *would* have triggered coercion is
|
||||
+ still active when the Python runtime is initialized.
|
||||
+
|
||||
+ Availability: \*nix
|
||||
+
|
||||
+ .. versionadded:: 3.7
|
||||
+ See :pep:`538` for more details.
|
||||
+
|
||||
Debug-mode variables
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
--- a/Lib/test/support/script_helper.py
|
||||
+++ b/Lib/test/support/script_helper.py
|
||||
@@ -56,8 +56,35 @@ def interpreter_requires_environment():
|
||||
return __cached_interp_requires_environment
|
||||
|
||||
|
||||
-_PythonRunResult = collections.namedtuple("_PythonRunResult",
|
||||
- ("rc", "out", "err"))
|
||||
+class _PythonRunResult(collections.namedtuple("_PythonRunResult",
|
||||
+ ("rc", "out", "err"))):
|
||||
+ """Helper for reporting Python subprocess run results"""
|
||||
+ def fail(self, cmd_line):
|
||||
+ """Provide helpful details about failed subcommand runs"""
|
||||
+ # Limit to 80 lines to ASCII characters
|
||||
+ maxlen = 80 * 100
|
||||
+ out, err = self.out, self.err
|
||||
+ if len(out) > maxlen:
|
||||
+ out = b'(... truncated stdout ...)' + out[-maxlen:]
|
||||
+ if len(err) > maxlen:
|
||||
+ err = b'(... truncated stderr ...)' + err[-maxlen:]
|
||||
+ out = out.decode('ascii', 'replace').rstrip()
|
||||
+ err = err.decode('ascii', 'replace').rstrip()
|
||||
+ raise AssertionError("Process return code is %d\n"
|
||||
+ "command line: %r\n"
|
||||
+ "\n"
|
||||
+ "stdout:\n"
|
||||
+ "---\n"
|
||||
+ "%s\n"
|
||||
+ "---\n"
|
||||
+ "\n"
|
||||
+ "stderr:\n"
|
||||
+ "---\n"
|
||||
+ "%s\n"
|
||||
+ "---"
|
||||
+ % (self.rc, cmd_line,
|
||||
+ out,
|
||||
+ err))
|
||||
|
||||
|
||||
# Executing the interpreter in a subprocess
|
||||
@@ -115,30 +142,7 @@ def run_python_until_end(*args, **env_va
|
||||
def _assert_python(expected_success, *args, **env_vars):
|
||||
res, cmd_line = run_python_until_end(*args, **env_vars)
|
||||
if (res.rc and expected_success) or (not res.rc and not expected_success):
|
||||
- # Limit to 80 lines to ASCII characters
|
||||
- maxlen = 80 * 100
|
||||
- out, err = res.out, res.err
|
||||
- if len(out) > maxlen:
|
||||
- out = b'(... truncated stdout ...)' + out[-maxlen:]
|
||||
- if len(err) > maxlen:
|
||||
- err = b'(... truncated stderr ...)' + err[-maxlen:]
|
||||
- out = out.decode('ascii', 'replace').rstrip()
|
||||
- err = err.decode('ascii', 'replace').rstrip()
|
||||
- raise AssertionError("Process return code is %d\n"
|
||||
- "command line: %r\n"
|
||||
- "\n"
|
||||
- "stdout:\n"
|
||||
- "---\n"
|
||||
- "%s\n"
|
||||
- "---\n"
|
||||
- "\n"
|
||||
- "stderr:\n"
|
||||
- "---\n"
|
||||
- "%s\n"
|
||||
- "---"
|
||||
- % (res.rc, cmd_line,
|
||||
- out,
|
||||
- err))
|
||||
+ res.fail(cmd_line)
|
||||
return res
|
||||
|
||||
def assert_python_ok(*args, **env_vars):
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/test_c_locale_coercion.py
|
||||
@@ -0,0 +1,371 @@
|
||||
+# Tests the attempted automatic coercion of the C locale to a UTF-8 locale
|
||||
+
|
||||
+import unittest
|
||||
+import locale
|
||||
+import os
|
||||
+import sys
|
||||
+import sysconfig
|
||||
+import shutil
|
||||
+import subprocess
|
||||
+from collections import namedtuple
|
||||
+
|
||||
+import test.support
|
||||
+from test.support.script_helper import (
|
||||
+ run_python_until_end,
|
||||
+ interpreter_requires_environment,
|
||||
+)
|
||||
+
|
||||
+# Set our expectation for the default encoding used in the C locale
|
||||
+# for the filesystem encoding and the standard streams
|
||||
+
|
||||
+# AIX uses iso8859-1 in the C locale, other *nix platforms use ASCII
|
||||
+if sys.platform.startswith("aix"):
|
||||
+ C_LOCALE_STREAM_ENCODING = "iso8859-1"
|
||||
+else:
|
||||
+ C_LOCALE_STREAM_ENCODING = "ascii"
|
||||
+
|
||||
+# FS encoding is UTF-8 on macOS, other *nix platforms use the locale encoding
|
||||
+if sys.platform == "darwin":
|
||||
+ C_LOCALE_FS_ENCODING = "utf-8"
|
||||
+else:
|
||||
+ C_LOCALE_FS_ENCODING = C_LOCALE_STREAM_ENCODING
|
||||
+
|
||||
+# Note that the above is probably still wrong in some cases, such as:
|
||||
+# * Windows when PYTHONLEGACYWINDOWSFSENCODING is set
|
||||
+# * AIX and any other platforms that use latin-1 in the C locale
|
||||
+#
|
||||
+# Options for dealing with this:
|
||||
+# * Don't set PYTHON_COERCE_C_LOCALE on such platforms (e.g. Windows doesn't)
|
||||
+# * Fix the test expectations to match the actual platform behaviour
|
||||
+
|
||||
+# In order to get the warning messages to match up as expected, the candidate
|
||||
+# order here must much the target locale order in Python/pylifecycle.c
|
||||
+_C_UTF8_LOCALES = ("C.UTF-8", "C.utf8", "UTF-8")
|
||||
+
|
||||
+# There's no reliable cross-platform way of checking locale alias
|
||||
+# lists, so the only way of knowing which of these locales will work
|
||||
+# is to try them with locale.setlocale(). We do that in a subprocess
|
||||
+# to avoid altering the locale of the test runner.
|
||||
+#
|
||||
+# If the relevant locale module attributes exist, and we're not on a platform
|
||||
+# where we expect it to always succeed, we also check that
|
||||
+# `locale.nl_langinfo(locale.CODESET)` works, as if it fails, the interpreter
|
||||
+# will skip locale coercion for that particular target locale
|
||||
+_check_nl_langinfo_CODESET = bool(
|
||||
+ sys.platform not in ("darwin", "linux") and
|
||||
+ hasattr(locale, "nl_langinfo") and
|
||||
+ hasattr(locale, "CODESET")
|
||||
+)
|
||||
+
|
||||
+def _set_locale_in_subprocess(locale_name):
|
||||
+ cmd_fmt = "import locale; print(locale.setlocale(locale.LC_CTYPE, '{}'))"
|
||||
+ if _check_nl_langinfo_CODESET:
|
||||
+ # If there's no valid CODESET, we expect coercion to be skipped
|
||||
+ cmd_fmt += "; import sys; sys.exit(not locale.nl_langinfo(locale.CODESET))"
|
||||
+ cmd = cmd_fmt.format(locale_name)
|
||||
+ result, py_cmd = run_python_until_end("-c", cmd, __isolated=True)
|
||||
+ return result.rc == 0
|
||||
+
|
||||
+
|
||||
+
|
||||
+_fields = "fsencoding stdin_info stdout_info stderr_info lang lc_ctype lc_all"
|
||||
+_EncodingDetails = namedtuple("EncodingDetails", _fields)
|
||||
+
|
||||
+class EncodingDetails(_EncodingDetails):
|
||||
+ # XXX (ncoghlan): Using JSON for child state reporting may be less fragile
|
||||
+ CHILD_PROCESS_SCRIPT = ";".join([
|
||||
+ "import sys, os",
|
||||
+ "print(sys.getfilesystemencoding())",
|
||||
+ "print(sys.stdin.encoding + ':' + sys.stdin.errors)",
|
||||
+ "print(sys.stdout.encoding + ':' + sys.stdout.errors)",
|
||||
+ "print(sys.stderr.encoding + ':' + sys.stderr.errors)",
|
||||
+ "print(os.environ.get('LANG', 'not set'))",
|
||||
+ "print(os.environ.get('LC_CTYPE', 'not set'))",
|
||||
+ "print(os.environ.get('LC_ALL', 'not set'))",
|
||||
+ ])
|
||||
+
|
||||
+ @classmethod
|
||||
+ def get_expected_details(cls, coercion_expected, fs_encoding, stream_encoding, env_vars):
|
||||
+ """Returns expected child process details for a given encoding"""
|
||||
+ _stream = stream_encoding + ":{}"
|
||||
+ # stdin and stdout should use surrogateescape either because the
|
||||
+ # coercion triggered, or because the C locale was detected
|
||||
+ stream_info = 2*[_stream.format("surrogateescape")]
|
||||
+ # stderr should always use backslashreplace
|
||||
+ stream_info.append(_stream.format("backslashreplace"))
|
||||
+ expected_lang = env_vars.get("LANG", "not set").lower()
|
||||
+ if coercion_expected:
|
||||
+ expected_lc_ctype = CLI_COERCION_TARGET.lower()
|
||||
+ else:
|
||||
+ expected_lc_ctype = env_vars.get("LC_CTYPE", "not set").lower()
|
||||
+ expected_lc_all = env_vars.get("LC_ALL", "not set").lower()
|
||||
+ env_info = expected_lang, expected_lc_ctype, expected_lc_all
|
||||
+ return dict(cls(fs_encoding, *stream_info, *env_info)._asdict())
|
||||
+
|
||||
+ @staticmethod
|
||||
+ def _handle_output_variations(data):
|
||||
+ """Adjust the output to handle platform specific idiosyncrasies
|
||||
+
|
||||
+ * Some platforms report ASCII as ANSI_X3.4-1968
|
||||
+ * Some platforms report ASCII as US-ASCII
|
||||
+ * Some platforms report UTF-8 instead of utf-8
|
||||
+ """
|
||||
+ data = data.replace(b"ANSI_X3.4-1968", b"ascii")
|
||||
+ data = data.replace(b"US-ASCII", b"ascii")
|
||||
+ data = data.lower()
|
||||
+ return data
|
||||
+
|
||||
+ @classmethod
|
||||
+ def get_child_details(cls, env_vars):
|
||||
+ """Retrieves fsencoding and standard stream details from a child process
|
||||
+
|
||||
+ Returns (encoding_details, stderr_lines):
|
||||
+
|
||||
+ - encoding_details: EncodingDetails for eager decoding
|
||||
+ - stderr_lines: result of calling splitlines() on the stderr output
|
||||
+
|
||||
+ The child is run in isolated mode if the current interpreter supports
|
||||
+ that.
|
||||
+ """
|
||||
+ result, py_cmd = run_python_until_end(
|
||||
+ "-c", cls.CHILD_PROCESS_SCRIPT,
|
||||
+ __isolated=True,
|
||||
+ **env_vars
|
||||
+ )
|
||||
+ if not result.rc == 0:
|
||||
+ result.fail(py_cmd)
|
||||
+ # All subprocess outputs in this test case should be pure ASCII
|
||||
+ adjusted_output = cls._handle_output_variations(result.out)
|
||||
+ stdout_lines = adjusted_output.decode("ascii").splitlines()
|
||||
+ child_encoding_details = dict(cls(*stdout_lines)._asdict())
|
||||
+ stderr_lines = result.err.decode("ascii").rstrip().splitlines()
|
||||
+ return child_encoding_details, stderr_lines
|
||||
+
|
||||
+
|
||||
+# Details of the shared library warning emitted at runtime
|
||||
+LEGACY_LOCALE_WARNING = (
|
||||
+ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII "
|
||||
+ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, "
|
||||
+ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible "
|
||||
+ "locales is recommended."
|
||||
+)
|
||||
+
|
||||
+# Details of the CLI locale coercion warning emitted at runtime
|
||||
+CLI_COERCION_WARNING_FMT = (
|
||||
+ "Python detected LC_CTYPE=C: LC_CTYPE coerced to {} (set another locale "
|
||||
+ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior)."
|
||||
+)
|
||||
+
|
||||
+
|
||||
+AVAILABLE_TARGETS = None
|
||||
+CLI_COERCION_TARGET = None
|
||||
+CLI_COERCION_WARNING = None
|
||||
+
|
||||
+def setUpModule():
|
||||
+ global AVAILABLE_TARGETS
|
||||
+ global CLI_COERCION_TARGET
|
||||
+ global CLI_COERCION_WARNING
|
||||
+
|
||||
+ if AVAILABLE_TARGETS is not None:
|
||||
+ # initialization already done
|
||||
+ return
|
||||
+ AVAILABLE_TARGETS = []
|
||||
+
|
||||
+ # Find the target locales available in the current system
|
||||
+ for target_locale in _C_UTF8_LOCALES:
|
||||
+ if _set_locale_in_subprocess(target_locale):
|
||||
+ AVAILABLE_TARGETS.append(target_locale)
|
||||
+
|
||||
+ if AVAILABLE_TARGETS:
|
||||
+ # Coercion is expected to use the first available target locale
|
||||
+ CLI_COERCION_TARGET = AVAILABLE_TARGETS[0]
|
||||
+ CLI_COERCION_WARNING = CLI_COERCION_WARNING_FMT.format(CLI_COERCION_TARGET)
|
||||
+
|
||||
+
|
||||
+class _LocaleHandlingTestCase(unittest.TestCase):
|
||||
+ # Base class to check expected locale handling behaviour
|
||||
+
|
||||
+ def _check_child_encoding_details(self,
|
||||
+ env_vars,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ expected_warnings,
|
||||
+ coercion_expected):
|
||||
+ """Check the C locale handling for the given process environment
|
||||
+
|
||||
+ Parameters:
|
||||
+ expected_fs_encoding: expected sys.getfilesystemencoding() result
|
||||
+ expected_stream_encoding: expected encoding for standard streams
|
||||
+ expected_warning: stderr output to expect (if any)
|
||||
+ """
|
||||
+ result = EncodingDetails.get_child_details(env_vars)
|
||||
+ encoding_details, stderr_lines = result
|
||||
+ expected_details = EncodingDetails.get_expected_details(
|
||||
+ coercion_expected,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ env_vars
|
||||
+ )
|
||||
+ self.assertEqual(encoding_details, expected_details)
|
||||
+ if expected_warnings is None:
|
||||
+ expected_warnings = []
|
||||
+ self.assertEqual(stderr_lines, expected_warnings)
|
||||
+
|
||||
+
|
||||
+class LocaleConfigurationTests(_LocaleHandlingTestCase):
|
||||
+ # Test explicit external configuration via the process environment
|
||||
+
|
||||
+ def setUpClass():
|
||||
+ # This relies on setupModule() having been run, so it can't be
|
||||
+ # handled via the @unittest.skipUnless decorator
|
||||
+ if not AVAILABLE_TARGETS:
|
||||
+ raise unittest.SkipTest("No C-with-UTF-8 locale available")
|
||||
+
|
||||
+ def test_external_target_locale_configuration(self):
|
||||
+
|
||||
+ # Explicitly setting a target locale should give the same behaviour as
|
||||
+ # is seen when implicitly coercing to that target locale
|
||||
+ self.maxDiff = None
|
||||
+
|
||||
+ expected_fs_encoding = "utf-8"
|
||||
+ expected_stream_encoding = "utf-8"
|
||||
+
|
||||
+ base_var_dict = {
|
||||
+ "LANG": "",
|
||||
+ "LC_CTYPE": "",
|
||||
+ "LC_ALL": "",
|
||||
+ }
|
||||
+ for env_var in ("LANG", "LC_CTYPE"):
|
||||
+ for locale_to_set in AVAILABLE_TARGETS:
|
||||
+ # XXX (ncoghlan): LANG=UTF-8 doesn't appear to work as
|
||||
+ # expected, so skip that combination for now
|
||||
+ # See https://bugs.python.org/issue30672 for discussion
|
||||
+ if env_var == "LANG" and locale_to_set == "UTF-8":
|
||||
+ continue
|
||||
+
|
||||
+ with self.subTest(env_var=env_var,
|
||||
+ configured_locale=locale_to_set):
|
||||
+ var_dict = base_var_dict.copy()
|
||||
+ var_dict[env_var] = locale_to_set
|
||||
+ self._check_child_encoding_details(var_dict,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ expected_warnings=None,
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+
|
||||
+
|
||||
+@test.support.cpython_only
|
||||
+@unittest.skipUnless(sysconfig.get_config_var("PY_COERCE_C_LOCALE"),
|
||||
+ "C locale coercion disabled at build time")
|
||||
+class LocaleCoercionTests(_LocaleHandlingTestCase):
|
||||
+ # Test implicit reconfiguration of the environment during CLI startup
|
||||
+
|
||||
+ def _check_c_locale_coercion(self,
|
||||
+ fs_encoding, stream_encoding,
|
||||
+ coerce_c_locale,
|
||||
+ expected_warnings=None,
|
||||
+ coercion_expected=True,
|
||||
+ **extra_vars):
|
||||
+ """Check the C locale handling for various configurations
|
||||
+
|
||||
+ Parameters:
|
||||
+ fs_encoding: expected sys.getfilesystemencoding() result
|
||||
+ stream_encoding: expected encoding for standard streams
|
||||
+ coerce_c_locale: setting to use for PYTHONCOERCECLOCALE
|
||||
+ None: don't set the variable at all
|
||||
+ str: the value set in the child's environment
|
||||
+ expected_warnings: expected warning lines on stderr
|
||||
+ extra_vars: additional environment variables to set in subprocess
|
||||
+ """
|
||||
+ self.maxDiff = None
|
||||
+
|
||||
+ if not AVAILABLE_TARGETS:
|
||||
+ # Locale coercion is disabled when there aren't any target locales
|
||||
+ fs_encoding = C_LOCALE_FS_ENCODING
|
||||
+ stream_encoding = C_LOCALE_STREAM_ENCODING
|
||||
+ coercion_expected = False
|
||||
+ if expected_warnings:
|
||||
+ expected_warnings = [LEGACY_LOCALE_WARNING]
|
||||
+
|
||||
+ base_var_dict = {
|
||||
+ "LANG": "",
|
||||
+ "LC_CTYPE": "",
|
||||
+ "LC_ALL": "",
|
||||
+ }
|
||||
+ base_var_dict.update(extra_vars)
|
||||
+ for env_var in ("LANG", "LC_CTYPE"):
|
||||
+ for locale_to_set in ("", "C", "POSIX", "invalid.ascii"):
|
||||
+ # XXX (ncoghlan): *BSD platforms don't behave as expected in the
|
||||
+ # POSIX locale, so we skip that for now
|
||||
+ # See https://bugs.python.org/issue30672 for discussion
|
||||
+ if locale_to_set == "POSIX":
|
||||
+ continue
|
||||
+ with self.subTest(env_var=env_var,
|
||||
+ nominal_locale=locale_to_set,
|
||||
+ PYTHONCOERCECLOCALE=coerce_c_locale):
|
||||
+ var_dict = base_var_dict.copy()
|
||||
+ var_dict[env_var] = locale_to_set
|
||||
+ if coerce_c_locale is not None:
|
||||
+ var_dict["PYTHONCOERCECLOCALE"] = coerce_c_locale
|
||||
+ # Check behaviour on successful coercion
|
||||
+ self._check_child_encoding_details(var_dict,
|
||||
+ fs_encoding,
|
||||
+ stream_encoding,
|
||||
+ expected_warnings,
|
||||
+ coercion_expected)
|
||||
+
|
||||
+ def test_test_PYTHONCOERCECLOCALE_not_set(self):
|
||||
+ # This should coerce to the first available target locale by default
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=None)
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_not_zero(self):
|
||||
+ # *Any* string other than "0" is considered "set" for our purposes
|
||||
+ # and hence should result in the locale coercion being enabled
|
||||
+ for setting in ("", "1", "true", "false"):
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=setting)
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_set_to_warn(self):
|
||||
+ # PYTHONCOERCECLOCALE=warn enables runtime warnings for legacy locales
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8",
|
||||
+ coerce_c_locale="warn",
|
||||
+ expected_warnings=[CLI_COERCION_WARNING])
|
||||
+
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_set_to_zero(self):
|
||||
+ # The setting "0" should result in the locale coercion being disabled
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="0",
|
||||
+ coercion_expected=False)
|
||||
+ # Setting LC_ALL=C shouldn't make any difference to the behaviour
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="0",
|
||||
+ LC_ALL="C",
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+ def test_LC_ALL_set_to_C(self):
|
||||
+ # Setting LC_ALL should render the locale coercion ineffective
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale=None,
|
||||
+ LC_ALL="C",
|
||||
+ coercion_expected=False)
|
||||
+ # And result in a warning about a lack of locale compatibility
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="warn",
|
||||
+ LC_ALL="C",
|
||||
+ expected_warnings=[LEGACY_LOCALE_WARNING],
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+def test_main():
|
||||
+ test.support.run_unittest(
|
||||
+ LocaleConfigurationTests,
|
||||
+ LocaleCoercionTests
|
||||
+ )
|
||||
+ test.support.reap_children()
|
||||
+
|
||||
+if __name__ == "__main__":
|
||||
+ test_main()
|
||||
--- a/Lib/test/test_cmd_line.py
|
||||
+++ b/Lib/test/test_cmd_line.py
|
||||
@@ -153,6 +153,7 @@ class CmdLineTest(unittest.TestCase):
|
||||
env = os.environ.copy()
|
||||
# Use C locale to get ascii for the locale encoding
|
||||
env['LC_ALL'] = 'C'
|
||||
+ env['PYTHONCOERCECLOCALE'] = '0'
|
||||
code = (
|
||||
b'import locale; '
|
||||
b'print(ascii("' + undecodable + b'"), '
|
||||
--- a/Lib/test/test_sys.py
|
||||
+++ b/Lib/test/test_sys.py
|
||||
@@ -680,6 +680,7 @@ class SysModuleTest(unittest.TestCase):
|
||||
# Force the POSIX locale
|
||||
env = os.environ.copy()
|
||||
env["LC_ALL"] = "C"
|
||||
+ env["PYTHONCOERCECLOCALE"] = "0"
|
||||
code = '\n'.join((
|
||||
'import sys',
|
||||
'def dump(name):',
|
||||
--- a/Modules/main.c
|
||||
+++ b/Modules/main.c
|
||||
@@ -107,7 +107,11 @@ static const char usage_6[] =
|
||||
" predictable seed.\n"
|
||||
"PYTHONMALLOC: set the Python memory allocators and/or install debug hooks\n"
|
||||
" on Python memory allocators. Use PYTHONMALLOC=debug to install debug\n"
|
||||
-" hooks.\n";
|
||||
+" hooks.\n"
|
||||
+
|
||||
+"PYTHONCOERCECLOCALE: if this variable is set to 0, it disables the locale\n"
|
||||
+" coercion behavior. Use PYTHONCOERCECLOCALE=warn to request display of\n"
|
||||
+" locale coercion and locale compatibility warnings on stderr.\n";
|
||||
|
||||
static int
|
||||
usage(int exitcode, const wchar_t* program)
|
||||
--- a/Programs/_testembed.c
|
||||
+++ b/Programs/_testembed.c
|
||||
@@ -1,4 +1,5 @@
|
||||
#include <Python.h>
|
||||
+#include "pyconfig.h"
|
||||
#include "pythread.h"
|
||||
#include <stdio.h>
|
||||
|
||||
--- a/Programs/python.c
|
||||
+++ b/Programs/python.c
|
||||
@@ -15,6 +15,21 @@ wmain(int argc, wchar_t **argv)
|
||||
}
|
||||
#else
|
||||
|
||||
+/* Access private pylifecycle helper API to better handle the legacy C locale
|
||||
+ *
|
||||
+ * The legacy C locale assumes ASCII as the default text encoding, which
|
||||
+ * causes problems not only for the CPython runtime, but also other
|
||||
+ * components like GNU readline.
|
||||
+ *
|
||||
+ * Accordingly, when the CLI detects it, it attempts to coerce it to a
|
||||
+ * more capable UTF-8 based alternative.
|
||||
+ *
|
||||
+ * See the documentation of the PYTHONCOERCECLOCALE setting for more details.
|
||||
+ *
|
||||
+ */
|
||||
+extern int _Py_LegacyLocaleDetected(void);
|
||||
+extern void _Py_CoerceLegacyLocale(void);
|
||||
+
|
||||
int
|
||||
main(int argc, char **argv)
|
||||
{
|
||||
@@ -25,7 +40,11 @@ main(int argc, char **argv)
|
||||
char *oldloc;
|
||||
|
||||
/* Force malloc() allocator to bootstrap Python */
|
||||
+#ifdef Py_DEBUG
|
||||
+ (void)_PyMem_SetupAllocators("malloc_debug");
|
||||
+# else
|
||||
(void)_PyMem_SetupAllocators("malloc");
|
||||
+# endif
|
||||
|
||||
argv_copy = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1));
|
||||
argv_copy2 = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1));
|
||||
@@ -49,7 +68,21 @@ main(int argc, char **argv)
|
||||
return 1;
|
||||
}
|
||||
|
||||
+#ifdef __ANDROID__
|
||||
+ /* Passing "" to setlocale() on Android requests the C locale rather
|
||||
+ * than checking environment variables, so request C.UTF-8 explicitly
|
||||
+ */
|
||||
+ setlocale(LC_ALL, "C.UTF-8");
|
||||
+#else
|
||||
+ /* Reconfigure the locale to the default for this process */
|
||||
setlocale(LC_ALL, "");
|
||||
+#endif
|
||||
+
|
||||
+ if (_Py_LegacyLocaleDetected()) {
|
||||
+ _Py_CoerceLegacyLocale();
|
||||
+ }
|
||||
+
|
||||
+ /* Convert from char to wchar_t based on the locale settings */
|
||||
for (i = 0; i < argc; i++) {
|
||||
argv_copy[i] = Py_DecodeLocale(argv[i], NULL);
|
||||
if (!argv_copy[i]) {
|
||||
@@ -70,7 +103,11 @@ main(int argc, char **argv)
|
||||
|
||||
/* Force again malloc() allocator to release memory blocks allocated
|
||||
before Py_Main() */
|
||||
+#ifdef Py_DEBUG
|
||||
+ (void)_PyMem_SetupAllocators("malloc_debug");
|
||||
+# else
|
||||
(void)_PyMem_SetupAllocators("malloc");
|
||||
+# endif
|
||||
|
||||
for (i = 0; i < argc; i++) {
|
||||
PyMem_RawFree(argv_copy2[i]);
|
||||
--- a/Python/pylifecycle.c
|
||||
+++ b/Python/pylifecycle.c
|
||||
@@ -167,6 +167,7 @@ Py_SetStandardStreamEncoding(const char
|
||||
return 0;
|
||||
}
|
||||
|
||||
+
|
||||
/* Global initializations. Can be undone by Py_FinalizeEx(). Don't
|
||||
call this twice without an intervening Py_FinalizeEx() call. When
|
||||
initializations fail, a fatal error is issued and the function does
|
||||
@@ -301,6 +302,183 @@ import_init(PyInterpreterState *interp,
|
||||
}
|
||||
|
||||
|
||||
+/* Helper functions to better handle the legacy C locale
|
||||
+ *
|
||||
+ * The legacy C locale assumes ASCII as the default text encoding, which
|
||||
+ * causes problems not only for the CPython runtime, but also other
|
||||
+ * components like GNU readline.
|
||||
+ *
|
||||
+ * Accordingly, when the CLI detects it, it attempts to coerce it to a
|
||||
+ * more capable UTF-8 based alternative as follows:
|
||||
+ *
|
||||
+ * if (_Py_LegacyLocaleDetected()) {
|
||||
+ * _Py_CoerceLegacyLocale();
|
||||
+ * }
|
||||
+ *
|
||||
+ * See the documentation of the PYTHONCOERCECLOCALE setting for more details.
|
||||
+ *
|
||||
+ * Locale coercion also impacts the default error handler for the standard
|
||||
+ * streams: while the usual default is "strict", the default for the legacy
|
||||
+ * C locale and for any of the coercion target locales is "surrogateescape".
|
||||
+ */
|
||||
+
|
||||
+int
|
||||
+_Py_LegacyLocaleDetected(void)
|
||||
+{
|
||||
+#ifndef MS_WINDOWS
|
||||
+ /* On non-Windows systems, the C locale is considered a legacy locale */
|
||||
+ /* XXX (ncoghlan): some platforms (notably Mac OS X) don't appear to treat
|
||||
+ * the POSIX locale as a simple alias for the C locale, so
|
||||
+ * we may also want to check for that explicitly.
|
||||
+ */
|
||||
+ const char *ctype_loc = setlocale(LC_CTYPE, NULL);
|
||||
+ return ctype_loc != NULL && strcmp(ctype_loc, "C") == 0;
|
||||
+#else
|
||||
+ /* Windows uses code pages instead of locales, so no locale is legacy */
|
||||
+ return 0;
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+
|
||||
+static const char *_C_LOCALE_WARNING =
|
||||
+ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII "
|
||||
+ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, "
|
||||
+ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible "
|
||||
+ "locales is recommended.\n";
|
||||
+
|
||||
+static int
|
||||
+_legacy_locale_warnings_enabled(void)
|
||||
+{
|
||||
+ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE");
|
||||
+ return (coerce_c_locale != NULL &&
|
||||
+ strncmp(coerce_c_locale, "warn", 5) == 0);
|
||||
+}
|
||||
+
|
||||
+static void
|
||||
+_emit_stderr_warning_for_legacy_locale(void)
|
||||
+{
|
||||
+ if (_legacy_locale_warnings_enabled()) {
|
||||
+ if (_Py_LegacyLocaleDetected()) {
|
||||
+ fprintf(stderr, "%s", _C_LOCALE_WARNING);
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+typedef struct _CandidateLocale {
|
||||
+ const char *locale_name; /* The locale to try as a coercion target */
|
||||
+} _LocaleCoercionTarget;
|
||||
+
|
||||
+static _LocaleCoercionTarget _TARGET_LOCALES[] = {
|
||||
+ {"C.UTF-8"},
|
||||
+ {"C.utf8"},
|
||||
+ {"UTF-8"},
|
||||
+ {NULL}
|
||||
+};
|
||||
+
|
||||
+static char *
|
||||
+get_default_standard_stream_error_handler(void)
|
||||
+{
|
||||
+ const char *ctype_loc = setlocale(LC_CTYPE, NULL);
|
||||
+ if (ctype_loc != NULL) {
|
||||
+ /* "surrogateescape" is the default in the legacy C locale */
|
||||
+ if (strcmp(ctype_loc, "C") == 0) {
|
||||
+ return "surrogateescape";
|
||||
+ }
|
||||
+
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+ /* "surrogateescape" is the default in locale coercion target locales */
|
||||
+ const _LocaleCoercionTarget *target = NULL;
|
||||
+ for (target = _TARGET_LOCALES; target->locale_name; target++) {
|
||||
+ if (strcmp(ctype_loc, target->locale_name) == 0) {
|
||||
+ return "surrogateescape";
|
||||
+ }
|
||||
+ }
|
||||
+#endif
|
||||
+ }
|
||||
+
|
||||
+ /* Otherwise return NULL to request the typical default error handler */
|
||||
+ return NULL;
|
||||
+}
|
||||
+
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+static const char *_C_LOCALE_COERCION_WARNING =
|
||||
+ "Python detected LC_CTYPE=C: LC_CTYPE coerced to %.20s (set another locale "
|
||||
+ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior).\n";
|
||||
+
|
||||
+static void
|
||||
+_coerce_default_locale_settings(const _LocaleCoercionTarget *target)
|
||||
+{
|
||||
+
|
||||
+ const char *newloc = target->locale_name;
|
||||
+
|
||||
+ /* Reset locale back to currently configured defaults */
|
||||
+ setlocale(LC_ALL, "");
|
||||
+
|
||||
+ /* Set the relevant locale environment variable */
|
||||
+ if (setenv("LC_CTYPE", newloc, 1)) {
|
||||
+ fprintf(stderr,
|
||||
+ "Error setting LC_CTYPE, skipping C locale coercion\n");
|
||||
+ return;
|
||||
+ }
|
||||
+ if (_legacy_locale_warnings_enabled()) {
|
||||
+ fprintf(stderr, _C_LOCALE_COERCION_WARNING, newloc);
|
||||
+ }
|
||||
+
|
||||
+ /* Reconfigure with the overridden environment variables */
|
||||
+ setlocale(LC_ALL, "");
|
||||
+}
|
||||
+#endif
|
||||
+
|
||||
+
|
||||
+void
|
||||
+_Py_CoerceLegacyLocale(void)
|
||||
+{
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+ /* We ignore the Python -E and -I flags here, as the CLI needs to sort out
|
||||
+ * the locale settings *before* we try to do anything with the command
|
||||
+ * line arguments. For cross-platform debugging purposes, we also need
|
||||
+ * to give end users a way to force even scripts that are otherwise
|
||||
+ * isolated from their environment to use the legacy ASCII-centric C
|
||||
+ * locale.
|
||||
+ *
|
||||
+ * Ignoring -E and -I is safe from a security perspective, as we only use
|
||||
+ * the setting to turn *off* the implicit locale coercion, and anyone with
|
||||
+ * access to the process environment already has the ability to set
|
||||
+ * `LC_ALL=C` to override the C level locale settings anyway.
|
||||
+ */
|
||||
+ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE");
|
||||
+ if (coerce_c_locale == NULL || strncmp(coerce_c_locale, "0", 2) != 0) {
|
||||
+ /* PYTHONCOERCECLOCALE is not set, or is set to something other than "0" */
|
||||
+ const char *locale_override = getenv("LC_ALL");
|
||||
+ if (locale_override == NULL || *locale_override == '\0') {
|
||||
+ /* LC_ALL is also not set (or is set to an empty string) */
|
||||
+ const _LocaleCoercionTarget *target = NULL;
|
||||
+ for (target = _TARGET_LOCALES; target->locale_name; target++) {
|
||||
+ const char *new_locale = setlocale(LC_CTYPE,
|
||||
+ target->locale_name);
|
||||
+ if (new_locale != NULL) {
|
||||
+#if !defined(__APPLE__) && defined(HAVE_LANGINFO_H) && defined(CODESET)
|
||||
+ /* Also ensure that nl_langinfo works in this locale */
|
||||
+ char *codeset = nl_langinfo(CODESET);
|
||||
+ if (!codeset || *codeset == '\0') {
|
||||
+ /* CODESET is not set or empty, so skip coercion */
|
||||
+ new_locale = NULL;
|
||||
+ setlocale(LC_CTYPE, "");
|
||||
+ continue;
|
||||
+ }
|
||||
+#endif
|
||||
+ /* Successfully configured locale, so make it the default */
|
||||
+ _coerce_default_locale_settings(target);
|
||||
+ return;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ /* No C locale warning here, as Py_Initialize will emit one later */
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+
|
||||
void
|
||||
_Py_InitializeEx_Private(int install_sigs, int install_importlib)
|
||||
{
|
||||
@@ -315,11 +493,19 @@ _Py_InitializeEx_Private(int install_sig
|
||||
initialized = 1;
|
||||
_Py_Finalizing = NULL;
|
||||
|
||||
-#ifdef HAVE_SETLOCALE
|
||||
+#ifdef __ANDROID__
|
||||
+ /* Passing "" to setlocale() on Android requests the C locale rather
|
||||
+ * than checking environment variables, so request C.UTF-8 explicitly
|
||||
+ */
|
||||
+ setlocale(LC_CTYPE, "C.UTF-8");
|
||||
+#else
|
||||
+#ifndef MS_WINDOWS
|
||||
/* Set up the LC_CTYPE locale, so we can obtain
|
||||
the locale's charset without having to switch
|
||||
locales. */
|
||||
setlocale(LC_CTYPE, "");
|
||||
+ _emit_stderr_warning_for_legacy_locale();
|
||||
+#endif
|
||||
#endif
|
||||
|
||||
if ((p = Py_GETENV("PYTHONDEBUG")) && *p != '\0')
|
||||
@@ -1250,12 +1436,8 @@ initstdio(void)
|
||||
}
|
||||
}
|
||||
if (!errors && !(pythonioencoding && *pythonioencoding)) {
|
||||
- /* When the LC_CTYPE locale is the POSIX locale ("C locale"),
|
||||
- stdin and stdout use the surrogateescape error handler by
|
||||
- default, instead of the strict error handler. */
|
||||
- char *loc = setlocale(LC_CTYPE, NULL);
|
||||
- if (loc != NULL && strcmp(loc, "C") == 0)
|
||||
- errors = "surrogateescape";
|
||||
+ /* Choose the default error handler based on the current locale */
|
||||
+ errors = get_default_standard_stream_error_handler();
|
||||
}
|
||||
}
|
||||
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -3417,6 +3417,40 @@ then
|
||||
fi
|
||||
AC_MSG_RESULT($with_pymalloc)
|
||||
|
||||
+# Check for --with-c-locale-coercion
|
||||
+AC_MSG_CHECKING(for --with-c-locale-coercion)
|
||||
+AC_ARG_WITH(c-locale-coercion,
|
||||
+ AS_HELP_STRING([--with(out)-c-locale-coercion],
|
||||
+ [disable/enable C locale coercion to a UTF-8 based locale]))
|
||||
+
|
||||
+if test -z "$with_c_locale_coercion"
|
||||
+then
|
||||
+ with_c_locale_coercion="yes"
|
||||
+fi
|
||||
+if test "$with_c_locale_coercion" != "no"
|
||||
+then
|
||||
+ AC_DEFINE(PY_COERCE_C_LOCALE, 1,
|
||||
+ [Define if you want to coerce the C locale to a UTF-8 based locale])
|
||||
+fi
|
||||
+AC_MSG_RESULT($with_c_locale_coercion)
|
||||
+
|
||||
+# Check for --with-c-locale-warning
|
||||
+AC_MSG_CHECKING(for --with-c-locale-warning)
|
||||
+AC_ARG_WITH(c-locale-warning,
|
||||
+ AS_HELP_STRING([--with(out)-c-locale-warning],
|
||||
+ [disable/enable locale compatibility warning in the C locale]))
|
||||
+
|
||||
+if test -z "$with_c_locale_warning"
|
||||
+then
|
||||
+ with_c_locale_warning="yes"
|
||||
+fi
|
||||
+if test "$with_c_locale_warning" != "no"
|
||||
+then
|
||||
+ AC_DEFINE(PY_WARN_ON_C_LOCALE, 1,
|
||||
+ [Define to emit a locale compatibility warning in the C locale])
|
||||
+fi
|
||||
+AC_MSG_RESULT($with_c_locale_warning)
|
||||
+
|
||||
# Check for Valgrind support
|
||||
AC_MSG_CHECKING([for --with-valgrind])
|
||||
AC_ARG_WITH([valgrind],
|
BIN
pip-20.0.2-py2.py3-none-any.whl
Normal file
BIN
pip-20.0.2-py2.py3-none-any.whl
Normal file
Binary file not shown.
13
python-3.3.0b1-curses-panel.patch
Normal file
13
python-3.3.0b1-curses-panel.patch
Normal file
|
@ -0,0 +1,13 @@
|
|||
Index: Modules/_curses_panel.c
|
||||
===================================================================
|
||||
--- Modules/_curses_panel.c.orig 2012-06-26 22:19:53.000000000 +0200
|
||||
+++ Modules/_curses_panel.c 2012-08-02 14:56:23.000000000 +0200
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
#include "py_curses.h"
|
||||
|
||||
-#include <panel.h>
|
||||
+#include <ncurses/panel.h>
|
||||
|
||||
typedef struct {
|
||||
PyObject *PyCursesError;
|
21
python-3.3.0b1-fix_date_time_compiler.patch
Normal file
21
python-3.3.0b1-fix_date_time_compiler.patch
Normal file
|
@ -0,0 +1,21 @@
|
|||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -738,11 +738,18 @@ Modules/getbuildinfo.o: $(PARSER_OBJS) \
|
||||
$(MODOBJS) \
|
||||
$(srcdir)/Modules/getbuildinfo.c
|
||||
$(CC) -c $(PY_CORE_CFLAGS) \
|
||||
+ -DDATE="\"`date -u -r Makefile.pre.in +"%b %d %Y"`\"" \
|
||||
+ -DTIME="\"`date -u -r Makefile.pre.in +"%T"`\"" \
|
||||
-DGITVERSION="\"`LC_ALL=C $(GITVERSION)`\"" \
|
||||
-DGITTAG="\"`LC_ALL=C $(GITTAG)`\"" \
|
||||
-DGITBRANCH="\"`LC_ALL=C $(GITBRANCH)`\"" \
|
||||
-o $@ $(srcdir)/Modules/getbuildinfo.c
|
||||
|
||||
+Python/getcompiler.o: $(srcdir)/Python/getcompiler.c Makefile
|
||||
+ $(CC) -c $(PY_CORE_CFLAGS) \
|
||||
+ -DCOMPILER='"[GCC]"' \
|
||||
+ -o $@ $(srcdir)/Python/getcompiler.c
|
||||
+
|
||||
Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile
|
||||
$(CC) -c $(PY_CORE_CFLAGS) -DPYTHONPATH='"$(PYTHONPATH)"' \
|
||||
-DPREFIX='"$(prefix)"' \
|
13
python-3.3.0b1-localpath.patch
Normal file
13
python-3.3.0b1-localpath.patch
Normal file
|
@ -0,0 +1,13 @@
|
|||
Index: Lib/site.py
|
||||
===================================================================
|
||||
--- Lib/site.py.orig
|
||||
+++ Lib/site.py
|
||||
@@ -75,7 +75,7 @@ import builtins
|
||||
import _sitebuiltins
|
||||
|
||||
# Prefixes for site-packages; add additional prefixes like /usr/local here
|
||||
-PREFIXES = [sys.prefix, sys.exec_prefix]
|
||||
+PREFIXES = [sys.prefix, sys.exec_prefix, '/usr/local']
|
||||
# Enable per user site-packages directory
|
||||
# set it to False to disable the feature or True to force the feature
|
||||
ENABLE_USER_SITE = None
|
11
python-3.3.0b1-test-posix_fadvise.patch
Normal file
11
python-3.3.0b1-test-posix_fadvise.patch
Normal file
|
@ -0,0 +1,11 @@
|
|||
--- a/Lib/test/test_posix.py
|
||||
+++ b/Lib/test/test_posix.py
|
||||
@@ -270,7 +270,7 @@ class PosixTester(unittest.TestCase):
|
||||
def test_posix_fadvise(self):
|
||||
fd = os.open(support.TESTFN, os.O_RDONLY)
|
||||
try:
|
||||
- posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_WILLNEED)
|
||||
+ posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_RANDOM)
|
||||
finally:
|
||||
os.close(fd)
|
||||
|
10
python-3.3.3-skip-distutils-test_sysconfig_module.patch
Normal file
10
python-3.3.3-skip-distutils-test_sysconfig_module.patch
Normal file
|
@ -0,0 +1,10 @@
|
|||
--- a/Lib/distutils/tests/test_sysconfig.py
|
||||
+++ b/Lib/distutils/tests/test_sysconfig.py
|
||||
@@ -116,6 +116,7 @@ class SysconfigTestCase(support.EnvironG
|
||||
'OTHER': 'foo'})
|
||||
|
||||
|
||||
+ @unittest.skip("Either fix distutil's sysconfig everytime we add new compiler flags or we just skip this test")
|
||||
def test_sysconfig_module(self):
|
||||
import sysconfig as global_sysconfig
|
||||
self.assertEqual(global_sysconfig.get_config_var('CFLAGS'),
|
20
python-3.6-CVE-2017-18207.patch
Normal file
20
python-3.6-CVE-2017-18207.patch
Normal file
|
@ -0,0 +1,20 @@
|
|||
From ae0ed14794ced2c51c822fc6f0d3ca92064619dd Mon Sep 17 00:00:00 2001
|
||||
From: BT123 <abcdyzhang@163.com>
|
||||
Date: Fri, 17 Nov 2017 16:45:45 +0800
|
||||
Subject: [PATCH] bug in wave.py
|
||||
|
||||
---
|
||||
Lib/wave.py | 2 ++
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
--- a/Lib/wave.py
|
||||
+++ b/Lib/wave.py
|
||||
@@ -258,6 +258,8 @@ class Wave_read:
|
||||
self._sampwidth = (sampwidth + 7) // 8
|
||||
else:
|
||||
raise Error('unknown format: %r' % (wFormatTag,))
|
||||
+ if self._nchannels == 0:
|
||||
+ raise ValueError("The audio file in wav format should have at least one channel!")
|
||||
self._framesize = self._nchannels * self._sampwidth
|
||||
self._comptype = 'NONE'
|
||||
self._compname = 'not compressed'
|
282
python-3.6.0-multilib-new.patch
Normal file
282
python-3.6.0-multilib-new.patch
Normal file
|
@ -0,0 +1,282 @@
|
|||
---
|
||||
Lib/distutils/command/install.py | 5 +++--
|
||||
Lib/distutils/sysconfig.py | 3 ++-
|
||||
Lib/pydoc.py | 5 ++---
|
||||
Lib/site.py | 8 +++++++-
|
||||
Lib/sysconfig.py | 6 +++---
|
||||
Lib/test/test_site.py | 5 +++--
|
||||
Lib/test/test_sysconfig.py | 14 +++++++++++++-
|
||||
Lib/trace.py | 6 +++---
|
||||
Makefile.pre.in | 6 +++++-
|
||||
Modules/getpath.c | 15 ++++++++-------
|
||||
configure.ac | 18 ++++++++++++++++--
|
||||
11 files changed, 65 insertions(+), 26 deletions(-)
|
||||
|
||||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -30,7 +30,7 @@ WINDOWS_SCHEME = {
|
||||
INSTALL_SCHEMES = {
|
||||
'unix_prefix': {
|
||||
'purelib': '$base/lib/python$py_version_short/site-packages',
|
||||
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
|
||||
+ 'platlib': '$platbase/$platlibdir/python$py_version_short/site-packages',
|
||||
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
@@ -284,7 +284,7 @@ class install(Command):
|
||||
# about needing recursive variable expansion (shudder).
|
||||
|
||||
py_version = sys.version.split()[0]
|
||||
- (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
|
||||
+ (prefix, exec_prefix, platlibdir) = get_config_vars('prefix', 'exec_prefix', 'platlibdir')
|
||||
try:
|
||||
abiflags = sys.abiflags
|
||||
except AttributeError:
|
||||
@@ -301,6 +301,7 @@ class install(Command):
|
||||
'sys_exec_prefix': exec_prefix,
|
||||
'exec_prefix': exec_prefix,
|
||||
'abiflags': abiflags,
|
||||
+ 'platlibdir': platlibdir,
|
||||
}
|
||||
|
||||
if HAS_USER_SITE:
|
||||
--- a/Lib/distutils/sysconfig.py
|
||||
+++ b/Lib/distutils/sysconfig.py
|
||||
@@ -129,8 +129,9 @@ def get_python_lib(plat_specific=0, stan
|
||||
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
||||
|
||||
if os.name == "posix":
|
||||
+ libdir = plat_specific and get_config_var("platlibdir") or "lib"
|
||||
libpython = os.path.join(prefix,
|
||||
- "lib", "python" + get_python_version())
|
||||
+ libdir, "python" + get_python_version())
|
||||
if standard_lib:
|
||||
return libpython
|
||||
else:
|
||||
--- a/Lib/pydoc.py
|
||||
+++ b/Lib/pydoc.py
|
||||
@@ -62,6 +62,7 @@ import pkgutil
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
+import sysconfig
|
||||
import time
|
||||
import tokenize
|
||||
import urllib.parse
|
||||
@@ -394,9 +395,7 @@ class Doc:
|
||||
|
||||
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
|
||||
|
||||
- def getdocloc(self, object,
|
||||
- basedir=os.path.join(sys.base_exec_prefix, "lib",
|
||||
- "python%d.%d" % sys.version_info[:2])):
|
||||
+ def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')):
|
||||
"""Return the location of module docs or None"""
|
||||
|
||||
try:
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -304,9 +304,15 @@ def getsitepackages(prefixes=None):
|
||||
seen.add(prefix)
|
||||
|
||||
if os.sep == '/':
|
||||
- sitepackages.append(os.path.join(prefix, "lib",
|
||||
+ from sysconfig import get_config_var
|
||||
+ platlibdir = get_config_var("platlibdir")
|
||||
+ sitepackages.append(os.path.join(prefix, platlibdir,
|
||||
"python%d.%d" % sys.version_info[:2],
|
||||
"site-packages"))
|
||||
+ if platlibdir != "lib":
|
||||
+ sitepackages.append(os.path.join(prefix, "lib",
|
||||
+ "python%d.%d" % sys.version_info[:2],
|
||||
+ "site-packages"))
|
||||
else:
|
||||
sitepackages.append(prefix)
|
||||
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -20,10 +20,10 @@ __all__ = [
|
||||
|
||||
_INSTALL_SCHEMES = {
|
||||
'posix_prefix': {
|
||||
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}',
|
||||
+ 'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}',
|
||||
'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{platbase}/{platlibdir}/python{py_version_short}/site-packages',
|
||||
'include':
|
||||
'{installed_base}/include/python{py_version_short}{abiflags}',
|
||||
'platinclude':
|
||||
--- a/Lib/test/test_site.py
|
||||
+++ b/Lib/test/test_site.py
|
||||
@@ -264,8 +264,9 @@ class HelperFunctionsTests(unittest.Test
|
||||
self.assertEqual(dirs[1], wanted)
|
||||
elif os.sep == '/':
|
||||
# OS X non-framework builds, Linux, FreeBSD, etc
|
||||
- self.assertEqual(len(dirs), 1)
|
||||
- wanted = os.path.join('xoxo', 'lib',
|
||||
+ platlibdir = sysconfig.get_config_var('platlibdir')
|
||||
+ self.assertTrue(len(dirs) in (1,2))
|
||||
+ wanted = os.path.join('xoxo', platlibdir,
|
||||
'python%d.%d' % sys.version_info[:2],
|
||||
'site-packages')
|
||||
self.assertEqual(dirs[0], wanted)
|
||||
--- a/Lib/test/test_sysconfig.py
|
||||
+++ b/Lib/test/test_sysconfig.py
|
||||
@@ -279,6 +279,7 @@ class TestSysConfig(unittest.TestCase):
|
||||
# is similar to the global posix_prefix one
|
||||
base = get_config_var('base')
|
||||
user = get_config_var('userbase')
|
||||
+ platlibdir = get_config_var("platlibdir")
|
||||
# the global scheme mirrors the distinction between prefix and
|
||||
# exec-prefix but not the user scheme, so we have to adapt the paths
|
||||
# before comparing (issue #9100)
|
||||
@@ -293,8 +294,19 @@ class TestSysConfig(unittest.TestCase):
|
||||
# before comparing
|
||||
global_path = global_path.replace(sys.base_prefix, sys.prefix)
|
||||
base = base.replace(sys.base_prefix, sys.prefix)
|
||||
+
|
||||
+ if platlibdir != "lib":
|
||||
+ platbase = os.path.join(base, platlibdir)
|
||||
+ purebase = os.path.join(base, "lib")
|
||||
+ userlib = os.path.join(user, "lib")
|
||||
+ # replace platbase first because usually purebase is a prefix of platbase
|
||||
+ # /usr/lib is prefix of /usr/lib64 and would get replaced first
|
||||
+ modified_path = global_path.replace(platbase, userlib, 1).replace(purebase, userlib, 1)
|
||||
+ else:
|
||||
+ modified_path = global_path.replace(base, user, 1)
|
||||
+
|
||||
user_path = get_path(name, 'posix_user')
|
||||
- self.assertEqual(user_path, global_path.replace(base, user, 1))
|
||||
+ self.assertEqual(user_path, modified_path)
|
||||
|
||||
def test_main(self):
|
||||
# just making sure _main() runs and returns things in the stdout
|
||||
--- a/Lib/trace.py
|
||||
+++ b/Lib/trace.py
|
||||
@@ -52,6 +52,7 @@ import linecache
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
+import sysconfig
|
||||
import token
|
||||
import tokenize
|
||||
import inspect
|
||||
@@ -669,9 +670,8 @@ def main():
|
||||
opts = parser.parse_args()
|
||||
|
||||
if opts.ignore_dir:
|
||||
- rel_path = 'lib', 'python{0.major}.{0.minor}'.format(sys.version_info)
|
||||
- _prefix = os.path.join(sys.base_prefix, *rel_path)
|
||||
- _exec_prefix = os.path.join(sys.base_exec_prefix, *rel_path)
|
||||
+ _prefix = sysconfig.get_path("stdlib")
|
||||
+ _exec_prefix = sysconfig.get_path("platstdlib")
|
||||
|
||||
def parse_ignore_dir(s):
|
||||
s = os.path.expanduser(os.path.expandvars(s))
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -133,13 +133,16 @@ exec_prefix= @exec_prefix@
|
||||
# Install prefix for data files
|
||||
datarootdir= @datarootdir@
|
||||
|
||||
+# Name of "lib" directory under prefix
|
||||
+platlibdir= @platlibdir@
|
||||
+
|
||||
# Expanded directories
|
||||
BINDIR= @bindir@
|
||||
LIBDIR= @libdir@
|
||||
MANDIR= @mandir@
|
||||
INCLUDEDIR= @includedir@
|
||||
CONFINCLUDEDIR= $(exec_prefix)/include
|
||||
-SCRIPTDIR= $(prefix)/lib
|
||||
+SCRIPTDIR= @libdir@
|
||||
ABIFLAGS= @ABIFLAGS@
|
||||
|
||||
# Detailed destination directories
|
||||
@@ -746,6 +749,7 @@ Modules/getpath.o: $(srcdir)/Modules/get
|
||||
-DEXEC_PREFIX='"$(exec_prefix)"' \
|
||||
-DVERSION='"$(VERSION)"' \
|
||||
-DVPATH='"$(VPATH)"' \
|
||||
+ -DPLATLIBDIR='"$(platlibdir)"' \
|
||||
-o $@ $(srcdir)/Modules/getpath.c
|
||||
|
||||
Programs/python.o: $(srcdir)/Programs/python.c
|
||||
--- a/Modules/getpath.c
|
||||
+++ b/Modules/getpath.c
|
||||
@@ -51,9 +51,10 @@
|
||||
* pybuilddir.txt. If the landmark is found, we're done.
|
||||
*
|
||||
* For the remaining steps, the prefix landmark will always be
|
||||
- * lib/python$VERSION/os.py and the exec_prefix will always be
|
||||
- * lib/python$VERSION/lib-dynload, where $VERSION is Python's version
|
||||
- * number as supplied by the Makefile. Note that this means that no more
|
||||
+ * $lib/python$VERSION/os.py and the exec_prefix will always be
|
||||
+ * $lib/python$VERSION/lib-dynload, where $VERSION is Python's version
|
||||
+ * number and $lib is PLATLIBDIR as supplied by the Makefile. (usually
|
||||
+ * "lib", "lib32" or "lib64"). Note that this means that no more
|
||||
* build directory checking is performed; if the first step did not find
|
||||
* the landmarks, the assumption is that python is running from an
|
||||
* installed setup.
|
||||
@@ -82,7 +83,7 @@
|
||||
* containing the shared library modules is appended. The environment
|
||||
* variable $PYTHONPATH is inserted in front of it all. Finally, the
|
||||
* prefix and exec_prefix globals are tweaked so they reflect the values
|
||||
- * expected by other code, by stripping the "lib/python$VERSION/..." stuff
|
||||
+ * expected by other code, by stripping the "$lib/python$VERSION/..." stuff
|
||||
* off. If either points to the build directory, the globals are reset to
|
||||
* the corresponding preprocessor variables (so sys.prefix will reflect the
|
||||
* installation location, even though sys.path points into the build
|
||||
@@ -101,8 +102,8 @@
|
||||
#endif
|
||||
|
||||
|
||||
-#if !defined(PREFIX) || !defined(EXEC_PREFIX) || !defined(VERSION) || !defined(VPATH)
|
||||
-#error "PREFIX, EXEC_PREFIX, VERSION, and VPATH must be constant defined"
|
||||
+#if !defined(PREFIX) || !defined(EXEC_PREFIX) || !defined(VERSION) || !defined(VPATH) || !defined(PLATLIBDIR)
|
||||
+#error "PREFIX, EXEC_PREFIX, VERSION, VPATH and PLATLIBDIR must be constant defined"
|
||||
#endif
|
||||
|
||||
#ifndef LANDMARK
|
||||
@@ -494,7 +495,7 @@ calculate_path(void)
|
||||
_pythonpath = Py_DecodeLocale(PYTHONPATH, NULL);
|
||||
_prefix = Py_DecodeLocale(PREFIX, NULL);
|
||||
_exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL);
|
||||
- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL);
|
||||
+ lib_python = Py_DecodeLocale(PLATLIBDIR "/python" VERSION, NULL);
|
||||
|
||||
if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) {
|
||||
Py_FatalError(
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -4768,12 +4768,26 @@ AC_MSG_CHECKING(LDVERSION)
|
||||
LDVERSION='$(VERSION)$(ABIFLAGS)'
|
||||
AC_MSG_RESULT($LDVERSION)
|
||||
|
||||
+# platlibdir must be defined before LIBPL definition
|
||||
+AC_MSG_CHECKING(for custom platlibdir)
|
||||
+AC_ARG_WITH(custom-platlibdir,
|
||||
+ [AS_HELP_STRING([--with-custom-platlibdir=<libdirname>],
|
||||
+ [set the platlibdir name to a custom string])],
|
||||
+ [],
|
||||
+ [with_custom_platlibdir=yes])
|
||||
+AS_CASE($with_custom_platlibdir,
|
||||
+ [yes],[platlibdir=`basename ${libdir}`],
|
||||
+ [no],[platlibdir=lib],
|
||||
+ [platlibdir=$with_custom_platlibdir])
|
||||
+AC_MSG_RESULT($platlibdir)
|
||||
+AC_SUBST(platlibdir)
|
||||
+
|
||||
dnl define LIBPL after ABIFLAGS and LDVERSION is defined.
|
||||
AC_SUBST(PY_ENABLE_SHARED)
|
||||
if test x$PLATFORM_TRIPLET = x; then
|
||||
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}"
|
||||
+ LIBPL='$(prefix)'"/${platlibdir}/python${VERSION}/config-${LDVERSION}"
|
||||
else
|
||||
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
|
||||
+ LIBPL='$(prefix)'"/${platlibdir}/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
|
||||
fi
|
||||
AC_SUBST(LIBPL)
|
||||
|
11542
python.keyring
Normal file
11542
python.keyring
Normal file
File diff suppressed because it is too large
Load diff
135
python3-sorted_tar.patch
Normal file
135
python3-sorted_tar.patch
Normal file
|
@ -0,0 +1,135 @@
|
|||
commit c11b93fd5e04c2541954ba7bc7b17027742edad1
|
||||
Author: Bernhard M. Wiedemann <githubbmw@lsmod.de>
|
||||
Date: Wed Jan 31 11:17:10 2018 +0100
|
||||
|
||||
bpo-30693: zip+tarfile: sort directory listing (#2263)
|
||||
|
||||
tarfile and zipfile now sort directory listing to generate tar and zip archives
|
||||
in a more reproducible way.
|
||||
|
||||
See also https://reproducible-builds.org/docs/stable-inputs/ on that topic.
|
||||
|
||||
---
|
||||
Doc/library/tarfile.rst | 3 +
|
||||
Doc/library/zipfile.rst | 5 +-
|
||||
Lib/tarfile.py | 2
|
||||
Lib/test/test_tarfile.py | 24 ++++++++++
|
||||
Lib/zipfile.py | 6 +-
|
||||
Misc/NEWS.d/next/Library/2017-11-27-15-09-49.bpo-30693.yC4mJ7.rst | 1
|
||||
Misc/NEWS.d/next/Library/2017-11-27-15-09-49.bpo-30693.yC4mJ8.rst | 1
|
||||
7 files changed, 37 insertions(+), 5 deletions(-)
|
||||
|
||||
--- a/Doc/library/tarfile.rst
|
||||
+++ b/Doc/library/tarfile.rst
|
||||
@@ -467,6 +467,9 @@ be finalized; only the internally used f
|
||||
The *exclude* parameter is deprecated, please use the *filter* parameter
|
||||
instead.
|
||||
|
||||
+ .. versionchanged:: 3.6.4
|
||||
+ Recursion adds entries in sorted order.
|
||||
+
|
||||
|
||||
.. method:: TarFile.addfile(tarinfo, fileobj=None)
|
||||
|
||||
--- a/Doc/library/zipfile.rst
|
||||
+++ b/Doc/library/zipfile.rst
|
||||
@@ -466,7 +466,7 @@ The :class:`PyZipFile` constructor takes
|
||||
:file:`\*.pyc` are added at the top level. If the directory is a
|
||||
package directory, then all :file:`\*.pyc` are added under the package
|
||||
name as a file path, and if any subdirectories are package directories,
|
||||
- all of these are added recursively.
|
||||
+ all of these are added recursively in sorted order.
|
||||
|
||||
*basename* is intended for internal use only.
|
||||
|
||||
@@ -499,6 +499,9 @@ The :class:`PyZipFile` constructor takes
|
||||
.. versionchanged:: 3.6.2
|
||||
The *pathname* parameter accepts a :term:`path-like object`.
|
||||
|
||||
+ .. versionchanged:: 3.6.4
|
||||
+ Recursion sorts directory entries.
|
||||
+
|
||||
|
||||
.. _zipinfo-objects:
|
||||
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -1955,7 +1955,7 @@ class TarFile(object):
|
||||
elif tarinfo.isdir():
|
||||
self.addfile(tarinfo)
|
||||
if recursive:
|
||||
- for f in os.listdir(name):
|
||||
+ for f in sorted(os.listdir(name)):
|
||||
self.add(os.path.join(name, f), os.path.join(arcname, f),
|
||||
recursive, exclude, filter=filter)
|
||||
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -1136,6 +1136,30 @@ class WriteTest(WriteTestBase, unittest.
|
||||
finally:
|
||||
support.rmdir(path)
|
||||
|
||||
+ # mock the following:
|
||||
+ # os.listdir: so we know that files are in the wrong order
|
||||
+ @unittest.mock.patch('os.listdir')
|
||||
+ def test_ordered_recursion(self, mock_listdir):
|
||||
+ path = os.path.join(TEMPDIR, "directory")
|
||||
+ os.mkdir(path)
|
||||
+ open(os.path.join(path, "1"), "a").close()
|
||||
+ open(os.path.join(path, "2"), "a").close()
|
||||
+ mock_listdir.return_value = ["2", "1"]
|
||||
+ try:
|
||||
+ tar = tarfile.open(tmpname, self.mode)
|
||||
+ try:
|
||||
+ tar.add(path)
|
||||
+ paths = []
|
||||
+ for m in tar.getmembers():
|
||||
+ paths.append(os.path.split(m.name)[-1])
|
||||
+ self.assertEqual(paths, ["directory", "1", "2"]);
|
||||
+ finally:
|
||||
+ tar.close()
|
||||
+ finally:
|
||||
+ support.unlink(os.path.join(path, "1"))
|
||||
+ support.unlink(os.path.join(path, "2"))
|
||||
+ support.rmdir(path)
|
||||
+
|
||||
def test_gettarinfo_pathlike_name(self):
|
||||
with tarfile.open(tmpname, self.mode) as tar:
|
||||
path = pathlib.Path(TEMPDIR) / "file"
|
||||
--- a/Lib/zipfile.py
|
||||
+++ b/Lib/zipfile.py
|
||||
@@ -1860,7 +1860,7 @@ class PyZipFile(ZipFile):
|
||||
if self.debug:
|
||||
print("Adding", arcname)
|
||||
self.write(fname, arcname)
|
||||
- dirlist = os.listdir(pathname)
|
||||
+ dirlist = sorted(os.listdir(pathname))
|
||||
dirlist.remove("__init__.py")
|
||||
# Add all *.py files and package subdirectories
|
||||
for filename in dirlist:
|
||||
@@ -1885,7 +1885,7 @@ class PyZipFile(ZipFile):
|
||||
# This is NOT a package directory, add its files at top level
|
||||
if self.debug:
|
||||
print("Adding files from directory", pathname)
|
||||
- for filename in os.listdir(pathname):
|
||||
+ for filename in sorted(os.listdir(pathname)):
|
||||
path = os.path.join(pathname, filename)
|
||||
root, ext = os.path.splitext(filename)
|
||||
if ext == ".py":
|
||||
@@ -2042,7 +2042,7 @@ def main(args = None):
|
||||
elif os.path.isdir(path):
|
||||
if zippath:
|
||||
zf.write(path, zippath)
|
||||
- for nm in os.listdir(path):
|
||||
+ for nm in sorted(os.listdir(path)):
|
||||
addToZip(zf,
|
||||
os.path.join(path, nm), os.path.join(zippath, nm))
|
||||
# else: ignore
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2017-11-27-15-09-49.bpo-30693.yC4mJ7.rst
|
||||
@@ -0,0 +1 @@
|
||||
+The ZipFile class now recurses directories in a reproducible way.
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2017-11-27-15-09-49.bpo-30693.yC4mJ8.rst
|
||||
@@ -0,0 +1 @@
|
||||
+The TarFile class now recurses directories in a reproducible way.
|
1109
python36.changes
Normal file
1109
python36.changes
Normal file
File diff suppressed because it is too large
Load diff
1120
python36.spec
Normal file
1120
python36.spec
Normal file
File diff suppressed because it is too large
Load diff
92
remove-sphinx40-warning.patch
Normal file
92
remove-sphinx40-warning.patch
Normal file
|
@ -0,0 +1,92 @@
|
|||
From 02f1485b1a26b575ad3a2c957ea279fcff789f63 Mon Sep 17 00:00:00 2001
|
||||
From: Dong-hee Na <donghee.na92@gmail.com>
|
||||
Date: Fri, 11 Sep 2020 20:41:43 +0900
|
||||
Subject: [PATCH 1/3] bpo-35293: Remove RemovedInSphinx40Warning
|
||||
|
||||
---
|
||||
Doc/tools/extensions/pyspecific.py | 40 ++++++++++++-------
|
||||
.../2020-09-12-17-37-13.bpo-35293._cOwPD.rst | 1 +
|
||||
2 files changed, 26 insertions(+), 15 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Documentation/2020-09-12-17-37-13.bpo-35293._cOwPD.rst
|
||||
|
||||
Index: Python-3.6.13/Doc/tools/extensions/pyspecific.py
|
||||
===================================================================
|
||||
--- Python-3.6.13.orig/Doc/tools/extensions/pyspecific.py
|
||||
+++ Python-3.6.13/Doc/tools/extensions/pyspecific.py
|
||||
@@ -27,7 +27,12 @@ from sphinx.util.nodes import split_expl
|
||||
from sphinx.writers.html import HTMLTranslator
|
||||
from sphinx.writers.text import TextWriter, TextTranslator
|
||||
from sphinx.writers.latex import LaTeXTranslator
|
||||
-from sphinx.domains.python import PyModulelevel, PyClassmember
|
||||
+
|
||||
+try:
|
||||
+ from sphinx.domains.python import PyFunction, PyMethod
|
||||
+except ImportError:
|
||||
+ from sphinx.domains.python import PyClassmember as PyMethod
|
||||
+ from sphinx.domains.python import PyModulelevel as PyFunction
|
||||
|
||||
# Support for checking for suspicious markup
|
||||
|
||||
@@ -142,17 +147,18 @@ class PyDecoratorMixin(object):
|
||||
return False
|
||||
|
||||
|
||||
-class PyDecoratorFunction(PyDecoratorMixin, PyModulelevel):
|
||||
+class PyDecoratorFunction(PyDecoratorMixin, PyFunction):
|
||||
def run(self):
|
||||
# a decorator function is a function after all
|
||||
self.name = 'py:function'
|
||||
- return PyModulelevel.run(self)
|
||||
+ return PyFunction.run(self)
|
||||
|
||||
|
||||
-class PyDecoratorMethod(PyDecoratorMixin, PyClassmember):
|
||||
+# TODO: Use sphinx.domains.python.PyDecoratorMethod when possible
|
||||
+class PyDecoratorMethod(PyDecoratorMixin, PyMethod):
|
||||
def run(self):
|
||||
self.name = 'py:method'
|
||||
- return PyClassmember.run(self)
|
||||
+ return PyMethod.run(self)
|
||||
|
||||
|
||||
class PyCoroutineMixin(object):
|
||||
@@ -162,19 +168,19 @@ class PyCoroutineMixin(object):
|
||||
return ret
|
||||
|
||||
|
||||
-class PyCoroutineFunction(PyCoroutineMixin, PyModulelevel):
|
||||
+class PyCoroutineFunction(PyCoroutineMixin, PyFunction):
|
||||
def run(self):
|
||||
self.name = 'py:function'
|
||||
- return PyModulelevel.run(self)
|
||||
+ return PyFunction.run(self)
|
||||
|
||||
|
||||
-class PyCoroutineMethod(PyCoroutineMixin, PyClassmember):
|
||||
+class PyCoroutineMethod(PyCoroutineMixin, PyMethod):
|
||||
def run(self):
|
||||
self.name = 'py:method'
|
||||
- return PyClassmember.run(self)
|
||||
+ return PyMethod.run(self)
|
||||
|
||||
|
||||
-class PyAbstractMethod(PyClassmember):
|
||||
+class PyAbstractMethod(PyMethod):
|
||||
|
||||
def handle_signature(self, sig, signode):
|
||||
ret = super(PyAbstractMethod, self).handle_signature(sig, signode)
|
||||
@@ -184,7 +190,7 @@ class PyAbstractMethod(PyClassmember):
|
||||
|
||||
def run(self):
|
||||
self.name = 'py:method'
|
||||
- return PyClassmember.run(self)
|
||||
+ return PyMethod.run(self)
|
||||
|
||||
|
||||
# Support for documenting version of removal in deprecations
|
||||
Index: Python-3.6.13/Misc/NEWS.d/next/Documentation/2020-09-12-17-37-13.bpo-35293._cOwPD.rst
|
||||
===================================================================
|
||||
--- /dev/null
|
||||
+++ Python-3.6.13/Misc/NEWS.d/next/Documentation/2020-09-12-17-37-13.bpo-35293._cOwPD.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix RemovedInSphinx40Warning when building the documentation. Patch by Dong-hee Na.
|
30
riscv64-ctypes.patch
Normal file
30
riscv64-ctypes.patch
Normal file
|
@ -0,0 +1,30 @@
|
|||
From 742d768656512a469ce9571b1cbd777def7bc5ea Mon Sep 17 00:00:00 2001
|
||||
From: Andreas Schwab <schwab@linux-m68k.org>
|
||||
Date: Tue, 29 Jan 2019 17:16:10 +0100
|
||||
Subject: [PATCH] bpo-35847: RISC-V needs CTYPES_PASS_BY_REF_HACK (GH-11694)
|
||||
|
||||
This fixes the ctypes.test.test_structures.StructureTestCase test.
|
||||
|
||||
|
||||
https://bugs.python.org/issue35847
|
||||
---
|
||||
.../next/Library/2019-01-29-09-11-09.bpo-35847.eiSi4t.rst | 1 +
|
||||
Modules/_ctypes/callproc.c | 2 +-
|
||||
2 files changed, 2 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2019-01-29-09-11-09.bpo-35847.eiSi4t.rst
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2019-01-29-09-11-09.bpo-35847.eiSi4t.rst
|
||||
@@ -0,0 +1 @@
|
||||
+RISC-V needed the CTYPES_PASS_BY_REF_HACK. Fixes ctypes Structure test_pass_by_value.
|
||||
--- a/Modules/_ctypes/callproc.c
|
||||
+++ b/Modules/_ctypes/callproc.c
|
||||
@@ -1050,7 +1050,7 @@ GetComError(HRESULT errcode, GUID *riid,
|
||||
#endif
|
||||
|
||||
#if (defined(__x86_64__) && (defined(__MINGW64__) || defined(__CYGWIN__))) || \
|
||||
- defined(__aarch64__)
|
||||
+ defined(__aarch64__) || defined(__riscv)
|
||||
#define CTYPES_PASS_BY_REF_HACK
|
||||
#define POW2(x) (((x & ~(x - 1)) == x) ? x : 0)
|
||||
#define IS_PASS_BY_REF(x) (x > 8 || !POW2(x))
|
157
riscv64-support.patch
Normal file
157
riscv64-support.patch
Normal file
|
@ -0,0 +1,157 @@
|
|||
From ddbe976964933cb943c6383a776e800cc7e0f47d Mon Sep 17 00:00:00 2001
|
||||
From: Matthias Klose <doko42@users.noreply.github.com>
|
||||
Date: Mon, 30 Apr 2018 19:22:16 +0200
|
||||
Subject: [PATCH] bpo-33377: add triplets for mips-r6 and riscv (#6655)
|
||||
|
||||
* issue33377: add triplets for mips-r6 and riscv
|
||||
|
||||
* issue33377: add triplets for mips-r6 and riscv (NEWS entry)
|
||||
---
|
||||
.../2018-04-30-16-53-00.bpo-33377.QBh6vP.rst | 2 +
|
||||
configure | 42 ++++++++++++++++++-
|
||||
configure.ac | 28 +++++++++++++
|
||||
3 files changed, 71 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Build/2018-04-30-16-53-00.bpo-33377.QBh6vP.rst
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Build/2018-04-30-16-53-00.bpo-33377.QBh6vP.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Add new triplets for mips r6 and riscv variants (used in extension
|
||||
+suffixes).
|
||||
--- a/configure
|
||||
+++ b/configure
|
||||
@@ -785,6 +785,7 @@ infodir
|
||||
docdir
|
||||
oldincludedir
|
||||
includedir
|
||||
+runstatedir
|
||||
localstatedir
|
||||
sharedstatedir
|
||||
sysconfdir
|
||||
@@ -898,6 +899,7 @@ datadir='${datarootdir}'
|
||||
sysconfdir='${prefix}/etc'
|
||||
sharedstatedir='${prefix}/com'
|
||||
localstatedir='${prefix}/var'
|
||||
+runstatedir='${localstatedir}/run'
|
||||
includedir='${prefix}/include'
|
||||
oldincludedir='/usr/include'
|
||||
docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
|
||||
@@ -1150,6 +1152,15 @@ do
|
||||
| -silent | --silent | --silen | --sile | --sil)
|
||||
silent=yes ;;
|
||||
|
||||
+ -runstatedir | --runstatedir | --runstatedi | --runstated \
|
||||
+ | --runstate | --runstat | --runsta | --runst | --runs \
|
||||
+ | --run | --ru | --r)
|
||||
+ ac_prev=runstatedir ;;
|
||||
+ -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \
|
||||
+ | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \
|
||||
+ | --run=* | --ru=* | --r=*)
|
||||
+ runstatedir=$ac_optarg ;;
|
||||
+
|
||||
-sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
|
||||
ac_prev=sbindir ;;
|
||||
-sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
|
||||
@@ -1287,7 +1298,7 @@ fi
|
||||
for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \
|
||||
datadir sysconfdir sharedstatedir localstatedir includedir \
|
||||
oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
|
||||
- libdir localedir mandir
|
||||
+ libdir localedir mandir runstatedir
|
||||
do
|
||||
eval ac_val=\$$ac_var
|
||||
# Remove trailing slashes.
|
||||
@@ -1440,6 +1451,7 @@ Fine tuning of the installation director
|
||||
--sysconfdir=DIR read-only single-machine data [PREFIX/etc]
|
||||
--sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com]
|
||||
--localstatedir=DIR modifiable single-machine data [PREFIX/var]
|
||||
+ --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run]
|
||||
--libdir=DIR object code libraries [EPREFIX/lib]
|
||||
--includedir=DIR C header files [PREFIX/include]
|
||||
--oldincludedir=DIR C header files for non-gcc [/usr/include]
|
||||
@@ -5261,6 +5273,26 @@ cat >> conftest.c <<EOF
|
||||
ia64-linux-gnu
|
||||
# elif defined(__m68k__) && !defined(__mcoldfire__)
|
||||
m68k-linux-gnu
|
||||
+# elif defined(__mips_hard_float) && defined(__mips_isa_rev) && (__mips_isa_rev >=6) && defined(_MIPSEL)
|
||||
+# if _MIPS_SIM == _ABIO32
|
||||
+ mipsisa32r6el-linux-gnu
|
||||
+# elif _MIPS_SIM == _ABIN32
|
||||
+ mipsisa64r6el-linux-gnuabin32
|
||||
+# elif _MIPS_SIM == _ABI64
|
||||
+ mipsisa64r6el-linux-gnuabi64
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
+# elif defined(__mips_hard_float) && defined(__mips_isa_rev) && (__mips_isa_rev >=6)
|
||||
+# if _MIPS_SIM == _ABIO32
|
||||
+ mipsisa32r6-linux-gnu
|
||||
+# elif _MIPS_SIM == _ABIN32
|
||||
+ mipsisa64r6-linux-gnuabin32
|
||||
+# elif _MIPS_SIM == _ABI64
|
||||
+ mipsisa64r6-linux-gnuabi64
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
# elif defined(__mips_hard_float) && defined(_MIPSEL)
|
||||
# if _MIPS_SIM == _ABIO32
|
||||
mipsel-linux-gnu
|
||||
@@ -5303,6 +5335,14 @@ cat >> conftest.c <<EOF
|
||||
sparc64-linux-gnu
|
||||
# elif defined(__sparc__)
|
||||
sparc-linux-gnu
|
||||
+# elif defined(__riscv)
|
||||
+# if __riscv_xlen == 32
|
||||
+ riscv32-linux-gnu
|
||||
+# elif __riscv_xlen == 64
|
||||
+ riscv64-linux-gnu
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -804,6 +804,26 @@ cat >> conftest.c <<EOF
|
||||
ia64-linux-gnu
|
||||
# elif defined(__m68k__) && !defined(__mcoldfire__)
|
||||
m68k-linux-gnu
|
||||
+# elif defined(__mips_hard_float) && defined(__mips_isa_rev) && (__mips_isa_rev >=6) && defined(_MIPSEL)
|
||||
+# if _MIPS_SIM == _ABIO32
|
||||
+ mipsisa32r6el-linux-gnu
|
||||
+# elif _MIPS_SIM == _ABIN32
|
||||
+ mipsisa64r6el-linux-gnuabin32
|
||||
+# elif _MIPS_SIM == _ABI64
|
||||
+ mipsisa64r6el-linux-gnuabi64
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
+# elif defined(__mips_hard_float) && defined(__mips_isa_rev) && (__mips_isa_rev >=6)
|
||||
+# if _MIPS_SIM == _ABIO32
|
||||
+ mipsisa32r6-linux-gnu
|
||||
+# elif _MIPS_SIM == _ABIN32
|
||||
+ mipsisa64r6-linux-gnuabin32
|
||||
+# elif _MIPS_SIM == _ABI64
|
||||
+ mipsisa64r6-linux-gnuabi64
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
# elif defined(__mips_hard_float) && defined(_MIPSEL)
|
||||
# if _MIPS_SIM == _ABIO32
|
||||
mipsel-linux-gnu
|
||||
@@ -846,6 +866,14 @@ cat >> conftest.c <<EOF
|
||||
sparc64-linux-gnu
|
||||
# elif defined(__sparc__)
|
||||
sparc-linux-gnu
|
||||
+# elif defined(__riscv)
|
||||
+# if __riscv_xlen == 32
|
||||
+ riscv32-linux-gnu
|
||||
+# elif __riscv_xlen == 64
|
||||
+ riscv64-linux-gnu
|
||||
+# else
|
||||
+# error unknown platform triplet
|
||||
+# endif
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
BIN
setuptools-44.1.1-py2.py3-none-any.whl
Normal file
BIN
setuptools-44.1.1-py2.py3-none-any.whl
Normal file
Binary file not shown.
17
skip_SSL_tests.patch
Normal file
17
skip_SSL_tests.patch
Normal file
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
Lib/test/test_ssl.py | 4 +---
|
||||
1 file changed, 1 insertion(+), 3 deletions(-)
|
||||
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -138,9 +138,7 @@ def skip_if_broken_ubuntu_ssl(func):
|
||||
try:
|
||||
ssl.SSLContext(ssl.PROTOCOL_SSLv2)
|
||||
except ssl.SSLError:
|
||||
- if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
|
||||
- platform.linux_distribution() == ('debian', 'squeeze/sid', '')):
|
||||
- raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
|
||||
+ raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
|
||||
return func(*args, **kwargs)
|
||||
return f
|
||||
else:
|
135
skip_random_failing_tests.patch
Normal file
135
skip_random_failing_tests.patch
Normal file
|
@ -0,0 +1,135 @@
|
|||
From: Michel Normand <normand@linux.vnet.ibm.com>
|
||||
Subject: skip random failing tests
|
||||
Date: Thu, 18 Jan 2018 15:48:52 +0100
|
||||
|
||||
skip random failing tests:
|
||||
in _test_multiprocessing.py:
|
||||
test_async_timeout
|
||||
test_waitfor_timeout
|
||||
test_wait_integer
|
||||
in test_events.py:
|
||||
test_run_until_complete
|
||||
test_signal_handling_args
|
||||
test_call_later
|
||||
|
||||
Reported to fail on ppc64le host on multiple osc build trials:
|
||||
(all failed for ppc64le, except one for ppc)
|
||||
===
|
||||
[michel@abanc:~/work/home:michel_mno:branches:devel:languages:python:Factory/python3]
|
||||
$idx=1; while test 1; do echo "trial $idx:"; osc build \
|
||||
--vm-type kvm -j 8 --threads 4 openSUSE_Factory_PowerPC ppc64le \
|
||||
>/tmp/python3_trialx_${idx}.log 2>&1 || break; ((idx++)); done
|
||||
===
|
||||
FAIL: test_async_timeout (test.test_multiprocessing_fork.WithProcessesTestPool)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/home/abuild/rpmbuild/BUILD/Python-3.6.4/Lib/test/_test_multiprocessing.py", line 2017, in test_async_timeout
|
||||
self.assertRaises(multiprocessing.TimeoutError, get, timeout=TIMEOUT2)
|
||||
AssertionError: TimeoutError not raised by <test._test_multiprocessing.TimingWrapper object at 0x7fff89b45f28>
|
||||
===
|
||||
FAIL: test_waitfor_timeout (test.test_multiprocessing_spawn.WithManagerTestCondition)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/home/abuild/rpmbuild/BUILD/Python-3.6.4/Lib/test/_test_multiprocessing.py", line 1169, in test_waitfor_timeout
|
||||
self.assertTrue(success.value)
|
||||
AssertionError: False is not true
|
||||
===
|
||||
FAIL: test_run_until_complete (test.test_asyncio.test_events.SelectEventLoopTests)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/home/abuild/rpmbuild/BUILD/Python-3.6.4/Lib/test/test_asyncio/test_events.py", line 285, in test_run_until_complete
|
||||
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
|
||||
AssertionError: False is not true : 3.966844968999993
|
||||
===
|
||||
FAIL: test_signal_handling_args (test.test_asyncio.test_events.SelectEventLoopTests)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/home/abuild/rpmbuild/BUILD/Python-3.6.4/Lib/test/test_asyncio/test_events.py", line 566, in test_signal_handling_args
|
||||
self.assertEqual(caught, 1)
|
||||
AssertionError: 0 != 1
|
||||
=== (ppc)
|
||||
FAIL: test_wait_integer (test.test_multiprocessing_spawn.TestWait)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/home/abuild/rpmbuild/BUILD/Python-3.6.4/Lib/test/_test_multiprocessing.py", line 3762, in test_wait_integer
|
||||
self.assertLess(delta, expected + 2)
|
||||
AssertionError: 5.576360702514648 not less than 5
|
||||
===
|
||||
===
|
||||
======================================================================
|
||||
FAIL: test_call_later (test.test_asyncio.test_events.PollEventLoopTests)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/home/abuild/rpmbuild/BUILD/Python-3.6.4/Lib/test/test_asyncio/test_events.py", line 309, in test_call_later
|
||||
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
|
||||
AssertionError: False is not true : 2.7154626529999746
|
||||
|
||||
======================================================================
|
||||
FAIL: test_call_later (test.test_asyncio.test_events.SelectEventLoopTests)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/home/abuild/rpmbuild/BUILD/Python-3.6.4/Lib/test/test_asyncio/test_events.py", line 309, in test_call_later
|
||||
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
|
||||
AssertionError: False is not true : 4.137590406000015
|
||||
===
|
||||
|
||||
|
||||
Signed-off-by: Michel Normand <normand@linux.vnet.ibm.com>
|
||||
---
|
||||
Lib/test/_test_multiprocessing.py | 3 +++
|
||||
Lib/test/test_asyncio/test_events.py | 4 +++-
|
||||
2 files changed, 6 insertions(+), 1 deletion(-)
|
||||
|
||||
--- a/Lib/test/_test_multiprocessing.py
|
||||
+++ b/Lib/test/_test_multiprocessing.py
|
||||
@@ -1177,6 +1177,7 @@ class _TestCondition(BaseTestCase):
|
||||
success.value = True
|
||||
|
||||
@unittest.skipUnless(HAS_SHAREDCTYPES, 'needs sharedctypes')
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_waitfor_timeout(self):
|
||||
# based on test in test/lock_tests.py
|
||||
cond = self.Condition()
|
||||
@@ -2066,6 +2067,7 @@ class _TestPool(BaseTestCase):
|
||||
self.assertEqual(get(), 49)
|
||||
self.assertTimingAlmostEqual(get.elapsed, TIMEOUT1)
|
||||
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_async_timeout(self):
|
||||
res = self.pool.apply_async(sqr, (6, TIMEOUT2 + 1.0))
|
||||
get = TimingWrapper(res.get)
|
||||
@@ -3799,6 +3801,7 @@ class TestWait(unittest.TestCase):
|
||||
sem.release()
|
||||
time.sleep(period)
|
||||
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_wait_integer(self):
|
||||
from multiprocessing.connection import wait
|
||||
|
||||
--- a/Lib/test/test_asyncio/test_events.py
|
||||
+++ b/Lib/test/test_asyncio/test_events.py
|
||||
@@ -272,6 +272,7 @@ class EventLoopTestsMixin:
|
||||
# Note: because of the default Windows timing granularity of
|
||||
# 15.6 msec, we use fairly long sleep times here (~100 msec).
|
||||
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_run_until_complete(self):
|
||||
t0 = self.loop.time()
|
||||
self.loop.run_until_complete(asyncio.sleep(0.1, loop=self.loop))
|
||||
@@ -299,7 +300,7 @@ class EventLoopTestsMixin:
|
||||
self.loop.run_forever()
|
||||
t1 = time.monotonic()
|
||||
self.assertEqual(results, ['hello world'])
|
||||
- self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
|
||||
+ self.assertTrue(0.08 <= t1-t0 <= 5.0, t1-t0)
|
||||
|
||||
def test_call_soon(self):
|
||||
results = []
|
||||
@@ -562,6 +563,7 @@ class EventLoopTestsMixin:
|
||||
self.assertEqual(caught, 1)
|
||||
|
||||
@unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM')
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_signal_handling_args(self):
|
||||
some_args = (42,)
|
||||
caught = 0
|
69
skipped_tests.py
Normal file
69
skipped_tests.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/python3
|
||||
"""
|
||||
Simple regexp-based skipped test checker.
|
||||
It lists tests that are mentioned (presumably for exclusion)
|
||||
in BASE, and in MAIN (presumably for inclusion)
|
||||
and reports discrepancies.
|
||||
|
||||
This will have a number of
|
||||
"""
|
||||
|
||||
MAIN = "python36.spec"
|
||||
|
||||
import glob
|
||||
import re
|
||||
from os.path import basename
|
||||
|
||||
alltests = set()
|
||||
qemu_exclusions = set()
|
||||
|
||||
for item in glob.glob("Python-*/Lib/test/test_*"):
|
||||
testname = basename(item)
|
||||
if testname.endswith(".py"):
|
||||
testname = testname[:-3]
|
||||
alltests.add(testname)
|
||||
|
||||
testre = re.compile(r'[\s"](test_\w+)\b')
|
||||
|
||||
def find_tests_in_spec(specname):
|
||||
global qemu_exclusions
|
||||
|
||||
found_tests = set()
|
||||
with open(specname) as spec:
|
||||
in_qemu = False
|
||||
for line in spec:
|
||||
line = line.strip()
|
||||
if "#" in line:
|
||||
line = line[:line.index("#")]
|
||||
tests = set(testre.findall(line))
|
||||
found_tests |= tests
|
||||
if line == "%if 0%{?qemu_user_space_build} > 0":
|
||||
in_qemu = True
|
||||
if in_qemu:
|
||||
if line == "%endif":
|
||||
in_qemu = False
|
||||
qemu_exclusions |= tests
|
||||
return found_tests
|
||||
|
||||
excluded = find_tests_in_spec(MAIN)
|
||||
|
||||
#print("--- excluded tests:", " ".join(sorted(excluded)))
|
||||
#print("--- included tests:", " ".join(sorted(included)))
|
||||
|
||||
mentioned = excluded
|
||||
nonexistent = mentioned - alltests
|
||||
missing = excluded - qemu_exclusions
|
||||
|
||||
print("--- the following tests are excluded for QEMU and not tested in python")
|
||||
print("--- (that probably means we don't need to worry about them)")
|
||||
for test in sorted(qemu_exclusions - excluded):
|
||||
print(test)
|
||||
|
||||
print("--- the following tests might be excluded in python:")
|
||||
for test in sorted(missing):
|
||||
print(test)
|
||||
|
||||
if nonexistent:
|
||||
print("--- the following tests don't exist:")
|
||||
for test in sorted(nonexistent):
|
||||
print(test)
|
29
sphinx-update-removed-function.patch
Normal file
29
sphinx-update-removed-function.patch
Normal file
|
@ -0,0 +1,29 @@
|
|||
From 960bb883769e5c64a63b014590d75654db87ffb0 Mon Sep 17 00:00:00 2001
|
||||
From: Pablo Galindo <Pablogsal@gmail.com>
|
||||
Date: Fri, 10 May 2019 22:58:17 +0100
|
||||
Subject: [PATCH] Fix sphinx deprecation warning about env.note_versionchange()
|
||||
(GH-13236)
|
||||
|
||||
---
|
||||
Doc/tools/extensions/pyspecific.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
--- a/Doc/tools/extensions/pyspecific.py
|
||||
+++ b/Doc/tools/extensions/pyspecific.py
|
||||
@@ -231,10 +231,14 @@ class DeprecatedRemoved(Directive):
|
||||
translatable=False)
|
||||
node.append(para)
|
||||
env = self.state.document.settings.env
|
||||
- env.note_versionchange('deprecated', version[0], node, self.lineno)
|
||||
+ # deprecated pre-Sphinx-2 method
|
||||
+ if hasattr(env, 'note_versionchange'):
|
||||
+ env.note_versionchange('deprecated', version[0], node, self.lineno)
|
||||
+ # new method
|
||||
+ else:
|
||||
+ env.get_domain('changeset').note_changeset(node)
|
||||
return [node] + messages
|
||||
|
||||
-
|
||||
# Support for including Misc/NEWS
|
||||
|
||||
issue_re = re.compile('(?:[Ii]ssue #|bpo-)([0-9]+)')
|
12
subprocess-raise-timeout.patch
Normal file
12
subprocess-raise-timeout.patch
Normal file
|
@ -0,0 +1,12 @@
|
|||
--- a/Lib/test/test_subprocess.py
|
||||
+++ b/Lib/test/test_subprocess.py
|
||||
@@ -1079,7 +1079,8 @@ class ProcessTestCase(BaseTestCase):
|
||||
self.assertIn("0.0001", str(c.exception)) # For coverage of __str__.
|
||||
# Some heavily loaded buildbots (sparc Debian 3.x) require this much
|
||||
# time to start.
|
||||
- self.assertEqual(p.wait(timeout=3), 0)
|
||||
+ # OBS might require even more
|
||||
+ self.assertEqual(p.wait(timeout=10), 0)
|
||||
|
||||
def test_wait_endtime(self):
|
||||
"""Confirm that the deprecated endtime parameter warns."""
|
84
support-expat-CVE-2022-25236-patched.patch
Normal file
84
support-expat-CVE-2022-25236-patched.patch
Normal file
|
@ -0,0 +1,84 @@
|
|||
From 7da97f61816f3cadaa6788804b22a2434b40e8c5 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Mon, 21 Feb 2022 08:16:09 -0800
|
||||
Subject: [PATCH] bpo-46811: Make test suite support Expat >=2.4.5 (GH-31453)
|
||||
(GH-31472)
|
||||
|
||||
Curly brackets were never allowed in namespace URIs
|
||||
according to RFC 3986, and so-called namespace-validating
|
||||
XML parsers have the right to reject them a invalid URIs.
|
||||
|
||||
libexpat >=2.4.5 has become strcter in that regard due to
|
||||
related security issues; with ET.XML instantiating a
|
||||
namespace-aware parser under the hood, this test has no
|
||||
future in CPython.
|
||||
|
||||
References:
|
||||
- https://datatracker.ietf.org/doc/html/rfc3968
|
||||
- https://www.w3.org/TR/xml-names/
|
||||
|
||||
Also, test_minidom.py: Support Expat >=2.4.5
|
||||
(cherry picked from commit 2cae93832f46b245847bdc252456ddf7742ef45e)
|
||||
|
||||
Co-authored-by: Sebastian Pipping <sebastian@pipping.org>
|
||||
---
|
||||
Lib/test/test_minidom.py | 16 ++++++++++++----
|
||||
Lib/test/test_xml_etree.py | 6 ------
|
||||
2 files changed, 12 insertions(+), 10 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2022-02-20-21-03-31.bpo-46811.8BxgdQ.rst
|
||||
|
||||
--- a/Lib/test/test_minidom.py
|
||||
+++ b/Lib/test/test_minidom.py
|
||||
@@ -9,6 +9,7 @@ import xml.dom.minidom
|
||||
|
||||
from xml.dom.minidom import parse, Node, Document, parseString
|
||||
from xml.dom.minidom import getDOMImplementation
|
||||
+from xml.parsers.expat import ExpatError
|
||||
|
||||
|
||||
tstfile = support.findfile("test.xml", subdir="xmltestdata")
|
||||
@@ -1156,8 +1157,12 @@ class MinidomTest(unittest.TestCase):
|
||||
|
||||
# Verify that character decoding errors raise exceptions instead
|
||||
# of crashing
|
||||
- self.assertRaises(UnicodeDecodeError, parseString,
|
||||
- b'<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>')
|
||||
+ # It doesn’t make any sense to insist on the exact text of the
|
||||
+ # error message, or even the exact Exception … it is enough that
|
||||
+ # the error has been discovered.
|
||||
+ with self.assertRaises((UnicodeDecodeError, ExpatError)):
|
||||
+ parseString(
|
||||
+ b'<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>')
|
||||
|
||||
doc.unlink()
|
||||
|
||||
@@ -1602,8 +1607,11 @@ class MinidomTest(unittest.TestCase):
|
||||
self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE)
|
||||
|
||||
def testExceptionOnSpacesInXMLNSValue(self):
|
||||
- with self.assertRaisesRegex(ValueError, 'Unsupported syntax'):
|
||||
- parseString('<element xmlns:abc="http:abc.com/de f g/hi/j k"><abc:foo /></element>')
|
||||
+ # It doesn’t make any sense to insist on the exact text of the
|
||||
+ # error message, or even the exact Exception … it is enough that
|
||||
+ # the error has been discovered.
|
||||
+ with self.assertRaises((ExpatError, ValueError)):
|
||||
+ parseString('<element xmlns:abc="http:abc.com/de f g/hi/j k"><abc:foo /></element>')
|
||||
|
||||
def testDocRemoveChild(self):
|
||||
doc = parse(tstfile)
|
||||
--- a/Lib/test/test_xml_etree.py
|
||||
+++ b/Lib/test/test_xml_etree.py
|
||||
@@ -1668,12 +1668,6 @@ class BugsTest(unittest.TestCase):
|
||||
b"<?xml version='1.0' encoding='ascii'?>\n"
|
||||
b'<body>tãg</body>')
|
||||
|
||||
- def test_issue3151(self):
|
||||
- e = ET.XML('<prefix:localname xmlns:prefix="${stuff}"/>')
|
||||
- self.assertEqual(e.tag, '{${stuff}}localname')
|
||||
- t = ET.ElementTree(e)
|
||||
- self.assertEqual(ET.tostring(e), b'<ns0:localname xmlns:ns0="${stuff}" />')
|
||||
-
|
||||
def test_issue6565(self):
|
||||
elem = ET.XML("<body><tag/></body>")
|
||||
self.assertEqual(summarize_list(elem), ['tag'])
|
Loading…
Add table
Reference in a new issue