Initialize for python
This commit is contained in:
commit
6d0197fab7
58 changed files with 5928 additions and 0 deletions
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
Python-2.7.18.tar.xz
|
1
.python.metadata
Normal file
1
.python.metadata
Normal file
|
@ -0,0 +1 @@
|
|||
cfbd3c771d680101ad126910657417e8b8895d39ee38e856569afc9b57b22ec8 Python-2.7.18.tar.xz
|
98
CVE-2015-20107-mailcap-unsafe-filenames.patch
Normal file
98
CVE-2015-20107-mailcap-unsafe-filenames.patch
Normal file
|
@ -0,0 +1,98 @@
|
|||
---
|
||||
Doc/library/mailcap.rst | 13 +++++++++++++
|
||||
Lib/mailcap.py | 28 ++++++++++++++++++++++++++--
|
||||
2 files changed, 39 insertions(+), 2 deletions(-)
|
||||
|
||||
--- a/Doc/library/mailcap.rst
|
||||
+++ b/Doc/library/mailcap.rst
|
||||
@@ -55,6 +55,19 @@ standard. However, mailcap files are su
|
||||
will automatically check such conditions and skip the entry if the check fails.
|
||||
|
||||
|
||||
+.. versionchanged:: 3.11
|
||||
+
|
||||
+ To prevent security issues with shell metacharacters (symbols that have
|
||||
+ special effects in a shell command line), ``findmatch`` will refuse
|
||||
+ to inject ASCII characters other than alphanumerics and ``@+=:,./-_``
|
||||
+ into the returned command line.
|
||||
+
|
||||
+ If a disallowed character appears in *filename*, ``findmatch`` will always
|
||||
+ return ``(None, None)`` as if no entry was found.
|
||||
+ If such a character appears elsewhere (a value in *plist* or in *MIMEtype*),
|
||||
+ ``findmatch`` will ignore all mailcap entries which use that value.
|
||||
+ A :mod:`warning <warnings>` will be raised in either case.
|
||||
+
|
||||
.. function:: getcaps()
|
||||
|
||||
Returns a dictionary mapping MIME types to a list of mailcap file entries. This
|
||||
--- a/Lib/mailcap.py
|
||||
+++ b/Lib/mailcap.py
|
||||
@@ -1,9 +1,17 @@
|
||||
"""Mailcap file handling. See RFC 1524."""
|
||||
|
||||
import os
|
||||
+import warnings
|
||||
+import re
|
||||
|
||||
__all__ = ["getcaps","findmatch"]
|
||||
|
||||
+_find_unsafe = re.compile(ur'[^\xa1-\U0010FFFF\w@+=:,./-]').search
|
||||
+
|
||||
+class UnsafeMailcapInput(Warning):
|
||||
+ """Warning raised when refusing unsafe input"""
|
||||
+
|
||||
+
|
||||
# Part 1: top-level interface.
|
||||
|
||||
def getcaps():
|
||||
@@ -18,6 +26,10 @@ def getcaps():
|
||||
"""
|
||||
caps = {}
|
||||
for mailcap in listmailcapfiles():
|
||||
+ if _find_unsafe(mailcap):
|
||||
+ msg = "Refusing to use mailcap with filename %r. Use a safe temporary filename." % (mailcap,)
|
||||
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||
+ return None, None
|
||||
try:
|
||||
fp = open(mailcap, 'r')
|
||||
except IOError:
|
||||
@@ -149,10 +161,13 @@ def findmatch(caps, MIMEtype, key='view'
|
||||
for e in entries:
|
||||
if 'test' in e:
|
||||
test = subst(e['test'], filename, plist)
|
||||
+ if test is None:
|
||||
+ continue
|
||||
if test and os.system(test) != 0:
|
||||
continue
|
||||
command = subst(e[key], MIMEtype, filename, plist)
|
||||
- return command, e
|
||||
+ if command is not None:
|
||||
+ return command, e
|
||||
return None, None
|
||||
|
||||
def lookup(caps, MIMEtype, key=None):
|
||||
@@ -184,6 +199,10 @@ def subst(field, MIMEtype, filename, pli
|
||||
elif c == 's':
|
||||
res = res + filename
|
||||
elif c == 't':
|
||||
+ if _find_unsafe(MIMEtype):
|
||||
+ msg = "Refusing to substitute MIME type %r into a shell command." % (MIMEtype,)
|
||||
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||
+ return None
|
||||
res = res + MIMEtype
|
||||
elif c == '{':
|
||||
start = i
|
||||
@@ -191,7 +210,12 @@ def subst(field, MIMEtype, filename, pli
|
||||
i = i+1
|
||||
name = field[start:i]
|
||||
i = i+1
|
||||
- res = res + findparam(name, plist)
|
||||
+ param = findparam(name, plist)
|
||||
+ if _find_unsafe(param):
|
||||
+ msg = "Refusing to substitute parameter %r (%s) into a shell command" % (param, name)
|
||||
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||
+ return None
|
||||
+ res = res + param
|
||||
# XXX To do:
|
||||
# %n == number of parts if type is multipart/*
|
||||
# %F == list of alternating type and filename for parts
|
22
CVE-2017-18207.patch
Normal file
22
CVE-2017-18207.patch
Normal file
|
@ -0,0 +1,22 @@
|
|||
From ae0ed14794ced2c51c822fc6f0d3ca92064619dd Mon Sep 17 00:00:00 2001
|
||||
From: BT123 <abcdyzhang@163.com>
|
||||
Date: Fri, 17 Nov 2017 16:45:45 +0800
|
||||
Subject: [PATCH] bug in wave.py
|
||||
|
||||
---
|
||||
Lib/wave.py | 2 ++
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
Index: Python-2.7.13/Lib/wave.py
|
||||
===================================================================
|
||||
--- Python-2.7.13.orig/Lib/wave.py 2018-06-07 17:00:25.370728844 +0000
|
||||
+++ Python-2.7.13/Lib/wave.py 2018-06-07 17:02:51.768202800 +0000
|
||||
@@ -272,6 +272,8 @@ class Wave_read:
|
||||
self._sampwidth = (sampwidth + 7) // 8
|
||||
else:
|
||||
raise Error, 'unknown format: %r' % (wFormatTag,)
|
||||
+ if self._nchannels == 0:
|
||||
+ raise Error, "The audio file in wav format should have at least one channel!"
|
||||
self._framesize = self._nchannels * self._sampwidth
|
||||
self._comptype = 'NONE'
|
||||
self._compname = 'not compressed'
|
42
CVE-2019-20907_tarfile-inf-loop.patch
Normal file
42
CVE-2019-20907_tarfile-inf-loop.patch
Normal file
|
@ -0,0 +1,42 @@
|
|||
From 1fa6ef2bc7cee1c8e088dd8b397d9b2d54036dbc Mon Sep 17 00:00:00 2001
|
||||
From: Rajarishi Devarajan <rishi93dev@gmail.com>
|
||||
Date: Sun, 12 Jul 2020 23:47:42 +0200
|
||||
Subject: [PATCH 1/4] bpo-39017 Fix infinite loop in the tarfile module
|
||||
|
||||
Add a check for length = 0 in the _proc_pax function to avoid running into an infinite loop
|
||||
---
|
||||
Lib/tarfile.py | 2 ++
|
||||
Lib/test/test_tarfile.py | 5 +++++
|
||||
Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst | 1 +
|
||||
3 files changed, 8 insertions(+)
|
||||
create mode 100644 Lib/test/recursion.tar
|
||||
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -1400,6 +1400,8 @@ class TarInfo(object):
|
||||
|
||||
length, keyword = match.groups()
|
||||
length = int(length)
|
||||
+ if length == 0:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
value = buf[match.end(2) + 1:match.start(1) + length - 1]
|
||||
|
||||
keyword = keyword.decode("utf8")
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -321,6 +321,11 @@ class CommonReadTest(ReadTest):
|
||||
with self.assertRaisesRegexp(tarfile.ReadError, "unexpected end of data"):
|
||||
tar.extractfile(t).read()
|
||||
|
||||
+ def test_length_zero_header(self):
|
||||
+ # bpo-39017 (CVE-2019-20907): reading a zero-length header should fail
|
||||
+ # with an exception
|
||||
+ self.assertRaises(tarfile.ReadError, tarfile.open, test_support.findfile('recursion.tar'))
|
||||
+
|
||||
|
||||
class MiscReadTest(CommonReadTest):
|
||||
taropen = tarfile.TarFile.taropen
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Avoid infinite loop when reading specially crafted TAR files using the tarfile module (CVE-2019-20907).
|
58
CVE-2019-5010-null-defer-x509-cert-DOS.patch
Normal file
58
CVE-2019-5010-null-defer-x509-cert-DOS.patch
Normal file
|
@ -0,0 +1,58 @@
|
|||
From 280917872027ee991416d2623fc16ff1eed48f50 Mon Sep 17 00:00:00 2001
|
||||
From: Christian Heimes <christian@python.org>
|
||||
Date: Tue, 15 Jan 2019 23:47:42 +0100
|
||||
Subject: [PATCH] bpo-35746: Fix segfault in ssl's cert parser (GH-11569)
|
||||
|
||||
Fix a NULL pointer deref in ssl module. The cert parser did not handle CRL
|
||||
distribution points with empty DP or URI correctly. A malicious or buggy
|
||||
certificate can result into segfault.
|
||||
|
||||
Signed-off-by: Christian Heimes <christian@python.org>
|
||||
|
||||
https://bugs.python.org/issue35746
|
||||
(cherry picked from commit a37f52436f9aa4b9292878b72f3ff1480e2606c3)
|
||||
|
||||
Co-authored-by: Christian Heimes <christian@python.org>
|
||||
---
|
||||
Lib/test/test_ssl.py | 21 ++++++++++
|
||||
Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | 3 +
|
||||
2 files changed, 24 insertions(+)
|
||||
create mode 100644 Lib/test/talos-2019-0758.pem
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst
|
||||
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -287,6 +287,27 @@ class BasicSocketTests(unittest.TestCase
|
||||
}
|
||||
)
|
||||
|
||||
+ def test_parse_cert_CVE_2019_5010(self):
|
||||
+ p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP)
|
||||
+ if support.verbose:
|
||||
+ sys.stdout.write("\n" + pprint.pformat(p) + "\n")
|
||||
+ self.assertEqual(
|
||||
+ p,
|
||||
+ {
|
||||
+ 'issuer': (
|
||||
+ (('countryName', 'UK'),), (('commonName', 'cody-ca'),)),
|
||||
+ 'notAfter': 'Jun 14 18:00:58 2028 GMT',
|
||||
+ 'notBefore': 'Jun 18 18:00:58 2018 GMT',
|
||||
+ 'serialNumber': '02',
|
||||
+ 'subject': ((('countryName', 'UK'),),
|
||||
+ (('commonName',
|
||||
+ 'codenomicon-vm-2.test.lal.cisco.com'),)),
|
||||
+ 'subjectAltName': (
|
||||
+ ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),),
|
||||
+ 'version': 3
|
||||
+ }
|
||||
+ )
|
||||
+
|
||||
def test_parse_cert_CVE_2013_4238(self):
|
||||
p = ssl._ssl._test_decode_cert(NULLBYTECERT)
|
||||
if support.verbose:
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+[CVE-2019-5010] Fix a NULL pointer deref in ssl module. The cert parser did
|
||||
+not handle CRL distribution points with empty DP or URI correctly. A
|
||||
+malicious or buggy certificate can result into segfault.
|
69
CVE-2019-9674-zip-bomb.patch
Normal file
69
CVE-2019-9674-zip-bomb.patch
Normal file
|
@ -0,0 +1,69 @@
|
|||
From b73fe12d4d85fc92e4b9658e417046b68fb68ecc Mon Sep 17 00:00:00 2001
|
||||
From: nick sung <sungboss2004@gmail.com>
|
||||
Date: Fri, 17 May 2019 15:45:31 +0800
|
||||
Subject: [PATCH 1/4] bpo-36260: Add pitfalls to zipfile module documentation
|
||||
|
||||
We saw vulnerability warning description (including zip bomb) in Doc/library/xml.rst file.
|
||||
This gave us the idea of documentation improvement.
|
||||
|
||||
So, we moved a little bit forward :P
|
||||
And the doc patch can be found (pr).
|
||||
---
|
||||
Doc/library/zipfile.rst | 29 +++++++++++++++++++++++++++++
|
||||
1 file changed, 29 insertions(+)
|
||||
|
||||
--- a/Doc/library/zipfile.rst
|
||||
+++ b/Doc/library/zipfile.rst
|
||||
@@ -553,5 +553,47 @@ Command-line options
|
||||
|
||||
Test whether the zipfile is valid or not.
|
||||
|
||||
+Decompression pitfalls
|
||||
+----------------------
|
||||
|
||||
+The extraction in zipfile module might fail due to some pitfalls
|
||||
+listed below.
|
||||
+
|
||||
+From file itself
|
||||
+~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+Decompression may fail due to incorrect password / CRC checksum
|
||||
+/ ZIP format or unsupported compression method / decryption.
|
||||
+
|
||||
+File System limitations
|
||||
+~~~~~~~~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+Exceeding limitations on different file systems can cause
|
||||
+decompression failed. Such as allowable characters in the
|
||||
+directory entries, length of the file name, length of the
|
||||
+pathname, size of a single file, and number of files, etc.
|
||||
+
|
||||
+Resources limitations
|
||||
+~~~~~~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+The lack of memory or disk volume would lead to decompression
|
||||
+failed. For example, decompression bombs (aka `ZIP bomb`_) apply
|
||||
+to zipfile library that can cause disk volume exhaustion.
|
||||
+
|
||||
+Interruption
|
||||
+~~~~~~~~~~~~
|
||||
+
|
||||
+Interruption during the decompression, such as pressing control-C
|
||||
+or killing the decompression process may result in incomplete
|
||||
+decompression of the archive.
|
||||
+
|
||||
+Default behaviors of extraction
|
||||
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
+
|
||||
+Not knowing the default extraction behaviors can cause unexpected
|
||||
+decompression results. For example, when extracting the same
|
||||
+archive twice, it overwrites files without asking.
|
||||
+
|
||||
+
|
||||
+.. _ZIP bomb: https://en.wikipedia.org/wiki/Zip_bomb
|
||||
.. _PKZIP Application Note: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Documentation/2019-06-04-09-29-00.bpo-36260.WrGuc-.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Add decompression pitfalls to zipfile module documentation.
|
||||
\ No newline at end of file
|
77
CVE-2020-26116-httplib-header-injection.patch
Normal file
77
CVE-2020-26116-httplib-header-injection.patch
Normal file
|
@ -0,0 +1,77 @@
|
|||
---
|
||||
Lib/httplib.py | 15 +++++++++++++++
|
||||
Lib/test/test_httplib.py | 22 +++++++++++++++++++++-
|
||||
2 files changed, 36 insertions(+), 1 deletion(-)
|
||||
|
||||
--- a/Lib/httplib.py
|
||||
+++ b/Lib/httplib.py
|
||||
@@ -262,6 +262,10 @@ _contains_disallowed_url_pchar_re = re.c
|
||||
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
|
||||
|
||||
|
||||
+# These characters are not allowed within HTTP method names
|
||||
+# to prevent http header injection.
|
||||
+_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
|
||||
+
|
||||
class HTTPMessage(mimetools.Message):
|
||||
|
||||
def addheader(self, key, value):
|
||||
@@ -940,6 +944,8 @@ class HTTPConnection:
|
||||
else:
|
||||
raise CannotSendRequest()
|
||||
|
||||
+ self._validate_method(method)
|
||||
+
|
||||
# Save the method for use later in the response phase
|
||||
self._method = method
|
||||
|
||||
@@ -1179,6 +1185,15 @@ class HTTPConnection:
|
||||
response.close()
|
||||
raise
|
||||
|
||||
+ def _validate_method(self, method):
|
||||
+ """Validate a method name for putrequest."""
|
||||
+ # prevent http header injection
|
||||
+ match = _contains_disallowed_method_pchar_re.search(method)
|
||||
+ if match:
|
||||
+ raise ValueError(
|
||||
+ "method can't contain control characters. %r (found at "
|
||||
+ "least %r)" % (method, match.group()))
|
||||
+
|
||||
|
||||
class HTTP:
|
||||
"Compatibility class with httplib.py from 1.5."
|
||||
--- a/Lib/test/test_httplib.py
|
||||
+++ b/Lib/test/test_httplib.py
|
||||
@@ -1007,10 +1007,30 @@ class TunnelTests(TestCase):
|
||||
self.assertTrue('Host: destination.com' in conn.sock.data)
|
||||
|
||||
|
||||
+class HttpMethodTests(TestCase):
|
||||
+ def test_invalid_method_names(self):
|
||||
+ methods = (
|
||||
+ 'GET\r',
|
||||
+ 'POST\n',
|
||||
+ 'PUT\n\r',
|
||||
+ 'POST\nValue',
|
||||
+ 'POST\nHOST:abc',
|
||||
+ 'GET\nrHost:abc\n',
|
||||
+ 'POST\rRemainder:\r',
|
||||
+ 'GET\rHOST:\n',
|
||||
+ '\nPUT'
|
||||
+ )
|
||||
+
|
||||
+ for method in methods:
|
||||
+ conn = httplib.HTTPConnection('example.com')
|
||||
+ conn.sock = FakeSocket(None)
|
||||
+ self.assertRaises(ValueError, conn.request, method=method, url="/")
|
||||
+
|
||||
+
|
||||
@test_support.reap_threads
|
||||
def test_main(verbose=None):
|
||||
test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
|
||||
- HTTPTest, HTTPSTest, SourceAddressTest,
|
||||
+ HTTPTest, HttpMethodTests, HTTPSTest, SourceAddressTest,
|
||||
TunnelTests)
|
||||
|
||||
if __name__ == '__main__':
|
35
CVE-2020-8492-urllib-ReDoS.patch
Normal file
35
CVE-2020-8492-urllib-ReDoS.patch
Normal file
|
@ -0,0 +1,35 @@
|
|||
From 34e25a97709a05f7c804036dd1e16afda6bdfa33 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@python.org>
|
||||
Date: Thu, 30 Jan 2020 16:13:03 +0100
|
||||
Subject: [PATCH 1/2] bpo-39503: Fix urllib basic auth regex
|
||||
|
||||
The AbstractBasicAuthHandler class of the urllib.request module uses
|
||||
an inefficient regular expression which can be exploited by an
|
||||
attacker to cause a denial of service. Fix the regex to prevent the
|
||||
catastrophic backtracking.
|
||||
|
||||
Vulnerability reported by Matt Schwager.
|
||||
---
|
||||
Lib/urllib2.py | 2 +-
|
||||
Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst | 4 ++++
|
||||
2 files changed, 5 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst
|
||||
@@ -0,0 +1,4 @@
|
||||
+CVE-2020-8492: The :class:`~urllib.request.AbstractBasicAuthHandler` class of the
|
||||
+:mod:`urllib.request` module uses an inefficient regular expression which can
|
||||
+be exploited by an attacker to cause a denial of service. Fix the regex to
|
||||
+prevent the catastrophic backtracking. Vulnerability reported by Matt Schwager.
|
||||
--- a/Lib/urllib2.py
|
||||
+++ b/Lib/urllib2.py
|
||||
@@ -856,7 +856,7 @@ class AbstractBasicAuthHandler:
|
||||
|
||||
# allow for double- and single-quoted realm values
|
||||
# (single quotes are a violation of the RFC, but appear in the wild)
|
||||
- rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
|
||||
+ rx = re.compile('(?:[^,]*,)*[ \t]*([^ \t]+)[ \t]+'
|
||||
'realm=(["\']?)([^"\']*)\\2', re.I)
|
||||
|
||||
# XXX could pre-emptively send auth info already accepted (RFC 2617,
|
389
CVE-2021-23336-only-amp-as-query-sep.patch
Normal file
389
CVE-2021-23336-only-amp-as-query-sep.patch
Normal file
|
@ -0,0 +1,389 @@
|
|||
From 5c17dfc5d70ce88be99bc5769b91ce79d7a90d61 Mon Sep 17 00:00:00 2001
|
||||
From: Senthil Kumaran <senthil@uthcode.com>
|
||||
Date: Mon, 15 Feb 2021 11:16:43 -0800
|
||||
Subject: [PATCH] [3.6] bpo-42967: only use '&' as a query string separator
|
||||
(GH-24297) (GH-24532)
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
bpo-42967: [security] Address a web cache-poisoning issue reported in
|
||||
urllib.parse.parse_qsl().
|
||||
|
||||
urllib.parse will only us "&" as query string separator by default
|
||||
instead of both ";" and "&" as allowed in earlier versions. An optional
|
||||
argument seperator with default value "&" is added to specify the
|
||||
separator.
|
||||
|
||||
Co-authored-by: Éric Araujo <merwok@netwok.org>
|
||||
Co-authored-by: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com>
|
||||
Co-authored-by: Adam Goldschmidt <adamgold7@gmail.com>
|
||||
---
|
||||
Doc/library/cgi.rst | 8 ++-
|
||||
Doc/library/urllib.parse.rst | 22 +++++-
|
||||
Doc/whatsnew/3.6.rst | 13 ++++
|
||||
Lib/cgi.py | 17 +++--
|
||||
Lib/test/test_cgi.py | 29 ++++++--
|
||||
Lib/test/test_urlparse.py | 68 +++++++++++++------
|
||||
Lib/urllib/parse.py | 19 ++++--
|
||||
.../2021-02-14-15-59-16.bpo-42967.YApqDS.rst | 1 +
|
||||
8 files changed, 134 insertions(+), 43 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2021-02-14-15-59-16.bpo-42967.YApqDS.rst
|
||||
|
||||
--- a/Doc/library/cgi.rst
|
||||
+++ b/Doc/library/cgi.rst
|
||||
@@ -287,10 +287,11 @@ algorithms implemented in this module in
|
||||
|
||||
.. function:: parse(fp[, environ[, keep_blank_values[, strict_parsing]]])
|
||||
|
||||
- Parse a query in the environment or from a file (the file defaults to
|
||||
- ``sys.stdin`` and environment defaults to ``os.environ``). The *keep_blank_values* and *strict_parsing* parameters are
|
||||
- passed to :func:`urlparse.parse_qs` unchanged.
|
||||
-
|
||||
+ Parse a query in the environment or from a file (the file
|
||||
+ defaults to ``sys.stdin`` and environment defaults to
|
||||
+ ``os.environ``). The *keep_blank_values*, *strict_parsing*,
|
||||
+ and *separator* parameters are passed to
|
||||
+ :func:`urlparse.parse_qs` unchanged.
|
||||
|
||||
.. function:: parse_qs(qs[, keep_blank_values[, strict_parsing[, max_num_fields]]])
|
||||
|
||||
@@ -316,6 +317,9 @@ algorithms implemented in this module in
|
||||
Note that this does not parse nested multipart parts --- use
|
||||
:class:`FieldStorage` for that.
|
||||
|
||||
+ .. versionchanged:: 3.6.13
|
||||
+ Added the *separator* parameter.
|
||||
+
|
||||
|
||||
.. function:: parse_header(string)
|
||||
|
||||
--- a/Lib/cgi.py
|
||||
+++ b/Lib/cgi.py
|
||||
@@ -121,7 +121,8 @@ log = initlog # The current lo
|
||||
# 0 ==> unlimited input
|
||||
maxlen = 0
|
||||
|
||||
-def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
|
||||
+def parse(fp=None, environ=os.environ, keep_blank_values=0,
|
||||
+ strict_parsing=0, separator='&'):
|
||||
"""Parse a query in the environment or from a file (default stdin)
|
||||
|
||||
Arguments, all optional:
|
||||
@@ -140,6 +141,9 @@ def parse(fp=None, environ=os.environ, k
|
||||
strict_parsing: flag indicating what to do with parsing errors.
|
||||
If false (the default), errors are silently ignored.
|
||||
If true, errors raise a ValueError exception.
|
||||
+
|
||||
+ separator: str. The symbol to use for separating the query arguments.
|
||||
+ Defaults to &.
|
||||
"""
|
||||
if fp is None:
|
||||
fp = sys.stdin
|
||||
@@ -171,7 +175,8 @@ def parse(fp=None, environ=os.environ, k
|
||||
else:
|
||||
qs = ""
|
||||
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
||||
- return urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
|
||||
+ return urlparse.parse_qs(qs, keep_blank_values, strict_parsing,
|
||||
+ separator=separator)
|
||||
|
||||
|
||||
# parse query string function called from urlparse,
|
||||
@@ -395,7 +400,7 @@ class FieldStorage:
|
||||
|
||||
def __init__(self, fp=None, headers=None, outerboundary="",
|
||||
environ=os.environ, keep_blank_values=0, strict_parsing=0,
|
||||
- max_num_fields=None):
|
||||
+ max_num_fields=None, separator='&'):
|
||||
"""Constructor. Read multipart/* until last part.
|
||||
|
||||
Arguments, all optional:
|
||||
@@ -430,6 +435,7 @@ class FieldStorage:
|
||||
self.keep_blank_values = keep_blank_values
|
||||
self.strict_parsing = strict_parsing
|
||||
self.max_num_fields = max_num_fields
|
||||
+ self.separator = separator
|
||||
if 'REQUEST_METHOD' in environ:
|
||||
method = environ['REQUEST_METHOD'].upper()
|
||||
self.qs_on_post = None
|
||||
@@ -613,7 +619,9 @@ class FieldStorage:
|
||||
if self.qs_on_post:
|
||||
qs += '&' + self.qs_on_post
|
||||
query = urlparse.parse_qsl(qs, self.keep_blank_values,
|
||||
- self.strict_parsing, self.max_num_fields)
|
||||
+ self.strict_parsing,
|
||||
+ self.max_num_fields,
|
||||
+ separator=self.separator)
|
||||
self.list = [MiniFieldStorage(key, value) for key, value in query]
|
||||
self.skip_lines()
|
||||
|
||||
@@ -629,7 +637,8 @@ class FieldStorage:
|
||||
query = urlparse.parse_qsl(self.qs_on_post,
|
||||
self.keep_blank_values,
|
||||
self.strict_parsing,
|
||||
- self.max_num_fields)
|
||||
+ self.max_num_fields,
|
||||
+ self.separator)
|
||||
self.list.extend(MiniFieldStorage(key, value)
|
||||
for key, value in query)
|
||||
FieldStorageClass = None
|
||||
@@ -642,7 +651,8 @@ class FieldStorage:
|
||||
klass = self.FieldStorageClass or self.__class__
|
||||
part = klass(self.fp, {}, ib,
|
||||
environ, keep_blank_values, strict_parsing,
|
||||
- max_num_fields)
|
||||
+ max_num_fields,
|
||||
+ self.separator)
|
||||
|
||||
# Throw first part away
|
||||
while not part.done:
|
||||
--- a/Lib/test/test_cgi.py
|
||||
+++ b/Lib/test/test_cgi.py
|
||||
@@ -61,12 +61,9 @@ parse_strict_test_cases = [
|
||||
("", ValueError("bad query field: ''")),
|
||||
("&", ValueError("bad query field: ''")),
|
||||
("&&", ValueError("bad query field: ''")),
|
||||
- (";", ValueError("bad query field: ''")),
|
||||
- (";&;", ValueError("bad query field: ''")),
|
||||
# Should the next few really be valid?
|
||||
("=", {}),
|
||||
("=&=", {}),
|
||||
- ("=;=", {}),
|
||||
# This rest seem to make sense
|
||||
("=a", {'': ['a']}),
|
||||
("&=a", ValueError("bad query field: ''")),
|
||||
@@ -81,8 +78,6 @@ parse_strict_test_cases = [
|
||||
("a=a+b&b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||
("a=a+b&a=b+a", {'a': ['a b', 'b a']}),
|
||||
("x=1&y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||
- ("x=1;y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||
- ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||
("Hbc5161168c542333633315dee1182227:key_store_seqid=400006&cuyer=r&view=bustomer&order_id=0bb2e248638833d48cb7fed300000f1b&expire=964546263&lobale=en-US&kid=130003.300038&ss=env",
|
||||
{'Hbc5161168c542333633315dee1182227:key_store_seqid': ['400006'],
|
||||
'cuyer': ['r'],
|
||||
@@ -188,6 +183,30 @@ class CgiTests(unittest.TestCase):
|
||||
self.assertEqual(expect[k], v)
|
||||
self.assertItemsEqual(expect.values(), d.values())
|
||||
|
||||
+ def test_separator(self):
|
||||
+ parse_semicolon = [
|
||||
+ ("x=1;y=2.0", {'x': ['1'], 'y': ['2.0']}),
|
||||
+ ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||
+ (";", ValueError("bad query field: ''")),
|
||||
+ (";;", ValueError("bad query field: ''")),
|
||||
+ ("=;a", ValueError("bad query field: 'a'")),
|
||||
+ (";b=a", ValueError("bad query field: ''")),
|
||||
+ ("b;=a", ValueError("bad query field: 'b'")),
|
||||
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||
+ ("a=a+b;a=b+a", {'a': ['a b', 'b a']}),
|
||||
+ ]
|
||||
+ for orig, expect in parse_semicolon:
|
||||
+ env = {'QUERY_STRING': orig}
|
||||
+ fs = cgi.FieldStorage(separator=';', environ=env)
|
||||
+ if isinstance(expect, dict):
|
||||
+ for key in expect.keys():
|
||||
+ expect_val = expect[key]
|
||||
+ self.assertIn(key, fs)
|
||||
+ if len(expect_val) > 1:
|
||||
+ self.assertEqual(fs.getvalue(key), expect_val)
|
||||
+ else:
|
||||
+ self.assertEqual(fs.getvalue(key), expect_val[0])
|
||||
+
|
||||
def test_log(self):
|
||||
cgi.log("Testing")
|
||||
|
||||
--- a/Lib/test/test_urlparse.py
|
||||
+++ b/Lib/test/test_urlparse.py
|
||||
@@ -24,16 +24,10 @@ parse_qsl_test_cases = [
|
||||
("&a=b", [('a', 'b')]),
|
||||
("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]),
|
||||
("a=1&a=2", [('a', '1'), ('a', '2')]),
|
||||
- (";", []),
|
||||
- (";;", []),
|
||||
- (";a=b", [('a', 'b')]),
|
||||
- ("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
|
||||
- ("a=1;a=2", [('a', '1'), ('a', '2')]),
|
||||
- (b";", []),
|
||||
- (b";;", []),
|
||||
- (b";a=b", [(b'a', b'b')]),
|
||||
- (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
|
||||
- (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
|
||||
+ (";a=b", [(';a', 'b')]),
|
||||
+ ("a=a+b;b=b+c", [('a', 'a b;b=b c')]),
|
||||
+ (b";a=b", [(b';a', b'b')]),
|
||||
+ (b"a=a+b;b=b+c", [(b'a', b'a b;b=b c')]),
|
||||
]
|
||||
|
||||
parse_qs_test_cases = [
|
||||
@@ -57,16 +51,10 @@ parse_qs_test_cases = [
|
||||
(b"&a=b", {b'a': [b'b']}),
|
||||
(b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
|
||||
(b"a=1&a=2", {b'a': [b'1', b'2']}),
|
||||
- (";", {}),
|
||||
- (";;", {}),
|
||||
- (";a=b", {'a': ['b']}),
|
||||
- ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||
- ("a=1;a=2", {'a': ['1', '2']}),
|
||||
- (b";", {}),
|
||||
- (b";;", {}),
|
||||
- (b";a=b", {b'a': [b'b']}),
|
||||
- (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
|
||||
- (b"a=1;a=2", {b'a': [b'1', b'2']}),
|
||||
+ (";a=b", {';a': ['b']}),
|
||||
+ ("a=a+b;b=b+c", {'a': ['a b;b=b c']}),
|
||||
+ (b";a=b", {b';a': [b'b']}),
|
||||
+ (b"a=a+b;b=b+c", {b'a':[ b'a b;b=b c']}),
|
||||
]
|
||||
|
||||
class UrlParseTestCase(unittest.TestCase):
|
||||
@@ -665,6 +653,43 @@ class UrlParseTestCase(unittest.TestCase
|
||||
"under NFKC normalization")
|
||||
self.assertIsInstance(cm.exception.args[0], str)
|
||||
|
||||
+ def test_parse_qs_separator(self):
|
||||
+ parse_qs_semicolon_cases = [
|
||||
+ (";", {}),
|
||||
+ (";;", {}),
|
||||
+ (";a=b", {'a': ['b']}),
|
||||
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||
+ ("a=1;a=2", {'a': ['1', '2']}),
|
||||
+ (b";", {}),
|
||||
+ (b";;", {}),
|
||||
+ (b";a=b", {b'a': [b'b']}),
|
||||
+ (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
|
||||
+ (b"a=1;a=2", {b'a': [b'1', b'2']}),
|
||||
+ ]
|
||||
+ for orig, expect in parse_qs_semicolon_cases:
|
||||
+ result = urlparse.parse_qs(orig, separator=';')
|
||||
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||
+
|
||||
+
|
||||
+ def test_parse_qsl_separator(self):
|
||||
+ parse_qsl_semicolon_cases = [
|
||||
+ (";", []),
|
||||
+ (";;", []),
|
||||
+ (";a=b", [('a', 'b')]),
|
||||
+ ("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
|
||||
+ ("a=1;a=2", [('a', '1'), ('a', '2')]),
|
||||
+ (b";", []),
|
||||
+ (b";;", []),
|
||||
+ (b";a=b", [(b'a', b'b')]),
|
||||
+ (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
|
||||
+ (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
|
||||
+ ]
|
||||
+ for orig, expect in parse_qsl_semicolon_cases:
|
||||
+ result = urlparse.parse_qsl(orig, separator=';')
|
||||
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||
+
|
||||
+
|
||||
+
|
||||
def test_main():
|
||||
test_support.run_unittest(UrlParseTestCase)
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2021-02-14-15-59-16.bpo-42967.YApqDS.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix web cache poisoning vulnerability by defaulting the query args separator to ``&``, and allowing the user to choose a custom separator.
|
||||
--- a/Lib/test/test_urllib2.py
|
||||
+++ b/Lib/test/test_urllib2.py
|
||||
@@ -1331,7 +1331,7 @@ class MiscTests(unittest.TestCase, FakeH
|
||||
# level 'def urlopen()' function defined in this... (quite ugly)
|
||||
# test suite. They use different url opening codepaths. Plain
|
||||
# urlopen uses FancyURLOpener which goes via a codepath that
|
||||
- # calls urllib.parse.quote() on the URL which makes all of the
|
||||
+ # calls urlparse.quote() on the URL which makes all of the
|
||||
# above attempts at injection within the url _path_ safe.
|
||||
escaped_char_repr = repr(char).replace('\\', r'\\')
|
||||
InvalidURL = httplib.InvalidURL
|
||||
@@ -1354,7 +1354,7 @@ class MiscTests(unittest.TestCase, FakeH
|
||||
# level 'def urlopen()' function defined in this... (quite ugly)
|
||||
# test suite. They use different url opening codepaths. Plain
|
||||
# urlopen uses FancyURLOpener which goes via a codepath that
|
||||
- # calls urllib.parse.quote() on the URL which makes all of the
|
||||
+ # calls urlparse.quote() on the URL which makes all of the
|
||||
# above attempts at injection within the url _path_ safe.
|
||||
InvalidURL = httplib.InvalidURL
|
||||
with self.assertRaisesRegexp(InvalidURL,
|
||||
--- a/Misc/NEWS
|
||||
+++ b/Misc/NEWS
|
||||
@@ -4246,7 +4246,7 @@ Library
|
||||
- bpo-18167: cgi.FieldStorage no longer fails to handle multipart/form-data
|
||||
when \r\n appears at end of 65535 bytes without other newlines.
|
||||
|
||||
-- bpo-17403: urllib.parse.robotparser normalizes the urls before adding to
|
||||
+- bpo-17403: urlparse.robotparser normalizes the urls before adding to
|
||||
ruleline. This helps in handling certain types invalid urls in a
|
||||
conservative manner. Patch contributed by Mher Movsisyan.
|
||||
|
||||
@@ -8271,7 +8271,7 @@ Core and Builtins
|
||||
Library
|
||||
-------
|
||||
|
||||
-- bpo-7904: Changes to urllib.parse.urlsplit to handle schemes as defined by
|
||||
+- bpo-7904: Changes to urlparse.urlsplit to handle schemes as defined by
|
||||
RFC3986. Anything before :// is considered a scheme and is followed by an
|
||||
authority (or netloc) and by '/' led path, which is optional.
|
||||
|
||||
--- a/Lib/urlparse.py
|
||||
+++ b/Lib/urlparse.py
|
||||
@@ -382,7 +382,8 @@ def unquote(s):
|
||||
append(item)
|
||||
return ''.join(res)
|
||||
|
||||
-def parse_qs(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
|
||||
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0,
|
||||
+ max_num_fields=None, separator='&'):
|
||||
"""Parse a query given as a string argument.
|
||||
|
||||
Arguments:
|
||||
@@ -402,17 +403,21 @@ def parse_qs(qs, keep_blank_values=0, st
|
||||
|
||||
max_num_fields: int. If set, then throws a ValueError if there
|
||||
are more than n fields read by parse_qsl().
|
||||
+
|
||||
+ separator: str. The symbol to use for separating the query arguments.
|
||||
+ Defaults to &.
|
||||
"""
|
||||
dict = {}
|
||||
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing,
|
||||
- max_num_fields):
|
||||
+ max_num_fields, separator=separator):
|
||||
if name in dict:
|
||||
dict[name].append(value)
|
||||
else:
|
||||
dict[name] = [value]
|
||||
return dict
|
||||
|
||||
-def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
|
||||
+def parse_qsl(qs, keep_blank_values=0, strict_parsing=0,
|
||||
+ max_num_fields=None, separator='&'):
|
||||
"""Parse a query given as a string argument.
|
||||
|
||||
Arguments:
|
||||
@@ -432,17 +437,23 @@ def parse_qsl(qs, keep_blank_values=0, s
|
||||
max_num_fields: int. If set, then throws a ValueError if there
|
||||
are more than n fields read by parse_qsl().
|
||||
|
||||
+ separator: str. The symbol to use for separating the query arguments.
|
||||
+ Defaults to &.
|
||||
+
|
||||
Returns a list, as G-d intended.
|
||||
"""
|
||||
# If max_num_fields is defined then check that the number of fields
|
||||
# is less than max_num_fields. This prevents a memory exhaustion DOS
|
||||
# attack via post bodies with many fields.
|
||||
+ if not separator or (not isinstance(separator, (str, bytes))):
|
||||
+ raise ValueError("Separator must be of type string or bytes.")
|
||||
+
|
||||
if max_num_fields is not None:
|
||||
- num_fields = 1 + qs.count('&') + qs.count(';')
|
||||
+ num_fields = 1 + qs.count(separator)
|
||||
if max_num_fields < num_fields:
|
||||
raise ValueError('Max number of fields exceeded')
|
||||
|
||||
- pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
||||
+ pairs = [s1 for s1 in qs.split(separator)]
|
||||
r = []
|
||||
for name_value in pairs:
|
||||
if not name_value and not strict_parsing:
|
76
CVE-2021-28861-double-slash-path.patch
Normal file
76
CVE-2021-28861-double-slash-path.patch
Normal file
|
@ -0,0 +1,76 @@
|
|||
Index: Python-2.7.18/Lib/BaseHTTPServer.py
|
||||
===================================================================
|
||||
--- Python-2.7.18.orig/Lib/BaseHTTPServer.py
|
||||
+++ Python-2.7.18/Lib/BaseHTTPServer.py
|
||||
@@ -287,6 +287,14 @@ class BaseHTTPRequestHandler(SocketServe
|
||||
return False
|
||||
self.command, self.path, self.request_version = command, path, version
|
||||
|
||||
+ # CVE-2021-28861: The purpose of replacing '//' with '/' is to
|
||||
+ # protect against open redirect attacks possibly triggered if the
|
||||
+ # path starts with '//' because http clients treat //path as an
|
||||
+ # absolute URI without scheme (similar to http://path) rather than
|
||||
+ # a path.
|
||||
+ if self.path.startswith('//'):
|
||||
+ self.path = '/' + self.path.lstrip('/') # Reduce to a single /
|
||||
+
|
||||
# Examine the headers and look for a Connection directive
|
||||
self.headers = self.MessageClass(self.rfile, 0)
|
||||
|
||||
Index: Python-2.7.18/Lib/test/test_httpservers.py
|
||||
===================================================================
|
||||
--- Python-2.7.18.orig/Lib/test/test_httpservers.py
|
||||
+++ Python-2.7.18/Lib/test/test_httpservers.py
|
||||
@@ -417,6 +417,52 @@ class SimpleHTTPServerTestCase(BaseTestC
|
||||
self.assertEqual(response.getheader("Location"),
|
||||
self.tempdir_name + "/?hi=1")
|
||||
|
||||
+ def test_get_dir_redirect_location_domain_injection_bug(self):
|
||||
+ """Ensure //evil.co/..%2f../../X does not put //evil.co/ in Location.
|
||||
+ //netloc/ in a Location header is a redirect to a new host.
|
||||
+ https://github.com/python/cpython/issues/87389
|
||||
+ This checks that a path resolving to a directory on our server cannot
|
||||
+ resolve into a redirect to another server.
|
||||
+ """
|
||||
+ os.mkdir(os.path.join(self.tempdir, 'existing_directory'))
|
||||
+ url = '/python.org/..%2f..%2f..%2f..%2f..%2f../%0a%0d/../' + self.tempdir_name + '/existing_directory'
|
||||
+ expected_location = url + '/' # /python.org.../ single slash single prefix, trailing slash
|
||||
+ # Canonicalizes to /tmp/tempdir_name/existing_directory which does
|
||||
+ # exist and is a dir, triggering the 301 redirect logic.
|
||||
+ response = self.request(url)
|
||||
+ self.check_status_and_reason(response, 301)
|
||||
+ location = response.getheader('Location')
|
||||
+ self.assertEqual(location, expected_location, msg='non-attack failed!')
|
||||
+
|
||||
+ # //python.org... multi-slash prefix, no trailing slash
|
||||
+ attack_url = '/' + url
|
||||
+ response = self.request(attack_url)
|
||||
+ self.check_status_and_reason(response, 301)
|
||||
+ location = response.getheader('Location')
|
||||
+ self.assertFalse(location.startswith('//'), msg=location)
|
||||
+ self.assertEqual(location, expected_location,
|
||||
+ msg='Expected Location header to start with a single / and '
|
||||
+ 'end with a / as this is a directory redirect.')
|
||||
+ # ///python.org... triple-slash prefix, no trailing slash
|
||||
+ attack3_url = '//' + url
|
||||
+ response = self.request(attack3_url)
|
||||
+ self.check_status_and_reason(response, 301)
|
||||
+ self.assertEqual(response.getheader('Location'), expected_location)
|
||||
+
|
||||
+ # If the second word in the http request (Request-URI for the http
|
||||
+ # method) is a full URI, we don't worry about it, as that'll be parsed
|
||||
+ # and reassembled as a full URI within BaseHTTPRequestHandler.send_head
|
||||
+ # so no errant scheme-less //netloc//evil.co/ domain mixup can happen.
|
||||
+ attack_scheme_netloc_2slash_url = 'https://pypi.org/' + url
|
||||
+ expected_scheme_netloc_location = attack_scheme_netloc_2slash_url + '/'
|
||||
+ response = self.request(attack_scheme_netloc_2slash_url)
|
||||
+ self.check_status_and_reason(response, 301)
|
||||
+ location = response.getheader('Location')
|
||||
+ # We're just ensuring that the scheme and domain make it through, if
|
||||
+ # there are or aren't multiple slashes at the start of the path that
|
||||
+ # follows that isn't important in this Location: header.
|
||||
+ self.assertTrue(location.startswith('https://pypi.org/'), msg=location)
|
||||
+
|
||||
|
||||
cgi_file1 = """\
|
||||
#!%s
|
172
CVE-2021-3177-buf_ovrfl_PyCArg_repr.patch
Normal file
172
CVE-2021-3177-buf_ovrfl_PyCArg_repr.patch
Normal file
|
@ -0,0 +1,172 @@
|
|||
From 34df10a9a16b38d54421eeeaf73ec89828563be7 Mon Sep 17 00:00:00 2001
|
||||
From: Benjamin Peterson <benjamin@python.org>
|
||||
Date: Mon, 18 Jan 2021 15:11:46 -0600
|
||||
Subject: [PATCH] [3.6] closes bpo-42938: Replace snprintf with Python unicode
|
||||
formatting in ctypes param reprs. (GH-24250)
|
||||
|
||||
(cherry picked from commit 916610ef90a0d0761f08747f7b0905541f0977c7)
|
||||
|
||||
Co-authored-by: Benjamin Peterson <benjamin@python.org>
|
||||
---
|
||||
Lib/ctypes/test/test_parameters.py | 43 +++++++++++++++
|
||||
.../2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst | 2 +
|
||||
Modules/_ctypes/callproc.c | 55 +++++++------------
|
||||
3 files changed, 66 insertions(+), 34 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
||||
|
||||
--- a/Lib/ctypes/test/test_parameters.py
|
||||
+++ b/Lib/ctypes/test/test_parameters.py
|
||||
@@ -1,4 +1,6 @@
|
||||
-import unittest, sys
|
||||
+import platform
|
||||
+import sys
|
||||
+import unittest
|
||||
from ctypes.test import need_symbol
|
||||
import test.support
|
||||
|
||||
@@ -206,6 +208,50 @@ class SimpleTypesTestCase(unittest.TestC
|
||||
with self.assertRaises(ZeroDivisionError):
|
||||
WorseStruct().__setstate__({}, b'foo')
|
||||
|
||||
+ def test_parameter_repr(self):
|
||||
+ from ctypes import (
|
||||
+ c_bool,
|
||||
+ c_char,
|
||||
+ c_wchar,
|
||||
+ c_byte,
|
||||
+ c_ubyte,
|
||||
+ c_short,
|
||||
+ c_ushort,
|
||||
+ c_int,
|
||||
+ c_uint,
|
||||
+ c_long,
|
||||
+ c_ulong,
|
||||
+ c_longlong,
|
||||
+ c_ulonglong,
|
||||
+ c_float,
|
||||
+ c_double,
|
||||
+ c_longdouble,
|
||||
+ c_char_p,
|
||||
+ c_wchar_p,
|
||||
+ c_void_p,
|
||||
+ )
|
||||
+ self.assertRegexpMatches(repr(c_bool.from_param(True)), r"^<cparam '\?' at 0x[A-Fa-f0-9]+>$")
|
||||
+ self.assertEqual(repr(c_char.from_param('a')), "<cparam 'c' ('a')>")
|
||||
+ self.assertRegexpMatches(repr(c_wchar.from_param('a')), r"^<cparam 'u' at 0x[A-Fa-f0-9]+>$")
|
||||
+ self.assertEqual(repr(c_byte.from_param(98)), "<cparam 'b' (98)>")
|
||||
+ self.assertEqual(repr(c_ubyte.from_param(98)), "<cparam 'B' (98)>")
|
||||
+ self.assertEqual(repr(c_short.from_param(511)), "<cparam 'h' (511)>")
|
||||
+ self.assertEqual(repr(c_ushort.from_param(511)), "<cparam 'H' (511)>")
|
||||
+ self.assertRegexpMatches(repr(c_int.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
||||
+ self.assertRegexpMatches(repr(c_uint.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
||||
+ self.assertRegexpMatches(repr(c_long.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
||||
+ self.assertRegexpMatches(repr(c_ulong.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
||||
+ if platform.architecture()[0].startswith("64"):
|
||||
+ self.assertRegexpMatches(repr(c_longlong.from_param(20000)), r"^<cparam '[liq]' \(20000\)>$")
|
||||
+ self.assertRegexpMatches(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$")
|
||||
+ self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>")
|
||||
+ self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>")
|
||||
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
|
||||
+ self.assertRegexpMatches(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$")
|
||||
+ self.assertRegexpMatches(repr(c_char_p.from_param(b'hihi')), "^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$")
|
||||
+ self.assertRegexpMatches(repr(c_wchar_p.from_param('hihi')), "^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$")
|
||||
+ self.assertRegexpMatches(repr(c_void_p.from_param(0x12)), r"^<cparam 'P' \(0x0*12\)>$")
|
||||
+
|
||||
################################################################
|
||||
|
||||
if __name__ == '__main__':
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Avoid static buffers when computing the repr of :class:`ctypes.c_double` and
|
||||
+:class:`ctypes.c_longdouble` values.
|
||||
--- a/Modules/_ctypes/callproc.c
|
||||
+++ b/Modules/_ctypes/callproc.c
|
||||
@@ -460,50 +460,44 @@ PyCArg_dealloc(PyCArgObject *self)
|
||||
static PyObject *
|
||||
PyCArg_repr(PyCArgObject *self)
|
||||
{
|
||||
- char buffer[256];
|
||||
switch(self->tag) {
|
||||
case 'b':
|
||||
case 'B':
|
||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
||||
self->tag, self->value.b);
|
||||
- break;
|
||||
case 'h':
|
||||
case 'H':
|
||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
||||
self->tag, self->value.h);
|
||||
- break;
|
||||
case 'i':
|
||||
case 'I':
|
||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
||||
self->tag, self->value.i);
|
||||
- break;
|
||||
case 'l':
|
||||
case 'L':
|
||||
- sprintf(buffer, "<cparam '%c' (%ld)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%ld)>",
|
||||
self->tag, self->value.l);
|
||||
- break;
|
||||
|
||||
-#ifdef HAVE_LONG_LONG
|
||||
+#if defined(HAVE_LONG_LONG) && defined(LLONG_MAX)
|
||||
case 'q':
|
||||
case 'Q':
|
||||
- sprintf(buffer,
|
||||
- "<cparam '%c' (%" PY_FORMAT_LONG_LONG "d)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%lld)>",
|
||||
self->tag, self->value.q);
|
||||
- break;
|
||||
#endif
|
||||
case 'd':
|
||||
- sprintf(buffer, "<cparam '%c' (%f)>",
|
||||
- self->tag, self->value.d);
|
||||
- break;
|
||||
- case 'f':
|
||||
- sprintf(buffer, "<cparam '%c' (%f)>",
|
||||
- self->tag, self->value.f);
|
||||
- break;
|
||||
+ case 'f': {
|
||||
+ PyObject *f = PyFloat_FromDouble((self->tag == 'f') ? self->value.f : self->value.d);
|
||||
+ if (f == NULL) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ PyObject *result = PyUnicode_FromFormat("<cparam '%c' (%R)>", self->tag, f);
|
||||
+ Py_DECREF(f);
|
||||
+ return result;
|
||||
+ }
|
||||
|
||||
case 'c':
|
||||
- sprintf(buffer, "<cparam '%c' (%c)>",
|
||||
- self->tag, self->value.c);
|
||||
- break;
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' ('%c')>",
|
||||
+ self->tag, self->value.c);
|
||||
|
||||
/* Hm, are these 'z' and 'Z' codes useful at all?
|
||||
Shouldn't they be replaced by the functionality of c_string
|
||||
@@ -512,16 +506,13 @@ PyCArg_repr(PyCArgObject *self)
|
||||
case 'z':
|
||||
case 'Z':
|
||||
case 'P':
|
||||
- sprintf(buffer, "<cparam '%c' (%p)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%p)>",
|
||||
self->tag, self->value.p);
|
||||
- break;
|
||||
|
||||
default:
|
||||
- sprintf(buffer, "<cparam '%c' at %p>",
|
||||
- self->tag, self);
|
||||
- break;
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' at %p>",
|
||||
+ (unsigned char)self->tag, (void *)self);
|
||||
}
|
||||
- return PyString_FromString(buffer);
|
||||
}
|
||||
|
||||
static PyMemberDef PyCArgType_members[] = {
|
15
CVE-2021-3733-fix-ReDoS-in-request.patch
Normal file
15
CVE-2021-3733-fix-ReDoS-in-request.patch
Normal file
|
@ -0,0 +1,15 @@
|
|||
--- a/Lib/urllib2.py
|
||||
+++ b/Lib/urllib2.py
|
||||
@@ -856,7 +856,7 @@ class AbstractBasicAuthHandler:
|
||||
|
||||
# allow for double- and single-quoted realm values
|
||||
# (single quotes are a violation of the RFC, but appear in the wild)
|
||||
- rx = re.compile('(?:[^,]*,)*[ \t]*([^ \t]+)[ \t]+'
|
||||
+ rx = re.compile('(?:[^,]*,)*[ \t]*([^ \t,]+)[ \t]+'
|
||||
'realm=(["\']?)([^"\']*)\\2', re.I)
|
||||
|
||||
# XXX could pre-emptively send auth info already accepted (RFC 2617,
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2021-01-31-05-28-14.bpo-43075.DoAXqO.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix Regular Expression Denial of Service (ReDoS) vulnerability in :class:`urllib.request.AbstractBasicAuthHandler`. The ReDoS-vulnerable regex has quadratic worst-case complexity and it allows cause a denial of service when identifying crafted invalid RFCs. This ReDoS issue is on the client side and needs remote attackers to control the HTTP server.
|
|
@ -0,0 +1,26 @@
|
|||
--- a/Lib/httplib.py
|
||||
+++ b/Lib/httplib.py
|
||||
@@ -449,6 +449,7 @@ class HTTPResponse:
|
||||
if status != CONTINUE:
|
||||
break
|
||||
# skip the header from the 100 response
|
||||
+ header_count = 0
|
||||
while True:
|
||||
skip = self.fp.readline(_MAXLINE + 1)
|
||||
if len(skip) > _MAXLINE:
|
||||
@@ -458,6 +459,10 @@ class HTTPResponse:
|
||||
break
|
||||
if self.debuglevel > 0:
|
||||
print "header:", skip
|
||||
+ # CVE-2021-3737: Fix infinitely reading potential HTTP headers on a 100 Continue status response from the server
|
||||
+ header_count += 1
|
||||
+ if header_count > _MAXHEADERS:
|
||||
+ raise HTTPException("got more than %d headers" % _MAXHEADERS)
|
||||
|
||||
self.status = status
|
||||
self.reason = reason.strip()
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2021-05-05-17-37-04.bpo-44022.bS3XJ9.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+mod:`http.client` now avoids infinitely reading potential HTTP headers after a
|
||||
+``100 Continue`` status response from the server.
|
135
CVE-2021-4189-ftplib-trust-PASV-resp.patch
Normal file
135
CVE-2021-4189-ftplib-trust-PASV-resp.patch
Normal file
|
@ -0,0 +1,135 @@
|
|||
commit 0ab152c6b5d95caa2dc1a30fa96e10258b5f188e
|
||||
Author: Gregory P. Smith <greg@krypto.org>
|
||||
Date: Mon Mar 15 11:39:31 2021 -0700
|
||||
|
||||
bpo-43285 Make ftplib not trust the PASV response. (GH-24838)
|
||||
|
||||
bpo-43285: Make ftplib not trust the PASV response.
|
||||
|
||||
The IPv4 address value returned from the server in response to the PASV command
|
||||
should not be trusted. This prevents a malicious FTP server from using the
|
||||
response to probe IPv4 address and port combinations on the client network.
|
||||
|
||||
Instead of using the returned address, we use the IP address we're
|
||||
already connected to. This is the strategy other ftp clients adopted,
|
||||
and matches the only strategy available for the modern IPv6 EPSV command
|
||||
where the server response must return a port number and nothing else.
|
||||
|
||||
For the rare user who _wants_ this ugly behavior, set a `trust_server_pasv_ipv4_address`
|
||||
attribute on your `ftplib.FTP` instance to True.
|
||||
|
||||
---
|
||||
Doc/whatsnew/2.7.rst | 10 +++
|
||||
Lib/ftplib.py | 11 +++-
|
||||
Lib/test/test_ftplib.py | 27 +++++++++-
|
||||
Misc/NEWS.d/next/Security/2021-03-13-03-48-14.bpo-43285.g-Hah3.rst | 8 ++
|
||||
4 files changed, 53 insertions(+), 3 deletions(-)
|
||||
|
||||
--- a/Doc/whatsnew/2.7.rst
|
||||
+++ b/Doc/whatsnew/2.7.rst
|
||||
@@ -166,6 +166,16 @@ The ``unittest`` module also automatical
|
||||
when running tests.
|
||||
|
||||
|
||||
+Post-EOS fixes
|
||||
+==============
|
||||
+
|
||||
+A security fix alters the :class:`ftplib.FTP` behavior to not trust the
|
||||
+IPv4 address sent from the remote server when setting up a passive data
|
||||
+channel. We reuse the ftp server IP address instead. For unusual code
|
||||
+requiring the old behavior, set a ``trust_server_pasv_ipv4_address``
|
||||
+attribute on your FTP instance to ``True``. (See :issue:`43285`)
|
||||
+
|
||||
+
|
||||
Python 3.1 Features
|
||||
=======================
|
||||
|
||||
--- a/Lib/ftplib.py
|
||||
+++ b/Lib/ftplib.py
|
||||
@@ -107,7 +107,9 @@ class FTP:
|
||||
sock = None
|
||||
file = None
|
||||
welcome = None
|
||||
- passiveserver = 1
|
||||
+ passiveserver = True
|
||||
+ # Disables https://bugs.python.org/issue43285 security if set to True.
|
||||
+ trust_server_pasv_ipv4_address = False
|
||||
|
||||
# Initialization method (called by class instantiation).
|
||||
# Initialize host to localhost, port to standard ftp port
|
||||
@@ -310,8 +312,13 @@ class FTP:
|
||||
return sock
|
||||
|
||||
def makepasv(self):
|
||||
+ """Internal: Does the PASV or EPSV handshake -> (address, port)"""
|
||||
if self.af == socket.AF_INET:
|
||||
- host, port = parse227(self.sendcmd('PASV'))
|
||||
+ untrusted_host, port = parse227(self.sendcmd('PASV'))
|
||||
+ if self.trust_server_pasv_ipv4_address:
|
||||
+ host = untrusted_host
|
||||
+ else:
|
||||
+ host = self.sock.getpeername()[0]
|
||||
else:
|
||||
host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
|
||||
return host, port
|
||||
--- a/Lib/test/test_ftplib.py
|
||||
+++ b/Lib/test/test_ftplib.py
|
||||
@@ -67,6 +67,10 @@ class DummyFTPHandler(asynchat.async_cha
|
||||
self.rest = None
|
||||
self.next_retr_data = RETR_DATA
|
||||
self.push('220 welcome')
|
||||
+ # We use this as the string IPv4 address to direct the client
|
||||
+ # to in response to a PASV command. To test security behavior.
|
||||
+ # https://bugs.python.org/issue43285/.
|
||||
+ self.fake_pasv_server_ip = '252.253.254.255'
|
||||
|
||||
def collect_incoming_data(self, data):
|
||||
self.in_buffer.append(data)
|
||||
@@ -109,7 +113,8 @@ class DummyFTPHandler(asynchat.async_cha
|
||||
sock.bind((self.socket.getsockname()[0], 0))
|
||||
sock.listen(5)
|
||||
sock.settimeout(10)
|
||||
- ip, port = sock.getsockname()[:2]
|
||||
+ port = sock.getsockname()[1]
|
||||
+ ip = self.fake_pasv_server_ip
|
||||
ip = ip.replace('.', ',')
|
||||
p1, p2 = divmod(port, 256)
|
||||
self.push('227 entering passive mode (%s,%d,%d)' %(ip, p1, p2))
|
||||
@@ -577,6 +582,26 @@ class TestFTPClass(TestCase):
|
||||
# IPv4 is in use, just make sure send_epsv has not been used
|
||||
self.assertEqual(self.server.handler_instance.last_received_cmd, 'pasv')
|
||||
|
||||
+ def test_makepasv_issue43285_security_disabled(self):
|
||||
+ """Test the opt-in to the old vulnerable behavior."""
|
||||
+ self.client.trust_server_pasv_ipv4_address = True
|
||||
+ bad_host, port = self.client.makepasv()
|
||||
+ self.assertEqual(
|
||||
+ bad_host, self.server.handler_instance.fake_pasv_server_ip)
|
||||
+ # Opening and closing a connection keeps the dummy server happy
|
||||
+ # instead of timing out on accept.
|
||||
+ socket.create_connection((self.client.sock.getpeername()[0], port),
|
||||
+ timeout=TIMEOUT).close()
|
||||
+
|
||||
+ def test_makepasv_issue43285_security_enabled_default(self):
|
||||
+ self.assertFalse(self.client.trust_server_pasv_ipv4_address)
|
||||
+ trusted_host, port = self.client.makepasv()
|
||||
+ self.assertNotEqual(
|
||||
+ trusted_host, self.server.handler_instance.fake_pasv_server_ip)
|
||||
+ # Opening and closing a connection keeps the dummy server happy
|
||||
+ # instead of timing out on accept.
|
||||
+ socket.create_connection((trusted_host, port), timeout=TIMEOUT).close()
|
||||
+
|
||||
def test_line_too_long(self):
|
||||
self.assertRaises(ftplib.Error, self.client.sendcmd,
|
||||
'x' * self.client.maxline * 2)
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2021-03-13-03-48-14.bpo-43285.g-Hah3.rst
|
||||
@@ -0,0 +1,8 @@
|
||||
+:mod:`ftplib` no longer trusts the IP address value returned from the server
|
||||
+in response to the PASV command by default. This prevents a malicious FTP
|
||||
+server from using the response to probe IPv4 address and port combinations
|
||||
+on the client network.
|
||||
+
|
||||
+Code that requires the former vulnerable behavior may set a
|
||||
+``trust_server_pasv_ipv4_address`` attribute on their
|
||||
+:class:`ftplib.FTP` instances to ``True`` to re-enable it.
|
169
CVE-2022-0391-urllib_parse-newline-parsing.patch
Normal file
169
CVE-2022-0391-urllib_parse-newline-parsing.patch
Normal file
|
@ -0,0 +1,169 @@
|
|||
---
|
||||
Doc/library/urlparse.rst | 14 ++
|
||||
Doc/whatsnew/2.7.rst | 7 +
|
||||
Lib/test/test_urlparse.py | 49 ++++++++++
|
||||
Lib/urlparse.py | 12 ++
|
||||
Misc/NEWS.d/next/Security/2021-04-25-07-46-37.bpo-43882.Jpwx85.rst | 6 +
|
||||
5 files changed, 88 insertions(+)
|
||||
|
||||
--- a/Doc/library/urlparse.rst
|
||||
+++ b/Doc/library/urlparse.rst
|
||||
@@ -248,6 +248,10 @@ The :mod:`urlparse` module defines the f
|
||||
decomposed before parsing, or is not a Unicode string, no error will be
|
||||
raised.
|
||||
|
||||
+ Following the `WHATWG spec`_ that updates RFC 3986, ASCII
|
||||
+ newline ``\n``, ``\r`` and tab ``\t`` characters are stripped
|
||||
+ from the URL.
|
||||
+
|
||||
.. versionadded:: 2.2
|
||||
|
||||
.. versionchanged:: 2.5
|
||||
@@ -257,6 +261,10 @@ The :mod:`urlparse` module defines the f
|
||||
Characters that affect netloc parsing under NFKC normalization will
|
||||
now raise :exc:`ValueError`.
|
||||
|
||||
+ .. versionchanged:: 3.6.14
|
||||
+ ASCII newline and tab characters are stripped from the URL.
|
||||
+
|
||||
+.. _WHATWG spec: https://url.spec.whatwg.org/#concept-basic-url-parser
|
||||
|
||||
.. function:: urlunsplit(parts)
|
||||
|
||||
@@ -308,6 +316,11 @@ The :mod:`urlparse` module defines the f
|
||||
|
||||
.. seealso::
|
||||
|
||||
+ `WHATWG`_ - URL Living standard
|
||||
+ Working Group for the URL Standard that defines URLs,
|
||||
+ domains, IP addresses, the application/x-www-form-urlencoded format,
|
||||
+ and their API.
|
||||
+
|
||||
:rfc:`3986` - Uniform Resource Identifiers
|
||||
This is the current standard (STD66). Any changes to urlparse module
|
||||
should conform to this. Certain deviations could be observed, which are
|
||||
@@ -332,6 +345,7 @@ The :mod:`urlparse` module defines the f
|
||||
:rfc:`1738` - Uniform Resource Locators (URL)
|
||||
This specifies the formal syntax and semantics of absolute URLs.
|
||||
|
||||
+.. _WHATWG: https://url.spec.whatwg.org/
|
||||
|
||||
.. _urlparse-result-object:
|
||||
|
||||
--- a/Doc/whatsnew/2.7.rst
|
||||
+++ b/Doc/whatsnew/2.7.rst
|
||||
@@ -175,6 +175,13 @@ channel. We reuse the ftp server IP add
|
||||
requiring the old behavior, set a ``trust_server_pasv_ipv4_address``
|
||||
attribute on your FTP instance to ``True``. (See :issue:`43285`)
|
||||
|
||||
+The presence of newline or tab characters in parts of a URL allows for some
|
||||
+forms of attacks. Following the WHATWG specification that updates RFC 3986,
|
||||
+ASCII newline ``\n``, ``\r`` and tab ``\t`` characters are stripped from the
|
||||
+URL by the parser :func:`urlparse` preventing such attacks. The removal
|
||||
+characters are controlled by a new module level variable
|
||||
+``urlparse._UNSAFE_URL_BYTES_TO_REMOVE``. (See :issue:`43882`)
|
||||
+
|
||||
|
||||
Python 3.1 Features
|
||||
=======================
|
||||
--- a/Lib/test/test_urlparse.py
|
||||
+++ b/Lib/test/test_urlparse.py
|
||||
@@ -492,6 +492,55 @@ class UrlParseTestCase(unittest.TestCase
|
||||
p = urlparse.urlsplit(url)
|
||||
self.assertEqual(p.port, None)
|
||||
|
||||
+ def test_urlsplit_remove_unsafe_bytes(self):
|
||||
+ # Remove ASCII tabs and newlines from input, for http common case scenario.
|
||||
+ url = "h\nttp://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||
+ p = urlparse.urlsplit(url)
|
||||
+ self.assertEqual(p.scheme, "http")
|
||||
+ self.assertEqual(p.netloc, "www.python.org")
|
||||
+ self.assertEqual(p.path, "/javascript:alert('msg')/")
|
||||
+ self.assertEqual(p.query, "query=something")
|
||||
+ self.assertEqual(p.fragment, "fragment")
|
||||
+ self.assertEqual(p.username, None)
|
||||
+ self.assertEqual(p.password, None)
|
||||
+ self.assertEqual(p.hostname, "www.python.org")
|
||||
+ self.assertEqual(p.port, None)
|
||||
+ self.assertEqual(p.geturl(), "http://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||
+
|
||||
+ # Remove ASCII tabs and newlines from input as bytes, for http common case scenario.
|
||||
+ url = b"h\nttp://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||
+ p = urlparse.urlsplit(url)
|
||||
+ self.assertEqual(p.scheme, b"http")
|
||||
+ self.assertEqual(p.netloc, b"www.python.org")
|
||||
+ self.assertEqual(p.path, b"/javascript:alert('msg')/")
|
||||
+ self.assertEqual(p.query, b"query=something")
|
||||
+ self.assertEqual(p.fragment, b"fragment")
|
||||
+ self.assertEqual(p.username, None)
|
||||
+ self.assertEqual(p.password, None)
|
||||
+ self.assertEqual(p.hostname, b"www.python.org")
|
||||
+ self.assertEqual(p.port, None)
|
||||
+ self.assertEqual(p.geturl(), b"http://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||
+
|
||||
+ # any scheme
|
||||
+ url = "x-new-scheme\t://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||
+ p = urlparse.urlsplit(url)
|
||||
+ self.assertEqual(p.geturl(), "x-new-scheme://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||
+
|
||||
+ # Remove ASCII tabs and newlines from input as bytes, any scheme.
|
||||
+ url = b"x-new-scheme\t://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||
+ p = urlparse.urlsplit(url)
|
||||
+ self.assertEqual(p.geturl(), b"x-new-scheme://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||
+
|
||||
+ # Unsafe bytes is not returned from urlparse cache.
|
||||
+ # scheme is stored after parsing, sending an scheme with unsafe bytes *will not* return an unsafe scheme
|
||||
+ url = "https://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||
+ scheme = "htt\nps"
|
||||
+ for _ in range(2):
|
||||
+ p = urlparse.urlsplit(url, scheme=scheme)
|
||||
+ self.assertEqual(p.scheme, "https")
|
||||
+ self.assertEqual(p.geturl(), "https://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||
+
|
||||
+
|
||||
def test_issue14072(self):
|
||||
p1 = urlparse.urlsplit('tel:+31-641044153')
|
||||
self.assertEqual(p1.scheme, 'tel')
|
||||
--- a/Lib/urlparse.py
|
||||
+++ b/Lib/urlparse.py
|
||||
@@ -62,6 +62,9 @@ scheme_chars = ('abcdefghijklmnopqrstuvw
|
||||
'0123456789'
|
||||
'+-.')
|
||||
|
||||
+# Unsafe bytes to be removed per WHATWG spec
|
||||
+_UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r', '\n']
|
||||
+
|
||||
MAX_CACHE_SIZE = 20
|
||||
_parse_cache = {}
|
||||
|
||||
@@ -184,12 +187,21 @@ def _checknetloc(netloc):
|
||||
"under NFKC normalization"
|
||||
% netloc)
|
||||
|
||||
+
|
||||
+def _remove_unsafe_bytes_from_url(url):
|
||||
+ for b in _UNSAFE_URL_BYTES_TO_REMOVE:
|
||||
+ url = url.replace(b, "")
|
||||
+ return url
|
||||
+
|
||||
+
|
||||
def urlsplit(url, scheme='', allow_fragments=True):
|
||||
"""Parse a URL into 5 components:
|
||||
<scheme>://<netloc>/<path>?<query>#<fragment>
|
||||
Return a 5-tuple: (scheme, netloc, path, query, fragment).
|
||||
Note that we don't break the components up in smaller bits
|
||||
(e.g. netloc is a single string) and we don't expand % escapes."""
|
||||
+ url = _remove_unsafe_bytes_from_url(url)
|
||||
+ scheme = _remove_unsafe_bytes_from_url(scheme)
|
||||
allow_fragments = bool(allow_fragments)
|
||||
key = url, scheme, allow_fragments, type(url), type(scheme)
|
||||
cached = _parse_cache.get(key, None)
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2021-04-25-07-46-37.bpo-43882.Jpwx85.rst
|
||||
@@ -0,0 +1,6 @@
|
||||
+The presence of newline or tab characters in parts of a URL could allow
|
||||
+some forms of attacks.
|
||||
+
|
||||
+Following the controlling specification for URLs defined by WHATWG
|
||||
+:func:`urlparse` now removes ASCII newlines and tabs from URLs,
|
||||
+preventing such attacks.
|
88
CVE-2022-45061-DoS-by-IDNA-decode.patch
Normal file
88
CVE-2022-45061-DoS-by-IDNA-decode.patch
Normal file
|
@ -0,0 +1,88 @@
|
|||
From fa792ddee55dc02c6392842c8194a464339f6f1b Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Mon, 7 Nov 2022 18:57:10 -0800
|
||||
Subject: [PATCH] [3.11] gh-98433: Fix quadratic time idna decoding. (GH-99092)
|
||||
(GH-99222)
|
||||
|
||||
There was an unnecessary quadratic loop in idna decoding. This restores
|
||||
the behavior to linear.
|
||||
|
||||
(cherry picked from commit d315722564927c7202dd6e111dc79eaf14240b0d)
|
||||
|
||||
(cherry picked from commit a6f6c3a3d6f2b580f2d87885c9b8a9350ad7bf15)
|
||||
|
||||
Co-authored-by: Miss Islington (bot) <31488909+miss-islington@users.noreply.github.com>
|
||||
Co-authored-by: Gregory P. Smith <greg@krypto.org>
|
||||
---
|
||||
Lib/encodings/idna.py | 32 ++++------
|
||||
Lib/test/test_codecs.py | 6 +
|
||||
Misc/NEWS.d/next/Security/2022-11-04-09-29-36.gh-issue-98433.l76c5G.rst | 6 +
|
||||
3 files changed, 27 insertions(+), 17 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2022-11-04-09-29-36.gh-issue-98433.l76c5G.rst
|
||||
|
||||
--- a/Lib/encodings/idna.py
|
||||
+++ b/Lib/encodings/idna.py
|
||||
@@ -39,23 +39,21 @@ def nameprep(label):
|
||||
|
||||
# Check bidi
|
||||
RandAL = map(stringprep.in_table_d1, label)
|
||||
- for c in RandAL:
|
||||
- if c:
|
||||
- # There is a RandAL char in the string. Must perform further
|
||||
- # tests:
|
||||
- # 1) The characters in section 5.8 MUST be prohibited.
|
||||
- # This is table C.8, which was already checked
|
||||
- # 2) If a string contains any RandALCat character, the string
|
||||
- # MUST NOT contain any LCat character.
|
||||
- if filter(stringprep.in_table_d2, label):
|
||||
- raise UnicodeError("Violation of BIDI requirement 2")
|
||||
-
|
||||
- # 3) If a string contains any RandALCat character, a
|
||||
- # RandALCat character MUST be the first character of the
|
||||
- # string, and a RandALCat character MUST be the last
|
||||
- # character of the string.
|
||||
- if not RandAL[0] or not RandAL[-1]:
|
||||
- raise UnicodeError("Violation of BIDI requirement 3")
|
||||
+ if any(RandAL):
|
||||
+ # There is a RandAL char in the string. Must perform further
|
||||
+ # tests:
|
||||
+ # 1) The characters in section 5.8 MUST be prohibited.
|
||||
+ # This is table C.8, which was already checked
|
||||
+ # 2) If a string contains any RandALCat character, the string
|
||||
+ # MUST NOT contain any LCat character.
|
||||
+ if any(stringprep.in_table_d2(x) for x in label):
|
||||
+ raise UnicodeError("Violation of BIDI requirement 2")
|
||||
+ # 3) If a string contains any RandALCat character, a
|
||||
+ # RandALCat character MUST be the first character of the
|
||||
+ # string, and a RandALCat character MUST be the last
|
||||
+ # character of the string.
|
||||
+ if not RandAL[0] or not RandAL[-1]:
|
||||
+ raise UnicodeError("Violation of BIDI requirement 3")
|
||||
|
||||
return label
|
||||
|
||||
--- a/Lib/test/test_codecs.py
|
||||
+++ b/Lib/test/test_codecs.py
|
||||
@@ -1318,6 +1318,12 @@ class IDNACodecTest(unittest.TestCase):
|
||||
self.assertEqual(u"pyth\xf6n.org".encode("idna"), "xn--pythn-mua.org")
|
||||
self.assertEqual(u"pyth\xf6n.org.".encode("idna"), "xn--pythn-mua.org.")
|
||||
|
||||
+ def test_builtin_decode_length_limit(self):
|
||||
+ with self.assertRaisesRegexp(UnicodeError, "too long"):
|
||||
+ (b"xn--016c"+b"a"*1100).decode("idna")
|
||||
+ with self.assertRaisesRegexp(UnicodeError, "too long"):
|
||||
+ (b"xn--016c"+b"a"*70).decode("idna")
|
||||
+
|
||||
def test_stream(self):
|
||||
import StringIO
|
||||
r = codecs.getreader("idna")(StringIO.StringIO("abc"))
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2022-11-04-09-29-36.gh-issue-98433.l76c5G.rst
|
||||
@@ -0,0 +1,6 @@
|
||||
+The IDNA codec decoder used on DNS hostnames by :mod:`socket` or :mod:`asyncio`
|
||||
+related name resolution functions no longer involves a quadratic algorithm.
|
||||
+This prevents a potential CPU denial of service if an out-of-spec excessive
|
||||
+length hostname involving bidirectional characters were decoded. Some protocols
|
||||
+such as :mod:`urllib` http ``3xx`` redirects potentially allow for an attacker
|
||||
+to supply such a name.
|
80
CVE-2022-48565-plistlib-XML-vulns.patch
Normal file
80
CVE-2022-48565-plistlib-XML-vulns.patch
Normal file
|
@ -0,0 +1,80 @@
|
|||
From 4d8f9e2e4461de92bd1e0c92ed433480d761670f Mon Sep 17 00:00:00 2001
|
||||
From: Ned Deily <nad@python.org>
|
||||
Date: Mon, 19 Oct 2020 22:36:27 -0400
|
||||
Subject: [PATCH] bpo-42051: Reject XML entity declarations in plist files
|
||||
(GH-22760) (GH-22801)
|
||||
|
||||
Co-authored-by: Ronald Oussoren <ronaldoussoren@mac.com>
|
||||
(cherry picked from commit e512bc799e3864fe3b1351757261762d63471efc)
|
||||
|
||||
Co-authored-by: Ned Deily <nad@python.org>
|
||||
---
|
||||
Lib/plistlib.py | 10 +++++
|
||||
Lib/test/test_plistlib.py | 19 ++++++++++
|
||||
Misc/NEWS.d/next/Security/2020-10-19-10-56-27.bpo-42051.EU_B7u.rst | 3 +
|
||||
3 files changed, 32 insertions(+)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2020-10-19-10-56-27.bpo-42051.EU_B7u.rst
|
||||
|
||||
--- a/Lib/plistlib.py
|
||||
+++ b/Lib/plistlib.py
|
||||
@@ -403,9 +403,19 @@ class PlistParser:
|
||||
parser.StartElementHandler = self.handleBeginElement
|
||||
parser.EndElementHandler = self.handleEndElement
|
||||
parser.CharacterDataHandler = self.handleData
|
||||
+ parser.EntityDeclHandler = self.handle_entity_decl
|
||||
parser.ParseFile(fileobj)
|
||||
return self.root
|
||||
|
||||
+ def handle_entity_decl(self, entity_name, is_parameter_entity, value,
|
||||
+ base, system_id, public_id, notation_name):
|
||||
+ # Reject plist files with entity declarations to avoid XML
|
||||
+ # vulnerabilies in expat. Regular plist files don't contain
|
||||
+ # those declerations, and Apple's plutil tool does not accept
|
||||
+ # them either.
|
||||
+ raise ValueError(
|
||||
+ "XML entity declarations are not supported in plist files")
|
||||
+
|
||||
def handleBeginElement(self, element, attrs):
|
||||
self.data = []
|
||||
handler = getattr(self, "begin_" + element, None)
|
||||
--- a/Lib/test/test_plistlib.py
|
||||
+++ b/Lib/test/test_plistlib.py
|
||||
@@ -86,6 +86,19 @@ TESTDATA = """<?xml version="1.0" encodi
|
||||
</plist>
|
||||
""".replace(" " * 8, "\t") # Apple as well as plistlib.py output hard tabs
|
||||
|
||||
+XML_PLIST_WITH_ENTITY=b'''\
|
||||
+<?xml version="1.0" encoding="UTF-8"?>
|
||||
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd" [
|
||||
+ <!ENTITY entity "replacement text">
|
||||
+ ]>
|
||||
+<plist version="1.0">
|
||||
+ <dict>
|
||||
+ <key>A</key>
|
||||
+ <string>&entity;</string>
|
||||
+ </dict>
|
||||
+</plist>
|
||||
+'''
|
||||
+
|
||||
|
||||
class TestPlistlib(unittest.TestCase):
|
||||
|
||||
@@ -195,6 +208,12 @@ class TestPlistlib(unittest.TestCase):
|
||||
self.assertEqual(test1, result1)
|
||||
self.assertEqual(test2, result2)
|
||||
|
||||
+ def test_xml_plist_with_entity_decl(self):
|
||||
+ with self.assertRaisesRegexp(ValueError,
|
||||
+ "XML entity declarations are not supported"):
|
||||
+ plistlib.readPlistFromString(XML_PLIST_WITH_ENTITY)
|
||||
+
|
||||
+
|
||||
|
||||
def test_main():
|
||||
test_support.run_unittest(TestPlistlib)
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2020-10-19-10-56-27.bpo-42051.EU_B7u.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+The :mod:`plistlib` module no longer accepts entity declarations in XML
|
||||
+plist files to avoid XML vulnerabilities. This should not affect users as
|
||||
+entity declarations are not used in regular plist files.
|
35
CVE-2022-48566-compare_digest-more-constant.patch
Normal file
35
CVE-2022-48566-compare_digest-more-constant.patch
Normal file
|
@ -0,0 +1,35 @@
|
|||
From 8bef9ebb1b88cfa4b2a38b93fe4ea22015d8254a Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Mon, 14 Dec 2020 09:04:57 -0800
|
||||
Subject: [PATCH] bpo-40791: Make compare_digest more constant-time. (GH-23438)
|
||||
(GH-23767)
|
||||
|
||||
The existing volatile `left`/`right` pointers guarantee that the reads will all occur, but does not guarantee that they will be _used_. So a compiler can still short-circuit the loop, saving e.g. the overhead of doing the xors and especially the overhead of the data dependency between `result` and the reads. That would change performance depending on where the first unequal byte occurs. This change removes that optimization.
|
||||
|
||||
(This is change GH-1 from https://bugs.python.org/issue40791 .)
|
||||
(cherry picked from commit 31729366e2bc09632e78f3896dbce0ae64914f28)
|
||||
|
||||
Co-authored-by: Devin Jeanpierre <jeanpierreda@google.com>
|
||||
---
|
||||
Misc/NEWS.d/next/Security/2020-05-28-06-06-47.bpo-40791.QGZClX.rst | 1 +
|
||||
Modules/operator.c | 2 +-
|
||||
2 files changed, 2 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2020-05-28-06-06-47.bpo-40791.QGZClX.rst
|
||||
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2020-05-28-06-06-47.bpo-40791.QGZClX.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Add ``volatile`` to the accumulator variable in ``hmac.compare_digest``, making constant-time-defeating optimizations less likely.
|
||||
\ No newline at end of file
|
||||
--- a/Modules/operator.c
|
||||
+++ b/Modules/operator.c
|
||||
@@ -259,7 +259,7 @@ _tscmp(const unsigned char *a, const uns
|
||||
volatile const unsigned char *left;
|
||||
volatile const unsigned char *right;
|
||||
Py_ssize_t i;
|
||||
- unsigned char result;
|
||||
+ volatile unsigned char result;
|
||||
|
||||
/* loop count depends on length of b */
|
||||
length = len_b;
|
84
CVE-2023-24329-blank-URL-bypass.patch
Normal file
84
CVE-2023-24329-blank-URL-bypass.patch
Normal file
|
@ -0,0 +1,84 @@
|
|||
---
|
||||
Lib/test/test_urlparse.py | 21 ++++++++++
|
||||
Lib/urlparse.py | 9 +++-
|
||||
Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rs | 2
|
||||
3 files changed, 30 insertions(+), 2 deletions(-)
|
||||
|
||||
Index: Python-2.7.18/Lib/test/test_urlparse.py
|
||||
===================================================================
|
||||
--- Python-2.7.18.orig/Lib/test/test_urlparse.py
|
||||
+++ Python-2.7.18/Lib/test/test_urlparse.py
|
||||
@@ -1,4 +1,5 @@
|
||||
from test import test_support
|
||||
+from urlparse import isascii
|
||||
import sys
|
||||
import unicodedata
|
||||
import unittest
|
||||
@@ -592,6 +593,26 @@ class UrlParseTestCase(unittest.TestCase
|
||||
self.assertEqual(p.netloc, "www.example.net:foo")
|
||||
self.assertRaises(ValueError, lambda: p.port)
|
||||
|
||||
+ def do_attributes_bad_scheme(self, bytes, parse, scheme):
|
||||
+ url = scheme + "://www.example.net"
|
||||
+ if bytes:
|
||||
+ if isascii(url):
|
||||
+ url = url.encode("ascii")
|
||||
+ else:
|
||||
+ return
|
||||
+ p = parse(url)
|
||||
+ if bytes:
|
||||
+ self.assertEqual(p.scheme, b"")
|
||||
+ else:
|
||||
+ self.assertEqual(p.scheme, "")
|
||||
+
|
||||
+ def test_attributes_bad_scheme(self):
|
||||
+ """Check handling of invalid schemes."""
|
||||
+ for bytes in (False, True):
|
||||
+ for parse in (urlparse.urlsplit, urlparse.urlparse):
|
||||
+ for scheme in (".", "+", "-", "0", "http&"):
|
||||
+ self.do_attributes_bad_scheme(bytes, parse, scheme)
|
||||
+
|
||||
def test_attributes_without_netloc(self):
|
||||
# This example is straight from RFC 3261. It looks like it
|
||||
# should allow the username, hostname, and port to be filled
|
||||
Index: Python-2.7.18/Lib/urlparse.py
|
||||
===================================================================
|
||||
--- Python-2.7.18.orig/Lib/urlparse.py
|
||||
+++ Python-2.7.18/Lib/urlparse.py
|
||||
@@ -31,7 +31,8 @@ test_urlparse.py provides a good indicat
|
||||
import re
|
||||
|
||||
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
|
||||
- "urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
|
||||
+ "urlsplit", "urlunsplit", "parse_qs", "parse_qsl",
|
||||
+ "isascii"]
|
||||
|
||||
# A classification of schemes ('' means apply by default)
|
||||
uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
|
||||
@@ -68,6 +69,10 @@ _UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r
|
||||
MAX_CACHE_SIZE = 20
|
||||
_parse_cache = {}
|
||||
|
||||
+# Py3k shim
|
||||
+def isascii(word):
|
||||
+ return all([ord(c) < 128 for c in word])
|
||||
+
|
||||
def clear_cache():
|
||||
"""Clear the parse cache."""
|
||||
_parse_cache.clear()
|
||||
@@ -211,7 +216,7 @@ def urlsplit(url, scheme='', allow_fragm
|
||||
clear_cache()
|
||||
netloc = query = fragment = ''
|
||||
i = url.find(':')
|
||||
- if i > 0:
|
||||
+ if i > 0 and isascii(url[0]) and url[0].isalpha():
|
||||
if url[:i] == 'http': # optimize the common case
|
||||
scheme = url[:i].lower()
|
||||
url = url[i+1:]
|
||||
Index: Python-2.7.18/Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rs
|
||||
===================================================================
|
||||
--- /dev/null
|
||||
+++ Python-2.7.18/Misc/NEWS.d/next/Library/2022-11-12-15-45-51.gh-issue-99418.FxfAXS.rs
|
||||
@@ -0,0 +1,2 @@
|
||||
+Fix bug in :func:`urllib.parse.urlparse` that causes URL schemes that begin
|
||||
+with a digit, a plus sign, or a minus sign to be parsed incorrectly.
|
330
CVE-2023-40217-avoid-ssl-pre-close.patch
Normal file
330
CVE-2023-40217-avoid-ssl-pre-close.patch
Normal file
|
@ -0,0 +1,330 @@
|
|||
From f0c1e55dfd28970196768a6997a6dc0eab0f5259 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?=C5=81ukasz=20Langa?= <lukasz@langa.pl>
|
||||
Date: Tue, 22 Aug 2023 17:39:17 +0200
|
||||
Subject: [PATCH] gh-108310: Fix CVE-2023-40217: Check for & avoid the ssl
|
||||
pre-close flaw
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Instances of `ssl.SSLSocket` were vulnerable to a bypass of the TLS handshake
|
||||
and included protections (like certificate verification) and treating sent
|
||||
unencrypted data as if it were post-handshake TLS encrypted data.
|
||||
|
||||
The vulnerability is caused when a socket is connected, data is sent by the
|
||||
malicious peer and stored in a buffer, and then the malicious peer closes the
|
||||
socket within a small timing window before the other peers’ TLS handshake can
|
||||
begin. After this sequence of events the closed socket will not immediately
|
||||
attempt a TLS handshake due to not being connected but will also allow the
|
||||
buffered data to be read as if a successful TLS handshake had occurred.
|
||||
|
||||
Co-Authored-By: Gregory P. Smith [Google LLC] <greg@krypto.org>
|
||||
---
|
||||
Lib/ssl.py | 31 ++-
|
||||
Lib/test/test_ssl.py | 215 ++++++++++++++++++
|
||||
...-08-22-17-39-12.gh-issue-108310.fVM3sg.rst | 7 +
|
||||
3 files changed, 252 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
|
||||
|
||||
Index: Python-2.7.18/Lib/ssl.py
|
||||
===================================================================
|
||||
--- Python-2.7.18.orig/Lib/ssl.py
|
||||
+++ Python-2.7.18/Lib/ssl.py
|
||||
@@ -576,10 +576,13 @@ class SSLSocket(socket):
|
||||
"in client mode")
|
||||
if self._context.check_hostname and not server_hostname:
|
||||
raise ValueError("check_hostname requires server_hostname")
|
||||
+ self._closed = False
|
||||
+ self._sslobj = None
|
||||
self.server_side = server_side
|
||||
self.server_hostname = server_hostname
|
||||
self.do_handshake_on_connect = do_handshake_on_connect
|
||||
self.suppress_ragged_eofs = suppress_ragged_eofs
|
||||
+ sock_timeout = sock.gettimeout()
|
||||
|
||||
# See if we are connected
|
||||
try:
|
||||
@@ -588,11 +591,38 @@ class SSLSocket(socket):
|
||||
if e.errno != errno.ENOTCONN:
|
||||
raise
|
||||
connected = False
|
||||
+ blocking = self.gettimeout() == 0
|
||||
+ self.setblocking(False)
|
||||
+ try:
|
||||
+ # We are not connected so this is not supposed to block, but
|
||||
+ # testing revealed otherwise on macOS and Windows so we do
|
||||
+ # the non-blocking dance regardless. Our raise when any data
|
||||
+ # is found means consuming the data is harmless.
|
||||
+ notconn_pre_handshake_data = self.recv(1)
|
||||
+ except socket_error as e:
|
||||
+ # EINVAL occurs for recv(1) on non-connected on unix sockets.
|
||||
+ if e.errno not in (errno.ENOTCONN, errno.EINVAL):
|
||||
+ raise
|
||||
+ notconn_pre_handshake_data = b''
|
||||
+ self.setblocking(blocking)
|
||||
+ if notconn_pre_handshake_data:
|
||||
+ # This prevents pending data sent to the socket before it was
|
||||
+ # closed from escaping to the caller who could otherwise
|
||||
+ # presume it came through a successful TLS connection.
|
||||
+ reason = "Closed before TLS handshake with data in recv buffer."
|
||||
+ notconn_pre_handshake_data_error = SSLError(e.errno, reason)
|
||||
+ # Add the SSLError attributes that _ssl.c always adds.
|
||||
+ notconn_pre_handshake_data_error.reason = reason
|
||||
+ notconn_pre_handshake_data_error.library = None
|
||||
+ try:
|
||||
+ self.close()
|
||||
+ except socket_error:
|
||||
+ pass
|
||||
+ raise notconn_pre_handshake_data_error
|
||||
else:
|
||||
connected = True
|
||||
|
||||
- self._closed = False
|
||||
- self._sslobj = None
|
||||
+ self.settimeout(sock_timeout) # Must come after setblocking() calls.
|
||||
self._connected = connected
|
||||
if connected:
|
||||
# create the SSL object
|
||||
Index: Python-2.7.18/Lib/test/test_ssl.py
|
||||
===================================================================
|
||||
--- Python-2.7.18.orig/Lib/test/test_ssl.py
|
||||
+++ Python-2.7.18/Lib/test/test_ssl.py
|
||||
@@ -20,6 +20,8 @@ import traceback
|
||||
import weakref
|
||||
import platform
|
||||
import re
|
||||
+import struct
|
||||
+import httplib
|
||||
import functools
|
||||
from contextlib import closing
|
||||
|
||||
@@ -3262,6 +3264,217 @@ else:
|
||||
self.assertRaises(ValueError, s.write, b'hello')
|
||||
|
||||
|
||||
+def set_socket_so_linger_on_with_zero_timeout(sock):
|
||||
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0))
|
||||
+
|
||||
+
|
||||
+class TestPreHandshakeClose(unittest.TestCase):
|
||||
+ """Verify behavior of close sockets with received data before to the handshake.
|
||||
+ """
|
||||
+
|
||||
+ class SingleConnectionTestServerThread(threading.Thread):
|
||||
+
|
||||
+ def __init__(self, name=None, call_after_accept=None):
|
||||
+ self.call_after_accept = call_after_accept
|
||||
+ self.received_data = b'' # set by .run()
|
||||
+ self.wrap_error = None # set by .run()
|
||||
+ self.listener = None # set by .start()
|
||||
+ self.port = None # set by .start()
|
||||
+ super().__init__(name=name)
|
||||
+
|
||||
+ def __enter__(self):
|
||||
+ self.start()
|
||||
+ return self
|
||||
+
|
||||
+ def __exit__(self, *args):
|
||||
+ try:
|
||||
+ if self.listener:
|
||||
+ self.listener.close()
|
||||
+ except OSError:
|
||||
+ pass
|
||||
+ self.join()
|
||||
+ self.wrap_error = None # avoid dangling references
|
||||
+
|
||||
+ def start(self):
|
||||
+ self.ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
+ self.ssl_ctx.verify_mode = ssl.CERT_REQUIRED
|
||||
+ self.ssl_ctx.load_verify_locations(cafile=ONLYCERT)
|
||||
+ self.ssl_ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY)
|
||||
+ self.listener = socket.socket()
|
||||
+ self.port = support.bind_port(self.listener)
|
||||
+ self.listener.settimeout(2.0)
|
||||
+ self.listener.listen(1)
|
||||
+ super().start()
|
||||
+
|
||||
+ def run(self):
|
||||
+ conn, address = self.listener.accept()
|
||||
+ self.listener.close()
|
||||
+ with conn:
|
||||
+ if self.call_after_accept(conn):
|
||||
+ return
|
||||
+ try:
|
||||
+ tls_socket = self.ssl_ctx.wrap_socket(conn, server_side=True)
|
||||
+ except OSError as err: # ssl.SSLError inherits from OSError
|
||||
+ self.wrap_error = err
|
||||
+ else:
|
||||
+ try:
|
||||
+ self.received_data = tls_socket.recv(400)
|
||||
+ except OSError:
|
||||
+ pass # closed, protocol error, etc.
|
||||
+
|
||||
+ def non_linux_skip_if_other_okay_error(self, err):
|
||||
+ if sys.platform == "linux":
|
||||
+ return # Expect the full test setup to always work on Linux.
|
||||
+ if (isinstance(err, ConnectionResetError) or
|
||||
+ (isinstance(err, OSError) and err.errno == errno.EINVAL) or
|
||||
+ re.search('wrong.version.number', getattr(err, "reason", ""), re.I)):
|
||||
+ # On Windows the TCP RST leads to a ConnectionResetError
|
||||
+ # (ECONNRESET) which Linux doesn't appear to surface to userspace.
|
||||
+ # If wrap_socket() winds up on the "if connected:" path and doing
|
||||
+ # the actual wrapping... we get an SSLError from OpenSSL. Typically
|
||||
+ # WRONG_VERSION_NUMBER. While appropriate, neither is the scenario
|
||||
+ # we're specifically trying to test. The way this test is written
|
||||
+ # is known to work on Linux. We'll skip it anywhere else that it
|
||||
+ # does not present as doing so.
|
||||
+ self.skipTest("Could not recreate conditions on %s: %s" % (sys.platform, err))
|
||||
+ # If maintaining this conditional winds up being a problem.
|
||||
+ # just turn this into an unconditional skip anything but Linux.
|
||||
+ # The important thing is that our CI has the logic covered.
|
||||
+
|
||||
+ def test_preauth_data_to_tls_server(self):
|
||||
+ server_accept_called = threading.Event()
|
||||
+ ready_for_server_wrap_socket = threading.Event()
|
||||
+
|
||||
+ def call_after_accept(unused):
|
||||
+ server_accept_called.set()
|
||||
+ if not ready_for_server_wrap_socket.wait(2.0):
|
||||
+ raise RuntimeError("wrap_socket event never set, test may fail.")
|
||||
+ return False # Tell the server thread to continue.
|
||||
+
|
||||
+ server = self.SingleConnectionTestServerThread(
|
||||
+ call_after_accept=call_after_accept,
|
||||
+ name="preauth_data_to_tls_server")
|
||||
+ server.__enter__() # starts it
|
||||
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
|
||||
+
|
||||
+ with socket.socket() as client:
|
||||
+ client.connect(server.listener.getsockname())
|
||||
+ # This forces an immediate connection close via RST on .close().
|
||||
+ set_socket_so_linger_on_with_zero_timeout(client)
|
||||
+ client.setblocking(False)
|
||||
+
|
||||
+ server_accept_called.wait()
|
||||
+ client.send(b"DELETE /data HTTP/1.0\r\n\r\n")
|
||||
+ client.close() # RST
|
||||
+
|
||||
+ ready_for_server_wrap_socket.set()
|
||||
+ server.join()
|
||||
+ wrap_error = server.wrap_error
|
||||
+ self.assertEqual(b"", server.received_data)
|
||||
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
|
||||
+ self.non_linux_skip_if_other_okay_error(wrap_error)
|
||||
+ self.assertIsInstance(wrap_error, ssl.SSLError)
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
|
||||
+ self.assertNotEqual(0, wrap_error.args[0])
|
||||
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
|
||||
+
|
||||
+ def test_preauth_data_to_tls_client(self):
|
||||
+ client_can_continue_with_wrap_socket = threading.Event()
|
||||
+
|
||||
+ def call_after_accept(conn_to_client):
|
||||
+ # This forces an immediate connection close via RST on .close().
|
||||
+ set_socket_so_linger_on_with_zero_timeout(conn_to_client)
|
||||
+ conn_to_client.send(
|
||||
+ b"HTTP/1.0 307 Temporary Redirect\r\n"
|
||||
+ b"Location: https://example.com/someone-elses-server\r\n"
|
||||
+ b"\r\n")
|
||||
+ conn_to_client.close() # RST
|
||||
+ client_can_continue_with_wrap_socket.set()
|
||||
+ return True # Tell the server to stop.
|
||||
+
|
||||
+ server = self.SingleConnectionTestServerThread(
|
||||
+ call_after_accept=call_after_accept,
|
||||
+ name="preauth_data_to_tls_client")
|
||||
+ server.__enter__() # starts it
|
||||
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
|
||||
+
|
||||
+ # Redundant; call_after_accept sets SO_LINGER on the accepted conn.
|
||||
+ set_socket_so_linger_on_with_zero_timeout(server.listener)
|
||||
+
|
||||
+ with socket.socket() as client:
|
||||
+ client.connect(server.listener.getsockname())
|
||||
+ if not client_can_continue_with_wrap_socket.wait(2.0):
|
||||
+ self.fail("test server took too long.")
|
||||
+ ssl_ctx = ssl.create_default_context()
|
||||
+ try:
|
||||
+ tls_client = ssl_ctx.wrap_socket(
|
||||
+ client, server_hostname="localhost")
|
||||
+ except OSError as err: # SSLError inherits from OSError
|
||||
+ wrap_error = err
|
||||
+ received_data = b""
|
||||
+ else:
|
||||
+ wrap_error = None
|
||||
+ received_data = tls_client.recv(400)
|
||||
+ tls_client.close()
|
||||
+
|
||||
+ server.join()
|
||||
+ self.assertEqual(b"", received_data)
|
||||
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
|
||||
+ self.non_linux_skip_if_other_okay_error(wrap_error)
|
||||
+ self.assertIsInstance(wrap_error, ssl.SSLError)
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
|
||||
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
|
||||
+ self.assertNotEqual(0, wrap_error.args[0])
|
||||
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
|
||||
+
|
||||
+ def test_https_client_non_tls_response_ignored(self):
|
||||
+
|
||||
+ server_responding = threading.Event()
|
||||
+
|
||||
+ class SynchronizedHTTPSConnection(httplib.HTTPSConnection):
|
||||
+ def connect(self):
|
||||
+ httplib.HTTPConnection.connect(self)
|
||||
+ # Wait for our fault injection server to have done its thing.
|
||||
+ if not server_responding.wait(1.0) and support.verbose:
|
||||
+ sys.stdout.write("server_responding event never set.")
|
||||
+ self.sock = self._context.wrap_socket(
|
||||
+ self.sock, server_hostname=self.host)
|
||||
+
|
||||
+ def call_after_accept(conn_to_client):
|
||||
+ # This forces an immediate connection close via RST on .close().
|
||||
+ set_socket_so_linger_on_with_zero_timeout(conn_to_client)
|
||||
+ conn_to_client.send(
|
||||
+ b"HTTP/1.0 402 Payment Required\r\n"
|
||||
+ b"\r\n")
|
||||
+ conn_to_client.close() # RST
|
||||
+ server_responding.set()
|
||||
+ return True # Tell the server to stop.
|
||||
+
|
||||
+ server = self.SingleConnectionTestServerThread(
|
||||
+ call_after_accept=call_after_accept,
|
||||
+ name="non_tls_http_RST_responder")
|
||||
+ server.__enter__() # starts it
|
||||
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
|
||||
+ # Redundant; call_after_accept sets SO_LINGER on the accepted conn.
|
||||
+ set_socket_so_linger_on_with_zero_timeout(server.listener)
|
||||
+
|
||||
+ connection = SynchronizedHTTPSConnection(
|
||||
+ "localhost",
|
||||
+ port=server.port,
|
||||
+ context=ssl.create_default_context(),
|
||||
+ timeout=2.0,
|
||||
+ )
|
||||
+ # There are lots of reasons this raises as desired, long before this
|
||||
+ # test was added. Sending the request requires a successful TLS wrapped
|
||||
+ # socket; that fails if the connection is broken. It may seem pointless
|
||||
+ # to test this. It serves as an illustration of something that we never
|
||||
+ # want to happen... properly not happening.
|
||||
+ with self.assertRaises(OSError) as err_ctx:
|
||||
+ connection.request("HEAD", "/test", headers={"Host": "localhost"})
|
||||
+ response = connection.getresponse()
|
||||
+
|
||||
+
|
||||
def test_main(verbose=False):
|
||||
if support.verbose:
|
||||
plats = {
|
||||
Index: Python-2.7.18/Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
|
||||
===================================================================
|
||||
--- /dev/null
|
||||
+++ Python-2.7.18/Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
|
||||
@@ -0,0 +1,7 @@
|
||||
+Fixed an issue where instances of :class:`ssl.SSLSocket` were vulnerable to
|
||||
+a bypass of the TLS handshake and included protections (like certificate
|
||||
+verification) and treating sent unencrypted data as if it were
|
||||
+post-handshake TLS encrypted data. Security issue reported as
|
||||
+`CVE-2023-40217
|
||||
+<https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40217>`_ by
|
||||
+Aapo Oksman. Patch by Gregory P. Smith.
|
28
PygmentsBridge-trime_doctest_flags.patch
Normal file
28
PygmentsBridge-trime_doctest_flags.patch
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
Doc/tools/extensions/pyspecific.py | 11 +++++++----
|
||||
1 file changed, 7 insertions(+), 4 deletions(-)
|
||||
|
||||
--- a/Doc/tools/extensions/pyspecific.py
|
||||
+++ b/Doc/tools/extensions/pyspecific.py
|
||||
@@ -31,14 +31,17 @@ Body.enum.converters['loweralpha'] = \
|
||||
# doctest docs themselves
|
||||
orig_visit_literal_block = HTMLTranslator.visit_literal_block
|
||||
def new_visit_literal_block(self, node):
|
||||
+ old_trim_doctest_flags = None
|
||||
meta = self.builder.env.metadata[self.builder.current_docname]
|
||||
- old_trim_doctest_flags = self.highlighter.trim_doctest_flags
|
||||
- if 'keepdoctest' in meta:
|
||||
- self.highlighter.trim_doctest_flags = False
|
||||
+ if hasattr(self.highlighter, 'trim_doctest_flags'):
|
||||
+ old_trim_doctest_flags = self.highlighter.trim_doctest_flags
|
||||
+ if 'keepdoctest' in meta:
|
||||
+ self.highlighter.trim_doctest_flags = False
|
||||
try:
|
||||
orig_visit_literal_block(self, node)
|
||||
finally:
|
||||
- self.highlighter.trim_doctest_flags = old_trim_doctest_flags
|
||||
+ if old_trim_doctest_flags is not None:
|
||||
+ self.highlighter.trim_doctest_flags = old_trim_doctest_flags
|
||||
|
||||
HTMLTranslator.visit_literal_block = new_visit_literal_block
|
||||
|
21
README.SUSE
Normal file
21
README.SUSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
Python in SUSE
|
||||
==============
|
||||
|
||||
* Documentation *
|
||||
|
||||
You can find documentation in seprarate packages: python-doc and
|
||||
python-doc-pdf. These contan following documents:
|
||||
|
||||
Tutorial, What's New in Python, Global Module Index, Library Reference,
|
||||
Macintosh Module Reference, Installing Python Modules, Distributing Python
|
||||
Modules, Language Reference, Extending and Embedding, Python/C API,
|
||||
Documenting Python
|
||||
|
||||
The python-doc package constains many text files from source tarball.
|
||||
|
||||
|
||||
* Interactive mode *
|
||||
|
||||
Interactive mode is by default enhanced with of history and command completion.
|
||||
If you don't like these features, you can unset PYTHONSTARTUP variable in your
|
||||
.profile or disable it system wide in /etc/profile.d/python.sh.
|
40
adapted-from-F00251-change-user-install-location.patch
Normal file
40
adapted-from-F00251-change-user-install-location.patch
Normal file
|
@ -0,0 +1,40 @@
|
|||
Index: Python-2.7.17/Lib/distutils/command/install.py
|
||||
===================================================================
|
||||
--- Python-2.7.17.orig/Lib/distutils/command/install.py
|
||||
+++ Python-2.7.17/Lib/distutils/command/install.py
|
||||
@@ -431,8 +431,18 @@ class install (Command):
|
||||
raise DistutilsOptionError, \
|
||||
"must not supply exec-prefix without prefix"
|
||||
|
||||
- self.prefix = os.path.normpath(sys.prefix)
|
||||
- self.exec_prefix = os.path.normpath(sys.exec_prefix)
|
||||
+ # self.prefix is set to sys.prefix + /local/
|
||||
+ # if neither RPM build nor virtual environment is
|
||||
+ # detected to make pip and distutils install packages
|
||||
+ # into the separate location.
|
||||
+ if (not hasattr(sys, 'real_prefix') and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ addition = "/local"
|
||||
+ else:
|
||||
+ addition = ""
|
||||
+
|
||||
+ self.prefix = os.path.normpath(sys.prefix) + addition
|
||||
+ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition
|
||||
|
||||
else:
|
||||
if self.exec_prefix is None:
|
||||
Index: Python-2.7.17/Lib/site.py
|
||||
===================================================================
|
||||
--- Python-2.7.17.orig/Lib/site.py
|
||||
+++ Python-2.7.17/Lib/site.py
|
||||
@@ -291,6 +291,10 @@ def getsitepackages():
|
||||
sitepackages = []
|
||||
seen = set()
|
||||
|
||||
+ # '/usr/local' is included in PREFIXES if RPM build is not detected
|
||||
+ # to make packages installed into this location visible.
|
||||
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ:
|
||||
+ PREFIXES.insert(0, "/usr/local")
|
||||
for prefix in PREFIXES:
|
||||
if not prefix or prefix in seen:
|
||||
continue
|
142
bpo34990-2038-problem-compileall.patch
Normal file
142
bpo34990-2038-problem-compileall.patch
Normal file
|
@ -0,0 +1,142 @@
|
|||
From 9d3b6b2472f7c7ef841e652825de652bc8af85d7 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Tue, 24 Aug 2021 08:07:31 -0700
|
||||
Subject: [PATCH] [3.9] bpo-34990: Treat the pyc header's mtime in compileall
|
||||
as an unsigned int (GH-19708)
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
(cherry picked from commit bb21e28fd08f894ceff2405544a2f257d42b1354)
|
||||
|
||||
Co-authored-by: Ammar Askar <ammar@ammaraskar.com>
|
||||
Co-authored-by: Stéphane Wirtel <stephane@wirtel.be>
|
||||
|
||||
ported to python-2.7 by Bernhard M. Wiedemann <bwiedemann suse de>
|
||||
|
||||
diff --git a/Lib/compileall.py b/Lib/compileall.py
|
||||
index 5cfa8be..193147e 100644
|
||||
--- a/Lib/compileall.py
|
||||
+++ b/Lib/compileall.py
|
||||
@@ -85,7 +85,7 @@ def compile_file(fullname, ddir=None, force=0, rx=None, quiet=0):
|
||||
if not force:
|
||||
try:
|
||||
mtime = int(os.stat(fullname).st_mtime)
|
||||
- expect = struct.pack('<4sl', imp.get_magic(), mtime)
|
||||
+ expect = struct.pack('<4sL', imp.get_magic(), mtime & 0xFFFFFFFF)
|
||||
cfile = fullname + (__debug__ and 'c' or 'o')
|
||||
with open(cfile, 'rb') as chandle:
|
||||
actual = chandle.read(8)
|
||||
diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py
|
||||
index d3a26db..0907f59 100644
|
||||
--- a/Lib/test/test_compileall.py
|
||||
+++ b/Lib/test/test_compileall.py
|
||||
@@ -28,7 +28,7 @@ class CompileallTests(unittest.TestCase):
|
||||
with open(self.bc_path, 'rb') as file:
|
||||
data = file.read(8)
|
||||
mtime = int(os.stat(self.source_path).st_mtime)
|
||||
- compare = struct.pack('<4sl', imp.get_magic(), mtime)
|
||||
+ compare = struct.pack('<4sL', imp.get_magic(), mtime & 0xFFFFFFFF)
|
||||
return data, compare
|
||||
|
||||
@unittest.skipUnless(hasattr(os, 'stat'), 'test needs os.stat()')
|
||||
@@ -48,7 +48,7 @@ class CompileallTests(unittest.TestCase):
|
||||
|
||||
def test_mtime(self):
|
||||
# Test a change in mtime leads to a new .pyc.
|
||||
- self.recreation_check(struct.pack('<4sl', imp.get_magic(), 1))
|
||||
+ self.recreation_check(struct.pack('<4sL', imp.get_magic(), 1))
|
||||
|
||||
def test_magic_number(self):
|
||||
# Test a change in mtime leads to a new .pyc.
|
||||
diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py
|
||||
index a66738a..e333582 100644
|
||||
--- a/Lib/test/test_zipimport.py
|
||||
+++ b/Lib/test/test_zipimport.py
|
||||
@@ -27,13 +27,7 @@ raise_src = 'def do_raise(): raise TypeError\n'
|
||||
|
||||
def make_pyc(co, mtime):
|
||||
data = marshal.dumps(co)
|
||||
- if type(mtime) is type(0.0):
|
||||
- # Mac mtimes need a bit of special casing
|
||||
- if mtime < 0x7fffffff:
|
||||
- mtime = int(mtime)
|
||||
- else:
|
||||
- mtime = int(-0x100000000L + long(mtime))
|
||||
- pyc = imp.get_magic() + struct.pack("<i", int(mtime)) + data
|
||||
+ pyc = imp.get_magic() + struct.pack("<L", int(mtime) & 0xFFFFFFFF) + data
|
||||
return pyc
|
||||
|
||||
def module_path_to_dotted_name(path):
|
||||
@@ -184,6 +178,14 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
|
||||
TESTMOD + pyc_ext: (NOW, badtime_pyc)}
|
||||
self.doTest(".py", files, TESTMOD)
|
||||
|
||||
+ def test2038MTime(self):
|
||||
+ # Make sure we can handle mtimes larger than what a 32-bit signed number
|
||||
+ # can hold.
|
||||
+ twenty_thirty_eight_pyc = make_pyc(test_co, 2**32 - 1)
|
||||
+ files = {TESTMOD + ".py": (NOW, test_src),
|
||||
+ TESTMOD + pyc_ext: (NOW, twenty_thirty_eight_pyc)}
|
||||
+ self.doTest(".py", files, TESTMOD)
|
||||
+
|
||||
def testPackage(self):
|
||||
packdir = TESTPACK + os.sep
|
||||
files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc),
|
||||
|
||||
==========
|
||||
|
||||
Author: Bernhard M. Wiedemann <bwiedemann suse de>
|
||||
Date: 2022-09-13
|
||||
|
||||
More y2038 fixes that are only needed for python2.7
|
||||
|
||||
diff --git a/Lib/compiler/pycodegen.py b/Lib/compiler/pycodegen.py
|
||||
index 6515945..21d52bb 100644
|
||||
--- a/Lib/compiler/pycodegen.py
|
||||
+++ b/Lib/compiler/pycodegen.py
|
||||
@@ -128,7 +128,7 @@ class Module(AbstractCompileMode):
|
||||
# to indicate the type of the value. simplest way to get the
|
||||
# same effect is to call marshal and then skip the code.
|
||||
mtime = os.path.getmtime(self.filename)
|
||||
- mtime = struct.pack('<i', mtime)
|
||||
+ mtime = struct.pack('<L', mtime & 0xFFFFFFFF)
|
||||
return self.MAGIC + mtime
|
||||
|
||||
class LocalNameFinder:
|
||||
diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py
|
||||
index cdb1af5..6344ef2 100644
|
||||
--- a/Lib/test/test_gzip.py
|
||||
+++ b/Lib/test/test_gzip.py
|
||||
@@ -265,7 +265,7 @@ class TestGzip(unittest.TestCase):
|
||||
self.assertEqual(flagsByte, '\x08') # only the FNAME flag is set
|
||||
|
||||
mtimeBytes = fRead.read(4)
|
||||
- self.assertEqual(mtimeBytes, struct.pack('<i', mtime)) # little-endian
|
||||
+ self.assertEqual(mtimeBytes, struct.pack('<L', mtime & 0xFFFFFFFF)) # little-endian
|
||||
|
||||
xflByte = fRead.read(1)
|
||||
self.assertEqual(xflByte, '\x02') # maximum compression
|
||||
diff --git a/Python/import.c b/Python/import.c
|
||||
index b79354b..3efd17f 100644
|
||||
--- a/Python/import.c
|
||||
+++ b/Python/import.c
|
||||
@@ -810,7 +810,7 @@ check_compiled_module(char *pathname, time_t mtime, char *cpathname)
|
||||
{
|
||||
FILE *fp;
|
||||
long magic;
|
||||
- long pyc_mtime;
|
||||
+ unsigned long pyc_mtime;
|
||||
|
||||
fp = fopen(cpathname, "rb");
|
||||
if (fp == NULL)
|
||||
@@ -823,7 +823,7 @@ check_compiled_module(char *pathname, time_t mtime, char *cpathname)
|
||||
return NULL;
|
||||
}
|
||||
pyc_mtime = PyMarshal_ReadLongFromFile(fp);
|
||||
- if (pyc_mtime != mtime) {
|
||||
+ if ((pyc_mtime&0xFFFFFFFF) != (((unsigned long)mtime)&0xFFFFFFFF)) {
|
||||
if (Py_VerboseFlag)
|
||||
PySys_WriteStderr("# %s has bad mtime\n", cpathname);
|
||||
fclose(fp);
|
29
bpo36160-init-sysconfig_vars.patch
Normal file
29
bpo36160-init-sysconfig_vars.patch
Normal file
|
@ -0,0 +1,29 @@
|
|||
From 603a4461e3c889b06a5d78e57594ebbc580f1c03 Mon Sep 17 00:00:00 2001
|
||||
From: Ivan Pozdeev <vano@mail.mipt.ru>
|
||||
Date: Fri, 1 Mar 2019 21:44:24 +0300
|
||||
Subject: [PATCH] Fix AttributeError on sysconfig._CONFIG_VARS.clear() if
|
||||
test_site is run separately
|
||||
|
||||
---
|
||||
Lib/test/test_site.py | 4 +++-
|
||||
.../next/Tests/2019-03-01-21-45-13.bpo-36160.4JjrqB.rst | 2 ++
|
||||
2 files changed, 5 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Tests/2019-03-01-21-45-13.bpo-36160.4JjrqB.rst
|
||||
|
||||
--- a/Lib/test/test_site.py
|
||||
+++ b/Lib/test/test_site.py
|
||||
@@ -47,6 +47,9 @@ def setUpModule():
|
||||
else:
|
||||
raise
|
||||
|
||||
+ # sysconfig._CONFIG_VARS is None until the first call to this function
|
||||
+ sysconfig.get_config_vars()
|
||||
+
|
||||
|
||||
def tearDownModule():
|
||||
sys.path[:] = OLD_SYS_PATH
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tests/2019-03-01-21-45-13.bpo-36160.4JjrqB.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Fix AttributeError on sysconfig._CONFIG_VARS.clear() if test_site is run
|
||||
+separately
|
13
bpo36302-sort-module-sources.patch
Normal file
13
bpo36302-sort-module-sources.patch
Normal file
|
@ -0,0 +1,13 @@
|
|||
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
|
||||
index 86a85c1..66bf0c2 100644
|
||||
--- a/Lib/distutils/command/build_ext.py
|
||||
+++ b/Lib/distutils/command/build_ext.py
|
||||
@@ -455,7 +455,7 @@ class build_ext (Command):
|
||||
("in 'ext_modules' option (extension '%s'), " +
|
||||
"'sources' must be present and must be " +
|
||||
"a list of source filenames") % ext.name
|
||||
- sources = list(sources)
|
||||
+ sources = sorted(sources)
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
depends = sources + ext.depends
|
11
configure_PYTHON_FOR_REGEN.patch
Normal file
11
configure_PYTHON_FOR_REGEN.patch
Normal file
|
@ -0,0 +1,11 @@
|
|||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -19,7 +19,7 @@ AC_SUBST(host)
|
||||
# pybuilddir.txt will be created by --generate-posix-vars in the Makefile
|
||||
rm -f pybuilddir.txt
|
||||
|
||||
-AC_CHECK_PROGS(PYTHON_FOR_REGEN, python$PACKAGE_VERSION python3 python, python3)
|
||||
+AC_CHECK_PROGS(PYTHON_FOR_REGEN, python$PACKAGE_VERSION python python2, python)
|
||||
AC_SUBST(PYTHON_FOR_REGEN)
|
||||
|
||||
if test "$cross_compiling" = yes; then
|
15
do-not-use-non-ascii-in-test_ssl.patch
Normal file
15
do-not-use-non-ascii-in-test_ssl.patch
Normal file
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
Lib/test/test_ssl.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -1052,7 +1052,7 @@ class ContextTests(unittest.TestCase):
|
||||
|
||||
|
||||
def test_load_dh_params(self):
|
||||
- filename = u'dhpäräm.pem'
|
||||
+ filename = u'dhparam.pem'
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
try:
|
||||
filename.encode(fs_encoding)
|
35
idle.appdata.xml
Normal file
35
idle.appdata.xml
Normal file
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!-- Copyright 2017 Zbigniew Jędrzejewski-Szmek -->
|
||||
<application>
|
||||
<id type="desktop">idle.desktop</id>
|
||||
<name>IDLE</name>
|
||||
<metadata_licence>CC0</metadata_licence>
|
||||
<project_license>Python-2.0</project_license>
|
||||
<summary>Python Integrated Development and Learning Environment</summary>
|
||||
<description>
|
||||
<p>
|
||||
IDLE is Python’s Integrated Development and Learning Environment.
|
||||
The GUI is uniform between Windows, Unix, and Mac OS X.
|
||||
IDLE provides an easy way to start writing, running, and debugging
|
||||
Python code.
|
||||
</p>
|
||||
<p>
|
||||
IDLE is written in pure Python, and uses the tkinter GUI toolkit.
|
||||
It provides:
|
||||
</p>
|
||||
<ul>
|
||||
<li>a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,</li>
|
||||
<li>a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,</li>
|
||||
<li>search within any window, replace within editor windows, and search through multiple files (grep),</li>
|
||||
<li>a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.</li>
|
||||
</ul>
|
||||
</description>
|
||||
<url type="homepage">https://docs.python.org/2.7/library/idle.html</url>
|
||||
<screenshots>
|
||||
<screenshot type="default">http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png</screenshot>
|
||||
</screenshots>
|
||||
<update_contact>zbyszek@in.waw.pl</update_contact>
|
||||
</application>
|
12
idle.desktop
Normal file
12
idle.desktop
Normal file
|
@ -0,0 +1,12 @@
|
|||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Name=IDLE
|
||||
GenericName=Python IDE
|
||||
Comment=Python 2.7 Integrated Development and Learning Environment
|
||||
Exec=idle %F
|
||||
TryExec=idle
|
||||
Terminal=false
|
||||
Type=Application
|
||||
Icon=idle
|
||||
Categories=Development;IDE;
|
||||
MimeType=text/x-python;
|
30
openssl-111-middlebox-compat.patch
Normal file
30
openssl-111-middlebox-compat.patch
Normal file
|
@ -0,0 +1,30 @@
|
|||
From 4fa35e8b1ebb2a8e88ba7c4c9cd2a17b35638ee6 Mon Sep 17 00:00:00 2001
|
||||
From: Dimitri John Ledkov <xnox@ubuntu.com>
|
||||
Date: Fri, 28 Sep 2018 16:34:16 +0100
|
||||
Subject: [PATCH] bpo-34834: Fix test_ssl.test_options to account for
|
||||
OP_ENABLE_MIDDLEBOX_COMPAT.
|
||||
|
||||
Signed-off-by: Dimitri John Ledkov <xnox@ubuntu.com>
|
||||
|
||||
https://bugs.python.org/issue34834
|
||||
---
|
||||
Lib/test/test_ssl.py | 8 ++++++--
|
||||
1 file changed, 6 insertions(+), 2 deletions(-)
|
||||
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -838,8 +838,12 @@ class ContextTests(unittest.TestCase):
|
||||
default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
|
||||
# SSLContext also enables these by default
|
||||
default |= (OP_NO_COMPRESSION | OP_CIPHER_SERVER_PREFERENCE |
|
||||
- OP_SINGLE_DH_USE | OP_SINGLE_ECDH_USE |
|
||||
- OP_ENABLE_MIDDLEBOX_COMPAT)
|
||||
+ ssl.OP_SINGLE_DH_USE | ssl.OP_SINGLE_ECDH_USE)
|
||||
+ if not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 1):
|
||||
+ # define MIDDLEBOX constant, as python2.7 does not know about it
|
||||
+ # but it is used by default.
|
||||
+ OP_ENABLE_MIDDLEBOX_COMPAT = 1048576L
|
||||
+ default |= OP_ENABLE_MIDDLEBOX_COMPAT
|
||||
self.assertEqual(default, ctx.options)
|
||||
ctx.options |= ssl.OP_NO_TLSv1
|
||||
self.assertEqual(default | ssl.OP_NO_TLSv1, ctx.options)
|
21
openssl-111-ssl_options.patch
Normal file
21
openssl-111-ssl_options.patch
Normal file
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
Lib/test/test_ssl.py | 8 ++------
|
||||
1 file changed, 2 insertions(+), 6 deletions(-)
|
||||
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -838,12 +838,8 @@ class ContextTests(unittest.TestCase):
|
||||
default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
|
||||
# SSLContext also enables these by default
|
||||
default |= (OP_NO_COMPRESSION | OP_CIPHER_SERVER_PREFERENCE |
|
||||
- ssl.OP_SINGLE_DH_USE | ssl.OP_SINGLE_ECDH_USE)
|
||||
- if not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 1):
|
||||
- # define MIDDLEBOX constant, as python2.7 does not know about it
|
||||
- # but it is used by default.
|
||||
- OP_ENABLE_MIDDLEBOX_COMPAT = 1048576L
|
||||
- default |= OP_ENABLE_MIDDLEBOX_COMPAT
|
||||
+ OP_SINGLE_DH_USE | OP_SINGLE_ECDH_USE |
|
||||
+ OP_ENABLE_MIDDLEBOX_COMPAT)
|
||||
self.assertEqual(default, ctx.options)
|
||||
ctx.options |= ssl.OP_NO_TLSv1
|
||||
self.assertEqual(default | ssl.OP_NO_TLSv1, ctx.options)
|
12
python-2.5.1-sqlite.patch
Normal file
12
python-2.5.1-sqlite.patch
Normal file
|
@ -0,0 +1,12 @@
|
|||
--- a/Modules/_sqlite/cursor.c
|
||||
+++ b/Modules/_sqlite/cursor.c
|
||||
@@ -829,6 +829,9 @@ PyObject* pysqlite_cursor_executescript(
|
||||
goto error;
|
||||
}
|
||||
|
||||
+ if (! statement)
|
||||
+ break;
|
||||
+
|
||||
/* execute statement, and ignore results of SELECT statements */
|
||||
rc = SQLITE_ROW;
|
||||
while (rc == SQLITE_ROW) {
|
14
python-2.6-gettext-plurals.patch
Normal file
14
python-2.6-gettext-plurals.patch
Normal file
|
@ -0,0 +1,14 @@
|
|||
--- a/Lib/gettext.py
|
||||
+++ b/Lib/gettext.py
|
||||
@@ -387,8 +387,9 @@ class GNUTranslations(NullTranslations):
|
||||
self._charset = v.split('charset=')[1]
|
||||
elif k == 'plural-forms':
|
||||
v = v.split(';')
|
||||
- plural = v[1].split('plural=')[1]
|
||||
- self.plural = c2py(plural)
|
||||
+ if len(v) > 1:
|
||||
+ plural = v[1].split('plural=')[1]
|
||||
+ self.plural = c2py(plural)
|
||||
# Note: we unconditionally convert both msgids and msgstrs to
|
||||
# Unicode using the character encoding specified in the charset
|
||||
# parameter of the Content-Type header. The gettext documentation
|
11
python-2.6b3-curses-panel.patch
Normal file
11
python-2.6b3-curses-panel.patch
Normal file
|
@ -0,0 +1,11 @@
|
|||
--- a/Modules/_curses_panel.c
|
||||
+++ b/Modules/_curses_panel.c
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
#include "py_curses.h"
|
||||
|
||||
-#include <panel.h>
|
||||
+#include <ncurses/panel.h>
|
||||
|
||||
static PyObject *PyCursesError;
|
||||
|
11
python-2.7-dirs.patch
Normal file
11
python-2.7-dirs.patch
Normal file
|
@ -0,0 +1,11 @@
|
|||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -110,7 +110,7 @@ LIBDIR= @libdir@
|
||||
MANDIR= @mandir@
|
||||
INCLUDEDIR= @includedir@
|
||||
CONFINCLUDEDIR= $(exec_prefix)/include
|
||||
-SCRIPTDIR= $(prefix)/lib
|
||||
+SCRIPTDIR= @libdir@
|
||||
|
||||
# Detailed destination directories
|
||||
BINLIBDEST= $(LIBDIR)/python$(VERSION)
|
13
python-2.7-libffi-aarch64.patch
Normal file
13
python-2.7-libffi-aarch64.patch
Normal file
|
@ -0,0 +1,13 @@
|
|||
Index: Python-2.7.9/Modules/_ctypes/libffi/src/aarch64/ffi.c
|
||||
===================================================================
|
||||
--- Python-2.7.9.orig/Modules/_ctypes/libffi/src/aarch64/ffi.c
|
||||
+++ Python-2.7.9/Modules/_ctypes/libffi/src/aarch64/ffi.c
|
||||
@@ -728,7 +728,7 @@ aarch64_prep_args (struct call_context *
|
||||
state.ngrn = N_X_ARG_REG;
|
||||
|
||||
memcpy (allocate_to_stack (&state, stack, ty->alignment,
|
||||
- ty->size), ecif->avalue + i, ty->size);
|
||||
+ ty->size), ecif->avalue[i], ty->size);
|
||||
}
|
||||
break;
|
||||
|
25
python-2.7.17-switch-off-failing-SSL-tests.patch
Normal file
25
python-2.7.17-switch-off-failing-SSL-tests.patch
Normal file
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
Lib/test/test_ssl.py | 5 ++---
|
||||
1 file changed, 2 insertions(+), 3 deletions(-)
|
||||
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -152,9 +152,7 @@ def skip_if_broken_ubuntu_ssl(func):
|
||||
try:
|
||||
ssl.SSLContext(ssl.PROTOCOL_SSLv2)
|
||||
except ssl.SSLError:
|
||||
- if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
|
||||
- platform.linux_distribution() == ('debian', 'squeeze/sid', '')):
|
||||
- raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
|
||||
+ raise unittest.SkipTest("Test fails on SLE-12")
|
||||
return func(*args, **kwargs)
|
||||
return f
|
||||
else:
|
||||
@@ -1280,6 +1278,7 @@ class ContextTests(unittest.TestCase):
|
||||
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
|
||||
self._assert_context_options(ctx)
|
||||
|
||||
+ @unittest.skip("Test fails on SLE-12")
|
||||
def test__https_verify_certificates(self):
|
||||
# Unit test to check the contect factory mapping
|
||||
# The factories themselves are tested above
|
18
python-2.7.2-fix_date_time_compiler.patch
Normal file
18
python-2.7.2-fix_date_time_compiler.patch
Normal file
|
@ -0,0 +1,18 @@
|
|||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -647,8 +647,15 @@ Modules/getbuildinfo.o: $(PARSER_OBJS) \
|
||||
-DGITVERSION="\"`LC_ALL=C $(GITVERSION)`\"" \
|
||||
-DGITTAG="\"`LC_ALL=C $(GITTAG)`\"" \
|
||||
-DGITBRANCH="\"`LC_ALL=C $(GITBRANCH)`\"" \
|
||||
+ -DDATE="\"`LC_ALL=C date -u -r Makefile.pre.in +"%b %d %Y"`\"" \
|
||||
+ -DTIME="\"`LC_ALL=C date -u -r Makefile.pre.in +"%T"`\"" \
|
||||
-o $@ $(srcdir)/Modules/getbuildinfo.c
|
||||
|
||||
+Python/getcompiler.o: $(srcdir)/Python/getcompiler.c Makefile
|
||||
+ $(CC) -c $(PY_CFLAGS) \
|
||||
+ -DCOMPILER='"[GCC]"' \
|
||||
+ -o $@ $(srcdir)/Python/getcompiler.c
|
||||
+
|
||||
Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile
|
||||
$(CC) -c $(PY_CFLAGS) -DPYTHONPATH='"$(PYTHONPATH)"' \
|
||||
-DPREFIX='"$(prefix)"' \
|
85
python-2.7.4-canonicalize2.patch
Normal file
85
python-2.7.4-canonicalize2.patch
Normal file
|
@ -0,0 +1,85 @@
|
|||
---
|
||||
Python/sysmodule.c | 26 +++++++++++++++++++-------
|
||||
configure.ac | 2 +-
|
||||
pyconfig.h.in | 3 +++
|
||||
3 files changed, 23 insertions(+), 8 deletions(-)
|
||||
|
||||
--- a/Python/sysmodule.c
|
||||
+++ b/Python/sysmodule.c
|
||||
@@ -1638,7 +1638,20 @@ PySys_SetArgvEx(int argc, char **argv, i
|
||||
char *p = NULL;
|
||||
Py_ssize_t n = 0;
|
||||
PyObject *a;
|
||||
-#ifdef HAVE_READLINK
|
||||
+#ifdef HAVE_CANONICALIZE_FILE_NAME
|
||||
+ int errnum;
|
||||
+
|
||||
+ if (argc > 0 && argv0 != NULL && strcmp(argv0, "-c") != 0) {
|
||||
+ argv0 = canonicalize_file_name(argv0);
|
||||
+ if (argv0 == NULL) argv0 = strdup(argv[0]);
|
||||
+ }
|
||||
+#elif defined(HAVE_REALPATH)
|
||||
+ if (argc > 0 && argv0 != NULL && strcmp(argv0, "-c") != 0) {
|
||||
+ if (realpath(argv0, fullpath)) {
|
||||
+ argv0 = fullpath;
|
||||
+ }
|
||||
+ }
|
||||
+#elif defined(HAVE_READLINK)
|
||||
char link[MAXPATHLEN+1];
|
||||
char argv0copy[2*MAXPATHLEN+1];
|
||||
int nr = 0;
|
||||
@@ -1665,7 +1678,8 @@ PySys_SetArgvEx(int argc, char **argv, i
|
||||
}
|
||||
}
|
||||
}
|
||||
-#endif /* HAVE_READLINK */
|
||||
+#endif /* resolve method selection */
|
||||
+
|
||||
#if SEP == '\\' /* Special case for MS filename syntax */
|
||||
if (argc > 0 && argv0 != NULL && strcmp(argv0, "-c") != 0) {
|
||||
char *q;
|
||||
@@ -1694,11 +1708,6 @@ PySys_SetArgvEx(int argc, char **argv, i
|
||||
}
|
||||
#else /* All other filename syntaxes */
|
||||
if (argc > 0 && argv0 != NULL && strcmp(argv0, "-c") != 0) {
|
||||
-#if defined(HAVE_REALPATH)
|
||||
- if (realpath(argv0, fullpath)) {
|
||||
- argv0 = fullpath;
|
||||
- }
|
||||
-#endif
|
||||
p = strrchr(argv0, SEP);
|
||||
}
|
||||
if (p != NULL) {
|
||||
@@ -1716,6 +1725,9 @@ PySys_SetArgvEx(int argc, char **argv, i
|
||||
a = PyString_FromStringAndSize(argv0, n);
|
||||
if (a == NULL)
|
||||
Py_FatalError("no mem for sys.path insertion");
|
||||
+#ifdef HAVE_CANONICALIZE_FILE_NAME
|
||||
+ if (argc > 0 && argv0 != NULL && strcmp(argv0, "-c") != 0) free(argv0);
|
||||
+#endif /* HAVE_CANONICALIZE_FILE_NAME */
|
||||
if (PyList_Insert(path, 0, a) < 0)
|
||||
Py_FatalError("sys.path.insert(0) failed");
|
||||
Py_DECREF(a);
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -3165,7 +3165,7 @@ AC_CHECK_FUNCS(alarm setitimer getitimer
|
||||
getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \
|
||||
initgroups kill killpg lchown lstat mkfifo mknod mktime mmap \
|
||||
mremap nice pathconf pause plock poll pthread_init \
|
||||
- putenv readlink realpath \
|
||||
+ putenv readlink realpath canonicalize_file_name \
|
||||
select sem_open sem_timedwait sem_getvalue sem_unlink setegid seteuid \
|
||||
setgid \
|
||||
setlocale setregid setreuid setsid setpgid setpgrp setuid setvbuf snprintf \
|
||||
--- a/pyconfig.h.in
|
||||
+++ b/pyconfig.h.in
|
||||
@@ -109,6 +109,9 @@
|
||||
/* Define to 1 if you have the 'chflags' function. */
|
||||
#undef HAVE_CHFLAGS
|
||||
|
||||
+/* Define to 1 if you have the `canonicalize_file_name' function. */
|
||||
+#undef HAVE_CANONICALIZE_FILE_NAME
|
||||
+
|
||||
/* Define to 1 if you have the `chown' function. */
|
||||
#undef HAVE_CHOWN
|
||||
|
436
python-2.7.5-multilib.patch
Normal file
436
python-2.7.5-multilib.patch
Normal file
|
@ -0,0 +1,436 @@
|
|||
---
|
||||
Include/pythonrun.h | 2 +
|
||||
Lib/distutils/command/install.py | 4 ++-
|
||||
Lib/distutils/sysconfig.py | 7 ++++--
|
||||
Lib/pydoc.py | 2 -
|
||||
Lib/site.py | 40 +++++++++++++++++++++++++++------------
|
||||
Lib/sysconfig.py | 12 +++++------
|
||||
Lib/test/test_dl.py | 5 ++--
|
||||
Lib/test/test_site.py | 16 +++++++++++----
|
||||
Lib/trace.py | 4 +--
|
||||
Makefile.pre.in | 5 +++-
|
||||
Modules/getpath.c | 4 ++-
|
||||
Python/getplatform.c | 20 +++++++++++++++++++
|
||||
Python/sysmodule.c | 4 +++
|
||||
configure.ac | 35 ++++++++++++++++++++++++++++++++++
|
||||
setup.py | 15 ++++++--------
|
||||
15 files changed, 135 insertions(+), 40 deletions(-)
|
||||
|
||||
--- a/Include/pythonrun.h
|
||||
+++ b/Include/pythonrun.h
|
||||
@@ -108,6 +108,8 @@ PyAPI_FUNC(char *) Py_GetPath(void);
|
||||
/* In their own files */
|
||||
PyAPI_FUNC(const char *) Py_GetVersion(void);
|
||||
PyAPI_FUNC(const char *) Py_GetPlatform(void);
|
||||
+PyAPI_FUNC(const char *) Py_GetArch(void);
|
||||
+PyAPI_FUNC(const char *) Py_GetLib(void);
|
||||
PyAPI_FUNC(const char *) Py_GetCopyright(void);
|
||||
PyAPI_FUNC(const char *) Py_GetCompiler(void);
|
||||
PyAPI_FUNC(const char *) Py_GetBuildInfo(void);
|
||||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -22,6 +22,8 @@ from site import USER_BASE
|
||||
from site import USER_SITE
|
||||
|
||||
|
||||
+libname = sys.lib
|
||||
+
|
||||
if sys.version < "2.2":
|
||||
WINDOWS_SCHEME = {
|
||||
'purelib': '$base',
|
||||
@@ -42,7 +44,7 @@ else:
|
||||
INSTALL_SCHEMES = {
|
||||
'unix_prefix': {
|
||||
'purelib': '$base/lib/python$py_version_short/site-packages',
|
||||
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
|
||||
+ 'platlib': '$platbase/'+libname+'/python$py_version_short/site-packages',
|
||||
'headers': '$base/include/python$py_version_short/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
--- a/Lib/distutils/sysconfig.py
|
||||
+++ b/Lib/distutils/sysconfig.py
|
||||
@@ -129,8 +129,11 @@ def get_python_lib(plat_specific=0, stan
|
||||
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
||||
|
||||
if os.name == "posix":
|
||||
- libpython = os.path.join(prefix,
|
||||
- "lib", "python" + get_python_version())
|
||||
+ if plat_specific or standard_lib:
|
||||
+ lib = sys.lib
|
||||
+ else:
|
||||
+ lib = "lib"
|
||||
+ libpython = os.path.join(prefix, lib, "python" + get_python_version())
|
||||
if standard_lib:
|
||||
return libpython
|
||||
else:
|
||||
--- a/Lib/pydoc.py
|
||||
+++ b/Lib/pydoc.py
|
||||
@@ -375,7 +375,7 @@ class Doc:
|
||||
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
|
||||
|
||||
def getdocloc(self, object,
|
||||
- basedir=os.path.join(sys.exec_prefix, "lib",
|
||||
+ basedir=os.path.join(sys.exec_prefix, sys.lib,
|
||||
"python"+sys.version[0:3])):
|
||||
"""Return the location of module docs or None"""
|
||||
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -231,29 +231,38 @@ def getuserbase():
|
||||
USER_BASE = get_config_var('userbase')
|
||||
return USER_BASE
|
||||
|
||||
-def getusersitepackages():
|
||||
+def getusersitepackages(lib_kind = 'purelib'):
|
||||
"""Returns the user-specific site-packages directory path.
|
||||
|
||||
If the global variable ``USER_SITE`` is not initialized yet, this
|
||||
function will also set it.
|
||||
"""
|
||||
+
|
||||
+ set_user_site = (lib_kind == 'purelib')
|
||||
+
|
||||
global USER_SITE
|
||||
user_base = getuserbase() # this will also set USER_BASE
|
||||
|
||||
- if USER_SITE is not None:
|
||||
+ if USER_SITE is not None and set_user_site:
|
||||
return USER_SITE
|
||||
|
||||
from sysconfig import get_path
|
||||
import os
|
||||
|
||||
+ user_site = None
|
||||
+
|
||||
if sys.platform == 'darwin':
|
||||
from sysconfig import get_config_var
|
||||
if get_config_var('PYTHONFRAMEWORK'):
|
||||
- USER_SITE = get_path('purelib', 'osx_framework_user')
|
||||
- return USER_SITE
|
||||
+ user_site = get_path(lib_kind, 'osx_framework_user')
|
||||
|
||||
- USER_SITE = get_path('purelib', '%s_user' % os.name)
|
||||
- return USER_SITE
|
||||
+ if user_site is None:
|
||||
+ user_site = get_path(lib_kind, '%s_user' % os.name)
|
||||
+
|
||||
+ if set_user_site:
|
||||
+ USER_SITE = user_site
|
||||
+
|
||||
+ return user_site
|
||||
|
||||
def addusersitepackages(known_paths):
|
||||
"""Add a per user site-package to sys.path
|
||||
@@ -263,10 +272,12 @@ def addusersitepackages(known_paths):
|
||||
"""
|
||||
# get the per user site-package path
|
||||
# this call will also make sure USER_BASE and USER_SITE are set
|
||||
- user_site = getusersitepackages()
|
||||
+ for kind in ('purelib', 'platlib'):
|
||||
+ user_site = getusersitepackages(kind)
|
||||
+
|
||||
+ if ENABLE_USER_SITE and os.path.isdir(user_site):
|
||||
+ addsitedir(user_site, known_paths)
|
||||
|
||||
- if ENABLE_USER_SITE and os.path.isdir(user_site):
|
||||
- addsitedir(user_site, known_paths)
|
||||
return known_paths
|
||||
|
||||
def getsitepackages():
|
||||
@@ -288,13 +299,18 @@ def getsitepackages():
|
||||
if sys.platform in ('os2emx', 'riscos'):
|
||||
sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
|
||||
elif os.sep == '/':
|
||||
- sitepackages.append(os.path.join(prefix, "lib",
|
||||
+ sitepackages.append(os.path.join(prefix, sys.lib,
|
||||
"python" + sys.version[:3],
|
||||
"site-packages"))
|
||||
- sitepackages.append(os.path.join(prefix, "lib", "site-python"))
|
||||
+ sitepackages.append(os.path.join(prefix, sys.lib, "site-python"))
|
||||
+ if sys.lib != "lib":
|
||||
+ sitepackages.append(os.path.join(prefix, "lib",
|
||||
+ "python" + sys.version[:3],
|
||||
+ "site-packages"))
|
||||
+ sitepackages.append(os.path.join(prefix, "lib", "site-python"))
|
||||
else:
|
||||
sitepackages.append(prefix)
|
||||
- sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
|
||||
+ sitepackages.append(os.path.join(prefix, sys.lib, "site-packages"))
|
||||
return sitepackages
|
||||
|
||||
def addsitepackages(known_paths):
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -7,10 +7,10 @@ from os.path import pardir, realpath
|
||||
|
||||
_INSTALL_SCHEMES = {
|
||||
'posix_prefix': {
|
||||
- 'stdlib': '{base}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{base}/'+sys.lib+'/python{py_version_short}',
|
||||
+ 'platstdlib': '{platbase}/'+sys.lib+'/python{py_version_short}',
|
||||
'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{platbase}/'+sys.lib+'/python{py_version_short}/site-packages',
|
||||
'include': '{base}/include/python{py_version_short}',
|
||||
'platinclude': '{platbase}/include/python{py_version_short}',
|
||||
'scripts': '{base}/bin',
|
||||
@@ -65,10 +65,10 @@ _INSTALL_SCHEMES = {
|
||||
'data' : '{userbase}',
|
||||
},
|
||||
'posix_user': {
|
||||
- 'stdlib': '{userbase}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{userbase}/'+sys.lib+'/python{py_version_short}',
|
||||
+ 'platstdlib': '{userbase}/'+sys.lib+'/python{py_version_short}',
|
||||
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{userbase}/'+sys.lib+'/python{py_version_short}/site-packages',
|
||||
'include': '{userbase}/include/python{py_version_short}',
|
||||
'scripts': '{userbase}/bin',
|
||||
'data' : '{userbase}',
|
||||
--- a/Lib/test/test_dl.py
|
||||
+++ b/Lib/test/test_dl.py
|
||||
@@ -4,10 +4,11 @@
|
||||
import unittest
|
||||
from test.test_support import verbose, import_module
|
||||
dl = import_module('dl', deprecated=True)
|
||||
+import sys
|
||||
|
||||
sharedlibs = [
|
||||
- ('/usr/lib/libc.so', 'getpid'),
|
||||
- ('/lib/libc.so.6', 'getpid'),
|
||||
+ ('/usr/'+sys.lib+'/libc.so', 'getpid'),
|
||||
+ ('/'+sys.lib+'/libc.so.6', 'getpid'),
|
||||
('/usr/bin/cygwin1.dll', 'getpid'),
|
||||
('/usr/lib/libc.dylib', 'getpid'),
|
||||
]
|
||||
--- a/Lib/test/test_site.py
|
||||
+++ b/Lib/test/test_site.py
|
||||
@@ -254,12 +254,16 @@ class HelperFunctionsTests(unittest.Test
|
||||
self.assertEqual(dirs[0], wanted)
|
||||
elif os.sep == '/':
|
||||
# OS X, Linux, FreeBSD, etc
|
||||
- self.assertEqual(len(dirs), 2)
|
||||
wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
|
||||
'site-packages')
|
||||
- self.assertEqual(dirs[0], wanted)
|
||||
+ self.assertTrue(wanted in dirs)
|
||||
wanted = os.path.join('xoxo', 'lib', 'site-python')
|
||||
- self.assertEqual(dirs[1], wanted)
|
||||
+ self.assertTrue(wanted in dirs)
|
||||
+ wanted = os.path.join('xoxo', sys.lib, 'python' + sys.version[:3],
|
||||
+ 'site-packages')
|
||||
+ self.assertTrue(wanted in dirs)
|
||||
+ wanted = os.path.join('xoxo', sys.lib, 'site-python')
|
||||
+ self.assertTrue(wanted in dirs)
|
||||
else:
|
||||
# other platforms
|
||||
self.assertEqual(len(dirs), 2)
|
||||
@@ -305,7 +309,11 @@ class HelperFunctionsTests(unittest.Test
|
||||
known_paths = set()
|
||||
site.addusersitepackages(known_paths)
|
||||
|
||||
- self.assertEqual(fake_isdir.arg, user_site)
|
||||
+ # value of user_site cannot be used for comparison, because
|
||||
+ # the following assert mistakenly assumes 'platlib' and
|
||||
+ # 'purelib' directories are same.
|
||||
+ self.assertEqual(fake_isdir.arg,
|
||||
+ site.getusersitepackages('platlib'))
|
||||
self.assertFalse(known_paths)
|
||||
|
||||
|
||||
--- a/Lib/trace.py
|
||||
+++ b/Lib/trace.py
|
||||
@@ -754,10 +754,10 @@ def main(argv=None):
|
||||
# should I also call expanduser? (after all, could use $HOME)
|
||||
|
||||
s = s.replace("$prefix",
|
||||
- os.path.join(sys.prefix, "lib",
|
||||
+ os.path.join(sys.prefix, sys.lib,
|
||||
"python" + sys.version[:3]))
|
||||
s = s.replace("$exec_prefix",
|
||||
- os.path.join(sys.exec_prefix, "lib",
|
||||
+ os.path.join(sys.exec_prefix, sys.lib,
|
||||
"python" + sys.version[:3]))
|
||||
s = os.path.normpath(s)
|
||||
ignore_dirs.append(s)
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -91,6 +91,8 @@ PY_CFLAGS= $(CFLAGS) $(CPPFLAGS) $(CFLAG
|
||||
|
||||
# Machine-dependent subdirectories
|
||||
MACHDEP= @MACHDEP@
|
||||
+LIB= @LIB@
|
||||
+ARCH= @ARCH@
|
||||
|
||||
# Multiarch directory (may be empty)
|
||||
MULTIARCH= @MULTIARCH@
|
||||
@@ -653,6 +655,7 @@ Modules/getpath.o: $(srcdir)/Modules/get
|
||||
-DEXEC_PREFIX='"$(exec_prefix)"' \
|
||||
-DVERSION='"$(VERSION)"' \
|
||||
-DVPATH='"$(VPATH)"' \
|
||||
+ -DARCH='"$(ARCH)"' -DLIB='"$(LIB)"' \
|
||||
-o $@ $(srcdir)/Modules/getpath.c
|
||||
|
||||
Modules/python.o: $(srcdir)/Modules/python.c
|
||||
@@ -701,7 +704,7 @@ regen-ast:
|
||||
Python/compile.o Python/symtable.o Python/ast.o: $(srcdir)/Include/graminit.h $(srcdir)/Include/Python-ast.h
|
||||
|
||||
Python/getplatform.o: $(srcdir)/Python/getplatform.c
|
||||
- $(CC) -c $(PY_CFLAGS) -DPLATFORM='"$(MACHDEP)"' -o $@ $(srcdir)/Python/getplatform.c
|
||||
+ $(CC) -c $(PY_CFLAGS) -DPLATFORM='"$(MACHDEP)"' -DARCH='"$(ARCH)"' -DLIB='"$(LIB)"' -o $@ $(srcdir)/Python/getplatform.c
|
||||
|
||||
Python/importdl.o: $(srcdir)/Python/importdl.c
|
||||
$(CC) -c $(PY_CFLAGS) -I$(DLINCLDIR) -o $@ $(srcdir)/Python/importdl.c
|
||||
--- a/Modules/getpath.c
|
||||
+++ b/Modules/getpath.c
|
||||
@@ -100,6 +100,8 @@
|
||||
#error "PREFIX, EXEC_PREFIX, VERSION, and VPATH must be constant defined"
|
||||
#endif
|
||||
|
||||
+#define LIB_PYTHON LIB "/python" VERSION
|
||||
+
|
||||
#ifndef LANDMARK
|
||||
#define LANDMARK "os.py"
|
||||
#endif
|
||||
@@ -108,7 +110,7 @@ static char prefix[MAXPATHLEN+1];
|
||||
static char exec_prefix[MAXPATHLEN+1];
|
||||
static char progpath[MAXPATHLEN+1];
|
||||
static char *module_search_path = NULL;
|
||||
-static char lib_python[] = "lib/python" VERSION;
|
||||
+static char lib_python[] = LIB_PYTHON;
|
||||
|
||||
static void
|
||||
reduce(char *dir)
|
||||
--- a/Python/getplatform.c
|
||||
+++ b/Python/getplatform.c
|
||||
@@ -10,3 +10,23 @@ Py_GetPlatform(void)
|
||||
{
|
||||
return PLATFORM;
|
||||
}
|
||||
+
|
||||
+#ifndef ARCH
|
||||
+#define ARCH "unknown"
|
||||
+#endif
|
||||
+
|
||||
+const char *
|
||||
+Py_GetArch(void)
|
||||
+{
|
||||
+ return ARCH;
|
||||
+}
|
||||
+
|
||||
+#ifndef LIB
|
||||
+#define LIB "lib"
|
||||
+#endif
|
||||
+
|
||||
+const char *
|
||||
+Py_GetLib(void)
|
||||
+{
|
||||
+ return LIB;
|
||||
+}
|
||||
--- a/Python/sysmodule.c
|
||||
+++ b/Python/sysmodule.c
|
||||
@@ -1437,6 +1437,10 @@ _PySys_Init(void)
|
||||
PyString_FromString(Py_GetCopyright()));
|
||||
SET_SYS_FROM_STRING("platform",
|
||||
PyString_FromString(Py_GetPlatform()));
|
||||
+ SET_SYS_FROM_STRING("arch",
|
||||
+ PyString_FromString(Py_GetArch()));
|
||||
+ SET_SYS_FROM_STRING("lib",
|
||||
+ PyString_FromString(Py_GetLib()));
|
||||
SET_SYS_FROM_STRING("executable",
|
||||
PyString_FromString(Py_GetProgramFullPath()));
|
||||
SET_SYS_FROM_STRING("prefix",
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -773,6 +773,41 @@ SunOS*)
|
||||
;;
|
||||
esac
|
||||
|
||||
+AC_SUBST(ARCH)
|
||||
+AC_MSG_CHECKING(ARCH)
|
||||
+ARCH=`uname -m`
|
||||
+case $ARCH in
|
||||
+i?86) ARCH=i386;;
|
||||
+esac
|
||||
+AC_MSG_RESULT($ARCH)
|
||||
+
|
||||
+AC_SUBST(LIB)
|
||||
+AC_MSG_CHECKING(LIB)
|
||||
+case $ac_sys_system in
|
||||
+Linux*)
|
||||
+ # Test if the compiler is 64bit
|
||||
+ echo 'int i;' > conftest.$ac_ext
|
||||
+ python_cv_cc_64bit_output=no
|
||||
+ if AC_TRY_EVAL(ac_compile); then
|
||||
+ case `/usr/bin/file conftest.$ac_objext` in
|
||||
+ *"ELF 64"*)
|
||||
+ python_cv_cc_64bit_output=yes
|
||||
+ ;;
|
||||
+ esac
|
||||
+ fi
|
||||
+ rm -rf conftest*
|
||||
+ ;;
|
||||
+esac
|
||||
+
|
||||
+case $ARCH:$python_cv_cc_64bit_output in
|
||||
+aarch64:yes | ppc64:yes | ppc64le:yes | powerpc64:yes | riscv64:yes | s390x:yes | sparc64:yes | x86_64:yes)
|
||||
+ LIB="lib64"
|
||||
+ ;;
|
||||
+*:*)
|
||||
+ LIB="lib"
|
||||
+ ;;
|
||||
+esac
|
||||
+AC_MSG_RESULT($LIB)
|
||||
|
||||
AC_SUBST(LIBRARY)
|
||||
AC_MSG_CHECKING(LIBRARY)
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -502,7 +502,7 @@ class PyBuildExt(build_ext):
|
||||
def detect_modules(self):
|
||||
# Ensure that /usr/local is always used
|
||||
if not cross_compiling:
|
||||
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
|
||||
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/' + sys.lib)
|
||||
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
|
||||
if cross_compiling:
|
||||
self.add_gcc_paths()
|
||||
@@ -828,11 +828,11 @@ class PyBuildExt(build_ext):
|
||||
elif curses_library:
|
||||
readline_libs.append(curses_library)
|
||||
elif self.compiler.find_library_file(lib_dirs +
|
||||
- ['/usr/lib/termcap'],
|
||||
+ ['/usr/'+sys.lib+'/termcap'],
|
||||
'termcap'):
|
||||
readline_libs.append('termcap')
|
||||
exts.append( Extension('readline', ['readline.c'],
|
||||
- library_dirs=['/usr/lib/termcap'],
|
||||
+ library_dirs=['/usr/'+sys.lib+'/termcap'],
|
||||
extra_link_args=readline_extra_link_args,
|
||||
libraries=readline_libs) )
|
||||
else:
|
||||
@@ -1979,18 +1979,17 @@ class PyBuildExt(build_ext):
|
||||
# Check for various platform-specific directories
|
||||
if host_platform == 'sunos5':
|
||||
include_dirs.append('/usr/openwin/include')
|
||||
- added_lib_dirs.append('/usr/openwin/lib')
|
||||
+ added_lib_dirs.append('/usr/openwin/' + sys.lib)
|
||||
elif os.path.exists('/usr/X11R6/include'):
|
||||
include_dirs.append('/usr/X11R6/include')
|
||||
- added_lib_dirs.append('/usr/X11R6/lib64')
|
||||
- added_lib_dirs.append('/usr/X11R6/lib')
|
||||
+ added_lib_dirs.append('/usr/X11R6/' + sys.lib)
|
||||
elif os.path.exists('/usr/X11R5/include'):
|
||||
include_dirs.append('/usr/X11R5/include')
|
||||
- added_lib_dirs.append('/usr/X11R5/lib')
|
||||
+ added_lib_dirs.append('/usr/X11R5/' + sys.lib)
|
||||
else:
|
||||
# Assume default location for X11
|
||||
include_dirs.append('/usr/X11/include')
|
||||
- added_lib_dirs.append('/usr/X11/lib')
|
||||
+ added_lib_dirs.append('/usr/X11/' + sys.lib)
|
||||
|
||||
# If Cygwin, then verify that X is installed before proceeding
|
||||
if host_platform == 'cygwin':
|
25
python-2.7.9-sles-disable-verification-by-default.patch
Normal file
25
python-2.7.9-sles-disable-verification-by-default.patch
Normal file
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
Lib/ssl.py | 12 +++++++++++-
|
||||
1 file changed, 11 insertions(+), 1 deletion(-)
|
||||
|
||||
--- a/Lib/ssl.py
|
||||
+++ b/Lib/ssl.py
|
||||
@@ -497,7 +497,17 @@ def _get_https_context_factory():
|
||||
return _create_unverified_context
|
||||
return create_default_context
|
||||
|
||||
-_create_default_https_context = _get_https_context_factory()
|
||||
+try:
|
||||
+ # load the TLS checks policy from separate package
|
||||
+ import sle_tls_checks_policy as policy
|
||||
+ if policy.get_policy:
|
||||
+ _create_default_https_context = policy.get_policy()
|
||||
+ else:
|
||||
+ # empty policy file means simply enable strict verification
|
||||
+ _create_default_https_context = _get_https_context_factory()
|
||||
+except ImportError:
|
||||
+ # policy not present, disable verification for backwards compatibility
|
||||
+ _create_default_https_context = _create_unverified_context
|
||||
|
||||
# PEP 493: "private" API to configure HTTPS defaults without monkeypatching
|
||||
def _https_verify_certificates(enable=True):
|
19
python-2.7.9-ssl_ca_path.patch
Normal file
19
python-2.7.9-ssl_ca_path.patch
Normal file
|
@ -0,0 +1,19 @@
|
|||
--- a/Lib/ssl.py
|
||||
+++ b/Lib/ssl.py
|
||||
@@ -537,7 +537,15 @@ class SSLSocket(socket):
|
||||
self._context = SSLContext(ssl_version)
|
||||
self._context.verify_mode = cert_reqs
|
||||
if ca_certs:
|
||||
- self._context.load_verify_locations(ca_certs)
|
||||
+ capath = None
|
||||
+ cafile = None
|
||||
+ if os.path.isdir(ca_certs):
|
||||
+ capath = ca_certs
|
||||
+ else:
|
||||
+ cafile = ca_certs
|
||||
+ self._context.load_verify_locations(cafile=cafile, capath=capath)
|
||||
+ elif cert_reqs != CERT_NONE:
|
||||
+ self._context.set_default_verify_paths()
|
||||
if certfile:
|
||||
self._context.load_cert_chain(certfile, keyfile)
|
||||
if npn_protocols:
|
395
python-bsddb6.patch
Normal file
395
python-bsddb6.patch
Normal file
|
@ -0,0 +1,395 @@
|
|||
From: Jan Engelhardt <jengelh@inai.de>
|
||||
Date: 2013-07-06 16:07:31.146616589 +0200
|
||||
|
||||
This patch was partially autogenerated:
|
||||
- copying python-bsddb3-6.0.0 sources into the python-2.7.5 tree
|
||||
- creating a diff -w against the unmodified python-2.7.5
|
||||
- stripped all hunks that pertained to module renaming
|
||||
- manually added db6 searching to setup.py
|
||||
|
||||
---
|
||||
Lib/bsddb/test/test_all.py | 15 +++--
|
||||
Lib/bsddb/test/test_misc.py | 5 +
|
||||
Lib/bsddb/test/test_replication.py | 25 +-------
|
||||
Modules/_bsddb.c | 108 +++++++++++++++++++++++++++++++++----
|
||||
Modules/bsddb.h | 2
|
||||
setup.py | 4 +
|
||||
6 files changed, 118 insertions(+), 41 deletions(-)
|
||||
|
||||
--- a/Lib/bsddb/test/test_all.py
|
||||
+++ b/Lib/bsddb/test/test_all.py
|
||||
@@ -74,8 +74,9 @@ if sys.version_info[0] >= 3 :
|
||||
key = key.decode(charset)
|
||||
return (key, value.decode(charset))
|
||||
|
||||
- def __next__(self) :
|
||||
- v = getattr(self._dbcursor, "next")()
|
||||
+ def __next__(self, flags=0, dlen=-1, doff=-1) :
|
||||
+ v = getattr(self._dbcursor, "next")(flags=flags, dlen=dlen,
|
||||
+ doff=doff)
|
||||
return self._fix(v)
|
||||
|
||||
next = __next__
|
||||
@@ -128,8 +129,8 @@ if sys.version_info[0] >= 3 :
|
||||
v = self._dbcursor.current(flags=flags, dlen=dlen, doff=doff)
|
||||
return self._fix(v)
|
||||
|
||||
- def first(self) :
|
||||
- v = self._dbcursor.first()
|
||||
+ def first(self, flags=0, dlen=-1, doff=-1) :
|
||||
+ v = self._dbcursor.first(flags=flags, dlen=dlen, doff=doff)
|
||||
return self._fix(v)
|
||||
|
||||
def pget(self, key=None, data=None, flags=0) :
|
||||
@@ -489,7 +490,11 @@ def print_versions():
|
||||
print 'py module: %s' % getattr(bsddb, "__file"+suffix)
|
||||
print 'extension module: %s' % getattr(bsddb, "__file"+suffix)
|
||||
|
||||
- print 'python version: %s' % sys.version
|
||||
+ print 'Test working dir: %s' % get_test_path_prefix()
|
||||
+ import platform
|
||||
+ print 'python version: %s %s' % \
|
||||
+ (sys.version.replace("\r", "").replace("\n", ""), \
|
||||
+ platform.architecture()[0])
|
||||
print 'My pid: %s' % os.getpid()
|
||||
print '-=' * 38
|
||||
|
||||
--- a/Lib/bsddb/test/test_misc.py
|
||||
+++ b/Lib/bsddb/test/test_misc.py
|
||||
@@ -46,8 +46,9 @@ class MiscTestCase(unittest.TestCase):
|
||||
d[repr(i)] = repr(100*i)
|
||||
db.close()
|
||||
db = hashopen(self.filename)
|
||||
- rp = repr(db)
|
||||
- self.assertEqual(rp, repr(d))
|
||||
+ rp = repr(sorted(db.items()))
|
||||
+ rd = repr(sorted(d.items()))
|
||||
+ self.assertEqual(rp, rd)
|
||||
db.close()
|
||||
|
||||
# http://sourceforge.net/tracker/index.php?func=detail&aid=1708868&group_id=13900&atid=313900
|
||||
--- a/Lib/bsddb/test/test_replication.py
|
||||
+++ b/Lib/bsddb/test/test_replication.py
|
||||
@@ -165,21 +165,10 @@ class DBReplicationManager(DBReplication
|
||||
# is not generated if the master has no new transactions.
|
||||
# This is solved in BDB 4.6 (#15542).
|
||||
import time
|
||||
- timeout = time.time()+60
|
||||
+ timeout = time.time()+10
|
||||
while (time.time()<timeout) and not (self.confirmed_master and self.client_startupdone) :
|
||||
time.sleep(0.02)
|
||||
- # self.client_startupdone does not always get set to True within
|
||||
- # the timeout. On windows this may be a deep issue, on other
|
||||
- # platforms it is likely just a timing issue, especially on slow
|
||||
- # virthost buildbots (see issue 3892 for more). Even though
|
||||
- # the timeout triggers, the rest of this test method usually passes
|
||||
- # (but not all of it always, see below). So we just note the
|
||||
- # timeout on stderr and keep soldering on.
|
||||
- if time.time()>timeout:
|
||||
- import sys
|
||||
- print >> sys.stderr, ("XXX: timeout happened before"
|
||||
- "startup was confirmed - see issue 3892")
|
||||
- startup_timeout = True
|
||||
+ self.assertTrue(time.time()<timeout)
|
||||
|
||||
d = self.dbenvMaster.repmgr_site_list()
|
||||
self.assertEqual(len(d), 1)
|
||||
@@ -237,14 +226,6 @@ class DBReplicationManager(DBReplication
|
||||
txn.commit()
|
||||
if v is None :
|
||||
time.sleep(0.02)
|
||||
- # If startup did not happen before the timeout above, then this test
|
||||
- # sometimes fails. This happens randomly, which causes buildbot
|
||||
- # instability, but all the other bsddb tests pass. Since bsddb3 in the
|
||||
- # stdlib is currently not getting active maintenance, and is gone in
|
||||
- # py3k, we just skip the end of the test in that case.
|
||||
- if time.time()>=timeout and startup_timeout:
|
||||
- self.skipTest("replication test skipped due to random failure, "
|
||||
- "see issue 3892")
|
||||
self.assertLess(time.time(), timeout)
|
||||
self.assertEqual("123", v)
|
||||
|
||||
@@ -375,7 +356,7 @@ class DBBaseReplication(DBReplication) :
|
||||
# is not generated if the master has no new transactions.
|
||||
# This is solved in BDB 4.6 (#15542).
|
||||
import time
|
||||
- timeout = time.time()+60
|
||||
+ timeout = time.time()+10
|
||||
while (time.time()<timeout) and not (self.confirmed_master and
|
||||
self.client_startupdone) :
|
||||
time.sleep(0.02)
|
||||
--- a/Modules/_bsddb.c
|
||||
+++ b/Modules/_bsddb.c
|
||||
@@ -124,10 +124,14 @@ typedef int Py_ssize_t;
|
||||
#define NUMBER_Check PyLong_Check
|
||||
#define NUMBER_AsLong PyLong_AsLong
|
||||
#define NUMBER_FromLong PyLong_FromLong
|
||||
+#define NUMBER_FromUnsignedLong PyLong_FromUnsignedLong
|
||||
#else
|
||||
#define NUMBER_Check PyInt_Check
|
||||
#define NUMBER_AsLong PyInt_AsLong
|
||||
#define NUMBER_FromLong PyInt_FromLong
|
||||
+#if (PY_VERSION_HEX >= 0x02050000)
|
||||
+#define NUMBER_FromUnsignedLong PyInt_FromSize_t
|
||||
+#endif
|
||||
#endif
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
@@ -853,6 +857,18 @@ static void _addIntToDict(PyObject* dict
|
||||
Py_XDECREF(v);
|
||||
}
|
||||
|
||||
+#if (DBVER >= 60) && (PY_VERSION_HEX >= 0x02050000)
|
||||
+/* add an unsigned integer to a dictionary using the given name as a key */
|
||||
+static void _addUnsignedIntToDict(PyObject* dict, char *name, unsigned int value)
|
||||
+{
|
||||
+ PyObject* v = NUMBER_FromUnsignedLong((unsigned long) value);
|
||||
+ if (!v || PyDict_SetItemString(dict, name, v))
|
||||
+ PyErr_Clear();
|
||||
+
|
||||
+ Py_XDECREF(v);
|
||||
+}
|
||||
+#endif
|
||||
+
|
||||
/* The same, when the value is a time_t */
|
||||
static void _addTimeTToDict(PyObject* dict, char *name, time_t value)
|
||||
{
|
||||
@@ -2677,13 +2693,21 @@ _default_cmp(const DBT *leftKey,
|
||||
static int
|
||||
_db_compareCallback(DB* db,
|
||||
const DBT *leftKey,
|
||||
- const DBT *rightKey)
|
||||
+ const DBT *rightKey
|
||||
+#if (DBVER >= 60)
|
||||
+ , size_t *locp
|
||||
+#endif
|
||||
+ )
|
||||
{
|
||||
int res = 0;
|
||||
PyObject *args;
|
||||
PyObject *result = NULL;
|
||||
DBObject *self = (DBObject *)db->app_private;
|
||||
|
||||
+# if (DBVER >= 60)
|
||||
+ locp = NULL; /* As required by documentation */
|
||||
+#endif
|
||||
+
|
||||
if (self == NULL || self->btCompareCallback == NULL) {
|
||||
MYDB_BEGIN_BLOCK_THREADS;
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
@@ -2791,13 +2815,21 @@ DB_set_bt_compare(DBObject* self, PyObje
|
||||
static int
|
||||
_db_dupCompareCallback(DB* db,
|
||||
const DBT *leftKey,
|
||||
- const DBT *rightKey)
|
||||
+ const DBT *rightKey
|
||||
+#if (DBVER >= 60)
|
||||
+ , size_t *locp
|
||||
+#endif
|
||||
+ )
|
||||
{
|
||||
int res = 0;
|
||||
PyObject *args;
|
||||
PyObject *result = NULL;
|
||||
DBObject *self = (DBObject *)db->app_private;
|
||||
|
||||
+#if (DBVER >= 60)
|
||||
+ locp = NULL; /* As required by documentation */
|
||||
+#endif
|
||||
+
|
||||
if (self == NULL || self->dupCompareCallback == NULL) {
|
||||
MYDB_BEGIN_BLOCK_THREADS;
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
@@ -3576,13 +3608,14 @@ Py_ssize_t DB_length(PyObject* _self)
|
||||
err = self->db->stat(self->db, /*txnid*/ NULL, &sp, 0);
|
||||
MYDB_END_ALLOW_THREADS;
|
||||
|
||||
+ if (makeDBError(err)) {
|
||||
+ return -1;
|
||||
+ }
|
||||
+
|
||||
/* All the stat structures have matching fields upto the ndata field,
|
||||
so we can use any of them for the type cast */
|
||||
size = ((DB_BTREE_STAT*)sp)->bt_ndata;
|
||||
|
||||
- if (err)
|
||||
- return -1;
|
||||
-
|
||||
free(sp);
|
||||
return size;
|
||||
}
|
||||
@@ -8420,12 +8453,22 @@ static PyObject*
|
||||
DBSequence_get(DBSequenceObject* self, PyObject* args, PyObject* kwargs)
|
||||
{
|
||||
int err, flags = 0;
|
||||
+#if (DBVER >= 60)
|
||||
+ unsigned
|
||||
+#endif
|
||||
int delta = 1;
|
||||
db_seq_t value;
|
||||
PyObject *txnobj = NULL;
|
||||
DB_TXN *txn = NULL;
|
||||
static char* kwnames[] = {"delta", "txn", "flags", NULL };
|
||||
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|iOi:get", kwnames, &delta, &txnobj, &flags))
|
||||
+
|
||||
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
|
||||
+#if (DBVER >=60)
|
||||
+ "|IOi:get",
|
||||
+#else
|
||||
+ "|iOi:get",
|
||||
+#endif
|
||||
+ kwnames, &delta, &txnobj, &flags))
|
||||
return NULL;
|
||||
CHECK_SEQUENCE_NOT_CLOSED(self)
|
||||
|
||||
@@ -8555,8 +8598,19 @@ DBSequence_remove(DBSequenceObject* self
|
||||
static PyObject*
|
||||
DBSequence_set_cachesize(DBSequenceObject* self, PyObject* args)
|
||||
{
|
||||
- int err, size;
|
||||
- if (!PyArg_ParseTuple(args,"i:set_cachesize", &size))
|
||||
+ int err;
|
||||
+#if (DBVER >= 60)
|
||||
+ unsigned
|
||||
+#endif
|
||||
+ int size;
|
||||
+
|
||||
+ if (!PyArg_ParseTuple(args,
|
||||
+#if (DBVER >= 60)
|
||||
+ "I:set_cachesize",
|
||||
+#else
|
||||
+ "i:set_cachesize",
|
||||
+#endif
|
||||
+ &size))
|
||||
return NULL;
|
||||
CHECK_SEQUENCE_NOT_CLOSED(self)
|
||||
|
||||
@@ -8571,7 +8625,11 @@ DBSequence_set_cachesize(DBSequenceObjec
|
||||
static PyObject*
|
||||
DBSequence_get_cachesize(DBSequenceObject* self)
|
||||
{
|
||||
- int err, size;
|
||||
+ int err;
|
||||
+#if (DBVER >= 60)
|
||||
+ unsigned
|
||||
+#endif
|
||||
+ int size;
|
||||
|
||||
CHECK_SEQUENCE_NOT_CLOSED(self)
|
||||
|
||||
@@ -8700,6 +8758,9 @@ DBSequence_stat(DBSequenceObject* self,
|
||||
|
||||
|
||||
#define MAKE_INT_ENTRY(name) _addIntToDict(dict_stat, #name, sp->st_##name)
|
||||
+#if (DBVER >= 60) && (PY_VERSION_HEX >= 0x02050000)
|
||||
+#define MAKE_UNSIGNED_INT_ENTRY(name) _addUnsignedIntToDict(dict_stat, #name, sp->st_##name)
|
||||
+#endif
|
||||
#define MAKE_LONG_LONG_ENTRY(name) _addDb_seq_tToDict(dict_stat, #name, sp->st_##name)
|
||||
|
||||
MAKE_INT_ENTRY(wait);
|
||||
@@ -8709,10 +8770,15 @@ DBSequence_stat(DBSequenceObject* self,
|
||||
MAKE_LONG_LONG_ENTRY(last_value);
|
||||
MAKE_LONG_LONG_ENTRY(min);
|
||||
MAKE_LONG_LONG_ENTRY(max);
|
||||
+#if (DBVER >= 60) && (PY_VERSION_HEX >= 0x02050000)
|
||||
+ MAKE_UNSIGNED_INT_ENTRY(cache_size);
|
||||
+#else
|
||||
MAKE_INT_ENTRY(cache_size);
|
||||
+#endif
|
||||
MAKE_INT_ENTRY(flags);
|
||||
|
||||
#undef MAKE_INT_ENTRY
|
||||
+#undef MAKE_UNSIGNED_INT_ENTRY
|
||||
#undef MAKE_LONG_LONG_ENTRY
|
||||
|
||||
free(sp);
|
||||
@@ -9014,7 +9080,7 @@ static PyMethodDef DBEnv_methods[] = {
|
||||
{"txn_recover", (PyCFunction)DBEnv_txn_recover, METH_NOARGS},
|
||||
#if (DBVER < 48)
|
||||
{"set_rpc_server", (PyCFunction)DBEnv_set_rpc_server,
|
||||
- METH_VARARGS|METH_KEYWORDS},
|
||||
+ METH_VARARGS||METH_KEYWORDS},
|
||||
#endif
|
||||
{"set_mp_max_openfd", (PyCFunction)DBEnv_set_mp_max_openfd, METH_VARARGS},
|
||||
{"get_mp_max_openfd", (PyCFunction)DBEnv_get_mp_max_openfd, METH_NOARGS},
|
||||
@@ -9986,6 +10052,10 @@ PyMODINIT_FUNC PyInit__bsddb(void) /
|
||||
ADD_INT(d, DB_LOG_ZERO);
|
||||
#endif
|
||||
|
||||
+#if (DBVER >= 60)
|
||||
+ ADD_INT(d, DB_LOG_BLOB);
|
||||
+#endif
|
||||
+
|
||||
#if (DBVER >= 44)
|
||||
ADD_INT(d, DB_DSYNC_DB);
|
||||
#endif
|
||||
@@ -10046,6 +10116,10 @@ PyMODINIT_FUNC PyInit__bsddb(void) /
|
||||
ADD_INT(d, DB_EVENT_REG_PANIC);
|
||||
#endif
|
||||
|
||||
+#if (DBVER >= 60)
|
||||
+ ADD_INT(d, DB_EVENT_REP_AUTOTAKEOVER_FAILED);
|
||||
+#endif
|
||||
+
|
||||
#if (DBVER >=52)
|
||||
ADD_INT(d, DB_EVENT_REP_SITE_ADDED);
|
||||
ADD_INT(d, DB_EVENT_REP_SITE_REMOVED);
|
||||
@@ -10150,6 +10224,20 @@ PyMODINIT_FUNC PyInit__bsddb(void) /
|
||||
ADD_INT(d, DB_REP_CONF_INMEM);
|
||||
#endif
|
||||
|
||||
+#if (DBVER >= 60)
|
||||
+ ADD_INT(d, DB_REPMGR_ISVIEW);
|
||||
+#endif
|
||||
+
|
||||
+#if (DBVER >= 60)
|
||||
+ ADD_INT(d, DB_DBT_BLOB);
|
||||
+#endif
|
||||
+
|
||||
+#if (DBVER >= 60)
|
||||
+ ADD_INT(d, DB_STREAM_READ);
|
||||
+ ADD_INT(d, DB_STREAM_WRITE);
|
||||
+ ADD_INT(d, DB_STREAM_SYNC_WRITE);
|
||||
+#endif
|
||||
+
|
||||
ADD_INT(d, DB_TIMEOUT);
|
||||
|
||||
#if (DBVER >= 50)
|
||||
--- a/Modules/bsddb.h
|
||||
+++ b/Modules/bsddb.h
|
||||
@@ -110,7 +110,7 @@
|
||||
#error "eek! DBVER can't handle minor versions > 9"
|
||||
#endif
|
||||
|
||||
-#define PY_BSDDB_VERSION "5.3.0"
|
||||
+#define PY_BSDDB_VERSION "6.0.0"
|
||||
|
||||
/* Python object definitions */
|
||||
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -951,7 +951,7 @@ class PyBuildExt(build_ext):
|
||||
# a release. Most open source OSes come with one or more
|
||||
# versions of BerkeleyDB already installed.
|
||||
|
||||
- max_db_ver = (5, 3)
|
||||
+ max_db_ver = (6, 0)
|
||||
min_db_ver = (4, 3)
|
||||
db_setup_debug = False # verbose debug prints from this script?
|
||||
|
||||
@@ -991,6 +991,7 @@ class PyBuildExt(build_ext):
|
||||
# construct a list of paths to look for the header file in on
|
||||
# top of the normal inc_dirs.
|
||||
db_inc_paths = [
|
||||
+ '/usr/include/db6',
|
||||
'/usr/include/db4',
|
||||
'/usr/local/include/db4',
|
||||
'/opt/sfw/include/db4',
|
||||
@@ -1030,6 +1031,7 @@ class PyBuildExt(build_ext):
|
||||
for dn in inc_dirs:
|
||||
std_variants.append(os.path.join(dn, 'db3'))
|
||||
std_variants.append(os.path.join(dn, 'db4'))
|
||||
+ std_variants.append(os.path.join(dn, 'db6'))
|
||||
for x in gen_db_minor_ver_nums(4):
|
||||
std_variants.append(os.path.join(dn, "db4%d"%x))
|
||||
std_variants.append(os.path.join(dn, "db4.%d"%x))
|
23
python-bundle-lang.patch
Normal file
23
python-bundle-lang.patch
Normal file
|
@ -0,0 +1,23 @@
|
|||
Index: Python-2.7.13/Lib/gettext.py
|
||||
===================================================================
|
||||
--- Python-2.7.13.orig/Lib/gettext.py
|
||||
+++ Python-2.7.13/Lib/gettext.py
|
||||
@@ -58,6 +58,7 @@ __all__ = ['NullTranslations', 'GNUTrans
|
||||
]
|
||||
|
||||
_default_localedir = os.path.join(sys.prefix, 'share', 'locale')
|
||||
+_default_bundlelocaledir = os.path.join(sys.prefix, 'share', 'locale-bundle')
|
||||
|
||||
# Expression parsing for plural form selection.
|
||||
#
|
||||
@@ -496,6 +497,10 @@ class GNUTranslations(NullTranslations):
|
||||
|
||||
# Locate a .mo file using the gettext strategy
|
||||
def find(domain, localedir=None, languages=None, all=0):
|
||||
+ if localedir in [None, _default_localedir]:
|
||||
+ bundle = find(domain, localedir=_default_bundlelocaledir, languages=languages, all=all)
|
||||
+ if bundle:
|
||||
+ return bundle
|
||||
# Get some reasonable defaults for arguments that were not supplied
|
||||
if localedir is None:
|
||||
localedir = _default_localedir
|
82
python-distutils-rpm-8.patch
Normal file
82
python-distutils-rpm-8.patch
Normal file
|
@ -0,0 +1,82 @@
|
|||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -154,6 +154,8 @@
|
||||
|
||||
('record=', None,
|
||||
"filename in which to record list of installed files"),
|
||||
+ ('record-rpm=', None,
|
||||
+ "filename in which to record list of installed files and directories suitable as filelist for rpm"),
|
||||
]
|
||||
|
||||
boolean_options = ['compile', 'force', 'skip-build', 'user']
|
||||
@@ -229,6 +231,7 @@
|
||||
#self.install_info = None
|
||||
|
||||
self.record = None
|
||||
+ self.record_rpm = None
|
||||
|
||||
|
||||
# -- Option finalizing methods -------------------------------------
|
||||
@@ -578,12 +581,61 @@
|
||||
self.create_path_file()
|
||||
|
||||
# write list of installed files, if requested.
|
||||
- if self.record:
|
||||
+ if self.record or self.record_rpm:
|
||||
outputs = self.get_outputs()
|
||||
if self.root: # strip any package prefix
|
||||
root_len = len(self.root)
|
||||
for counter in xrange(len(outputs)):
|
||||
outputs[counter] = outputs[counter][root_len:]
|
||||
+ if self.record_rpm: # add directories
|
||||
+ self.record = self.record_rpm
|
||||
+ dirs = []
|
||||
+ # directories to reject:
|
||||
+ rejectdirs = [
|
||||
+ '/etc',
|
||||
+ '/',
|
||||
+ '',
|
||||
+ self.prefix,
|
||||
+ self.exec_prefix,
|
||||
+ self.install_base,
|
||||
+ self.install_platbase,
|
||||
+ self.install_purelib,
|
||||
+ self.install_platlib,
|
||||
+ self.install_headers[:len(self.install_headers) - len(self.distribution.get_name()) - 1],
|
||||
+ self.install_libbase,
|
||||
+ self.install_scripts,
|
||||
+ self.install_data,
|
||||
+ os.path.join(self.install_data, 'share'),
|
||||
+ os.path.join(self.install_data, 'share', 'doc'),
|
||||
+ ]
|
||||
+ # directories whose childs reject:
|
||||
+ rejectdirs2 = [
|
||||
+ os.path.join(self.install_data, 'share', 'man'),
|
||||
+ ]
|
||||
+ # directories whose grandsons reject:
|
||||
+ rejectdirs3 = [
|
||||
+ os.path.join(self.install_data, 'share', 'man'),
|
||||
+ os.path.join(self.install_data, 'share', 'locale'),
|
||||
+ ]
|
||||
+ for counter in xrange(len(rejectdirs)):
|
||||
+ if len(rejectdirs[counter]) > root_len:
|
||||
+ rejectdirs[counter] = rejectdirs[counter][root_len:]
|
||||
+ for counter in xrange(len(rejectdirs2)):
|
||||
+ if len(rejectdirs2[counter]) > root_len:
|
||||
+ rejectdirs2[counter] = rejectdirs2[counter][root_len:]
|
||||
+ for counter in xrange(len(rejectdirs3)):
|
||||
+ if len(rejectdirs3[counter]) > root_len:
|
||||
+ rejectdirs3[counter] = rejectdirs3[counter][root_len:]
|
||||
+ for counter in xrange(len(outputs)):
|
||||
+ directory = os.path.dirname(outputs[counter])
|
||||
+ while directory not in rejectdirs and \
|
||||
+ os.path.dirname(directory) not in rejectdirs2 and \
|
||||
+ os.path.dirname(os.path.dirname(directory)) not in rejectdirs3:
|
||||
+ dirname = '%dir ' + directory
|
||||
+ if dirname not in dirs:
|
||||
+ dirs.append(dirname)
|
||||
+ directory = os.path.dirname(directory)
|
||||
+ outputs += dirs
|
||||
self.execute(write_file,
|
||||
(self.record, outputs),
|
||||
"writing list of installed files to '%s'" %
|
50
python-skip_random_failing_tests.patch
Normal file
50
python-skip_random_failing_tests.patch
Normal file
|
@ -0,0 +1,50 @@
|
|||
---
|
||||
Lib/test/test_multiprocessing.py | 1 +
|
||||
Lib/test/test_subprocess.py | 1 +
|
||||
Lib/test/test_telnetlib.py | 2 ++
|
||||
3 files changed, 4 insertions(+)
|
||||
|
||||
Index: Python-2.7.14/Lib/test/test_subprocess.py
|
||||
===================================================================
|
||||
--- Python-2.7.14.orig/Lib/test/test_subprocess.py
|
||||
+++ Python-2.7.14/Lib/test/test_subprocess.py
|
||||
@@ -654,6 +654,7 @@ class ProcessTestCase(BaseTestCase):
|
||||
'ab ""')
|
||||
|
||||
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_poll(self):
|
||||
p = subprocess.Popen([sys.executable,
|
||||
"-c", "import time; time.sleep(1)"])
|
||||
Index: Python-2.7.14/Lib/test/test_telnetlib.py
|
||||
===================================================================
|
||||
--- Python-2.7.14.orig/Lib/test/test_telnetlib.py
|
||||
+++ Python-2.7.14/Lib/test/test_telnetlib.py
|
||||
@@ -134,6 +134,7 @@ class ReadTests(TestCase):
|
||||
data = telnet.read_until('match')
|
||||
self.assertEqual(data, ''.join(want[:-2]))
|
||||
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_read_until_B(self):
|
||||
# test the timeout - it does NOT raise socket.timeout
|
||||
want = ['hello', self.block_long, 'not seen', EOF_sigil]
|
||||
@@ -420,6 +421,7 @@ class ExpectTests(TestCase):
|
||||
(_,_,data) = telnet.expect(['match'])
|
||||
self.assertEqual(data, ''.join(want[:-2]))
|
||||
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_expect_B(self):
|
||||
# test the timeout - it does NOT raise socket.timeout
|
||||
want = ['hello', self.block_long, 'not seen', EOF_sigil]
|
||||
Index: Python-2.7.14/Lib/test/test_multiprocessing.py
|
||||
===================================================================
|
||||
--- Python-2.7.14.orig/Lib/test/test_multiprocessing.py
|
||||
+++ Python-2.7.14/Lib/test/test_multiprocessing.py
|
||||
@@ -1219,6 +1219,7 @@ class _TestPool(BaseTestCase):
|
||||
self.assertEqual(get(), 49)
|
||||
self.assertTimingAlmostEqual(get.elapsed, TIMEOUT1)
|
||||
|
||||
+ @unittest.skip("transient failure on PowerPC")
|
||||
def test_async_timeout(self):
|
||||
res = self.pool.apply_async(sqr, (6, TIMEOUT2 + 1.0))
|
||||
get = TimingWrapper(res.get)
|
21
python-sorted_tar.patch
Normal file
21
python-sorted_tar.patch
Normal file
|
@ -0,0 +1,21 @@
|
|||
commit 6936e36efcc0a75c7d5e67b949c9749d61fa5ead
|
||||
Author: Bernhard M. Wiedemann <bwiedemann@suse.de>
|
||||
Date: Sun Jun 18 03:50:25 2017 +0200
|
||||
|
||||
tarfile: sort directory listing
|
||||
|
||||
to generate tar files in a more reproducible way
|
||||
|
||||
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||
index adf91d5382..36f6ed7167 100644
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -2027,7 +2027,7 @@ class TarFile(object):
|
||||
elif tarinfo.isdir():
|
||||
self.addfile(tarinfo)
|
||||
if recursive:
|
||||
- for f in os.listdir(name):
|
||||
+ for f in sorted(os.listdir(name)):
|
||||
self.add(os.path.join(name, f), os.path.join(arcname, f),
|
||||
recursive, exclude, filter)
|
||||
|
1514
python.changes
Normal file
1514
python.changes
Normal file
File diff suppressed because it is too large
Load diff
623
python.spec
Normal file
623
python.spec
Normal file
|
@ -0,0 +1,623 @@
|
|||
#
|
||||
# spec file for package python
|
||||
#
|
||||
# Copyright (c) 2022-2023 ZhuningOS
|
||||
#
|
||||
|
||||
Name: python
|
||||
Version: 2.7.18
|
||||
Release: 150000.57.1
|
||||
Summary: Python Interpreter
|
||||
License: Python-2.0
|
||||
Group: Development/Languages/Python
|
||||
URL: https://www.python.org/
|
||||
%define tarversion %{version}
|
||||
%define tarname Python-%{tarversion}
|
||||
Source0: https://www.python.org/ftp/python/%{version}/%{tarname}.tar.xz
|
||||
Source1: README.SUSE
|
||||
Source8: sle_tls_checks_policy.py
|
||||
#Source11: testfiles.tar.bz2
|
||||
Source50: idle.appdata.xml
|
||||
Source51: idle.desktop
|
||||
# issues with copyrighted Unicode testing files
|
||||
# For Patch 66
|
||||
Source66: recursion.tar
|
||||
|
||||
# !!!!!!!!!!!!!!
|
||||
# do not add or edit patches here. please edit python-base.spec
|
||||
# instead and run pre_checkin.sh
|
||||
# !!!!!!!!!!!!!!
|
||||
# COMMON-PATCH-BEGIN
|
||||
Patch1: python-2.7-dirs.patch
|
||||
Patch2: python-distutils-rpm-8.patch
|
||||
Patch3: python-2.7.5-multilib.patch
|
||||
Patch4: python-2.5.1-sqlite.patch
|
||||
Patch5: python-2.7.4-canonicalize2.patch
|
||||
Patch7: python-2.6-gettext-plurals.patch
|
||||
Patch8: python-2.6b3-curses-panel.patch
|
||||
Patch10: sparc_longdouble.patch
|
||||
Patch13: python-2.7.2-fix_date_time_compiler.patch
|
||||
Patch17: remove-static-libpython.patch
|
||||
# PATCH-FEATURE-OPENSUSE python-bundle-lang.patch bnc#617751 dimstar@opensuse.org -- gettext: when looking in default_localedir also check in locale-bundle.
|
||||
Patch20: python-bundle-lang.patch
|
||||
# PATCH-FIX-UPSTREAM Fix argument passing in libffi for aarch64
|
||||
Patch22: python-2.7-libffi-aarch64.patch
|
||||
Patch24: python-bsddb6.patch
|
||||
# PATCH-FIX-UPSTREAM accept directory-based CA paths as well
|
||||
Patch33: python-2.7.9-ssl_ca_path.patch
|
||||
# PATCH-FEATURE-SLE disable SSL verification-by-default in http clients
|
||||
Patch34: python-2.7.9-sles-disable-verification-by-default.patch
|
||||
# PATCH-FIX-UPSTREAM do not use non-ASCII filename in test_ssl.py
|
||||
Patch35: do-not-use-non-ascii-in-test_ssl.patch
|
||||
# PATCH-FIX-UPSTREAM bmwiedemann@suse.de -- allow python packages to build reproducibly
|
||||
Patch38: reproducible.patch
|
||||
# bypass boo#1078485 random failing tests
|
||||
Patch40: python-skip_random_failing_tests.patch
|
||||
# PATCH-FIX-UPSTREAM sorted tar https://github.com/python/cpython/pull/2263
|
||||
Patch41: python-sorted_tar.patch
|
||||
# https://github.com/python/cpython/pull/9624 (https://bugs.python.org/issue34834)
|
||||
Patch47: openssl-111-middlebox-compat.patch
|
||||
# PATCH-FIX-SUSE python default SSLContext doesn't contain OP_CIPHER_SERVER_PREFERENCE
|
||||
Patch48: openssl-111-ssl_options.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2019-5010-null-defer-x509-cert-DOS.patch bnc#1122191 mcepl@suse.com
|
||||
# gh#python/cpython#11569
|
||||
# Fix segfault in ssl's cert parser
|
||||
Patch49: CVE-2019-5010-null-defer-x509-cert-DOS.patch
|
||||
# PATCH-FIX-UPSTREAM bpo36160-init-sysconfig_vars.patch gh#python/cpython#12131 mcepl@suse.com
|
||||
# Initialize sysconfig variables in test_site.
|
||||
Patch50: bpo36160-init-sysconfig_vars.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2017-18207.patch gh#python/cpython#4437 psimons@suse.com
|
||||
# Add check for channels of wav file in Lib/wave.py
|
||||
Patch51: CVE-2017-18207.patch
|
||||
# PATCH-FIX-UPSTREAM gh#python/cpython#12341
|
||||
Patch55: bpo36302-sort-module-sources.patch
|
||||
# Fix installation in /usr/local (boo#1071941), adapted from Fedora
|
||||
# https://src.fedoraproject.org/rpms/python3/blob/master/f/00251-change-user-install-location.patch
|
||||
# Set values of prefix and exec_prefix in distutils install command
|
||||
# to /usr/local if executable is /usr/bin/python* and RPM build
|
||||
# is not detected to make pip and distutils install into separate location
|
||||
Patch56: adapted-from-F00251-change-user-install-location.patch
|
||||
# Switch couple of tests failing on acient SLE-12
|
||||
Patch57: python-2.7.17-switch-off-failing-SSL-tests.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2020-8492-urllib-ReDoS.patch bsc#1162367 mcepl@suse.com
|
||||
# Fixes Python urrlib allowed an HTTP server to conduct Regular
|
||||
# Expression Denial of Service (ReDoS)
|
||||
Patch58: CVE-2020-8492-urllib-ReDoS.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2019-9674-zip-bomb.patch bsc#1162825 mcepl@suse.com
|
||||
# Improve documentation warning against the possible zip bombs
|
||||
Patch59: CVE-2019-9674-zip-bomb.patch
|
||||
# PATCH-FIX-UPSTREAM configure_PYTHON_FOR_REGEN.patch bsc#1078326 mcepl@suse.com
|
||||
# PYTHON_FOR_REGEN value is set very weird upstream
|
||||
Patch60: configure_PYTHON_FOR_REGEN.patch
|
||||
# PATCH-FIX-SLE CVE-2021-3177-buf_ovrfl_PyCArg_repr.patch bsc#1181126 mcepl@suse.com
|
||||
# buffer overflow in PyCArg_repr in _ctypes/callproc.c, which may lead to remote code execution
|
||||
Patch61: CVE-2021-3177-buf_ovrfl_PyCArg_repr.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2021-23336-only-amp-as-query-sep.patch bsc#[0-9]+ mcepl@suse.com
|
||||
# this patch makes things totally awesome
|
||||
Patch62: CVE-2021-23336-only-amp-as-query-sep.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2021-3737-fix-HTTP-client-infinite-line-reading-after-a-HTTP-100-Continue.patch boo#1189241 gh#python/cpython#25916
|
||||
Patch63: CVE-2021-3737-fix-HTTP-client-infinite-line-reading-after-a-HTTP-100-Continue.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2021-3733-fix-ReDoS-in-request.patch boo#1189287 gh#python/cpython#24391
|
||||
Patch64: CVE-2021-3733-fix-ReDoS-in-request.patch
|
||||
# PATCH-FIX-UPSTREAM sphinx-update-removed-function.patch bpo#35293 gh#python/cpython#22198 -- fix doc build
|
||||
Patch65: sphinx-update-removed-function.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2019-20907_tarfile-inf-loop.patch bsc#1174091 mcepl@suse.com
|
||||
# avoid possible infinite loop in specifically crafted tarball (CVE-2019-20907)
|
||||
# REQUIRES SOURCE 66
|
||||
Patch66: CVE-2019-20907_tarfile-inf-loop.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2020-26116-httplib-header-injection.patch bsc#1177211
|
||||
# Fixes httplib to disallow control characters in method to avoid header
|
||||
# injection
|
||||
Patch67: CVE-2020-26116-httplib-header-injection.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2021-4189-ftplib-trust-PASV-resp.patch bsc#1194146 mcepl@suse.com
|
||||
# Make ftplib not trust the PASV response. (gh#python/cpython#24838)
|
||||
Patch68: CVE-2021-4189-ftplib-trust-PASV-resp.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2022-0391-urllib_parse-newline-parsing.patch bsc#1195396 mcepl@suse.com
|
||||
# whole long discussion is on bpo#43882
|
||||
# fix for santization URLs containing ASCII newline and tabs in urllib.parse
|
||||
Patch69: CVE-2022-0391-urllib_parse-newline-parsing.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2015-20107-mailcap-unsafe-filenames.patch bsc#1198511 mcepl@suse.com
|
||||
# avoid the command injection in the mailcap module.
|
||||
Patch70: CVE-2015-20107-mailcap-unsafe-filenames.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2021-28861 bsc#1202624
|
||||
# Coerce // to / in Lib/BaseHTTPServer.py
|
||||
Patch71: CVE-2021-28861-double-slash-path.patch
|
||||
Patch72: bpo34990-2038-problem-compileall.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2022-45061-DoS-by-IDNA-decode.patch bsc#1205244 mcepl@suse.com
|
||||
# Avoid DoS by decoding IDNA for too long domain names
|
||||
Patch73: CVE-2022-45061-DoS-by-IDNA-decode.patch
|
||||
# PATCH-FIX-UPSTREAM skip_unverified_test.patch mcepl@suse.com
|
||||
# switching verification off on the old SLE doesn't work
|
||||
Patch74: skip_unverified_test.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2023-24329-blank-URL-bypass.patch bsc#1208471 mcepl@suse.com
|
||||
# blocklist bypass via the urllib.parse component when supplying
|
||||
# a URL that starts with blank characters
|
||||
Patch75: CVE-2023-24329-blank-URL-bypass.patch
|
||||
# PATCH-FIX-OPENSUSE PygmentsBridge-trime_doctest_flags.patch mcepl@suse.com
|
||||
# Build documentation even without PygmentsBridge.trim_doctest_flags
|
||||
Patch76: PygmentsBridge-trime_doctest_flags.patch
|
||||
# # PATCH-FIX-UPSTREAM CVE-2023-27043-email-parsing-errors.patch bsc#1210638 mcepl@suse.com
|
||||
# # Detect email address parsing errors and return empty tuple to
|
||||
# # indicate the parsing error (old API)
|
||||
# Patch77: CVE-2023-27043-email-parsing-errors.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2022-48565-plistlib-XML-vulns.patch bsc#1214685 mcepl@suse.com
|
||||
# Reject entity declarations in plists
|
||||
Patch78: CVE-2022-48565-plistlib-XML-vulns.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2023-40217-avoid-ssl-pre-close.patch gh#python/cpython#108315
|
||||
Patch79: CVE-2023-40217-avoid-ssl-pre-close.patch
|
||||
# PATCH-FIX-UPSTREAM CVE-2022-48566-compare_digest-more-constant.patch bsc#1214691 mcepl@suse.com
|
||||
# Make compare_digest more constant-time
|
||||
Patch80: CVE-2022-48566-compare_digest-more-constant.patch
|
||||
# COMMON-PATCH-END
|
||||
BuildRequires: automake
|
||||
BuildRequires: db-devel
|
||||
BuildRequires: fdupes
|
||||
BuildRequires: gdbm-devel
|
||||
BuildRequires: gmp-devel
|
||||
BuildRequires: libbz2-devel
|
||||
# This is NOT switching off NIS support on SLE < 15,
|
||||
# support for NIS used to be in the glibc itself
|
||||
%if 0%{?suse_version} >= 1500 && 0%{?suse_version} < 1599
|
||||
BuildRequires: libnsl-devel
|
||||
%endif
|
||||
%if 0%{?suse_version} >= 1500
|
||||
BuildRequires: libopenssl-1_1-devel
|
||||
%else
|
||||
BuildRequires: libopenssl-devel
|
||||
%endif
|
||||
BuildRequires: ncurses-devel
|
||||
BuildRequires: readline-devel
|
||||
BuildRequires: sqlite-devel
|
||||
BuildRequires: tk-devel
|
||||
BuildRequires: xz
|
||||
BuildRequires: pkgconfig(x11)
|
||||
# for %%{_datadir}/application and %%{_datadir}/mime/packages
|
||||
BuildRequires: filesystem
|
||||
BuildRequires: update-desktop-files
|
||||
# for %%{_datadir}/icons/hicolor directories
|
||||
BuildRequires: hicolor-icon-theme
|
||||
%define python_version %(echo %{tarversion} | head -c 3)
|
||||
%define idle_name idle
|
||||
Requires: python-base = %{version}
|
||||
%if %{suse_version} == 1315 && !0%{?is_opensuse}
|
||||
Recommends: python-strict-tls-check
|
||||
%endif
|
||||
Provides: %{name} = %{python_version}
|
||||
Provides: python2 = %{version}
|
||||
# To make older versions of this package to conflict with
|
||||
# shared-python-startup I need a symbol to conflict with
|
||||
Provides: python2_split_startup
|
||||
Obsoletes: python-elementtree
|
||||
Obsoletes: python-nothreads
|
||||
Obsoletes: python-sqlite
|
||||
Obsoletes: python21
|
||||
# bug437293
|
||||
%ifarch ppc64
|
||||
Obsoletes: python-64bit
|
||||
%endif
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-build
|
||||
|
||||
%description
|
||||
Python is an interpreted, object-oriented programming language, and is
|
||||
often compared to Tcl, Perl, Scheme, or Java. You can find an overview
|
||||
of Python in the documentation and tutorials included in the python-doc
|
||||
(HTML) or python-doc-pdf (PDF) packages.
|
||||
|
||||
If you want to install third party modules using distutils, you need to
|
||||
install python-devel package.
|
||||
|
||||
%package idle
|
||||
Summary: An Integrated Development Environment for Python
|
||||
Group: Development/Languages/Python
|
||||
Requires: python-base = %{version}
|
||||
Requires: python-tk
|
||||
Provides: python2-idle = %{version}
|
||||
|
||||
%description idle
|
||||
IDLE is a Tkinter based integrated development environment for Python.
|
||||
It features a multi-window text editor with multiple undo, Python
|
||||
colorizing, and many other things, as well as a Python shell window and
|
||||
a debugger.
|
||||
|
||||
%package demo
|
||||
Summary: Python Demonstration Scripts
|
||||
Group: Development/Languages/Python
|
||||
Requires: python-base = %{version}
|
||||
Provides: pyth_dmo
|
||||
Obsoletes: pyth_dmo
|
||||
Provides: python2-demo = %{version}
|
||||
|
||||
%description demo
|
||||
Various demonstrations of what you can do with Python and a number of
|
||||
programs that are useful for building or extending Python.
|
||||
|
||||
%package tk
|
||||
Summary: TkInter - Python Tk Interface
|
||||
Group: Development/Libraries/Python
|
||||
Requires: python-base = %{version}
|
||||
Provides: pyth_tk
|
||||
Provides: pyth_tkl
|
||||
Provides: python-tkinter
|
||||
Provides: python_tkinter_lib
|
||||
#%ifarch %ix86
|
||||
#Provides: _tkinter.so
|
||||
#%endif
|
||||
Obsoletes: pyth_tk
|
||||
Obsoletes: pyth_tkl
|
||||
Obsoletes: python-tkinter
|
||||
Provides: python2-tk = %{version}
|
||||
|
||||
%description tk
|
||||
Python interface to Tk. Tk is the GUI toolkit that comes with Tcl.
|
||||
|
||||
%package curses
|
||||
Summary: Python Interface to the (N)Curses Library
|
||||
Group: Development/Libraries/Python
|
||||
Requires: python-base = %{version}
|
||||
Obsoletes: pyth_cur
|
||||
Provides: pyth_cur
|
||||
Provides: python2-curses = %{version}
|
||||
|
||||
%description curses
|
||||
An easy to use interface to the (n)curses CUI library. CUI stands for
|
||||
Console User Interface.
|
||||
|
||||
%package gdbm
|
||||
Summary: Python Interface to the GDBM Library
|
||||
Group: Development/Libraries/Python
|
||||
Requires: python-base = %{version}
|
||||
Obsoletes: pygdmod
|
||||
Provides: pygdmod
|
||||
Provides: python2-gdbm = %{version}
|
||||
# Compat to allow BR on python_module dbm and have it properly
|
||||
# pull in gdbm on py2 and dbm on py3
|
||||
Provides: python-dbm = %{version}
|
||||
Provides: python2-dbm = %{version}
|
||||
|
||||
%description gdbm
|
||||
An easy to use interface for GDBM databases. GDBM is the GNU
|
||||
implementation of the standard Unix DBM databases.
|
||||
|
||||
%if %{suse_version} == 1315 && !0%{?is_opensuse}
|
||||
%package strict-tls-check
|
||||
Summary: Enable secure verification of TLS certificates
|
||||
Group: Development/Libraries/Python
|
||||
Requires: %{name} = %{version}
|
||||
Supplements: %{name}
|
||||
|
||||
%description strict-tls-check
|
||||
When this package is present, Python performs strict verification of
|
||||
TLS certificates, including hostname check, by default. This is
|
||||
the preferred secure setting.
|
||||
|
||||
It is distributed as a separate package, because this behavior
|
||||
can cause verification errors in improperly written legacy scripts
|
||||
that rely on earlier non-verification behavior.
|
||||
%endif
|
||||
|
||||
%prep
|
||||
%setup -q -n %{tarname}
|
||||
# COMMON-PREP-BEGIN
|
||||
%patch1 -p1
|
||||
%patch2 -p1
|
||||
%patch3 -p1
|
||||
%patch4 -p1
|
||||
%patch5 -p1
|
||||
%patch7 -p1
|
||||
%patch8 -p1
|
||||
%patch10 -p1
|
||||
%patch13 -p1
|
||||
%patch17 -p1
|
||||
%patch20 -p1
|
||||
%patch22 -p1
|
||||
%patch24 -p1
|
||||
%patch33 -p1
|
||||
%if %{suse_version} < 1500 && !0%{?is_opensuse}
|
||||
%patch34 -p1
|
||||
%endif
|
||||
%patch35 -p1
|
||||
%patch38 -p1
|
||||
%ifarch ppc ppc64 ppc64le
|
||||
%patch40 -p1
|
||||
%endif
|
||||
%patch41 -p1
|
||||
%if %{suse_version} >= 1500 || (0%{?sle_version} && 0%{?sle_version} >= 120400)
|
||||
%patch47 -p1
|
||||
%patch48 -p1
|
||||
%endif
|
||||
# SLE-12 needs to skip more
|
||||
%if %{suse_version} == 1315
|
||||
%patch57 -p1
|
||||
%endif
|
||||
%patch49 -p1
|
||||
%patch50 -p1
|
||||
%patch51 -p1
|
||||
%patch55 -p1
|
||||
%patch56 -p1
|
||||
%patch58 -p1
|
||||
%patch59 -p1
|
||||
%patch60 -p1
|
||||
%patch61 -p1
|
||||
%patch62 -p1
|
||||
%patch63 -p1
|
||||
%patch64 -p1
|
||||
%patch65 -p1
|
||||
%patch66 -p1
|
||||
%patch67 -p1
|
||||
%patch68 -p1
|
||||
%patch69 -p1
|
||||
%patch70 -p1
|
||||
%patch71 -p1
|
||||
%patch72 -p1
|
||||
%patch73 -p1
|
||||
%if 0%{?sle_version} && 0%{?sle_version} < 150000
|
||||
%patch74 -p1
|
||||
%endif
|
||||
%patch75 -p1
|
||||
%patch76 -p1
|
||||
# %%patch77 -p1
|
||||
%patch78 -p1
|
||||
%patch79 -p1
|
||||
%patch80 -p1
|
||||
|
||||
# For patch 66
|
||||
cp -v %{SOURCE66} Lib/test/recursion.tar
|
||||
|
||||
# drop Autoconf version requirement
|
||||
sed -i 's/^version_required/dnl version_required/' configure.ac
|
||||
# COMMON-PREP-END
|
||||
|
||||
%if %{suse_version} == 1315 && !0%{?is_opensuse}
|
||||
cp %{SOURCE8} Lib/
|
||||
%endif
|
||||
|
||||
%build
|
||||
%define _lto_cflags %{nil}
|
||||
export OPT="%{optflags} -DOPENSSL_LOAD_CONF -fwrapv"
|
||||
|
||||
autoreconf -f -i . # Modules/_ctypes/libffi
|
||||
# prevent make from trying to rebuild asdl stuff, which requires existing
|
||||
# python installation
|
||||
touch Parser/asdl* Python/Python-ast.c Include/Python-ast.h
|
||||
|
||||
%configure \
|
||||
--docdir=%{_docdir}/python \
|
||||
--enable-ipv6 \
|
||||
--with-fpectl \
|
||||
--enable-shared \
|
||||
--enable-unicode=ucs4
|
||||
|
||||
LD_LIBRARY_PATH=$PWD:$LD_LIBRARY_PATH \
|
||||
make %{?_smp_mflags}
|
||||
|
||||
%check
|
||||
# on hppa, the threading of glibc is quite broken. The tests just stop
|
||||
# at some point, and the machine does not build anything more until a
|
||||
# timeout several hours later.
|
||||
%ifnarch hppa
|
||||
# Limit virtual memory to avoid spurious failures
|
||||
if test $(ulimit -v) = unlimited || test $(ulimit -v) -gt 10000000; then
|
||||
ulimit -v 10000000 || :
|
||||
fi
|
||||
LIST="test_urllib test_ssl test_hashlib test_hmac test_unicodedata test_tarfile test_sqlite test_tcl test_dbm test_anydbm test_dumbdbm test_gdbm test_whichdb test_tk test_ttk_textonly test_bsddb test_bsddb3 test_readline"
|
||||
make test TESTOPTS="-w $LIST" TESTPYTHONOPTS="-R"
|
||||
%endif
|
||||
|
||||
%install
|
||||
# replace rest of /usr/local/bin/python or /usr/bin/python2.x with /usr/bin/python
|
||||
find . -name '*.py' -type f | grep -vE "^./Parser/|^./Python/" \
|
||||
| xargs grep -lE '^#! *(/usr/.*bin/(env +)?)?python' \
|
||||
| xargs sed -r -i -e '1s@^#![[:space:]]*(/usr/(local/)?bin/(env +)?)?python([0-9]+\.[0-9]+)?@#!/usr/bin/python@'
|
||||
# the grep inbetween makes it much faster
|
||||
########################################
|
||||
# install it
|
||||
########################################
|
||||
%make_install OPT="%{optflags} -fPIC"
|
||||
########################################
|
||||
# some cleanups
|
||||
########################################
|
||||
# remove hard links and replace them with symlinks
|
||||
for dir in bin include %{_lib} ; do
|
||||
rm -f %{buildroot}/%{_prefix}/$dir/python
|
||||
ln -s python%{python_version} %{buildroot}/%{_prefix}/$dir/python
|
||||
done
|
||||
# kill imageop.so, it's insecure
|
||||
rm -f %{buildroot}/%{_libdir}/python%{python_version}/lib-dynload/imageop.so
|
||||
#cleanup for -base
|
||||
rm %{buildroot}%{_bindir}/python%{python_version}
|
||||
rm %{buildroot}%{_bindir}/python2
|
||||
rm %{buildroot}%{_bindir}/python
|
||||
rm %{buildroot}%{_bindir}/smtpd.py
|
||||
rm %{buildroot}%{_bindir}/pydoc
|
||||
rm %{buildroot}%{_bindir}/2to3
|
||||
rm %{buildroot}%{_mandir}/man1/python*
|
||||
rm %{buildroot}%{_libdir}/libpython*.so.*
|
||||
rm %{buildroot}%{_libdir}/python
|
||||
find %{buildroot}%{_libdir}/python%{python_version} -maxdepth 1 \
|
||||
! \( -name "ssl.py*" -o -name "sle_tls_checks_policy.py*" \) \
|
||||
-exec rm {} ";"
|
||||
rm %{buildroot}%{_bindir}/python%{python_version}-config
|
||||
rm %{buildroot}%{_bindir}/python2-config
|
||||
rm %{buildroot}%{_bindir}/python-config
|
||||
rm %{buildroot}%{_libdir}/pkgconfig/*
|
||||
rm -r %{buildroot}%{_includedir}/python
|
||||
rm -r %{buildroot}%{_includedir}/python%{python_version}
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/compiler
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/config
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/ctypes
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/distutils
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/email
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/encodings
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/ensurepip
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/hotshot
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/importlib
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/json
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/lib2to3
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/logging
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/multiprocessing
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/plat-*
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/pydoc_data
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/test
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/unittest
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/wsgiref
|
||||
rm -r %{buildroot}%{_libdir}/python%{python_version}/xml
|
||||
rm %{buildroot}%{_libdir}/libpython%{python_version}.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/site-packages/README
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_bisect.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_csv.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_collections.so
|
||||
rm -f %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_ctypes.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_ctypes_test.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_elementtree.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_functools.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_heapq.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_hotshot.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_io.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_json.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_locale.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_lsprof.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_multiprocessing.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_random.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_socket.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_struct.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_testcapi.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/array.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/binascii.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/bz2.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/cPickle.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/cStringIO.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/cmath.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/crypt.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/datetime.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/fcntl.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/future_builtins.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/grp.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/itertools.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/linuxaudiodev.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/math.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/mmap.so
|
||||
rm -f %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/nis.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/operator.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/ossaudiodev.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/parser.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/pyexpat.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/resource.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/select.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/spwd.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/strop.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/syslog.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/termios.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/time.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/unicodedata.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/zlib.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_codecs*.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/_multibytecodec.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/audioop.so
|
||||
rm -f %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/dl.so
|
||||
rm %{buildroot}%{_libdir}/python%{python_version}/lib-dynload/Python-%{tarversion}-py%{python_version}.egg-info
|
||||
# replace duplicate .pyo/.pyc with hardlinks
|
||||
%fdupes %{buildroot}/%{_libdir}/python%{python_version}
|
||||
########################################
|
||||
# documentation
|
||||
########################################
|
||||
export PDOCS=%{buildroot}%{_docdir}/%{name}
|
||||
install -d -m 755 $PDOCS
|
||||
install -c -m 644 %{SOURCE1} $PDOCS/
|
||||
install -c -m 644 LICENSE $PDOCS/
|
||||
install -c -m 644 README $PDOCS/
|
||||
########################################
|
||||
# tools and demos
|
||||
########################################
|
||||
find Tools/ Demo/ -type d \( -regex ".*/.cvsignore" \) -exec rm -f \{\} \;
|
||||
for x in `find Tools/ Demo/ \( -not -name Makefile \) -print | sort` ; do
|
||||
test -d $x && ( install -c -m 755 -d $PDOCS/$x ) \
|
||||
|| ( install -c -m 644 $x $PDOCS/$x )
|
||||
done
|
||||
########################################
|
||||
# idle
|
||||
########################################
|
||||
# move idle config into /etc
|
||||
install -d -m755 %{buildroot}%{_sysconfdir}/%{idle_name}
|
||||
(
|
||||
cd %{buildroot}/%{_libdir}/python%{python_version}/idlelib/
|
||||
for file in *.def ; do
|
||||
mv $file %{buildroot}%{_sysconfdir}/%{idle_name}/
|
||||
ln -sf /etc/%{idle_name}/$file %{buildroot}/%{_libdir}/python%{python_version}/idlelib/
|
||||
done
|
||||
)
|
||||
|
||||
# Install .desktop, mime and appdata files from upstream tarball
|
||||
%if 0%{?suse_version} >= 1500
|
||||
install -Dm0644 %{SOURCE50} %{buildroot}/%{_datadir}/mime/packages/idle.appdata.xml
|
||||
%endif
|
||||
install -D -m 0644 Lib/idlelib/Icons/idle_16.png %{buildroot}%{_datadir}/icons/hicolor/16x16/apps/idle.png
|
||||
install -D -m 0644 Lib/idlelib/Icons/idle_32.png %{buildroot}%{_datadir}/icons/hicolor/32x32/apps/idle.png
|
||||
install -D -m 0644 Lib/idlelib/Icons/idle_48.png %{buildroot}%{_datadir}/icons/hicolor/48x48/apps/idle.png
|
||||
desktop-file-install --dir=%{buildroot}%{_datadir}/applications %{SOURCE51}
|
||||
|
||||
%post -p /sbin/ldconfig
|
||||
|
||||
%postun -p /sbin/ldconfig
|
||||
|
||||
%files idle
|
||||
%defattr(644, root, root, 755)
|
||||
%dir %{_sysconfdir}/%{idle_name}
|
||||
%config %{_sysconfdir}/%{idle_name}/*
|
||||
%doc Lib/idlelib/NEWS.txt
|
||||
%doc Lib/idlelib/README.txt
|
||||
%doc Lib/idlelib/TODO.txt
|
||||
%doc Lib/idlelib/extend.txt
|
||||
%doc Lib/idlelib/ChangeLog
|
||||
%{_libdir}/python%{python_version}/idlelib
|
||||
%attr(755, root, root) %{_bindir}/%{idle_name}
|
||||
%if 0%{?suse_version} >= 1500
|
||||
%{_datadir}/mime/packages/idle.appdata.xml
|
||||
%endif
|
||||
%{_datadir}/applications/idle.desktop
|
||||
%{_datadir}/icons/hicolor/*/apps/idle.png
|
||||
|
||||
%files demo
|
||||
%defattr(644, root, root, 755)
|
||||
%doc %{_docdir}/%{name}/Demo
|
||||
%doc %{_docdir}/%{name}/Tools
|
||||
|
||||
%files tk
|
||||
%defattr(644, root, root, 755)
|
||||
%{_libdir}/python%{python_version}/lib-tk/
|
||||
%{_libdir}/python%{python_version}/lib-dynload/_tkinter.so
|
||||
|
||||
%files curses
|
||||
%defattr(644, root, root, 755)
|
||||
%{_libdir}/python%{python_version}/curses
|
||||
%{_libdir}/python%{python_version}/lib-dynload/_curses.so
|
||||
%{_libdir}/python%{python_version}/lib-dynload/_curses_panel.so
|
||||
|
||||
%files gdbm
|
||||
%defattr(644, root, root, 755)
|
||||
%{_libdir}/python%{python_version}/lib-dynload/gdbm.so
|
||||
%{_libdir}/python%{python_version}/lib-dynload/dbm.so
|
||||
|
||||
%if %{suse_version} == 1315 && !0%{?is_opensuse}
|
||||
%files strict-tls-check
|
||||
%defattr(644, root, root, 755)
|
||||
%{_libdir}/python%{python_version}/sle_tls_checks_policy.py*
|
||||
%endif
|
||||
|
||||
%files
|
||||
%defattr(644, root, root, 755)
|
||||
%dir %{_docdir}/%{name}
|
||||
%doc %{_docdir}/%{name}/README
|
||||
%doc %{_docdir}/%{name}/LICENSE
|
||||
%doc %{_docdir}/%{name}/README.SUSE
|
||||
%dir %{_libdir}/python%{python_version}
|
||||
%{_libdir}/python%{python_version}/ssl.py*
|
||||
%{_libdir}/python%{python_version}/bsddb
|
||||
%{_libdir}/python%{python_version}/sqlite3
|
||||
%dir %{_libdir}/python%{python_version}/lib-dynload
|
||||
%{_libdir}/python%{python_version}/lib-dynload/_bsddb.so
|
||||
%{_libdir}/python%{python_version}/lib-dynload/_hashlib.so
|
||||
%{_libdir}/python%{python_version}/lib-dynload/_sqlite3.so
|
||||
%{_libdir}/python%{python_version}/lib-dynload/_ssl.so
|
||||
%{_libdir}/python%{python_version}/lib-dynload/readline.so
|
||||
|
||||
%changelog
|
BIN
recursion.tar
Normal file
BIN
recursion.tar
Normal file
Binary file not shown.
53
remove-static-libpython.patch
Normal file
53
remove-static-libpython.patch
Normal file
|
@ -0,0 +1,53 @@
|
|||
---
|
||||
Makefile.pre.in | 26 +-------------------------
|
||||
1 file changed, 1 insertion(+), 25 deletions(-)
|
||||
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -488,7 +488,7 @@ coverage-report: regen-grammar
|
||||
|
||||
|
||||
# Build the interpreter
|
||||
-$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY)
|
||||
+$(BUILDPYTHON): Modules/python.o $(LDLIBRARY)
|
||||
$(LINKCC) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
|
||||
Modules/python.o \
|
||||
$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||
@@ -529,18 +529,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.tx
|
||||
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
|
||||
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
|
||||
|
||||
-# Build static library
|
||||
-# avoid long command lines, same as LIBRARY_OBJS
|
||||
-$(LIBRARY): $(LIBRARY_OBJS)
|
||||
- -rm -f $@
|
||||
- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o
|
||||
- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS)
|
||||
- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS)
|
||||
- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS)
|
||||
- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS) $(SIGNAL_OBJS)
|
||||
- $(AR) $(ARFLAGS) $@ $(MODOBJS)
|
||||
- $(RANLIB) $@
|
||||
-
|
||||
libpython$(VERSION).so: $(LIBRARY_OBJS)
|
||||
if test $(INSTSONAME) != $(LDLIBRARY); then \
|
||||
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
|
||||
@@ -1226,18 +1214,6 @@ libainstall: @DEF_MAKE_RULE@ python-conf
|
||||
else true; \
|
||||
fi; \
|
||||
done
|
||||
- @if test -d $(LIBRARY); then :; else \
|
||||
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
|
||||
- if test "$(SO)" = .dll; then \
|
||||
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
|
||||
- else \
|
||||
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||
- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||
- fi; \
|
||||
- else \
|
||||
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
|
||||
- fi; \
|
||||
- fi
|
||||
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
|
||||
$(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o
|
||||
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
|
15
reproducible.patch
Normal file
15
reproducible.patch
Normal file
|
@ -0,0 +1,15 @@
|
|||
Index: Python-2.7.13/Lib/py_compile.py
|
||||
===================================================================
|
||||
--- Python-2.7.13.orig/Lib/py_compile.py
|
||||
+++ Python-2.7.13/Lib/py_compile.py
|
||||
@@ -108,6 +108,10 @@ def compile(file, cfile=None, dfile=None
|
||||
timestamp = long(os.fstat(f.fileno()).st_mtime)
|
||||
except AttributeError:
|
||||
timestamp = long(os.stat(file).st_mtime)
|
||||
+ sde = os.environ.get('SOURCE_DATE_EPOCH')
|
||||
+ if sde and timestamp > int(sde):
|
||||
+ timestamp = int(sde)
|
||||
+ os.utime(file, (timestamp, timestamp))
|
||||
codestring = f.read()
|
||||
try:
|
||||
codeobject = __builtin__.compile(codestring, dfile or file,'exec')
|
19
skip_unverified_test.patch
Normal file
19
skip_unverified_test.patch
Normal file
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
Lib/test/test_ssl.py | 6 +++---
|
||||
1 file changed, 3 insertions(+), 3 deletions(-)
|
||||
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -1344,9 +1344,9 @@ class ContextTests(unittest.TestCase):
|
||||
extra_env = {}
|
||||
# Omitting it leaves verification on
|
||||
assert_python_ok("-c", https_is_verified, **extra_env)
|
||||
- # Setting it to zero turns verification off
|
||||
- extra_env[ssl._https_verify_envvar] = "0"
|
||||
- assert_python_ok("-c", https_is_not_verified, **extra_env)
|
||||
+ ## # Setting it to zero turns verification off
|
||||
+ ## extra_env[ssl._https_verify_envvar] = "0"
|
||||
+ ## assert_python_ok("-c", https_is_not_verified, **extra_env)
|
||||
# Any other value should also leave it on
|
||||
for setting in ("", "1", "enabled", "foo"):
|
||||
extra_env[ssl._https_verify_envvar] = setting
|
4
sle_tls_checks_policy.py
Normal file
4
sle_tls_checks_policy.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
import ssl
|
||||
|
||||
def get_policy():
|
||||
return ssl.create_default_context
|
21
sparc_longdouble.patch
Normal file
21
sparc_longdouble.patch
Normal file
|
@ -0,0 +1,21 @@
|
|||
Python ticket 6029
|
||||
|
||||
==== //tools/python/2.6.2/src/base/Modules/_ctypes/libffi/src/sparc/ffi.c#1 - /home/build/clifford/gpdb/tools/python/2.6.2/src/base/Modules/_ctypes/libffi/src/sparc/ffi.c ====
|
||||
---
|
||||
Modules/_ctypes/libffi/src/sparc/ffi.c | 5 +++++
|
||||
1 file changed, 5 insertions(+)
|
||||
|
||||
--- a/Modules/_ctypes/libffi/src/sparc/ffi.c
|
||||
+++ b/Modules/_ctypes/libffi/src/sparc/ffi.c
|
||||
@@ -652,6 +652,11 @@
|
||||
}
|
||||
else
|
||||
{
|
||||
+#if FFI_TYPE_LONGDOUBLE != FFI_TYPE_DOUBLE
|
||||
+ /* SparcV9 long double is 16-byte aligned; skip arg if necessary */
|
||||
+ if (arg_types[i]->type == FFI_TYPE_LONGDOUBLE && (argn & 1))
|
||||
+ argn++;
|
||||
+#endif
|
||||
/* Right-justify. */
|
||||
argn += ALIGN(arg_types[i]->size, FFI_SIZEOF_ARG) / FFI_SIZEOF_ARG;
|
||||
|
40
sphinx-update-removed-function.patch
Normal file
40
sphinx-update-removed-function.patch
Normal file
|
@ -0,0 +1,40 @@
|
|||
--- a/Doc/tools/extensions/pyspecific.py
|
||||
+++ b/Doc/tools/extensions/pyspecific.py
|
||||
@@ -103,7 +103,11 @@ class ImplementationDetail(Directive):
|
||||
# Support for documenting decorators
|
||||
|
||||
from sphinx import addnodes
|
||||
-from sphinx.domains.python import PyModulelevel, PyClassmember
|
||||
+try:
|
||||
+ from sphinx.domains.python import PyFunction, PyMethod
|
||||
+except ImportError:
|
||||
+ from sphinx.domains.python import PyClassmember as PyMethod
|
||||
+ from sphinx.domains.python import PyModulelevel as PyFunction
|
||||
|
||||
class PyDecoratorMixin(object):
|
||||
def handle_signature(self, sig, signode):
|
||||
@@ -114,16 +118,16 @@ class PyDecoratorMixin(object):
|
||||
def needs_arglist(self):
|
||||
return False
|
||||
|
||||
-class PyDecoratorFunction(PyDecoratorMixin, PyModulelevel):
|
||||
+class PyDecoratorFunction(PyDecoratorMixin, PyFunction):
|
||||
def run(self):
|
||||
# a decorator function is a function after all
|
||||
self.name = 'py:function'
|
||||
- return PyModulelevel.run(self)
|
||||
+ return PyFunction.run(self)
|
||||
|
||||
-class PyDecoratorMethod(PyDecoratorMixin, PyClassmember):
|
||||
+class PyDecoratorMethod(PyDecoratorMixin, PyMethod):
|
||||
def run(self):
|
||||
self.name = 'py:method'
|
||||
- return PyClassmember.run(self)
|
||||
+ return PyMethod.run(self)
|
||||
|
||||
|
||||
# Support for building "topic help" for pydoc
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Documentation/2020-09-12-17-37-13.bpo-35293._cOwPD.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix RemovedInSphinx40Warning when building the documentation. Patch by Dong-hee Na.
|
Loading…
Add table
Reference in a new issue