update version to 3.10.0

This commit is contained in:
shixuantong 2021-11-13 10:52:43 +08:00
parent 1174a273a4
commit 36e7ad0769
23 changed files with 75 additions and 3901 deletions

View File

@ -1,7 +1,8 @@
diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py
--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400
+++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400
@@ -141,6 +141,15 @@ class UnixCCompiler(CCompiler):
diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py
index f0792de..4d83793 100644
--- a/Lib/distutils/unixccompiler.py
+++ b/Lib/distutils/unixccompiler.py
@@ -82,6 +82,15 @@ class UnixCCompiler(CCompiler):
if sys.platform == "cygwin":
exe_extension = ".exe"
@ -17,3 +18,6 @@ diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/dist
def preprocess(self, source, output_file=None, macros=None,
include_dirs=None, extra_preargs=None, extra_postargs=None):
fixed_args = self._fix_compile_args(None, macros, include_dirs)
--
1.8.3.1

View File

@ -1,227 +0,0 @@
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index c625c95..ae4f915 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -30,14 +30,14 @@ WINDOWS_SCHEME = {
INSTALL_SCHEMES = {
'unix_prefix': {
'purelib': '$base/lib/python$py_version_short/site-packages',
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
+ 'platlib': '$platbase/lib64/python$py_version_short/site-packages',
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
},
'unix_home': {
'purelib': '$base/lib/python',
- 'platlib': '$base/lib/python',
+ 'platlib': '$base/lib64/python',
'headers': '$base/include/python/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index b51629e..9a4892a 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -146,8 +146,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
prefix = plat_specific and EXEC_PREFIX or PREFIX
if os.name == "posix":
+ if plat_specific or standard_lib:
+ lib = "lib64"
+ else:
+ lib = "lib"
libpython = os.path.join(prefix,
- "lib", "python" + get_python_version())
+ lib, "python" + get_python_version())
if standard_lib:
return libpython
else:
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index 287ab19..d4c05e0 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -57,8 +57,9 @@ class InstallTestCase(support.TempdirManager,
self.assertEqual(got, expected)
libdir = os.path.join(destination, "lib", "python")
+ platlibdir = os.path.join(destination, "lib64", "python")
check_path(cmd.install_lib, libdir)
- check_path(cmd.install_platlib, libdir)
+ check_path(cmd.install_platlib, platlibdir)
check_path(cmd.install_purelib, libdir)
check_path(cmd.install_headers,
os.path.join(destination, "include", "python", "foopkg"))
diff --git a/Lib/site.py b/Lib/site.py
index a065ab0..22d53fa 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -335,11 +335,15 @@ def getsitepackages(prefixes=None):
seen.add(prefix)
if os.sep == '/':
+ sitepackages.append(os.path.join(prefix, "lib64",
+ "python" + sys.version[:3],
+ "site-packages"))
sitepackages.append(os.path.join(prefix, "lib",
"python%d.%d" % sys.version_info[:2],
"site-packages"))
else:
sitepackages.append(prefix)
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
return sitepackages
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index b9e2faf..0ae6d35 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -20,10 +20,10 @@ __all__ = [
_INSTALL_SCHEMES = {
'posix_prefix': {
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
+ 'stdlib': '{installed_base}/lib64/python{py_version_short}',
+ 'platstdlib': '{platbase}/lib64/python{py_version_short}',
'purelib': '{base}/lib/python{py_version_short}/site-packages',
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages',
'include':
'{installed_base}/include/python{py_version_short}{abiflags}',
'platinclude':
@@ -62,10 +62,10 @@ _INSTALL_SCHEMES = {
'data': '{userbase}',
},
'posix_user': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
+ 'stdlib': '{userbase}/lib64/python{py_version_short}',
+ 'platstdlib': '{userbase}/lib64/python{py_version_short}',
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
+ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages',
'include': '{userbase}/include/python{py_version_short}',
'scripts': '{userbase}/bin',
'data': '{userbase}',
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
index 1bbc697..9a7e80d 100644
--- a/Lib/test/test_site.py
+++ b/Lib/test/test_site.py
@@ -267,8 +267,8 @@ class HelperFunctionsTests(unittest.TestCase):
dirs = site.getsitepackages()
if os.sep == '/':
# OS X, Linux, FreeBSD, etc
- self.assertEqual(len(dirs), 1)
- wanted = os.path.join('xoxo', 'lib',
+ self.assertEqual(len(dirs), 2)
+ wanted = os.path.join('xoxo', 'lib64',
'python%d.%d' % sys.version_info[:2],
'site-packages')
self.assertEqual(dirs[0], wanted)
diff --git a/Makefile.pre.in b/Makefile.pre.in
index a914a9c..406a441 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -143,7 +143,7 @@ LIBDIR= @libdir@
MANDIR= @mandir@
INCLUDEDIR= @includedir@
CONFINCLUDEDIR= $(exec_prefix)/include
-SCRIPTDIR= $(prefix)/lib
+SCRIPTDIR= $(prefix)/lib64
ABIFLAGS= @ABIFLAGS@
# Detailed destination directories
diff --git a/Modules/getpath.c b/Modules/getpath.c
index b727f66..a0c5fb6 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -730,7 +730,7 @@ calculate_exec_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig,
if (safe_wcscpy(exec_prefix, calculate->exec_prefix, exec_prefix_len) < 0) {
return PATHLEN_ERR();
}
- status = joinpath(exec_prefix, L"lib/lib-dynload", exec_prefix_len);
+ status = joinpath(exec_prefix, L"lib64/lib-dynload", exec_prefix_len);
if (_PyStatus_EXCEPTION(status)) {
return status;
}
@@ -1067,7 +1067,7 @@ calculate_zip_path(PyCalculatePath *calculate, const wchar_t *prefix,
return PATHLEN_ERR();
}
}
- status = joinpath(zip_path, L"lib/python00.zip", zip_path_len);
+ status = joinpath(zip_path, L"lib64/python00.zip", zip_path_len);
if (_PyStatus_EXCEPTION(status)) {
return status;
}
@@ -1197,7 +1197,7 @@ calculate_init(PyCalculatePath *calculate, const PyConfig *config)
if (!calculate->exec_prefix) {
return DECODE_LOCALE_ERR("EXEC_PREFIX define", len);
}
- calculate->lib_python = Py_DecodeLocale("lib/python" VERSION, &len);
+ calculate->lib_python = Py_DecodeLocale("lib64/python" VERSION, &len);
if (!calculate->lib_python) {
return DECODE_LOCALE_ERR("EXEC_PREFIX define", len);
}
diff --git a/configure b/configure
index 8886561..78867c6 100755
--- a/configure
+++ b/configure
@@ -15214,9 +15214,9 @@ fi
if test x$PLATFORM_TRIPLET = x; then
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}"
else
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
fi
diff --git a/configure.ac b/configure.ac
index d8de9d4..477a5ff 100644
--- a/configure.ac
+++ b/configure.ac
@@ -4689,9 +4689,9 @@ fi
dnl define LIBPL after ABIFLAGS and LDVERSION is defined.
AC_SUBST(PY_ENABLE_SHARED)
if test x$PLATFORM_TRIPLET = x; then
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}"
else
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
fi
AC_SUBST(LIBPL)
diff --git a/setup.py b/setup.py
index b168ed4..8628b9d 100644
--- a/setup.py
+++ b/setup.py
@@ -649,7 +649,7 @@ class PyBuildExt(build_ext):
# directories (i.e. '.' and 'Include') must be first. See issue
# 10520.
if not CROSS_COMPILING:
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
# only change this for cross builds for 3.3, issues on Mageia
if CROSS_COMPILING:
@@ -953,11 +953,11 @@ class PyBuildExt(build_ext):
elif curses_library:
readline_libs.append(curses_library)
elif self.compiler.find_library_file(self.lib_dirs +
- ['/usr/lib/termcap'],
+ ['/usr/lib64/termcap'],
'termcap'):
readline_libs.append('termcap')
self.add(Extension('readline', ['readline.c'],
- library_dirs=['/usr/lib/termcap'],
+ library_dirs=['/usr/lib64/termcap'],
extra_link_args=readline_extra_link_args,
libraries=readline_libs))
else:
--
1.8.3.1

View File

@ -1,56 +0,0 @@
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 406a441..917303d 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -562,7 +562,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py --make --srcdir $(srcdir)
# Build the interpreter
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY)
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS)
platform: $(BUILDPYTHON) pybuilddir.txt
@@ -610,12 +610,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
-
-# Build static library
-$(LIBRARY): $(LIBRARY_OBJS)
- -rm -f $@
- $(AR) $(ARFLAGS) $@ $(LIBRARY_OBJS)
-
libpython$(LDVERSION).so: $(LIBRARY_OBJS) $(DTRACE_OBJS)
if test $(INSTSONAME) != $(LDLIBRARY); then \
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM); \
@@ -693,7 +687,7 @@ Makefile Modules/config.c: Makefile.pre \
@echo "The Makefile was updated, you may need to re-run make."
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY)
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS)
############################################################################
@@ -1557,17 +1551,6 @@ libainstall: @DEF_MAKE_RULE@ python-config
else true; \
fi; \
done
- @if test -d $(LIBRARY); then :; else \
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
- if test "$(SHLIB_SUFFIX)" = .dll; then \
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
- else \
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
- fi; \
- else \
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
- fi; \
- fi
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
$(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
--
1.8.3.1

View File

@ -1,48 +0,0 @@
diff --git a/Lib/unittest/__init__.py b/Lib/unittest/__init__.py
index ace3a6f..a3f76a1 100644
--- a/Lib/unittest/__init__.py
+++ b/Lib/unittest/__init__.py
@@ -59,7 +59,7 @@ __unittest = True
from .result import TestResult
from .async_case import IsolatedAsyncioTestCase
from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
- skipIf, skipUnless, expectedFailure)
+ skipIf, skipUnless, expectedFailure, _skipInRpmBuild)
from .suite import BaseTestSuite, TestSuite
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases)
diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py
index e5734b6..b641710 100644
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -3,6 +3,7 @@
import sys
import functools
import difflib
+import os
import logging
import pprint
import re
@@ -163,6 +164,22 @@ class _BaseTestCaseContext:
msg = self.test_case._formatMessage(self.msg, standardMsg)
raise self.test_case.failureException(msg)
+# Non-standard/downstream-only hooks for handling issues with specific test
+# cases:
+
+def _skipInRpmBuild(reason):
+ """
+ Non-standard/downstream-only decorator for marking a specific unit test
+ to be skipped when run within the %check of an rpmbuild.
+
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
+ the environment, and has no effect otherwise.
+ """
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
+ return skip(reason)
+ else:
+ return _id
+
class _AssertRaisesBaseContext(_BaseTestCaseContext):
def __init__(self, expected, test_case, expected_regex=None):

View File

@ -1,11 +0,0 @@
diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py
--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400
+++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400
@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase):
posix.RTLD_GLOBAL
posix.RTLD_LOCAL
+ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot')
@unittest.skipUnless(hasattr(os, 'SEEK_HOLE'),
"test needs an OS that reports file holes")
def test_fs_holes(self):

View File

@ -1,7 +1,8 @@
diff -r 39b9b05c3085 Lib/distutils/sysconfig.py
--- a/Lib/distutils/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
+++ b/Lib/distutils/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
@@ -362,7 +362,10 @@
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 3414a76..5cb3a89 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -146,7 +146,10 @@ def parse_makefile(fn, g=None):
done[n] = item = ""
if found:
after = value[m.end():]
@ -13,10 +14,11 @@ diff -r 39b9b05c3085 Lib/distutils/sysconfig.py
if "$" in after:
notdone[name] = value
else:
diff -r 39b9b05c3085 Lib/sysconfig.py
--- a/Lib/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
+++ b/Lib/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
@@ -296,7 +296,10 @@
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index 95b48f6..90b4e27 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -346,7 +346,10 @@ def _parse_makefile(filename, vars=None, keep_unresolved=True):
if found:
after = value[m.end():]
@ -28,3 +30,6 @@ diff -r 39b9b05c3085 Lib/sysconfig.py
if "$" in after:
notdone[name] = value
else:
--
1.8.3.1

View File

@ -1,71 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
Date: Wed, 15 Aug 2018 15:36:29 +0200
Subject: [PATCH] 00189: Instead of bundled wheels, use our RPM packaged wheels
We keep them in /usr/share/python-wheels
Downstream only: upstream bundles
We might eventually pursuit upstream support, but it's low prio
---
Lib/ensurepip/__init__.py | 32 ++++++++++++++++++++++----------
1 file changed, 22 insertions(+), 10 deletions(-)
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index f3152a55d4..f58dab1800 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -1,6 +1,7 @@
+import distutils.version
+import glob
import os
import os.path
-import pkgutil
import sys
import runpy
import tempfile
@@ -8,10 +9,24 @@ import tempfile
__all__ = ["version", "bootstrap"]
+_WHEEL_DIR = "/usr/share/python-wheels/"
-_SETUPTOOLS_VERSION = "47.1.0"
+_wheels = {}
-_PIP_VERSION = "20.1.1"
+def _get_most_recent_wheel_version(pkg):
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
+ _wheels[pkg] = {}
+ for suffix in "-py2.py3-none-any.whl", "-py3-none-any.whl":
+ pattern = "{}*{}".format(prefix, suffix)
+ for path in glob.glob(pattern):
+ version_str = path[len(prefix):-len(suffix)]
+ _wheels[pkg][version_str] = os.path.basename(path)
+ return str(max(_wheels[pkg], key=distutils.version.LooseVersion))
+
+
+_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools")
+
+_PIP_VERSION = _get_most_recent_wheel_version("pip")
_PROJECTS = [
("setuptools", _SETUPTOOLS_VERSION, "py3"),
@@ -105,13 +120,10 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
# additional paths that need added to sys.path
additional_paths = []
for project, version, py_tag in _PROJECTS:
- wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag)
- whl = pkgutil.get_data(
- "ensurepip",
- "_bundled/{}".format(wheel_name),
- )
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
- fp.write(whl)
+ wheel_name = _wheels[project][version]
+ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp:
+ with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
+ fp.write(sfp.read())
additional_paths.append(os.path.join(tmpdir, wheel_name))

View File

@ -1,7 +1,8 @@
diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in
--- Python-3.5.0/Makefile.pre.in.lib 2015-09-21 15:39:47.928286620 +0200
+++ Python-3.5.0/Makefile.pre.in 2015-09-21 15:42:58.004042762 +0200
@@ -1340,7 +1340,7 @@ inclinstall:
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 790d974..2e25366 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -1689,7 +1689,7 @@ inclinstall:
# Install the library and miscellaneous stuff needed for extending/embedding
# This goes into $(exec_prefix)
@ -10,3 +11,6 @@ diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in
# pkgconfig directory
LIBPC= $(LIBDIR)/pkgconfig
--
1.8.3.1

View File

@ -1,11 +1,11 @@
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 0258d3d..4ebf50a 100644
index 26696cf..1826cbc 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -418,8 +418,19 @@ class install(Command):
@@ -441,8 +441,19 @@ class install(Command):
raise DistutilsOptionError(
"must not supply exec-prefix without prefix")
- self.prefix = os.path.normpath(sys.prefix)
- self.exec_prefix = os.path.normpath(sys.exec_prefix)
+ # self.prefix is set to sys.prefix + /local/
@ -21,16 +21,16 @@ index 0258d3d..4ebf50a 100644
+
+ self.prefix = os.path.normpath(sys.prefix) + addition
+ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition
else:
if self.exec_prefix is None:
diff --git a/Lib/site.py b/Lib/site.py
index 0fc9200..c95202e 100644
index 939893e..9bc1a5f 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -322,7 +322,14 @@ def getsitepackages(prefixes=None):
@@ -380,7 +380,14 @@ def getsitepackages(prefixes=None):
return sitepackages
def addsitepackages(known_paths, prefixes=None):
- """Add site-packages to sys.path"""
+ """Add site-packages to sys.path
@ -41,6 +41,9 @@ index 0fc9200..c95202e 100644
+ """
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ:
+ PREFIXES.insert(0, "/usr/local")
_trace("Processing global site-packages")
for sitedir in getsitepackages(prefixes):
if os.path.isdir(sitedir):
addsitedir(sitedir, known_paths)
--
1.8.3.1

View File

@ -1,67 +0,0 @@
From 43e523103886af66d6c27cd72431b5d9d14cd2a9 Mon Sep 17 00:00:00 2001
From: "Miss Skeleton (bot)" <31488909+miss-islington@users.noreply.github.com>
Date: Mon, 19 Oct 2020 19:38:40 -0700
Subject: [PATCH] bpo-41944: No longer call eval() on content received via HTTP
in the CJK codec tests (GH-22566) (GH-22578)
(cherry picked from commit 2ef5caa58febc8968e670e39e3d37cf8eef3cab8)
https://github.com/python/cpython/commit/43e523103886af66d6c27cd72431b5d9d14cd2a9
reason:CVE-2020-27619
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
---
Lib/test/multibytecodec_support.py | 22 +++++++------------
.../2020-10-05-17-43-46.bpo-41944.rf1dYb.rst | 1 +
2 files changed, 9 insertions(+), 14 deletions(-)
create mode 100644 Misc/NEWS.d/next/Tests/2020-10-05-17-43-46.bpo-41944.rf1dYb.rst
diff --git a/Lib/test/multibytecodec_support.py b/Lib/test/multibytecodec_support.py
index cca8af67d6d1d..f76c0153f5ecf 100644
--- a/Lib/test/multibytecodec_support.py
+++ b/Lib/test/multibytecodec_support.py
@@ -305,29 +305,23 @@ def test_mapping_file(self):
self._test_mapping_file_plain()
def _test_mapping_file_plain(self):
- unichrs = lambda s: ''.join(map(chr, map(eval, s.split('+'))))
+ def unichrs(s):
+ return ''.join(chr(int(x, 16)) for x in s.split('+'))
+
urt_wa = {}
with self.open_mapping_file() as f:
for line in f:
if not line:
break
- data = line.split('#')[0].strip().split()
+ data = line.split('#')[0].split()
if len(data) != 2:
continue
- csetval = eval(data[0])
- if csetval <= 0x7F:
- csetch = bytes([csetval & 0xff])
- elif csetval >= 0x1000000:
- csetch = bytes([(csetval >> 24), ((csetval >> 16) & 0xff),
- ((csetval >> 8) & 0xff), (csetval & 0xff)])
- elif csetval >= 0x10000:
- csetch = bytes([(csetval >> 16), ((csetval >> 8) & 0xff),
- (csetval & 0xff)])
- elif csetval >= 0x100:
- csetch = bytes([(csetval >> 8), (csetval & 0xff)])
- else:
+ if data[0][:2] != '0x':
+ self.fail(f"Invalid line: {line!r}")
+ csetch = bytes.fromhex(data[0][2:])
+ if len(csetch) == 1 and 0x80 <= csetch[0]:
continue
unich = unichrs(data[1])
diff --git a/Misc/NEWS.d/next/Tests/2020-10-05-17-43-46.bpo-41944.rf1dYb.rst b/Misc/NEWS.d/next/Tests/2020-10-05-17-43-46.bpo-41944.rf1dYb.rst
new file mode 100644
index 0000000000000..4f9782f1c85af
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2020-10-05-17-43-46.bpo-41944.rf1dYb.rst
@@ -0,0 +1 @@
+Tests for CJK codecs no longer call ``eval()`` on content received via HTTP.

View File

@ -1,183 +0,0 @@
From 298cc0231a271b5d5c6d55273297b9541353ecc3 Mon Sep 17 00:00:00 2001
From: Benjamin Peterson <benjamin@python.org>
Date: Mon, 18 Jan 2021 09:28:15 -0600
Subject: [PATCH] closes bpo-42938: Replace snprintf with Python unicode
formatting in ctypes param reprs.
https://github.com/python/cpython/commit/916610ef90a0d0761f08747f7b0905541f0977c7
reason:CVE-2021-3177
---
Lib/ctypes/test/test_parameters.py | 43 ++++++++++++++++
.../2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst | 2 +
Modules/_ctypes/callproc.c | 51 +++++++------------
3 files changed, 64 insertions(+), 32 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py
index e4c25fd880cef..531894fdec838 100644
--- a/Lib/ctypes/test/test_parameters.py
+++ b/Lib/ctypes/test/test_parameters.py
@@ -201,6 +201,49 @@ def __dict__(self):
with self.assertRaises(ZeroDivisionError):
WorseStruct().__setstate__({}, b'foo')
+ def test_parameter_repr(self):
+ from ctypes import (
+ c_bool,
+ c_char,
+ c_wchar,
+ c_byte,
+ c_ubyte,
+ c_short,
+ c_ushort,
+ c_int,
+ c_uint,
+ c_long,
+ c_ulong,
+ c_longlong,
+ c_ulonglong,
+ c_float,
+ c_double,
+ c_longdouble,
+ c_char_p,
+ c_wchar_p,
+ c_void_p,
+ )
+ self.assertRegex(repr(c_bool.from_param(True)), r"^<cparam '\?' at 0x[A-Fa-f0-9]+>$")
+ self.assertEqual(repr(c_char.from_param(97)), "<cparam 'c' ('a')>")
+ self.assertRegex(repr(c_wchar.from_param('a')), r"^<cparam 'u' at 0x[A-Fa-f0-9]+>$")
+ self.assertEqual(repr(c_byte.from_param(98)), "<cparam 'b' (98)>")
+ self.assertEqual(repr(c_ubyte.from_param(98)), "<cparam 'B' (98)>")
+ self.assertEqual(repr(c_short.from_param(511)), "<cparam 'h' (511)>")
+ self.assertEqual(repr(c_ushort.from_param(511)), "<cparam 'H' (511)>")
+ self.assertRegex(repr(c_int.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
+ self.assertRegex(repr(c_uint.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
+ self.assertRegex(repr(c_long.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
+ self.assertRegex(repr(c_ulong.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
+ self.assertRegex(repr(c_longlong.from_param(20000)), r"^<cparam '[liq]' \(20000\)>$")
+ self.assertRegex(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$")
+ self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>")
+ self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>")
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
+ self.assertRegex(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$")
+ self.assertRegex(repr(c_char_p.from_param(b'hihi')), "^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$")
+ self.assertRegex(repr(c_wchar_p.from_param('hihi')), "^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$")
+ self.assertRegex(repr(c_void_p.from_param(0x12)), r"^<cparam 'P' \(0x0*12\)>$")
+
################################################################
if __name__ == '__main__':
diff --git a/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
new file mode 100644
index 0000000000000..7df65a156feab
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
@@ -0,0 +1,2 @@
+Avoid static buffers when computing the repr of :class:`ctypes.c_double` and
+:class:`ctypes.c_longdouble` values.
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
index 40a05a44edd4c..56ccc2f1e0b5d 100644
--- a/Modules/_ctypes/callproc.c
+++ b/Modules/_ctypes/callproc.c
@@ -487,58 +487,47 @@ is_literal_char(unsigned char c)
static PyObject *
PyCArg_repr(PyCArgObject *self)
{
- char buffer[256];
switch(self->tag) {
case 'b':
case 'B':
- sprintf(buffer, "<cparam '%c' (%d)>",
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
self->tag, self->value.b);
- break;
case 'h':
case 'H':
- sprintf(buffer, "<cparam '%c' (%d)>",
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
self->tag, self->value.h);
- break;
case 'i':
case 'I':
- sprintf(buffer, "<cparam '%c' (%d)>",
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
self->tag, self->value.i);
- break;
case 'l':
case 'L':
- sprintf(buffer, "<cparam '%c' (%ld)>",
+ return PyUnicode_FromFormat("<cparam '%c' (%ld)>",
self->tag, self->value.l);
- break;
case 'q':
case 'Q':
- sprintf(buffer,
-#ifdef MS_WIN32
- "<cparam '%c' (%I64d)>",
-#else
- "<cparam '%c' (%lld)>",
-#endif
+ return PyUnicode_FromFormat("<cparam '%c' (%lld)>",
self->tag, self->value.q);
- break;
case 'd':
- sprintf(buffer, "<cparam '%c' (%f)>",
- self->tag, self->value.d);
- break;
- case 'f':
- sprintf(buffer, "<cparam '%c' (%f)>",
- self->tag, self->value.f);
- break;
-
+ case 'f': {
+ PyObject *f = PyFloat_FromDouble((self->tag == 'f') ? self->value.f : self->value.d);
+ if (f == NULL) {
+ return NULL;
+ }
+ PyObject *result = PyUnicode_FromFormat("<cparam '%c' (%R)>", self->tag, f);
+ Py_DECREF(f);
+ return result;
+ }
case 'c':
if (is_literal_char((unsigned char)self->value.c)) {
- sprintf(buffer, "<cparam '%c' ('%c')>",
+ return PyUnicode_FromFormat("<cparam '%c' ('%c')>",
self->tag, self->value.c);
}
else {
- sprintf(buffer, "<cparam '%c' ('\\x%02x')>",
+ return PyUnicode_FromFormat("<cparam '%c' ('\\x%02x')>",
self->tag, (unsigned char)self->value.c);
}
- break;
/* Hm, are these 'z' and 'Z' codes useful at all?
Shouldn't they be replaced by the functionality of c_string
@@ -547,22 +536,20 @@ PyCArg_repr(PyCArgObject *self)
case 'z':
case 'Z':
case 'P':
- sprintf(buffer, "<cparam '%c' (%p)>",
+ return PyUnicode_FromFormat("<cparam '%c' (%p)>",
self->tag, self->value.p);
break;
default:
if (is_literal_char((unsigned char)self->tag)) {
- sprintf(buffer, "<cparam '%c' at %p>",
+ return PyUnicode_FromFormat("<cparam '%c' at %p>",
(unsigned char)self->tag, (void *)self);
}
else {
- sprintf(buffer, "<cparam 0x%02x at %p>",
+ return PyUnicode_FromFormat("<cparam 0x%02x at %p>",
(unsigned char)self->tag, (void *)self);
}
- break;
}
- return PyUnicode_FromString(buffer);
}
static PyMemberDef PyCArgType_members[] = {

View File

@ -9,10 +9,10 @@ Signed-off-by: hanxinke <hanxinke@huawei.com>
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/Lib/crypt.py b/Lib/crypt.py
index 8846602..4c62280 100644
index 33dbc46..3a7cea1 100644
--- a/Lib/crypt.py
+++ b/Lib/crypt.py
@@ -54,7 +54,7 @@ def mksalt(method=None, *, rounds=None):
@@ -55,7 +55,7 @@ def mksalt(method=None, *, rounds=None):
if not 4 <= log_rounds <= 31:
raise ValueError('rounds out of the range 2**4 to 2**31')
s += f'{log_rounds:02d}$'
@ -21,7 +21,7 @@ index 8846602..4c62280 100644
if rounds is not None:
if not 1000 <= rounds <= 999_999_999:
raise ValueError('rounds out of the range 1000 to 999_999_999')
@@ -109,4 +109,7 @@ for _v in 'b', 'y', 'a', '':
@@ -117,4 +117,7 @@ for _v in 'b', 'y', 'a', '':
_add_method('MD5', '1', 8, 34)
_add_method('CRYPT', None, 2, 13)
@ -30,5 +30,5 @@ index 8846602..4c62280 100644
+
del _v, _add_method
--
2.23.0
1.8.3.1

View File

@ -1,536 +0,0 @@
From e3110c3cfbb7daa690d54d0eff6c264c870a71bf Mon Sep 17 00:00:00 2001
From: Senthil Kumaran <senthil@uthcode.com>
Date: Mon, 15 Feb 2021 10:15:02 -0800
Subject: [PATCH] [3.8] bpo-42967: only use '&' as a query string separator
(GH-24297) (#24529)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* bpo-42967: only use '&' as a query string separator (#24297)
bpo-42967: [security] Address a web cache-poisoning issue reported in
urllib.parse.parse_qsl().
urllib.parse will only us "&" as query string separator by default
instead of both ";" and "&" as allowed in earlier versions. An optional
argument seperator with default value "&" is added to specify the
separator.
Co-authored-by: Éric Araujo <merwok@netwok.org>
Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com>
Co-authored-by: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com>
Co-authored-by: Éric Araujo <merwok@netwok.org>
(cherry picked from commit fcbe0cb04d35189401c0c880ebfb4311e952d776)
* [3.8] bpo-42967: only use '&' as a query string separator (GH-24297)
bpo-42967: [security] Address a web cache-poisoning issue reported in urllib.parse.parse_qsl().
urllib.parse will only us "&" as query string separator by default instead of both ";" and "&" as allowed in earlier versions. An optional argument seperator with default value "&" is added to specify the separator.
Co-authored-by: Éric Araujo <merwok@netwok.org>
Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com>
Co-authored-by: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com>
Co-authored-by: Éric Araujo <merwok@netwok.org>.
(cherry picked from commit fcbe0cb04d35189401c0c880ebfb4311e952d776)
Co-authored-by: Adam Goldschmidt <adamgold7@gmail.com>
* Update correct version information.
* fix docs and make logic clearer
Co-authored-by: Adam Goldschmidt <adamgold7@gmail.com>
Co-authored-by: Fidget-Spinner <28750310+Fidget-Spinner@users.noreply.github.com>
---
Doc/library/cgi.rst | 11 ++-
Doc/library/urllib.parse.rst | 22 +++++-
Doc/whatsnew/3.6.rst | 13 ++++
Doc/whatsnew/3.7.rst | 13 ++++
Doc/whatsnew/3.8.rst | 13 ++++
Lib/cgi.py | 23 ++++---
Lib/test/test_cgi.py | 29 ++++++--
Lib/test/test_urlparse.py | 68 +++++++++++++------
Lib/urllib/parse.py | 19 ++++--
.../2021-02-14-15-59-16.bpo-42967.YApqDS.rst | 1 +
10 files changed, 166 insertions(+), 46 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2021-02-14-15-59-16.bpo-42967.YApqDS.rst
diff --git a/Doc/library/cgi.rst b/Doc/library/cgi.rst
index 4048592e7361f..880074bed6026 100644
--- a/Doc/library/cgi.rst
+++ b/Doc/library/cgi.rst
@@ -277,14 +277,16 @@ These are useful if you want more control, or if you want to employ some of the
algorithms implemented in this module in other circumstances.
-.. function:: parse(fp=None, environ=os.environ, keep_blank_values=False, strict_parsing=False)
+.. function:: parse(fp=None, environ=os.environ, keep_blank_values=False, strict_parsing=False, separator="&")
Parse a query in the environment or from a file (the file defaults to
- ``sys.stdin``). The *keep_blank_values* and *strict_parsing* parameters are
+ ``sys.stdin``). The *keep_blank_values*, *strict_parsing* and *separator* parameters are
passed to :func:`urllib.parse.parse_qs` unchanged.
+ .. versionchanged:: 3.8.8
+ Added the *separator* parameter.
-.. function:: parse_multipart(fp, pdict, encoding="utf-8", errors="replace")
+.. function:: parse_multipart(fp, pdict, encoding="utf-8", errors="replace", separator="&")
Parse input of type :mimetype:`multipart/form-data` (for file uploads).
Arguments are *fp* for the input file, *pdict* for a dictionary containing
@@ -303,6 +305,9 @@ algorithms implemented in this module in other circumstances.
Added the *encoding* and *errors* parameters. For non-file fields, the
value is now a list of strings, not bytes.
+ .. versionchanged:: 3.8.8
+ Added the *separator* parameter.
+
.. function:: parse_header(string)
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst
index 25e5cc1a6ce0b..fcad7076e6c77 100644
--- a/Doc/library/urllib.parse.rst
+++ b/Doc/library/urllib.parse.rst
@@ -165,7 +165,7 @@ or on combining URL components into a URL string.
now raise :exc:`ValueError`.
-.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None)
+.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&')
Parse a query string given as a string argument (data of type
:mimetype:`application/x-www-form-urlencoded`). Data are returned as a
@@ -190,6 +190,9 @@ or on combining URL components into a URL string.
read. If set, then throws a :exc:`ValueError` if there are more than
*max_num_fields* fields read.
+ The optional argument *separator* is the symbol to use for separating the
+ query arguments. It defaults to ``&``.
+
Use the :func:`urllib.parse.urlencode` function (with the ``doseq``
parameter set to ``True``) to convert such dictionaries into query
strings.
@@ -201,8 +204,14 @@ or on combining URL components into a URL string.
.. versionchanged:: 3.8
Added *max_num_fields* parameter.
+ .. versionchanged:: 3.8.8
+ Added *separator* parameter with the default value of ``&``. Python
+ versions earlier than Python 3.8.8 allowed using both ``;`` and ``&`` as
+ query parameter separator. This has been changed to allow only a single
+ separator key, with ``&`` as the default separator.
+
-.. function:: parse_qsl(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None)
+.. function:: parse_qsl(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&')
Parse a query string given as a string argument (data of type
:mimetype:`application/x-www-form-urlencoded`). Data are returned as a list of
@@ -226,6 +235,9 @@ or on combining URL components into a URL string.
read. If set, then throws a :exc:`ValueError` if there are more than
*max_num_fields* fields read.
+ The optional argument *separator* is the symbol to use for separating the
+ query arguments. It defaults to ``&``.
+
Use the :func:`urllib.parse.urlencode` function to convert such lists of pairs into
query strings.
@@ -235,6 +247,12 @@ or on combining URL components into a URL string.
.. versionchanged:: 3.8
Added *max_num_fields* parameter.
+ .. versionchanged:: 3.8.8
+ Added *separator* parameter with the default value of ``&``. Python
+ versions earlier than Python 3.8.8 allowed using both ``;`` and ``&`` as
+ query parameter separator. This has been changed to allow only a single
+ separator key, with ``&`` as the default separator.
+
.. function:: urlunparse(parts)
diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst
index 85a6657fdfbda..03a877a3d9178 100644
--- a/Doc/whatsnew/3.6.rst
+++ b/Doc/whatsnew/3.6.rst
@@ -2443,3 +2443,16 @@ because of the behavior of the socket option ``SO_REUSEADDR`` in UDP. For more
details, see the documentation for ``loop.create_datagram_endpoint()``.
(Contributed by Kyle Stanley, Antoine Pitrou, and Yury Selivanov in
:issue:`37228`.)
+
+Notable changes in Python 3.6.13
+================================
+
+Earlier Python versions allowed using both ``;`` and ``&`` as
+query parameter separators in :func:`urllib.parse.parse_qs` and
+:func:`urllib.parse.parse_qsl`. Due to security concerns, and to conform with
+newer W3C recommendations, this has been changed to allow only a single
+separator key, with ``&`` as the default. This change also affects
+:func:`cgi.parse` and :func:`cgi.parse_multipart` as they use the affected
+functions internally. For more details, please see their respective
+documentation.
+(Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.)
diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst
index 4933cba3990b1..824dc13e0c6fd 100644
--- a/Doc/whatsnew/3.7.rst
+++ b/Doc/whatsnew/3.7.rst
@@ -2556,3 +2556,16 @@ because of the behavior of the socket option ``SO_REUSEADDR`` in UDP. For more
details, see the documentation for ``loop.create_datagram_endpoint()``.
(Contributed by Kyle Stanley, Antoine Pitrou, and Yury Selivanov in
:issue:`37228`.)
+
+Notable changes in Python 3.7.10
+================================
+
+Earlier Python versions allowed using both ``;`` and ``&`` as
+query parameter separators in :func:`urllib.parse.parse_qs` and
+:func:`urllib.parse.parse_qsl`. Due to security concerns, and to conform with
+newer W3C recommendations, this has been changed to allow only a single
+separator key, with ``&`` as the default. This change also affects
+:func:`cgi.parse` and :func:`cgi.parse_multipart` as they use the affected
+functions internally. For more details, please see their respective
+documentation.
+(Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.)
diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst
index 1a192800b2f02..632ccc1f2c40a 100644
--- a/Doc/whatsnew/3.8.rst
+++ b/Doc/whatsnew/3.8.rst
@@ -2251,3 +2251,16 @@ The constant values of future flags in the :mod:`__future__` module
are updated in order to prevent collision with compiler flags. Previously
``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``.
(Contributed by Batuhan Taskaya in :issue:`39562`)
+
+Notable changes in Python 3.8.8
+===============================
+
+Earlier Python versions allowed using both ``;`` and ``&`` as
+query parameter separators in :func:`urllib.parse.parse_qs` and
+:func:`urllib.parse.parse_qsl`. Due to security concerns, and to conform with
+newer W3C recommendations, this has been changed to allow only a single
+separator key, with ``&`` as the default. This change also affects
+:func:`cgi.parse` and :func:`cgi.parse_multipart` as they use the affected
+functions internally. For more details, please see their respective
+documentation.
+(Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.)
diff --git a/Lib/cgi.py b/Lib/cgi.py
index 77ab703cc0360..1e880e51848af 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -115,7 +115,8 @@ def closelog():
# 0 ==> unlimited input
maxlen = 0
-def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
+def parse(fp=None, environ=os.environ, keep_blank_values=0,
+ strict_parsing=0, separator='&'):
"""Parse a query in the environment or from a file (default stdin)
Arguments, all optional:
@@ -134,6 +135,9 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
+
+ separator: str. The symbol to use for separating the query arguments.
+ Defaults to &.
"""
if fp is None:
fp = sys.stdin
@@ -154,7 +158,7 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
if environ['REQUEST_METHOD'] == 'POST':
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
if ctype == 'multipart/form-data':
- return parse_multipart(fp, pdict)
+ return parse_multipart(fp, pdict, separator=separator)
elif ctype == 'application/x-www-form-urlencoded':
clength = int(environ['CONTENT_LENGTH'])
if maxlen and clength > maxlen:
@@ -178,10 +182,10 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
qs = ""
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing,
- encoding=encoding)
+ encoding=encoding, separator=separator)
-def parse_multipart(fp, pdict, encoding="utf-8", errors="replace"):
+def parse_multipart(fp, pdict, encoding="utf-8", errors="replace", separator='&'):
"""Parse multipart input.
Arguments:
@@ -205,7 +209,7 @@ def parse_multipart(fp, pdict, encoding="utf-8", errors="replace"):
except KeyError:
pass
fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors,
- environ={'REQUEST_METHOD': 'POST'})
+ environ={'REQUEST_METHOD': 'POST'}, separator=separator)
return {k: fs.getlist(k) for k in fs}
def _parseparam(s):
@@ -315,7 +319,7 @@ class FieldStorage:
def __init__(self, fp=None, headers=None, outerboundary=b'',
environ=os.environ, keep_blank_values=0, strict_parsing=0,
limit=None, encoding='utf-8', errors='replace',
- max_num_fields=None):
+ max_num_fields=None, separator='&'):
"""Constructor. Read multipart/* until last part.
Arguments, all optional:
@@ -363,6 +367,7 @@ def __init__(self, fp=None, headers=None, outerboundary=b'',
self.keep_blank_values = keep_blank_values
self.strict_parsing = strict_parsing
self.max_num_fields = max_num_fields
+ self.separator = separator
if 'REQUEST_METHOD' in environ:
method = environ['REQUEST_METHOD'].upper()
self.qs_on_post = None
@@ -589,7 +594,7 @@ def read_urlencoded(self):
query = urllib.parse.parse_qsl(
qs, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors,
- max_num_fields=self.max_num_fields)
+ max_num_fields=self.max_num_fields, separator=self.separator)
self.list = [MiniFieldStorage(key, value) for key, value in query]
self.skip_lines()
@@ -605,7 +610,7 @@ def read_multi(self, environ, keep_blank_values, strict_parsing):
query = urllib.parse.parse_qsl(
self.qs_on_post, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors,
- max_num_fields=self.max_num_fields)
+ max_num_fields=self.max_num_fields, separator=self.separator)
self.list.extend(MiniFieldStorage(key, value) for key, value in query)
klass = self.FieldStorageClass or self.__class__
@@ -649,7 +654,7 @@ def read_multi(self, environ, keep_blank_values, strict_parsing):
else self.limit - self.bytes_read
part = klass(self.fp, headers, ib, environ, keep_blank_values,
strict_parsing, limit,
- self.encoding, self.errors, max_num_fields)
+ self.encoding, self.errors, max_num_fields, self.separator)
if max_num_fields is not None:
max_num_fields -= 1
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
index 101942de947fb..4e1506a6468b9 100644
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -53,12 +53,9 @@ def do_test(buf, method):
("", ValueError("bad query field: ''")),
("&", ValueError("bad query field: ''")),
("&&", ValueError("bad query field: ''")),
- (";", ValueError("bad query field: ''")),
- (";&;", ValueError("bad query field: ''")),
# Should the next few really be valid?
("=", {}),
("=&=", {}),
- ("=;=", {}),
# This rest seem to make sense
("=a", {'': ['a']}),
("&=a", ValueError("bad query field: ''")),
@@ -73,8 +70,6 @@ def do_test(buf, method):
("a=a+b&b=b+c", {'a': ['a b'], 'b': ['b c']}),
("a=a+b&a=b+a", {'a': ['a b', 'b a']}),
("x=1&y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
- ("x=1;y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
- ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
("Hbc5161168c542333633315dee1182227:key_store_seqid=400006&cuyer=r&view=bustomer&order_id=0bb2e248638833d48cb7fed300000f1b&expire=964546263&lobale=en-US&kid=130003.300038&ss=env",
{'Hbc5161168c542333633315dee1182227:key_store_seqid': ['400006'],
'cuyer': ['r'],
@@ -201,6 +196,30 @@ def test_strict(self):
else:
self.assertEqual(fs.getvalue(key), expect_val[0])
+ def test_separator(self):
+ parse_semicolon = [
+ ("x=1;y=2.0", {'x': ['1'], 'y': ['2.0']}),
+ ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
+ (";", ValueError("bad query field: ''")),
+ (";;", ValueError("bad query field: ''")),
+ ("=;a", ValueError("bad query field: 'a'")),
+ (";b=a", ValueError("bad query field: ''")),
+ ("b;=a", ValueError("bad query field: 'b'")),
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
+ ("a=a+b;a=b+a", {'a': ['a b', 'b a']}),
+ ]
+ for orig, expect in parse_semicolon:
+ env = {'QUERY_STRING': orig}
+ fs = cgi.FieldStorage(separator=';', environ=env)
+ if isinstance(expect, dict):
+ for key in expect.keys():
+ expect_val = expect[key]
+ self.assertIn(key, fs)
+ if len(expect_val) > 1:
+ self.assertEqual(fs.getvalue(key), expect_val)
+ else:
+ self.assertEqual(fs.getvalue(key), expect_val[0])
+
def test_log(self):
cgi.log("Testing")
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 4ae6ed33858ce..90c8d6922629e 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -32,16 +32,10 @@
(b"&a=b", [(b'a', b'b')]),
(b"a=a+b&b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
(b"a=1&a=2", [(b'a', b'1'), (b'a', b'2')]),
- (";", []),
- (";;", []),
- (";a=b", [('a', 'b')]),
- ("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
- ("a=1;a=2", [('a', '1'), ('a', '2')]),
- (b";", []),
- (b";;", []),
- (b";a=b", [(b'a', b'b')]),
- (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
- (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
+ (";a=b", [(';a', 'b')]),
+ ("a=a+b;b=b+c", [('a', 'a b;b=b c')]),
+ (b";a=b", [(b';a', b'b')]),
+ (b"a=a+b;b=b+c", [(b'a', b'a b;b=b c')]),
]
# Each parse_qs testcase is a two-tuple that contains
@@ -68,16 +62,10 @@
(b"&a=b", {b'a': [b'b']}),
(b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
(b"a=1&a=2", {b'a': [b'1', b'2']}),
- (";", {}),
- (";;", {}),
- (";a=b", {'a': ['b']}),
- ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
- ("a=1;a=2", {'a': ['1', '2']}),
- (b";", {}),
- (b";;", {}),
- (b";a=b", {b'a': [b'b']}),
- (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
- (b"a=1;a=2", {b'a': [b'1', b'2']}),
+ (";a=b", {';a': ['b']}),
+ ("a=a+b;b=b+c", {'a': ['a b;b=b c']}),
+ (b";a=b", {b';a': [b'b']}),
+ (b"a=a+b;b=b+c", {b'a':[ b'a b;b=b c']}),
]
class UrlParseTestCase(unittest.TestCase):
@@ -884,10 +872,46 @@ def test_parse_qsl_encoding(self):
def test_parse_qsl_max_num_fields(self):
with self.assertRaises(ValueError):
urllib.parse.parse_qs('&'.join(['a=a']*11), max_num_fields=10)
- with self.assertRaises(ValueError):
- urllib.parse.parse_qs(';'.join(['a=a']*11), max_num_fields=10)
urllib.parse.parse_qs('&'.join(['a=a']*10), max_num_fields=10)
+ def test_parse_qs_separator(self):
+ parse_qs_semicolon_cases = [
+ (";", {}),
+ (";;", {}),
+ (";a=b", {'a': ['b']}),
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
+ ("a=1;a=2", {'a': ['1', '2']}),
+ (b";", {}),
+ (b";;", {}),
+ (b";a=b", {b'a': [b'b']}),
+ (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
+ (b"a=1;a=2", {b'a': [b'1', b'2']}),
+ ]
+ for orig, expect in parse_qs_semicolon_cases:
+ with self.subTest(f"Original: {orig!r}, Expected: {expect!r}"):
+ result = urllib.parse.parse_qs(orig, separator=';')
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+
+
+ def test_parse_qsl_separator(self):
+ parse_qsl_semicolon_cases = [
+ (";", []),
+ (";;", []),
+ (";a=b", [('a', 'b')]),
+ ("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
+ ("a=1;a=2", [('a', '1'), ('a', '2')]),
+ (b";", []),
+ (b";;", []),
+ (b";a=b", [(b'a', b'b')]),
+ (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
+ (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
+ ]
+ for orig, expect in parse_qsl_semicolon_cases:
+ with self.subTest(f"Original: {orig!r}, Expected: {expect!r}"):
+ result = urllib.parse.parse_qsl(orig, separator=';')
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+
+
def test_urlencode_sequences(self):
# Other tests incidentally urlencode things; test non-covered cases:
# Sequence and object values.
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index 95be7181133b4..0c1c94f5fc986 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -650,7 +650,7 @@ def unquote(string, encoding='utf-8', errors='replace'):
def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace', max_num_fields=None):
+ encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
"""Parse a query given as a string argument.
Arguments:
@@ -674,12 +674,15 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
max_num_fields: int. If set, then throws a ValueError if there
are more than n fields read by parse_qsl().
+ separator: str. The symbol to use for separating the query arguments.
+ Defaults to &.
+
Returns a dictionary.
"""
parsed_result = {}
pairs = parse_qsl(qs, keep_blank_values, strict_parsing,
encoding=encoding, errors=errors,
- max_num_fields=max_num_fields)
+ max_num_fields=max_num_fields, separator=separator)
for name, value in pairs:
if name in parsed_result:
parsed_result[name].append(value)
@@ -689,7 +692,7 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace', max_num_fields=None):
+ encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
"""Parse a query given as a string argument.
Arguments:
@@ -712,19 +715,25 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
max_num_fields: int. If set, then throws a ValueError
if there are more than n fields read by parse_qsl().
+ separator: str. The symbol to use for separating the query arguments.
+ Defaults to &.
+
Returns a list, as G-d intended.
"""
qs, _coerce_result = _coerce_args(qs)
+ if not separator or (not isinstance(separator, (str, bytes))):
+ raise ValueError("Separator must be of type string or bytes.")
+
# If max_num_fields is defined then check that the number of fields
# is less than max_num_fields. This prevents a memory exhaustion DOS
# attack via post bodies with many fields.
if max_num_fields is not None:
- num_fields = 1 + qs.count('&') + qs.count(';')
+ num_fields = 1 + qs.count(separator)
if max_num_fields < num_fields:
raise ValueError('Max number of fields exceeded')
- pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
+ pairs = [s1 for s1 in qs.split(separator)]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:

View File

@ -1,126 +0,0 @@
From 5374fbc31446364bf5f12e5ab88c5493c35eaf04 Mon Sep 17 00:00:00 2001
From: "Miss Islington (bot)"
<31488909+miss-islington@users.noreply.github.com>
Date: Sun, 2 May 2021 06:49:03 -0700
Subject: [PATCH] bpo-36384: Leading zeros in IPv4 addresses are no longer
tolerated (GH-25099) (GH-25815)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Reverts commit e653d4d8e820a7a004ad399530af0135b45db27a and makes
parsing even more strict. Like socket.inet_pton() any leading zero
is now treated as invalid input.
Signed-off-by: Christian Heimes <christian@python.org>
Co-authored-by: Łukasz Langa <lukasz@langa.pl>
(cherry picked from commit 60ce8f0be6354ad565393ab449d8de5d713f35bc)
---
Doc/library/ipaddress.rst | 19 +++++++++++++++++--
Lib/ipaddress.py | 5 +++++
Lib/test/test_ipaddress.py | 21 +++++++++++++++++----
.../2021-03-30-16-29-51.bpo-36384.sCAmLs.rst | 6 ++++++
4 files changed, 45 insertions(+), 6 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2021-03-30-16-29-51.bpo-36384.sCAmLs.rst
diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst
index 140401d..7dba43f 100644
--- a/Doc/library/ipaddress.rst
+++ b/Doc/library/ipaddress.rst
@@ -104,8 +104,7 @@ write code that handles both IP versions correctly. Address objects are
1. A string in decimal-dot notation, consisting of four decimal integers in
the inclusive range 0--255, separated by dots (e.g. ``192.168.0.1``). Each
integer represents an octet (byte) in the address. Leading zeroes are
- tolerated only for values less than 8 (as there is no ambiguity
- between the decimal and octal interpretations of such strings).
+ not tolerated to prevent confusion with octal notation.
2. An integer that fits into 32 bits.
3. An integer packed into a :class:`bytes` object of length 4 (most
significant octet first).
@@ -117,6 +116,22 @@ write code that handles both IP versions correctly. Address objects are
>>> ipaddress.IPv4Address(b'\xC0\xA8\x00\x01')
IPv4Address('192.168.0.1')
+ .. versionchanged:: 3.8
+
+ Leading zeros are tolerated, even in ambiguous cases that look like
+ octal notation.
+
+ .. versionchanged:: 3.10
+
+ Leading zeros are no longer tolerated and are treated as an error.
+ IPv4 address strings are now parsed as strict as glibc
+ :func:`~socket.inet_pton`.
+
+ .. versionchanged:: 3.9.5
+
+ The above change was also included in Python 3.9 starting with
+ version 3.9.5.
+
.. attribute:: version
The appropriate version number: ``4`` for IPv4, ``6`` for IPv6.
diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py
index a3a04f7..e1c346c 100644
--- a/Lib/ipaddress.py
+++ b/Lib/ipaddress.py
@@ -1173,6 +1173,11 @@ class _BaseV4:
if len(octet_str) > 3:
msg = "At most 3 characters permitted in %r"
raise ValueError(msg % octet_str)
+ # Handle leading zeros as strict as glibc's inet_pton()
+ # See security bug bpo-36384
+ if octet_str != '0' and octet_str[0] == '0':
+ msg = "Leading zeros are not permitted in %r"
+ raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
if octet_int > 255:
diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py
index 2eba740..5205fb0 100644
--- a/Lib/test/test_ipaddress.py
+++ b/Lib/test/test_ipaddress.py
@@ -97,10 +97,23 @@ class CommonTestMixin:
class CommonTestMixin_v4(CommonTestMixin):
def test_leading_zeros(self):
- self.assertInstancesEqual("000.000.000.000", "0.0.0.0")
- self.assertInstancesEqual("192.168.000.001", "192.168.0.1")
- self.assertInstancesEqual("016.016.016.016", "16.16.16.16")
- self.assertInstancesEqual("001.000.008.016", "1.0.8.16")
+ # bpo-36384: no leading zeros to avoid ambiguity with octal notation
+ msg = "Leading zeros are not permitted in '\d+'"
+ addresses = [
+ "000.000.000.000",
+ "192.168.000.001",
+ "016.016.016.016",
+ "192.168.000.001",
+ "001.000.008.016",
+ "01.2.3.40",
+ "1.02.3.40",
+ "1.2.03.40",
+ "1.2.3.040",
+ ]
+ for address in addresses:
+ with self.subTest(address=address):
+ with self.assertAddressError(msg):
+ self.factory(address)
def test_int(self):
self.assertInstancesEqual(0, "0.0.0.0")
diff --git a/Misc/NEWS.d/next/Security/2021-03-30-16-29-51.bpo-36384.sCAmLs.rst b/Misc/NEWS.d/next/Security/2021-03-30-16-29-51.bpo-36384.sCAmLs.rst
new file mode 100644
index 0000000..f956cde
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2021-03-30-16-29-51.bpo-36384.sCAmLs.rst
@@ -0,0 +1,6 @@
+:mod:`ipaddress` module no longer accepts any leading zeros in IPv4 address
+strings. Leading zeros are ambiguous and interpreted as octal notation by
+some libraries. For example the legacy function :func:`socket.inet_aton`
+treats leading zeros as octal notatation. glibc implementation of modern
+:func:`~socket.inet_pton` does not accept any leading zeros. For a while
+the :mod:`ipaddress` module used to accept ambiguous leading zeros.
--
1.8.3.1

View File

@ -1,100 +0,0 @@
From 7e38d3309e0a5a7b9e23ef933aef0079c6e317f7 Mon Sep 17 00:00:00 2001
From: "Miss Islington (bot)"
<31488909+miss-islington@users.noreply.github.com>
Date: Mon, 29 Mar 2021 06:02:40 -0700
Subject: [PATCH] bpo-42988: Remove the pydoc getfile feature (GH-25015)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
CVE-2021-3426: Remove the "getfile" feature of the pydoc module which
could be abused to read arbitrary files on the disk (directory
traversal vulnerability). Moreover, even source code of Python
modules can contain sensitive data like passwords. Vulnerability
reported by David Schwörer.
(cherry picked from commit 9b999479c0022edfc9835a8a1f06e046f3881048)
Co-authored-by: Victor Stinner <vstinner@python.org>
---
Lib/pydoc.py | 18 ------------------
Lib/test/test_pydoc.py | 6 ------
.../2021-03-24-14-16-56.bpo-42988.P2aNco.rst | 4 ++++
3 files changed, 4 insertions(+), 24 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index dc3377d68f8caa..afec613dd85a06 100644
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -2364,9 +2364,6 @@ def page(self, title, contents):
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
</body></html>''' % (title, css_link, html_navbar(), contents)
- def filelink(self, url, path):
- return '<a href="getfile?key=%s">%s</a>' % (url, path)
-
html = _HTMLDoc()
@@ -2452,19 +2449,6 @@ def bltinlink(name):
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
return 'Search Results', contents
- def html_getfile(path):
- """Get and display a source file listing safely."""
- path = urllib.parse.unquote(path)
- with tokenize.open(path) as fp:
- lines = html.escape(fp.read())
- body = '<pre>%s</pre>' % lines
- heading = html.heading(
- '<big><big><strong>File Listing</strong></big></big>',
- '#ffffff', '#7799ee')
- contents = heading + html.bigsection(
- 'File: %s' % path, '#ffffff', '#ee77aa', body)
- return 'getfile %s' % path, contents
-
def html_topics():
"""Index of topic texts available."""
@@ -2556,8 +2540,6 @@ def get_html_page(url):
op, _, url = url.partition('=')
if op == "search?key":
title, content = html_search(url)
- elif op == "getfile?key":
- title, content = html_getfile(url)
elif op == "topic?key":
# try topics first, then objects.
try:
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
index c80477c50f0980..72ed8a93b712b0 100644
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -1360,18 +1360,12 @@ def test_url_requests(self):
("topic?key=def", "Pydoc: KEYWORD def"),
("topic?key=STRINGS", "Pydoc: TOPIC STRINGS"),
("foobar", "Pydoc: Error - foobar"),
- ("getfile?key=foobar", "Pydoc: Error - getfile?key=foobar"),
]
with self.restrict_walk_packages():
for url, title in requests:
self.call_url_handler(url, title)
- path = string.__file__
- title = "Pydoc: getfile " + path
- url = "getfile?key=" + path
- self.call_url_handler(url, title)
-
class TestHelper(unittest.TestCase):
def test_keywords(self):
diff --git a/Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst b/Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst
new file mode 100644
index 00000000000000..4b42dd05305a83
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst
@@ -0,0 +1,4 @@
+CVE-2021-3426: Remove the ``getfile`` feature of the :mod:`pydoc` module which
+could be abused to read arbitrary files on the disk (directory traversal
+vulnerability). Moreover, even source code of Python modules can contain
+sensitive data like passwords. Vulnerability reported by David Schwörer.

View File

@ -1,40 +0,0 @@
From 7215d1ae25525c92b026166f9d5cac85fb1defe1 Mon Sep 17 00:00:00 2001
From: Yeting Li <liyt@ios.ac.cn>
Date: Wed, 7 Apr 2021 19:27:41 +0800
Subject: [PATCH] bpo-43075: Fix ReDoS in urllib AbstractBasicAuthHandler
(GH-24391)
Fix Regular Expression Denial of Service (ReDoS) vulnerability in
urllib.request.AbstractBasicAuthHandler. The ReDoS-vulnerable regex
has quadratic worst-case complexity and it allows cause a denial of
service when identifying crafted invalid RFCs. This ReDoS issue is on
the client side and needs remote attackers to control the HTTP server.
---
Lib/urllib/request.py | 2 +-
Misc/NEWS.d/next/Security/2021-01-31-05-28-14.bpo-43075.DoAXqO.rst | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
create mode 100644 Misc/NEWS.d/next/Security/2021-01-31-05-28-14.bpo-43075.DoAXqO.rst
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index e440738..f6ec148 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -947,7 +947,7 @@ class AbstractBasicAuthHandler:
# (single quotes are a violation of the RFC, but appear in the wild)
rx = re.compile('(?:^|,)' # start of the string or ','
'[ \t]*' # optional whitespaces
- '([^ \t]+)' # scheme like "Basic"
+ '([^ \t,]+)' # scheme like "Basic"
'[ \t]+' # mandatory whitespaces
# realm=xxx
# realm='xxx'
diff --git a/Misc/NEWS.d/next/Security/2021-01-31-05-28-14.bpo-43075.DoAXqO.rst b/Misc/NEWS.d/next/Security/2021-01-31-05-28-14.bpo-43075.DoAXqO.rst
new file mode 100644
index 0000000..1c9f727
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2021-01-31-05-28-14.bpo-43075.DoAXqO.rst
@@ -0,0 +1 @@
+Fix Regular Expression Denial of Service (ReDoS) vulnerability in :class:`urllib.request.AbstractBasicAuthHandler`. The ReDoS-vulnerable regex has quadratic worst-case complexity and it allows cause a denial of service when identifying crafted invalid RFCs. This ReDoS issue is on the client side and needs remote attackers to control the HTTP server.
--
1.8.3.1

View File

@ -1,120 +0,0 @@
From 47895e31b6f626bc6ce47d175fe9d43c1098909d Mon Sep 17 00:00:00 2001
From: Gen Xu <xgbarry@gmail.com>
Date: Wed, 5 May 2021 15:42:41 -0700
Subject: [PATCH] bpo-44022: Fix http client infinite line reading (DoS) after
a HTTP 100 Continue (GH-25916)
Fixes http.client potential denial of service where it could get stuck reading lines from a malicious server after a 100 Cont
inue response.
Co-authored-by: Gregory P. Smith <greg@krypto.org>
---
Lib/http/client.py | 38 ++++++++++++----------
Lib/test/test_httplib.py | 10 +++++-
.../2021-05-05-17-37-04.bpo-44022.bS3XJ9.rst | 2 ++
3 files changed, 32 insertions(+), 18 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2021-05-05-17-37-04.bpo-44022.bS3XJ9.rst
diff --git a/Lib/http/client.py b/Lib/http/client.py
index c2ad047..2b88d43 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -201,15 +201,11 @@ class HTTPMessage(email.message.Message):
lst.append(line)
return lst
-def parse_headers(fp, _class=HTTPMessage):
- """Parses only RFC2822 headers from a file pointer.
-
- email Parser wants to see strings rather than bytes.
- But a TextIOWrapper around self.rfile would buffer too many bytes
- from the stream, bytes which we later need to read as bytes.
- So we read the correct bytes here, as bytes, for email Parser
- to parse.
+def _read_headers(fp):
+ """Reads potential header lines into a list from a file pointer.
+ Length of line is limited by _MAXLINE, and number of
+ headers is limited by _MAXHEADERS.
"""
headers = []
while True:
@@ -221,6 +217,19 @@ def parse_headers(fp, _class=HTTPMessage):
raise HTTPException("got more than %d headers" % _MAXHEADERS)
if line in (b'\r\n', b'\n', b''):
break
+ return headers
+
+def parse_headers(fp, _class=HTTPMessage):
+ """Parses only RFC2822 headers from a file pointer.
+
+ email Parser wants to see strings rather than bytes.
+ But a TextIOWrapper around self.rfile would buffer too many bytes
+ from the stream, bytes which we later need to read as bytes.
+ So we read the correct bytes here, as bytes, for email Parser
+ to parse.
+
+ """
+ headers = _read_headers(fp)
hstring = b''.join(headers).decode('iso-8859-1')
return email.parser.Parser(_class=_class).parsestr(hstring)
@@ -308,15 +317,10 @@ class HTTPResponse(io.BufferedIOBase):
if status != CONTINUE:
break
# skip the header from the 100 response
- while True:
- skip = self.fp.readline(_MAXLINE + 1)
- if len(skip) > _MAXLINE:
- raise LineTooLong("header line")
- skip = skip.strip()
- if not skip:
- break
- if self.debuglevel > 0:
- print("header:", skip)
+ skipped_headers = _read_headers(self.fp)
+ if self.debuglevel > 0:
+ print("headers:", skipped_headers)
+ del skipped_headers
self.code = self.status = status
self.reason = reason.strip()
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 5a5fcec..2777969 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -999,6 +999,14 @@ class BasicTest(TestCase):
resp = client.HTTPResponse(FakeSocket(body))
self.assertRaises(client.LineTooLong, resp.begin)
+ def test_overflowing_header_limit_after_100(self):
+ body = (
+ 'HTTP/1.1 100 OK\r\n'
+ 'r\n' * 32768
+ )
+ resp = client.HTTPResponse(FakeSocket(body))
+ self.assertRaises(client.HTTPException, resp.begin)
+
def test_overflowing_chunked_line(self):
body = (
'HTTP/1.1 200 OK\r\n'
@@ -1400,7 +1408,7 @@ class Readliner:
class OfflineTest(TestCase):
def test_all(self):
# Documented objects defined in the module should be in __all__
- expected = {"responses"} # White-list documented dict() object
+ expected = {"responses"} # Allowlist documented dict() object
# HTTPMessage, parse_headers(), and the HTTP status code constants are
# intentionally omitted for simplicity
blacklist = {"HTTPMessage", "parse_headers"}
diff --git a/Misc/NEWS.d/next/Security/2021-05-05-17-37-04.bpo-44022.bS3XJ9.rst b/Misc/NEWS.d/next/Security/2021-05-05-17-37-04.bpo-44022.bS3XJ9.rst
new file mode 100644
index 0000000..cf6b63e
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2021-05-05-17-37-04.bpo-44022.bS3XJ9.rst
@@ -0,0 +1,2 @@
+mod:`http.client` now avoids infinitely reading potential HTTP headers after a
+``100 Continue`` status response from the server.
--
1.8.3.1

View File

@ -1,100 +0,0 @@
From b30b25b26663fb6070b8ed86fe3a20dcb557d05d Mon Sep 17 00:00:00 2001
From: Antoine Pitrou <antoine@python.org>
Date: Sat, 15 May 2021 11:51:20 +0200
Subject: [PATCH] [3.9] bpo-37788: Fix reference leak when Thread is never
joined (GH-26103) (GH-26142)
Reference:https://github.com/python/cpython/commit/b30b25b26663fb6070b8ed86fe3a20dcb557d05d
When a Thread is not joined after it has stopped, its lock may remain in the _shutdown_locks set until interpreter shutdown. If many threads are created this way, the _shutdown_locks set could therefore grow endlessly. To avoid such a situation, purge expired locks each time a new one is added or removed..
(cherry picked from commit c10c2ec7a0e06975e8010c56c9c3270f8ea322ec)
Co-authored-by: Antoine Pitrou <antoine@python.org>
Automerge-Triggered-By: GH:pitrou
---
Lib/test/test_threading.py | 8 ++++++++
Lib/threading.py | 19 ++++++++++++++++++-
.../2021-05-13-19-07-28.bpo-37788.adeFcf.rst | 1 +
3 files changed, 27 insertions(+), 1 deletion(-)
create mode 100644 Misc/NEWS.d/next/Library/2021-05-13-19-07-28.bpo-37788.adeFcf.rst
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index ac4e7a7..f2e427c 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -762,6 +762,14 @@ class ThreadTests(BaseTestCase):
# Daemon threads must never add it to _shutdown_locks.
self.assertNotIn(tstate_lock, threading._shutdown_locks)
+ def test_leak_without_join(self):
+ # bpo-37788: Test that a thread which is not joined explicitly
+ # does not leak. Test written for reference leak checks.
+ def noop(): pass
+ with support.wait_threads_exit():
+ threading.Thread(target=noop).start()
+ # Thread.join() is not called
+
class ThreadJoinOnShutdown(BaseTestCase):
diff --git a/Lib/threading.py b/Lib/threading.py
index 813dae2..f8f5954 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -739,12 +739,27 @@ _active_limbo_lock = _allocate_lock()
_active = {} # maps thread id to Thread object
_limbo = {}
_dangling = WeakSet()
+
# Set of Thread._tstate_lock locks of non-daemon threads used by _shutdown()
# to wait until all Python thread states get deleted:
# see Thread._set_tstate_lock().
_shutdown_locks_lock = _allocate_lock()
_shutdown_locks = set()
+def _maintain_shutdown_locks():
+ """
+ Drop any shutdown locks that don't correspond to running threads anymore.
+
+ Calling this from time to time avoids an ever-growing _shutdown_locks
+ set when Thread objects are not joined explicitly. See bpo-37788.
+
+ This must be called with _shutdown_locks_lock acquired.
+ """
+ # If a lock was released, the corresponding thread has exited
+ to_remove = [lock for lock in _shutdown_locks if not lock.locked()]
+ _shutdown_locks.difference_update(to_remove)
+
+
# Main class for threads
class Thread:
@@ -910,6 +925,7 @@ class Thread:
if not self.daemon:
with _shutdown_locks_lock:
+ _maintain_shutdown_locks()
_shutdown_locks.add(self._tstate_lock)
def _bootstrap_inner(self):
@@ -965,7 +981,8 @@ class Thread:
self._tstate_lock = None
if not self.daemon:
with _shutdown_locks_lock:
- _shutdown_locks.discard(lock)
+ # Remove our lock and other released locks from _shutdown_locks
+ _maintain_shutdown_locks()
def _delete(self):
"Remove current thread from the dict of currently running threads."
diff --git a/Misc/NEWS.d/next/Library/2021-05-13-19-07-28.bpo-37788.adeFcf.rst b/Misc/NEWS.d/next/Library/2021-05-13-19-07-28.bpo-37788.adeFcf.rst
new file mode 100644
index 0000000..0c33923
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2021-05-13-19-07-28.bpo-37788.adeFcf.rst
@@ -0,0 +1 @@
+Fix a reference leak when a Thread object is never joined.
--
2.25.1

View File

@ -1,156 +0,0 @@
From de0359e236bba03b4aabaeed6af08e15a55dd4d9 Mon Sep 17 00:00:00 2001
From: "Miss Islington (bot)"
<31488909+miss-islington@users.noreply.github.com>
Date: Thu, 4 Mar 2021 08:55:24 -0800
Subject: [PATCH] [3.8] bpo-37193: Remove thread objects which finished process
its request (GH-23127) (GH-24749)
This reverts commit aca67da4fe68d5420401ac1782203d302875eb27.
(cherry picked from commit b5711c940f70af89f2b4cf081a3fcd83924f3ae7)
Co-authored-by: Jason R. Coombs <jaraco@jaraco.com>
Automerge-Triggered-By: GH:jaraco
---
Lib/socketserver.py | 51 +++++++++++++++++-----
Lib/test/test_socketserver.py | 23 ++++++++++
.../2020-06-12-21-23-20.bpo-37193.wJximU.rst | 2 +
3 files changed, 64 insertions(+), 12 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2020-06-12-21-23-20.bpo-37193.wJximU.rst
diff --git a/Lib/socketserver.py b/Lib/socketserver.py
index 1ad028f..9e94c76 100644
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -628,6 +628,39 @@ if hasattr(os, "fork"):
self.collect_children(blocking=self.block_on_close)
+class _Threads(list):
+ """
+ Joinable list of all non-daemon threads.
+ """
+ def append(self, thread):
+ self.reap()
+ if thread.daemon:
+ return
+ super().append(thread)
+
+ def pop_all(self):
+ self[:], result = [], self[:]
+ return result
+
+ def join(self):
+ for thread in self.pop_all():
+ thread.join()
+
+ def reap(self):
+ self[:] = (thread for thread in self if thread.is_alive())
+
+
+class _NoThreads:
+ """
+ Degenerate version of _Threads.
+ """
+ def append(self, thread):
+ pass
+
+ def join(self):
+ pass
+
+
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
@@ -636,9 +669,9 @@ class ThreadingMixIn:
daemon_threads = False
# If true, server_close() waits until all non-daemonic threads terminate.
block_on_close = True
- # For non-daemonic threads, list of threading.Threading objects
+ # Threads object
# used by server_close() to wait for all threads completion.
- _threads = None
+ _threads = _NoThreads()
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
@@ -655,23 +688,17 @@ class ThreadingMixIn:
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
+ if self.block_on_close:
+ vars(self).setdefault('_threads', _Threads())
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
t.daemon = self.daemon_threads
- if not t.daemon and self.block_on_close:
- if self._threads is None:
- self._threads = []
- self._threads.append(t)
+ self._threads.append(t)
t.start()
def server_close(self):
super().server_close()
- if self.block_on_close:
- threads = self._threads
- self._threads = None
- if threads:
- for thread in threads:
- thread.join()
+ self._threads.join()
if hasattr(os, "fork"):
diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py
index 8aed4b6..1e5224d 100644
--- a/Lib/test/test_socketserver.py
+++ b/Lib/test/test_socketserver.py
@@ -276,6 +276,13 @@ class SocketServerTest(unittest.TestCase):
t.join()
s.server_close()
+ def test_close_immediately(self):
+ class MyServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
+ pass
+
+ server = MyServer((HOST, 0), lambda: None)
+ server.server_close()
+
def test_tcpserver_bind_leak(self):
# Issue #22435: the server socket wouldn't be closed if bind()/listen()
# failed.
@@ -490,6 +497,22 @@ class MiscTestCase(unittest.TestCase):
self.assertEqual(server.shutdown_called, 1)
server.server_close()
+ def test_threads_reaped(self):
+ """
+ In #37193, users reported a memory leak
+ due to the saving of every request thread. Ensure that
+ not all threads are kept forever.
+ """
+ class MyServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
+ pass
+
+ server = MyServer((HOST, 0), socketserver.StreamRequestHandler)
+ for n in range(10):
+ with socket.create_connection(server.server_address):
+ server.handle_request()
+ self.assertLess(len(server._threads), 10)
+ server.server_close()
+
if __name__ == "__main__":
unittest.main()
diff --git a/Misc/NEWS.d/next/Library/2020-06-12-21-23-20.bpo-37193.wJximU.rst b/Misc/NEWS.d/next/Library/2020-06-12-21-23-20.bpo-37193.wJximU.rst
new file mode 100644
index 0000000..fbf56d3
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2020-06-12-21-23-20.bpo-37193.wJximU.rst
@@ -0,0 +1,2 @@
+Fixed memory leak in ``socketserver.ThreadingMixIn`` introduced in Python
+3.7.
--
1.8.3.1

View File

@ -1,32 +0,0 @@
From e60ab843cbb016fb6ff8b4f418641ac05a9b2fcc Mon Sep 17 00:00:00 2001
From: "Gregory P. Smith" <greg@krypto.org>
Date: Wed, 2 Jun 2021 20:43:38 -0700
Subject: [PATCH] bpo-44022: Improve the regression test. (GH-26503)
It wasn't actually detecting the regression due to the
assertion being too lenient.
---
Lib/test/test_httplib.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 2777969..419904d 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -1005,7 +1005,12 @@ class BasicTest(TestCase):
'r\n' * 32768
)
resp = client.HTTPResponse(FakeSocket(body))
- self.assertRaises(client.HTTPException, resp.begin)
+ with self.assertRaises(client.HTTPException) as cm:
+ resp.begin()
+ # We must assert more because other reasonable errors that we
+ # do not want can also be HTTPException derived.
+ self.assertIn('got more than ', str(cm.exception))
+ self.assertIn('headers', str(cm.exception))
def test_overflowing_chunked_line(self):
body = (
--
1.8.3.1

1955
python-385

File diff suppressed because it is too large Load Diff

View File

@ -2,12 +2,12 @@ Name: python3
Summary: Interpreter of the Python3 programming language
URL: https://www.python.org/
Version: 3.8.5
Release: 14
Version: 3.10.0
Release: 1
License: Python
%global branchversion 3.8
%global pyshortver 38
%global branchversion 3.10
%global pyshortver 310
%ifarch %{ix86} x86_64
%bcond_without optimizations
@ -86,30 +86,15 @@ Source: https://www.python.org/ftp/python/%{version}/Python-%{version}.tar.xz
Source1: pyconfig.h
Patch1: 00001-rpath.patch
Patch102: 00102-lib64.patch
Patch111: 00111-no-static-lib.patch
Patch132: 00132-add-rpmbuild-hooks-to-unittest.patch
Patch160: 00160-disable-test_fs_holes-in-rpm-build.patch
Patch178: 00178-dont-duplicate-flags-in-sysconfig.patch
Patch189: 00189-use-rpm-wheels.patch
Patch205: 00205-make-libpl-respect-lib64.patch
Patch251: 00251-change-user-install-location.patch
Patch252: CVE-2020-27619.patch
Patch254: CVE-2021-3177.patch
Patch255: backport-CVE-2021-23336.patch
Patch256: backport-Remove-thread-objects-which-finished-process-its-request.patch
Patch257: backport-Fix-reference-leak-when-Thread-is-never-joined.patch
Patch6000: backport-CVE-2021-3426.patch
Patch6001: backport-CVE-2021-29921.patch
Patch6002: backport-CVE-2021-3733.patch
Patch6003: backport-CVE-2021-3737.patch
Patch6004: backport-bpo-44022-Improve-the-regression-test.patch
Patch9000: add-the-sm3-method-for-obtaining-the-salt-value.patch
Provides: python%{branchversion} = %{version}-%{release}
Provides: python(abi) = %{branchversion}
Provides: python(abi) = 3.7
Provides: python(abi) = 3.8
Provides: python%{pyshortver} = %{version}-%{release}
Obsoletes: python%{pyshortver}
@ -186,32 +171,13 @@ extension modules.
%setup -q -n Python-%{version}
find -name '*.exe' -print -delete
rm -r Modules/expat
rm Lib/ensurepip/_bundled/*.whl
rm configure pyconfig.h.in
%patch1 -p1
%patch102 -p1
%patch111 -p1
%patch132 -p1
%patch160 -p1
%patch178 -p1
%patch189 -p1
rm Lib/ensurepip/_bundled/*.whl
%patch205 -p1
%patch251 -p1
%patch252 -p1
%patch254 -p1
%patch255 -p1
%patch256 -p1
%patch257 -p1
%patch6000 -p1
%patch6001 -p1
%patch6002 -p1
%patch6003 -p1
%patch6004 -p1
%patch9000 -p1
rm configure pyconfig.h.in
%build
autoconf
autoheader
@ -244,6 +210,9 @@ pushd ${DebugBuildDir}
%global _configure $topdir/configure
%configure \
--with-platlibdir=%{_lib} \
--without-static-libpython \
--with-wheel-pkg-dir=%{_datadir}/python-wheels \
--enable-ipv6 \
--enable-shared \
--with-computed-gotos=yes \
@ -270,6 +239,9 @@ pushd ${OptimizedBuildDir}
%global _configure $topdir/configure
%configure \
--with-platlibdir=%{_lib} \
--without-static-libpython \
--with-wheel-pkg-dir=%{_datadir}/python-wheels \
--enable-ipv6 \
--enable-shared \
--with-computed-gotos=yes \
@ -396,7 +368,7 @@ ln -s %{_bindir}/python3 %{buildroot}%{_bindir}/python
mv %{buildroot}%{_bindir}/2to3-%{branchversion} %{buildroot}%{_bindir}/2to3
cp -a %{_libdir}/libpython3.7m.so.1.0 ${RPM_BUILD_ROOT}%{_libdir}
cp -a %{_libdir}/libpython3.8.so.1.0 ${RPM_BUILD_ROOT}%{_libdir}
%check
topdir=$(pwd)
@ -549,7 +521,6 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{dynload_dir}/mmap.%{SOABI_optimized}.so
%{dynload_dir}/nis.%{SOABI_optimized}.so
%{dynload_dir}/ossaudiodev.%{SOABI_optimized}.so
%{dynload_dir}/parser.%{SOABI_optimized}.so
%{dynload_dir}/_posixshmem.%{SOABI_optimized}.so
%{dynload_dir}/pyexpat.%{SOABI_optimized}.so
%{dynload_dir}/readline.%{SOABI_optimized}.so
@ -565,6 +536,8 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{dynload_dir}/zlib.%{SOABI_optimized}.so
%{dynload_dir}/_statistics.%{SOABI_optimized}.so
%{dynload_dir}/_xxsubinterpreters.%{SOABI_optimized}.so
%{dynload_dir}/_zoneinfo.%{SOABI_optimized}.so
%{dynload_dir}/xxlimited_35.%{SOABI_optimized}.so
%dir %{pylibdir}/site-packages/
%dir %{pylibdir}/site-packages/__pycache__/
@ -615,6 +588,10 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{pylibdir}/importlib/*.py
%{pylibdir}/importlib/__pycache__/*%{bytecode_suffixes}
%dir %{pylibdir}/importlib/metadata/
%dir %{pylibdir}/importlib/metadata/__pycache__/
%{pylibdir}/importlib/metadata/
%dir %{pylibdir}/json/
%dir %{pylibdir}/json/__pycache__/
%{pylibdir}/json/*.py
@ -633,6 +610,7 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{pylibdir}/urllib
%{pylibdir}/xml
%{pylibdir}/zoneinfo
%attr(0755,root,root) %dir %{_prefix}/lib/python%{branchversion}
%attr(0755,root,root) %dir %{_prefix}/lib/python%{branchversion}/site-packages
@ -645,7 +623,7 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{_libdir}/%{py_INSTSONAME_optimized}
%{_libdir}/libpython3.so
%{_libdir}/libpython3.7m.so.1.0
%{_libdir}/libpython3.8.so.1.0
%files -n python3-unversioned-command
%{_bindir}/python
@ -774,7 +752,6 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{dynload_dir}/mmap.%{SOABI_debug}.so
%{dynload_dir}/nis.%{SOABI_debug}.so
%{dynload_dir}/ossaudiodev.%{SOABI_debug}.so
%{dynload_dir}/parser.%{SOABI_debug}.so
%{dynload_dir}/pyexpat.%{SOABI_debug}.so
%{dynload_dir}/readline.%{SOABI_debug}.so
%{dynload_dir}/resource.%{SOABI_debug}.so
@ -790,6 +767,9 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{dynload_dir}/_statistics.%{SOABI_debug}.so
%{dynload_dir}/_xxsubinterpreters.%{SOABI_debug}.so
%{dynload_dir}/_posixshmem.%{SOABI_debug}.so
%{dynload_dir}/_zoneinfo.%{SOABI_debug}.so
%{dynload_dir}/xxlimited.%{SOABI_debug}.so
%{dynload_dir}/xxlimited_35.%{SOABI_debug}.so
%{_libdir}/%{py_INSTSONAME_debug}
@ -816,6 +796,12 @@ export BEP_GTDLIST="$BEP_GTDLIST_TMP"
%{_mandir}/*/*
%changelog
* Sat Nov 13 2021 shixuantong<shixuantong@huawei.com> - 3.10.0-1
- Type:enhancement
- ID:NA
- SUG:NA
- DESC:update version to 3.10.0
* Thu Oct 28 2021 hanxinke<hanxinke@huawei.com> - 3.8.5-14
- Type:requirement
- CVE:NA