diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/ccompiler.py python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/ccompiler.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/ccompiler.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/ccompiler.py 2022-10-11 11:21:44.000000000 +0000 @@ -392,7 +392,7 @@ return output_dir, macros, include_dirs def _prep_compile(self, sources, output_dir, depends=None): - """Decide which souce files must be recompiled. + """Decide which source files must be recompiled. Determine the list of object files corresponding to 'sources', and figure out which ones really need to be recompiled. diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/command/check.py python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/command/check.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/command/check.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/command/check.py 2022-10-11 11:21:44.000000000 +0000 @@ -83,7 +83,7 @@ name, version, URL Recommended fields: - (author and author_email) or (maintainer and maintainer_email)) + (author and author_email) or (maintainer and maintainer_email) Warns if any are missing. """ diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/command/install.py python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/command/install.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/command/install.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/command/install.py 2022-10-11 11:21:44.000000000 +0000 @@ -31,7 +31,7 @@ # while making the sysconfig module the single point of truth. # This makes it easier for OS distributions where they need to # alter locations for packages installations in a single place. -# Note that this module is depracated (PEP 632); all consumers +# Note that this module is deprecated (PEP 632); all consumers # of this information should switch to using sysconfig directly. INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}} @@ -43,7 +43,7 @@ sys_key = key sys_scheme = sysconfig._INSTALL_SCHEMES[sys_scheme_name] if key == "headers" and key not in sys_scheme: - # On POSIX-y platofrms, Python will: + # On POSIX-y platforms, Python will: # - Build from .h files in 'headers' (only there when # building CPython) # - Install .h files to 'include' diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/msvc9compiler.py python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/msvc9compiler.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/msvc9compiler.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/msvc9compiler.py 2022-10-11 11:21:44.000000000 +0000 @@ -673,7 +673,7 @@ # If a manifest should be embedded, return a tuple of # (manifest_filename, resource_id). Returns None if no manifest # should be embedded. See http://bugs.python.org/issue7833 for why - # we want to avoid any manifest for extension modules if we can) + # we want to avoid any manifest for extension modules if we can. for arg in ld_args: if arg.startswith("/MANIFESTFILE:"): temp_manifest = arg.split(":", 1)[1] diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/tests/test_bdist_rpm.py python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/tests/test_bdist_rpm.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/tests/test_bdist_rpm.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/tests/test_bdist_rpm.py 2022-10-11 11:21:44.000000000 +0000 @@ -44,7 +44,7 @@ # spurious sdtout/stderr output under Mac OS X @unittest.skipUnless(sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib + @requires_zlib() @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') @unittest.skipIf(find_executable('rpmbuild') is None, @@ -87,7 +87,7 @@ # spurious sdtout/stderr output under Mac OS X @unittest.skipUnless(sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib + @requires_zlib() # http://bugs.python.org/issue1533164 @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/unixccompiler.py python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/unixccompiler.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/distutils/unixccompiler.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/distutils/unixccompiler.py 2022-10-11 11:21:44.000000000 +0000 @@ -215,7 +215,8 @@ return "-L" + dir def _is_gcc(self, compiler_name): - return "gcc" in compiler_name or "g++" in compiler_name + # clang uses same syntax for rpath as gcc + return any(name in compiler_name for name in ("gcc", "g++", "clang")) def runtime_library_dir_option(self, dir): # XXX Hackish, at the very least. See Python bug #445902: diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/fixes/fix_metaclass.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/fixes/fix_metaclass.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/fixes/fix_metaclass.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/fixes/fix_metaclass.py 2022-10-11 11:21:44.000000000 +0000 @@ -51,7 +51,7 @@ # already in the preferred format, do nothing return - # !%@#! oneliners have no suite node, we have to fake one up + # !%@#! one-liners have no suite node, we have to fake one up for i, node in enumerate(cls_node.children): if node.type == token.COLON: break diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/fixes/fix_paren.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/fixes/fix_paren.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/fixes/fix_paren.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/fixes/fix_paren.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,4 +1,4 @@ -"""Fixer that addes parentheses where they are required +"""Fixer that adds parentheses where they are required This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.""" diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/pgen2/tokenize.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/pgen2/tokenize.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/pgen2/tokenize.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/pgen2/tokenize.py 2022-10-11 11:21:44.000000000 +0000 @@ -512,13 +512,14 @@ stashed = tok continue - if token == 'def': + if token in ('def', 'for'): if (stashed and stashed[0] == NAME and stashed[1] == 'async'): - async_def = True - async_def_indent = indents[-1] + if token == 'def': + async_def = True + async_def_indent = indents[-1] yield (ASYNC, stashed[1], stashed[2], stashed[3], diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/pytree.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/pytree.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/pytree.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/pytree.py 2022-10-11 11:21:44.000000000 +0000 @@ -720,8 +720,8 @@ r[self.name] = nodes[:count] yield count, r except RuntimeError: - # We fall back to the iterative pattern matching scheme if the recursive - # scheme hits the recursion limit. + # Fall back to the iterative pattern matching scheme if the + # recursive scheme hits the recursion limit (RecursionError). for count, r in self._iterative_matches(nodes): if self.name: r[self.name] = nodes[:count] diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/data/infinite_recursion.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/data/infinite_recursion.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/data/infinite_recursion.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/data/infinite_recursion.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,5 +1,5 @@ -# This file is used to verify that 2to3 falls back to a slower, iterative pattern matching -# scheme in the event that the faster recursive system fails due to infinite recursion. +# Verify that 2to3 falls back from the recursive pattern matching scheme to a +# slower, iterative scheme in the event of a RecursionError. from ctypes import * STRING = c_char_p diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/data/py2_test_grammar.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/data/py2_test_grammar.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/data/py2_test_grammar.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/data/py2_test_grammar.py 2022-10-11 11:21:44.000000000 +0000 @@ -8,7 +8,7 @@ # regression test, the filterwarnings() call has been added to # regrtest.py. -from test.test_support import run_unittest, check_syntax_error +from test.test_support import check_syntax_error import unittest import sys # testing import * @@ -967,8 +967,5 @@ self.assertEqual((6 < 4 if 0 else 2), 2) -def test_main(): - run_unittest(TokenTests, GrammarTests) - if __name__ == '__main__': - test_main() + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/data/py3_test_grammar.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/data/py3_test_grammar.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/data/py3_test_grammar.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/data/py3_test_grammar.py 2022-10-11 11:21:44.000000000 +0000 @@ -8,7 +8,7 @@ # regression test, the filterwarnings() call has been added to # regrtest.py. -from test.support import run_unittest, check_syntax_error +from test.support import check_syntax_error import unittest import sys # testing import * @@ -952,8 +952,5 @@ self.assertEqual((6 < 4 if 0 else 2), 2) -def test_main(): - run_unittest(TokenTests, GrammarTests) - if __name__ == '__main__': - test_main() + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/test_all_fixers.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/test_all_fixers.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/test_all_fixers.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/test_all_fixers.py 2022-10-11 11:21:44.000000000 +0000 @@ -6,8 +6,10 @@ # Author: Collin Winter # Python imports -import unittest +import os.path +import sys import test.support +import unittest # Local imports from . import support @@ -19,9 +21,22 @@ def setUp(self): self.refactor = support.get_refactorer() + def refactor_file(self, filepath): + if test.support.verbose: + print(f"Refactor file: {filepath}") + if os.path.basename(filepath) == 'infinite_recursion.py': + # bpo-46542: Processing infinite_recursion.py can crash Python + # if Python is built in debug mode: lower the recursion limit + # to prevent a crash. + with test.support.infinite_recursion(150): + self.refactor.refactor_file(filepath) + else: + self.refactor.refactor_file(filepath) + def test_all_project_files(self): for filepath in support.all_project_files(): - self.refactor.refactor_file(filepath) + with self.subTest(filepath=filepath): + self.refactor_file(filepath) if __name__ == '__main__': unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/test_parser.py python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/test_parser.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/lib2to3/tests/test_parser.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/lib2to3/tests/test_parser.py 2022-10-11 11:21:44.000000000 +0000 @@ -20,6 +20,7 @@ import subprocess import sys import tempfile +import test.support import unittest # Local imports @@ -200,20 +201,27 @@ self.validate("""await = 1""") self.validate("""def async(): pass""") - def test_async_with(self): + def test_async_for(self): self.validate("""async def foo(): async for a in b: pass""") - self.invalid_syntax("""def foo(): - async for a in b: pass""") - - def test_async_for(self): + def test_async_with(self): self.validate("""async def foo(): async with a: pass""") self.invalid_syntax("""def foo(): async with a: pass""") + def test_async_generator(self): + self.validate( + """async def foo(): + return (i * 2 async for i in arange(42))""" + ) + self.validate( + """def foo(): + return (i * 2 async for i in arange(42))""" + ) + class TestRaiseChanges(GrammarTest): def test_2x_style_1(self): @@ -579,25 +587,31 @@ """A cut-down version of pytree_idempotency.py.""" + def parse_file(self, filepath): + if test.support.verbose: + print(f"Parse file: {filepath}") + with open(filepath, "rb") as fp: + encoding = tokenize.detect_encoding(fp.readline)[0] + self.assertIsNotNone(encoding, + "can't detect encoding for %s" % filepath) + with open(filepath, "r", encoding=encoding) as fp: + source = fp.read() + try: + tree = driver.parse_string(source) + except ParseError: + try: + tree = driver_no_print_statement.parse_string(source) + except ParseError as err: + self.fail('ParseError on file %s (%s)' % (filepath, err)) + new = str(tree) + if new != source: + print(diff_texts(source, new, filepath)) + self.fail("Idempotency failed: %s" % filepath) + def test_all_project_files(self): for filepath in support.all_project_files(): - with open(filepath, "rb") as fp: - encoding = tokenize.detect_encoding(fp.readline)[0] - self.assertIsNotNone(encoding, - "can't detect encoding for %s" % filepath) - with open(filepath, "r", encoding=encoding) as fp: - source = fp.read() - try: - tree = driver.parse_string(source) - except ParseError: - try: - tree = driver_no_print_statement.parse_string(source) - except ParseError as err: - self.fail('ParseError on file %s (%s)' % (filepath, err)) - new = str(tree) - if new != source: - print(diff_texts(source, new, filepath)) - self.fail("Idempotency failed: %s" % filepath) + with self.subTest(filepath=filepath): + self.parse_file(filepath) def test_extended_unpacking(self): driver.parse_string("a, *b, c = x\n") diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/__init__.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/__init__.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/__init__.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/__init__.py 2022-10-11 11:21:44.000000000 +0000 @@ -2099,7 +2099,7 @@ the bitmap if None is given. Under Windows, the DEFAULT parameter can be used to set the icon - for the widget and any descendents that don't have an icon set + for the widget and any descendants that don't have an icon set explicitly. DEFAULT can be the relative path to a .ico file (example: root.iconbitmap(default='myicon.ico') ). See Tk documentation for more information.""" @@ -2345,9 +2345,9 @@ _default_root = None def readprofile(self, baseName, className): - """Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into - the Tcl Interpreter and calls exec on the contents of BASENAME.py and - CLASSNAME.py if such a file exists in the home directory.""" + """Internal function. It reads .BASENAME.tcl and .CLASSNAME.tcl into + the Tcl Interpreter and calls exec on the contents of .BASENAME.py and + .CLASSNAME.py if such a file exists in the home directory.""" import os if 'HOME' in os.environ: home = os.environ['HOME'] else: home = os.curdir @@ -2592,7 +2592,7 @@ if kw: cnf = _cnfmerge((cnf, kw)) self.widgetName = widgetName - BaseWidget._setup(self, master, cnf) + self._setup(master, cnf) if self._tclCommands is None: self._tclCommands = [] classes = [(k, v) for k, v in cnf.items() if isinstance(k, type)] @@ -2736,7 +2736,7 @@ """Add tag NEWTAG to item which is closest to pixel at X, Y. If several match take the top-most. All items closer than HALO are considered overlapping (all are - closests). If START is specified the next below this tag is taken.""" + closest). If START is specified the next below this tag is taken.""" self.addtag(newtag, 'closest', x, y, halo, start) def addtag_enclosed(self, newtag, x1, y1, x2, y2): @@ -3011,6 +3011,8 @@ return self.tk.call(self._w, 'type', tagOrId) or None +_checkbutton_count = 0 + class Checkbutton(Widget): """Checkbutton widget which is either in on- or off-state.""" @@ -3026,6 +3028,14 @@ underline, variable, width, wraplength.""" Widget.__init__(self, master, 'checkbutton', cnf, kw) + def _setup(self, master, cnf): + if not cnf.get('name'): + global _checkbutton_count + name = self.__class__.__name__.lower() + _checkbutton_count += 1 + cnf['name'] = f'!{name}{_checkbutton_count}' + super()._setup(master, cnf) + def deselect(self): """Put the button in off-state.""" self.tk.call(self._w, 'deselect') @@ -3331,7 +3341,7 @@ self.add('command', cnf or kw) def add_radiobutton(self, cnf={}, **kw): - """Addd radio menu item.""" + """Add radio menu item.""" self.add('radiobutton', cnf or kw) def add_separator(self, cnf={}, **kw): @@ -3356,7 +3366,7 @@ self.insert(index, 'command', cnf or kw) def insert_radiobutton(self, index, cnf={}, **kw): - """Addd radio menu item at INDEX.""" + """Add radio menu item at INDEX.""" self.insert(index, 'radiobutton', cnf or kw) def insert_separator(self, index, cnf={}, **kw): diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/dialog.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/dialog.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/dialog.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/dialog.py 2022-10-11 11:21:44.000000000 +0000 @@ -11,7 +11,7 @@ def __init__(self, master=None, cnf={}, **kw): cnf = _cnfmerge((cnf, kw)) self.widgetName = '__dialog__' - Widget._setup(self, master, cnf) + self._setup(master, cnf) self.num = self.tk.getint( self.tk.call( 'tk_dialog', self._w, diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/runtktests.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/runtktests.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/runtktests.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/runtktests.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ -""" -Use this module to get and run all tk tests. - -tkinter tests should live in a package inside the directory where this file -lives, like test_tkinter. -Extensions also should live in packages following the same rule as above. -""" - -import os -import importlib -import test.support - -this_dir_path = os.path.abspath(os.path.dirname(__file__)) - -def is_package(path): - for name in os.listdir(path): - if name in ('__init__.py', '__init__.pyc'): - return True - return False - -def get_tests_modules(basepath=this_dir_path, gui=True, packages=None): - """This will import and yield modules whose names start with test_ - and are inside packages found in the path starting at basepath. - - If packages is specified it should contain package names that - want their tests collected. - """ - py_ext = '.py' - - for dirpath, dirnames, filenames in os.walk(basepath): - for dirname in list(dirnames): - if dirname[0] == '.': - dirnames.remove(dirname) - - if is_package(dirpath) and filenames: - pkg_name = dirpath[len(basepath) + len(os.sep):].replace('/', '.') - if packages and pkg_name not in packages: - continue - - filenames = filter( - lambda x: x.startswith('test_') and x.endswith(py_ext), - filenames) - - for name in filenames: - try: - yield importlib.import_module( - ".%s.%s" % (pkg_name, name[:-len(py_ext)]), - "tkinter.test") - except test.support.ResourceDenied: - if gui: - raise - -def get_tests(text=True, gui=True, packages=None): - """Yield all the tests in the modules found by get_tests_modules. - - If nogui is True, only tests that do not require a GUI will be - returned.""" - attrs = [] - if text: - attrs.append('tests_nogui') - if gui: - attrs.append('tests_gui') - for module in get_tests_modules(gui=gui, packages=packages): - for attr in attrs: - for test in getattr(module, attr, ()): - yield test - -if __name__ == "__main__": - test.support.run_unittest(*get_tests()) diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_colorchooser.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_colorchooser.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_colorchooser.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_colorchooser.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,6 +1,6 @@ import unittest import tkinter -from test.support import requires, run_unittest, swap_attr +from test.support import requires, swap_attr from tkinter.test.support import AbstractDefaultRootTest, AbstractTkTest from tkinter import colorchooser from tkinter.colorchooser import askcolor @@ -64,7 +64,5 @@ self.assertRaises(RuntimeError, askcolor) -tests_gui = (ChooserTest, DefaultRootTest,) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_font.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_font.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_font.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_font.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,7 +1,7 @@ import unittest import tkinter from tkinter import font -from test.support import requires, run_unittest, gc_collect, ALWAYS_EQ +from test.support import requires, gc_collect, ALWAYS_EQ from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest requires('gui') @@ -159,7 +159,5 @@ self.assertRaises(RuntimeError, font.nametofont, fontname) -tests_gui = (FontTest, DefaultRootTest) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_images.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_images.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_images.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_images.py 2022-10-11 11:21:44.000000000 +0000 @@ -78,6 +78,7 @@ self.assertEqual(image.height(), 16) self.assertIn('::img::test', self.root.image_names()) del image + support.gc_collect() # For PyPy or other GCs. self.assertNotIn('::img::test', self.root.image_names()) def test_create_from_data(self): @@ -92,6 +93,7 @@ self.assertEqual(image.height(), 16) self.assertIn('::img::test', self.root.image_names()) del image + support.gc_collect() # For PyPy or other GCs. self.assertNotIn('::img::test', self.root.image_names()) def assertEqualStrList(self, actual, expected): @@ -172,6 +174,7 @@ self.assertEqual(image['file'], testfile) self.assertIn('::img::test', self.root.image_names()) del image + support.gc_collect() # For PyPy or other GCs. self.assertNotIn('::img::test', self.root.image_names()) def check_create_from_data(self, ext): @@ -189,6 +192,7 @@ self.assertEqual(image['file'], '') self.assertIn('::img::test', self.root.image_names()) del image + support.gc_collect() # For PyPy or other GCs. self.assertNotIn('::img::test', self.root.image_names()) def test_create_from_ppm_file(self): @@ -372,7 +376,5 @@ self.assertEqual(image.transparency_get(4, 6), False) -tests_gui = (MiscTest, DefaultRootTest, BitmapImageTest, PhotoImageTest,) - if __name__ == "__main__": - support.run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_loadtk.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_loadtk.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_loadtk.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_loadtk.py 2022-10-11 11:21:44.000000000 +0000 @@ -41,7 +41,6 @@ self.assertRaises(TclError, tcl.winfo_geometry) self.assertRaises(TclError, tcl.loadtk) -tests_gui = (TkLoadTest, ) if __name__ == "__main__": - test_support.run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_messagebox.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_messagebox.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_messagebox.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_messagebox.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,6 +1,6 @@ import unittest import tkinter -from test.support import requires, run_unittest, swap_attr +from test.support import requires, swap_attr from tkinter.test.support import AbstractDefaultRootTest from tkinter.commondialog import Dialog from tkinter.messagebox import showinfo @@ -32,7 +32,5 @@ self.assertRaises(RuntimeError, showinfo, "Spam", "Egg Information") -tests_gui = (DefaultRootTest,) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_misc.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_misc.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_misc.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_misc.py 2022-10-11 11:21:44.000000000 +0000 @@ -200,6 +200,13 @@ root.clipboard_get() def test_winfo_rgb(self): + + def assertApprox(col1, col2): + # A small amount of flexibility is required (bpo-45496) + # 33 is ~0.05% of 65535, which is a reasonable margin + for col1_channel, col2_channel in zip(col1, col2): + self.assertAlmostEqual(col1_channel, col2_channel, delta=33) + root = self.root rgb = root.winfo_rgb @@ -209,9 +216,9 @@ # #RGB - extends each 4-bit hex value to be 16-bit. self.assertEqual(rgb('#F0F'), (0xFFFF, 0x0000, 0xFFFF)) # #RRGGBB - extends each 8-bit hex value to be 16-bit. - self.assertEqual(rgb('#4a3c8c'), (0x4a4a, 0x3c3c, 0x8c8c)) + assertApprox(rgb('#4a3c8c'), (0x4a4a, 0x3c3c, 0x8c8c)) # #RRRRGGGGBBBB - self.assertEqual(rgb('#dede14143939'), (0xdede, 0x1414, 0x3939)) + assertApprox(rgb('#dede14143939'), (0xdede, 0x1414, 0x3939)) # Invalid string. with self.assertRaises(tkinter.TclError): rgb('#123456789a') @@ -346,7 +353,5 @@ self.assertRaises(RuntimeError, tkinter.mainloop) -tests_gui = (MiscTest, DefaultRootTest) - if __name__ == "__main__": - support.run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_simpledialog.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_simpledialog.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_simpledialog.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_simpledialog.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,6 +1,6 @@ import unittest import tkinter -from test.support import requires, run_unittest, swap_attr +from test.support import requires, swap_attr from tkinter.test.support import AbstractDefaultRootTest from tkinter.simpledialog import Dialog, askinteger @@ -31,7 +31,5 @@ self.assertRaises(RuntimeError, askinteger, "Go To Line", "Line number") -tests_gui = (DefaultRootTest,) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_text.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_text.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_text.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_text.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,6 +1,6 @@ import unittest import tkinter -from test.support import requires, run_unittest +from test.support import requires from tkinter.test.support import AbstractTkTest requires('gui') @@ -41,7 +41,5 @@ self.assertEqual(text.search('test', '1.0', 'end'), '1.3') -tests_gui = (TextTest, ) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_variables.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_variables.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_variables.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_variables.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,4 +1,6 @@ import unittest +from test import support + import gc import tkinter from tkinter import (Variable, StringVar, IntVar, DoubleVar, BooleanVar, Tcl, @@ -46,6 +48,7 @@ v = Variable(self.root, "sample string", "varname") self.assertTrue(self.info_exists("varname")) del v + support.gc_collect() # For PyPy or other GCs. self.assertFalse(self.info_exists("varname")) def test_dont_unset_not_existing(self): @@ -53,9 +56,11 @@ v1 = Variable(self.root, name="name") v2 = Variable(self.root, name="name") del v1 + support.gc_collect() # For PyPy or other GCs. self.assertFalse(self.info_exists("name")) # shouldn't raise exception del v2 + support.gc_collect() # For PyPy or other GCs. self.assertFalse(self.info_exists("name")) def test_equality(self): @@ -333,10 +338,5 @@ self.assertRaises(RuntimeError, Variable) -tests_gui = (TestVariable, TestStringVar, TestIntVar, - TestDoubleVar, TestBooleanVar, DefaultRootTest) - - if __name__ == "__main__": - from test.support import run_unittest - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_widgets.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_tkinter/test_widgets.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_tkinter/test_widgets.py 2022-10-11 11:21:44.000000000 +0000 @@ -212,6 +212,32 @@ widget = self.create() self.checkParams(widget, 'onvalue', 1, 2.3, '', 'any string') + def test_unique_variables(self): + frames = [] + buttons = [] + for i in range(2): + f = tkinter.Frame(self.root) + f.pack() + frames.append(f) + for j in 'AB': + b = tkinter.Checkbutton(f, text=j) + b.pack() + buttons.append(b) + variables = [str(b['variable']) for b in buttons] + self.assertEqual(len(set(variables)), 4, variables) + + def test_same_name(self): + f1 = tkinter.Frame(self.root) + f2 = tkinter.Frame(self.root) + b1 = tkinter.Checkbutton(f1, name='test', text='Test1') + b2 = tkinter.Checkbutton(f2, name='test', text='Test2') + + v = tkinter.IntVar(self.root, name='test') + b1.select() + self.assertEqual(v.get(), 1) + b2.deselect() + self.assertEqual(v.get(), 0) + @add_standard_options(StandardOptionsTests) class RadiobuttonTest(AbstractLabelTest, unittest.TestCase): @@ -800,7 +826,7 @@ self.checkEnumParam(widget, 'activestyle', 'dotbox', 'none', 'underline') - test_justify = requires_tcl(8, 6, 5)(StandardOptionsTests.test_configure_justify) + test_configure_justify = requires_tcl(8, 6, 5)(StandardOptionsTests.test_configure_justify) def test_configure_listvariable(self): widget = self.create() @@ -939,7 +965,7 @@ def test_configure_from(self): widget = self.create() - conv = False if get_tk_patchlevel() >= (8, 6, 10) else float_round + conv = float if get_tk_patchlevel() >= (8, 6, 10) else float_round self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=conv) def test_configure_label(self): @@ -1241,8 +1267,11 @@ def test_configure_type(self): widget = self.create() - self.checkEnumParam(widget, 'type', - 'normal', 'tearoff', 'menubar') + self.checkEnumParam( + widget, 'type', + 'normal', 'tearoff', 'menubar', + errmsg='bad type "{}": must be normal, tearoff, or menubar', + ) def test_entryconfigure(self): m1 = self.create() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_extensions.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_extensions.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_extensions.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_extensions.py 2022-10-11 11:21:44.000000000 +0000 @@ -2,7 +2,7 @@ import unittest import tkinter from tkinter import ttk -from test.support import requires, run_unittest +from test.support import requires, gc_collect from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest requires('gui') @@ -18,6 +18,7 @@ x = ttk.LabeledScale(self.root) var = x._variable._name x.destroy() + gc_collect() # For PyPy or other GCs. self.assertRaises(tkinter.TclError, x.tk.globalgetvar, var) # manually created variable @@ -30,6 +31,7 @@ else: self.assertEqual(float(x.tk.globalgetvar(name)), myvar.get()) del myvar + gc_collect() # For PyPy or other GCs. self.assertRaises(tkinter.TclError, x.tk.globalgetvar, name) # checking that the tracing callback is properly removed @@ -171,6 +173,7 @@ def test_resize(self): x = ttk.LabeledScale(self.root) x.pack(expand=True, fill='both') + gc_collect() # For PyPy or other GCs. x.update() width, height = x.master.winfo_width(), x.master.winfo_height() @@ -206,6 +209,7 @@ optmenu.destroy() self.assertEqual(optmenu.tk.globalgetvar(name), var.get()) del var + gc_collect() # For PyPy or other GCs. self.assertRaises(tkinter.TclError, optmenu.tk.globalgetvar, name) @@ -251,6 +255,7 @@ # check that variable is updated correctly optmenu.pack() + gc_collect() # For PyPy or other GCs. optmenu['menu'].invoke(0) self.assertEqual(optmenu._variable.get(), items[0]) @@ -296,6 +301,22 @@ optmenu.destroy() optmenu2.destroy() + def test_trace_variable(self): + # prior to bpo45160, tracing a variable would cause the callback to be made twice + success = [] + items = ('a', 'b', 'c') + textvar = tkinter.StringVar(self.root) + def cb_test(*args): + success.append(textvar.get()) + optmenu = ttk.OptionMenu(self.root, textvar, "a", *items) + optmenu.pack() + cb_name = textvar.trace_add("write", cb_test) + optmenu['menu'].invoke(1) + self.assertEqual(success, ['b']) + self.assertEqual(textvar.get(), 'b') + textvar.trace_remove("write", cb_name) + optmenu.destroy() + class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): @@ -303,7 +324,5 @@ self._test_widget(ttk.LabeledScale) -tests_gui = (LabeledScaleTest, OptionMenuTest, DefaultRootTest) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_functions.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_functions.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_functions.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_functions.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,460 +0,0 @@ -# -*- encoding: utf-8 -*- -import unittest -from tkinter import ttk - -class MockTkApp: - - def splitlist(self, arg): - if isinstance(arg, tuple): - return arg - return arg.split(':') - - def wantobjects(self): - return True - - -class MockTclObj(object): - typename = 'test' - - def __init__(self, val): - self.val = val - - def __str__(self): - return str(self.val) - - -class MockStateSpec(object): - typename = 'StateSpec' - - def __init__(self, *args): - self.val = args - - def __str__(self): - return ' '.join(self.val) - - -class InternalFunctionsTest(unittest.TestCase): - - def test_format_optdict(self): - def check_against(fmt_opts, result): - for i in range(0, len(fmt_opts), 2): - self.assertEqual(result.pop(fmt_opts[i]), fmt_opts[i + 1]) - if result: - self.fail("result still got elements: %s" % result) - - # passing an empty dict should return an empty object (tuple here) - self.assertFalse(ttk._format_optdict({})) - - # check list formatting - check_against( - ttk._format_optdict({'fg': 'blue', 'padding': [1, 2, 3, 4]}), - {'-fg': 'blue', '-padding': '1 2 3 4'}) - - # check tuple formatting (same as list) - check_against( - ttk._format_optdict({'test': (1, 2, '', 0)}), - {'-test': '1 2 {} 0'}) - - # check untouched values - check_against( - ttk._format_optdict({'test': {'left': 'as is'}}), - {'-test': {'left': 'as is'}}) - - # check script formatting - check_against( - ttk._format_optdict( - {'test': [1, -1, '', '2m', 0], 'test2': 3, - 'test3': '', 'test4': 'abc def', - 'test5': '"abc"', 'test6': '{}', - 'test7': '} -spam {'}, script=True), - {'-test': '{1 -1 {} 2m 0}', '-test2': '3', - '-test3': '{}', '-test4': '{abc def}', - '-test5': '{"abc"}', '-test6': r'\{\}', - '-test7': r'\}\ -spam\ \{'}) - - opts = {'αβγ': True, 'á': False} - orig_opts = opts.copy() - # check if giving unicode keys is fine - check_against(ttk._format_optdict(opts), {'-αβγ': True, '-á': False}) - # opts should remain unchanged - self.assertEqual(opts, orig_opts) - - # passing values with spaces inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('one two', 'three')}), - {'-option': '{one two} three'}) - check_against( - ttk._format_optdict( - {'option': ('one\ttwo', 'three')}), - {'-option': '{one\ttwo} three'}) - - # passing empty strings inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('', 'one')}), - {'-option': '{} one'}) - - # passing values with braces inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('one} {two', 'three')}), - {'-option': r'one\}\ \{two three'}) - - # passing quoted strings inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('"one"', 'two')}), - {'-option': '{"one"} two'}) - check_against( - ttk._format_optdict( - {'option': ('{one}', 'two')}), - {'-option': r'\{one\} two'}) - - # ignore an option - amount_opts = len(ttk._format_optdict(opts, ignore=('á'))) / 2 - self.assertEqual(amount_opts, len(opts) - 1) - - # ignore non-existing options - amount_opts = len(ttk._format_optdict(opts, ignore=('á', 'b'))) / 2 - self.assertEqual(amount_opts, len(opts) - 1) - - # ignore every option - self.assertFalse(ttk._format_optdict(opts, ignore=list(opts.keys()))) - - - def test_format_mapdict(self): - opts = {'a': [('b', 'c', 'val'), ('d', 'otherval'), ('', 'single')]} - result = ttk._format_mapdict(opts) - self.assertEqual(len(result), len(list(opts.keys())) * 2) - self.assertEqual(result, ('-a', '{b c} val d otherval {} single')) - self.assertEqual(ttk._format_mapdict(opts, script=True), - ('-a', '{{b c} val d otherval {} single}')) - - self.assertEqual(ttk._format_mapdict({2: []}), ('-2', '')) - - opts = {'üñíćódè': [('á', 'vãl')]} - result = ttk._format_mapdict(opts) - self.assertEqual(result, ('-üñíćódè', 'á vãl')) - - self.assertEqual(ttk._format_mapdict({'opt': [('value',)]}), - ('-opt', '{} value')) - - # empty states - valid = {'opt': [('', '', 'hi')]} - self.assertEqual(ttk._format_mapdict(valid), ('-opt', '{ } hi')) - - # when passing multiple states, they all must be strings - invalid = {'opt': [(1, 2, 'valid val')]} - self.assertRaises(TypeError, ttk._format_mapdict, invalid) - invalid = {'opt': [([1], '2', 'valid val')]} - self.assertRaises(TypeError, ttk._format_mapdict, invalid) - # but when passing a single state, it can be anything - valid = {'opt': [[1, 'value']]} - self.assertEqual(ttk._format_mapdict(valid), ('-opt', '1 value')) - # special attention to single states which evaluate to False - for stateval in (None, 0, False, '', set()): # just some samples - valid = {'opt': [(stateval, 'value')]} - self.assertEqual(ttk._format_mapdict(valid), - ('-opt', '{} value')) - - # values must be iterable - opts = {'a': None} - self.assertRaises(TypeError, ttk._format_mapdict, opts) - - - def test_format_elemcreate(self): - self.assertTrue(ttk._format_elemcreate(None), (None, ())) - - ## Testing type = image - # image type expects at least an image name, so this should raise - # IndexError since it tries to access the index 0 of an empty tuple - self.assertRaises(IndexError, ttk._format_elemcreate, 'image') - - # don't format returned values as a tcl script - # minimum acceptable for image type - self.assertEqual(ttk._format_elemcreate('image', False, 'test'), - ("test ", ())) - # specifying a state spec - self.assertEqual(ttk._format_elemcreate('image', False, 'test', - ('', 'a')), ("test {} a", ())) - # state spec with multiple states - self.assertEqual(ttk._format_elemcreate('image', False, 'test', - ('a', 'b', 'c')), ("test {a b} c", ())) - # state spec and options - self.assertEqual(ttk._format_elemcreate('image', False, 'test', - ('a', 'b'), a='x'), ("test a b", ("-a", "x"))) - # format returned values as a tcl script - # state spec with multiple states and an option with a multivalue - self.assertEqual(ttk._format_elemcreate('image', True, 'test', - ('a', 'b', 'c', 'd'), x=[2, 3]), ("{test {a b c} d}", "-x {2 3}")) - - ## Testing type = vsapi - # vsapi type expects at least a class name and a part_id, so this - # should raise a ValueError since it tries to get two elements from - # an empty tuple - self.assertRaises(ValueError, ttk._format_elemcreate, 'vsapi') - - # don't format returned values as a tcl script - # minimum acceptable for vsapi - self.assertEqual(ttk._format_elemcreate('vsapi', False, 'a', 'b'), - ("a b ", ())) - # now with a state spec with multiple states - self.assertEqual(ttk._format_elemcreate('vsapi', False, 'a', 'b', - ('a', 'b', 'c')), ("a b {a b} c", ())) - # state spec and option - self.assertEqual(ttk._format_elemcreate('vsapi', False, 'a', 'b', - ('a', 'b'), opt='x'), ("a b a b", ("-opt", "x"))) - # format returned values as a tcl script - # state spec with a multivalue and an option - self.assertEqual(ttk._format_elemcreate('vsapi', True, 'a', 'b', - ('a', 'b', [1, 2]), opt='x'), ("{a b {a b} {1 2}}", "-opt x")) - - # Testing type = from - # from type expects at least a type name - self.assertRaises(IndexError, ttk._format_elemcreate, 'from') - - self.assertEqual(ttk._format_elemcreate('from', False, 'a'), - ('a', ())) - self.assertEqual(ttk._format_elemcreate('from', False, 'a', 'b'), - ('a', ('b', ))) - self.assertEqual(ttk._format_elemcreate('from', True, 'a', 'b'), - ('{a}', 'b')) - - - def test_format_layoutlist(self): - def sample(indent=0, indent_size=2): - return ttk._format_layoutlist( - [('a', {'other': [1, 2, 3], 'children': - [('b', {'children': - [('c', {'children': - [('d', {'nice': 'opt'})], 'something': (1, 2) - })] - })] - })], indent=indent, indent_size=indent_size)[0] - - def sample_expected(indent=0, indent_size=2): - spaces = lambda amount=0: ' ' * (amount + indent) - return ( - "%sa -other {1 2 3} -children {\n" - "%sb -children {\n" - "%sc -something {1 2} -children {\n" - "%sd -nice opt\n" - "%s}\n" - "%s}\n" - "%s}" % (spaces(), spaces(indent_size), - spaces(2 * indent_size), spaces(3 * indent_size), - spaces(2 * indent_size), spaces(indent_size), spaces())) - - # empty layout - self.assertEqual(ttk._format_layoutlist([])[0], '') - - # _format_layoutlist always expects the second item (in every item) - # to act like a dict (except when the value evaluates to False). - self.assertRaises(AttributeError, - ttk._format_layoutlist, [('a', 'b')]) - - smallest = ttk._format_layoutlist([('a', None)], indent=0) - self.assertEqual(smallest, - ttk._format_layoutlist([('a', '')], indent=0)) - self.assertEqual(smallest[0], 'a') - - # testing indentation levels - self.assertEqual(sample(), sample_expected()) - for i in range(4): - self.assertEqual(sample(i), sample_expected(i)) - self.assertEqual(sample(i, i), sample_expected(i, i)) - - # invalid layout format, different kind of exceptions will be - # raised by internal functions - - # plain wrong format - self.assertRaises(ValueError, ttk._format_layoutlist, - ['bad', 'format']) - # will try to use iteritems in the 'bad' string - self.assertRaises(AttributeError, ttk._format_layoutlist, - [('name', 'bad')]) - # bad children formatting - self.assertRaises(ValueError, ttk._format_layoutlist, - [('name', {'children': {'a': None}})]) - - - def test_script_from_settings(self): - # empty options - self.assertFalse(ttk._script_from_settings({'name': - {'configure': None, 'map': None, 'element create': None}})) - - # empty layout - self.assertEqual( - ttk._script_from_settings({'name': {'layout': None}}), - "ttk::style layout name {\nnull\n}") - - configdict = {'αβγ': True, 'á': False} - self.assertTrue( - ttk._script_from_settings({'name': {'configure': configdict}})) - - mapdict = {'üñíćódè': [('á', 'vãl')]} - self.assertTrue( - ttk._script_from_settings({'name': {'map': mapdict}})) - - # invalid image element - self.assertRaises(IndexError, - ttk._script_from_settings, {'name': {'element create': ['image']}}) - - # minimal valid image - self.assertTrue(ttk._script_from_settings({'name': - {'element create': ['image', 'name']}})) - - image = {'thing': {'element create': - ['image', 'name', ('state1', 'state2', 'val')]}} - self.assertEqual(ttk._script_from_settings(image), - "ttk::style element create thing image {name {state1 state2} val} ") - - image['thing']['element create'].append({'opt': 30}) - self.assertEqual(ttk._script_from_settings(image), - "ttk::style element create thing image {name {state1 state2} val} " - "-opt 30") - - image['thing']['element create'][-1]['opt'] = [MockTclObj(3), - MockTclObj('2m')] - self.assertEqual(ttk._script_from_settings(image), - "ttk::style element create thing image {name {state1 state2} val} " - "-opt {3 2m}") - - - def test_tclobj_to_py(self): - self.assertEqual( - ttk._tclobj_to_py((MockStateSpec('a', 'b'), 'val')), - [('a', 'b', 'val')]) - self.assertEqual( - ttk._tclobj_to_py([MockTclObj('1'), 2, MockTclObj('3m')]), - [1, 2, '3m']) - - - def test_list_from_statespec(self): - def test_it(sspec, value, res_value, states): - self.assertEqual(ttk._list_from_statespec( - (sspec, value)), [states + (res_value, )]) - - states_even = tuple('state%d' % i for i in range(6)) - statespec = MockStateSpec(*states_even) - test_it(statespec, 'val', 'val', states_even) - test_it(statespec, MockTclObj('val'), 'val', states_even) - - states_odd = tuple('state%d' % i for i in range(5)) - statespec = MockStateSpec(*states_odd) - test_it(statespec, 'val', 'val', states_odd) - - test_it(('a', 'b', 'c'), MockTclObj('val'), 'val', ('a', 'b', 'c')) - - - def test_list_from_layouttuple(self): - tk = MockTkApp() - - # empty layout tuple - self.assertFalse(ttk._list_from_layouttuple(tk, ())) - - # shortest layout tuple - self.assertEqual(ttk._list_from_layouttuple(tk, ('name', )), - [('name', {})]) - - # not so interesting ltuple - sample_ltuple = ('name', '-option', 'value') - self.assertEqual(ttk._list_from_layouttuple(tk, sample_ltuple), - [('name', {'option': 'value'})]) - - # empty children - self.assertEqual(ttk._list_from_layouttuple(tk, - ('something', '-children', ())), - [('something', {'children': []})] - ) - - # more interesting ltuple - ltuple = ( - 'name', '-option', 'niceone', '-children', ( - ('otherone', '-children', ( - ('child', )), '-otheropt', 'othervalue' - ) - ) - ) - self.assertEqual(ttk._list_from_layouttuple(tk, ltuple), - [('name', {'option': 'niceone', 'children': - [('otherone', {'otheropt': 'othervalue', 'children': - [('child', {})] - })] - })] - ) - - # bad tuples - self.assertRaises(ValueError, ttk._list_from_layouttuple, tk, - ('name', 'no_minus')) - self.assertRaises(ValueError, ttk._list_from_layouttuple, tk, - ('name', 'no_minus', 'value')) - self.assertRaises(ValueError, ttk._list_from_layouttuple, tk, - ('something', '-children')) # no children - - - def test_val_or_dict(self): - def func(res, opt=None, val=None): - if opt is None: - return res - if val is None: - return "test val" - return (opt, val) - - tk = MockTkApp() - tk.call = func - - self.assertEqual(ttk._val_or_dict(tk, {}, '-test:3'), - {'test': '3'}) - self.assertEqual(ttk._val_or_dict(tk, {}, ('-test', 3)), - {'test': 3}) - - self.assertEqual(ttk._val_or_dict(tk, {'test': None}, 'x:y'), - 'test val') - - self.assertEqual(ttk._val_or_dict(tk, {'test': 3}, 'x:y'), - {'test': 3}) - - - def test_convert_stringval(self): - tests = ( - (0, 0), ('09', 9), ('a', 'a'), ('áÚ', 'áÚ'), ([], '[]'), - (None, 'None') - ) - for orig, expected in tests: - self.assertEqual(ttk._convert_stringval(orig), expected) - - -class TclObjsToPyTest(unittest.TestCase): - - def test_unicode(self): - adict = {'opt': 'välúè'} - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': 'välúè'}) - - adict['opt'] = MockTclObj(adict['opt']) - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': 'välúè'}) - - def test_multivalues(self): - adict = {'opt': [1, 2, 3, 4]} - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': [1, 2, 3, 4]}) - - adict['opt'] = [1, 'xm', 3] - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': [1, 'xm', 3]}) - - adict['opt'] = (MockStateSpec('a', 'b'), 'válũè') - self.assertEqual(ttk.tclobjs_to_py(adict), - {'opt': [('a', 'b', 'válũè')]}) - - self.assertEqual(ttk.tclobjs_to_py({'x': ['y z']}), - {'x': ['y z']}) - - def test_nosplit(self): - self.assertEqual(ttk.tclobjs_to_py({'text': 'some text'}), - {'text': 'some text'}) - -tests_nogui = (InternalFunctionsTest, TclObjsToPyTest) - -if __name__ == "__main__": - from test.support import run_unittest - run_unittest(*tests_nogui) diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_style.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_style.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_style.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_style.py 2022-10-11 11:21:44.000000000 +0000 @@ -3,8 +3,8 @@ import tkinter from tkinter import ttk from test import support -from test.support import requires, run_unittest -from tkinter.test.support import AbstractTkTest +from test.support import requires +from tkinter.test.support import AbstractTkTest, get_tk_patchlevel requires('gui') @@ -170,12 +170,12 @@ newname = f'C.{name}' self.assertEqual(style.map(newname), {}) style.map(newname, **default) + if theme == 'alt' and name == '.' and get_tk_patchlevel() < (8, 6, 1): + default['embossed'] = [('disabled', '1')] self.assertEqual(style.map(newname), default) for key, value in default.items(): self.assertEqual(style.map(newname, key), value) -tests_gui = (StyleTest, ) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_widgets.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/test/test_ttk/test_widgets.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/test/test_ttk/test_widgets.py 2022-10-11 11:21:44.000000000 +0000 @@ -1,10 +1,10 @@ import unittest import tkinter from tkinter import ttk, TclError -from test.support import requires +from test.support import requires, gc_collect import sys -from tkinter.test.test_ttk.test_functions import MockTclObj +from test.test_ttk_textonly import MockTclObj from tkinter.test.support import (AbstractTkTest, tcl_version, get_tk_patchlevel, simulate_mouse_click, AbstractDefaultRootTest) from tkinter.test.widget_tests import (add_standard_options, noconv, @@ -61,7 +61,6 @@ self.widget = ttk.Button(self.root, width=0, text="Text") self.widget.pack() - def test_identify(self): self.widget.update() self.assertEqual(self.widget.identify( @@ -74,7 +73,6 @@ self.assertRaises(tkinter.TclError, self.widget.identify, 5, None) self.assertRaises(tkinter.TclError, self.widget.identify, 5, '') - def test_widget_state(self): # XXX not sure about the portability of all these tests self.assertEqual(self.widget.state(), ()) @@ -169,10 +167,13 @@ errmsg='image "spam" doesn\'t exist') def test_configure_compound(self): + options = 'none text image center top bottom left right'.split() + errmsg = ( + 'bad compound "{}": must be' + f' {", ".join(options[:-1])}, or {options[-1]}' + ) widget = self.create() - self.checkEnumParam(widget, 'compound', - 'none', 'text', 'image', 'center', - 'top', 'bottom', 'left', 'right') + self.checkEnumParam(widget, 'compound', *options, errmsg=errmsg) def test_configure_state(self): widget = self.create() @@ -274,6 +275,21 @@ self.assertEqual(cbtn['offvalue'], cbtn.tk.globalgetvar(cbtn['variable'])) + def test_unique_variables(self): + frames = [] + buttons = [] + for i in range(2): + f = ttk.Frame(self.root) + f.pack() + frames.append(f) + for j in 'AB': + b = ttk.Checkbutton(f, text=j) + b.pack() + buttons.append(b) + variables = [str(b['variable']) for b in buttons] + print(variables) + self.assertEqual(len(set(variables)), 4, variables) + @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class EntryTest(AbstractWidgetTest, unittest.TestCase): @@ -284,6 +300,7 @@ 'show', 'state', 'style', 'takefocus', 'textvariable', 'validate', 'validatecommand', 'width', 'xscrollcommand', ) + IDENTIFY_AS = 'Entry.field' if sys.platform == 'darwin' else 'textarea' def setUp(self): super().setUp() @@ -316,30 +333,23 @@ widget = self.create() self.checkCommandParam(widget, 'validatecommand') - def test_bbox(self): self.assertIsBoundingBox(self.entry.bbox(0)) self.assertRaises(tkinter.TclError, self.entry.bbox, 'noindex') self.assertRaises(tkinter.TclError, self.entry.bbox, None) - def test_identify(self): self.entry.pack() self.entry.update() # bpo-27313: macOS Cocoa widget differs from X, allow either - if sys.platform == 'darwin': - self.assertIn(self.entry.identify(5, 5), - ("textarea", "Combobox.button") ) - else: - self.assertEqual(self.entry.identify(5, 5), "textarea") + self.assertEqual(self.entry.identify(5, 5), self.IDENTIFY_AS) self.assertEqual(self.entry.identify(-1, -1), "") self.assertRaises(tkinter.TclError, self.entry.identify, None, 5) self.assertRaises(tkinter.TclError, self.entry.identify, 5, None) self.assertRaises(tkinter.TclError, self.entry.identify, 5, '') - def test_validation_options(self): success = [] test_invalid = lambda: success.append(True) @@ -367,7 +377,6 @@ self.entry['validatecommand'] = True self.assertRaises(tkinter.TclError, self.entry.validate) - def test_validation(self): validation = [] def validate(to_insert): @@ -385,7 +394,6 @@ self.assertEqual(validation, [False, True]) self.assertEqual(self.entry.get(), 'a') - def test_revalidation(self): def validate(content): for letter in content: @@ -421,6 +429,7 @@ 'validate', 'validatecommand', 'values', 'width', 'xscrollcommand', ) + IDENTIFY_AS = 'Combobox.button' if sys.platform == 'darwin' else 'textarea' def setUp(self): super().setUp() @@ -436,7 +445,8 @@ def _show_drop_down_listbox(self): width = self.combo.winfo_width() x, y = width - 5, 5 - self.assertRegex(self.combo.identify(x, y), r'.*downarrow\Z') + if sys.platform != 'darwin': # there's no down arrow on macOS + self.assertRegex(self.combo.identify(x, y), r'.*downarrow\Z') self.combo.event_generate('', x=x, y=y) self.combo.event_generate('', x=x, y=y) self.combo.update_idletasks() @@ -458,7 +468,6 @@ self.assertTrue(success) - def test_configure_postcommand(self): success = [] @@ -474,7 +483,6 @@ self._show_drop_down_listbox() self.assertEqual(len(success), 1) - def test_configure_values(self): def check_get_current(getval, currval): self.assertEqual(self.combo.get(), getval) @@ -589,7 +597,6 @@ other_child.destroy() self.assertRaises(tkinter.TclError, self.paned.pane, 0) - def test_forget(self): self.assertRaises(tkinter.TclError, self.paned.forget, None) self.assertRaises(tkinter.TclError, self.paned.forget, 0) @@ -598,7 +605,6 @@ self.paned.forget(0) self.assertRaises(tkinter.TclError, self.paned.forget, 0) - def test_insert(self): self.assertRaises(tkinter.TclError, self.paned.insert, None, 0) self.assertRaises(tkinter.TclError, self.paned.insert, 0, None) @@ -633,7 +639,6 @@ self.assertEqual(self.paned.panes(), (str(child3), str(child2), str(child))) - def test_pane(self): self.assertRaises(tkinter.TclError, self.paned.pane, 0) @@ -650,7 +655,6 @@ self.assertRaises(tkinter.TclError, self.paned.pane, 0, badoption='somevalue') - def test_sashpos(self): self.assertRaises(tkinter.TclError, self.paned.sashpos, None) self.assertRaises(tkinter.TclError, self.paned.sashpos, '') @@ -798,7 +802,6 @@ self.assertFalse(failure) - def test_get(self): if self.wantobjects: conv = lambda x: x @@ -816,7 +819,6 @@ self.assertRaises(tkinter.TclError, self.scale.get, '', 0) self.assertRaises(tkinter.TclError, self.scale.get, 0, '') - def test_set(self): if self.wantobjects: conv = lambda x: x @@ -839,6 +841,7 @@ self.assertEqual(conv(self.scale.get()), var.get()) self.assertEqual(conv(self.scale.get()), max + 5) del var + gc_collect() # For PyPy or other GCs. # the same happens with the value option self.scale['value'] = max + 10 @@ -948,7 +951,6 @@ else: self.fail("Tab with text 'a' not found") - def test_add_and_hidden(self): self.assertRaises(tkinter.TclError, self.nb.hide, -1) self.assertRaises(tkinter.TclError, self.nb.hide, 'hi') @@ -967,7 +969,7 @@ tabs = self.nb.tabs() curr = self.nb.index('current') - # verify that the tab gets readded at its previous position + # verify that the tab gets re-added at its previous position child2_index = self.nb.index(self.child2) self.nb.hide(self.child2) self.nb.add(self.child2) @@ -977,7 +979,6 @@ # but the tab next to it (not hidden) is the one selected now self.assertEqual(self.nb.index('current'), curr + 1) - def test_forget(self): self.assertRaises(tkinter.TclError, self.nb.forget, -1) self.assertRaises(tkinter.TclError, self.nb.forget, 'hi') @@ -993,7 +994,6 @@ self.assertEqual(self.nb.index(self.child1), 1) self.assertNotEqual(child1_index, self.nb.index(self.child1)) - def test_index(self): self.assertRaises(tkinter.TclError, self.nb.index, -1) self.assertRaises(tkinter.TclError, self.nb.index, None) @@ -1003,7 +1003,6 @@ self.assertEqual(self.nb.index(self.child2), 1) self.assertEqual(self.nb.index('end'), 2) - def test_insert(self): # moving tabs tabs = self.nb.tabs() @@ -1036,7 +1035,6 @@ self.assertRaises(tkinter.TclError, self.nb.insert, None, 0) self.assertRaises(tkinter.TclError, self.nb.insert, None, None) - def test_select(self): self.nb.pack() self.nb.update() @@ -1056,7 +1054,6 @@ self.nb.update() self.assertTrue(tab_changed) - def test_tab(self): self.assertRaises(tkinter.TclError, self.nb.tab, -1) self.assertRaises(tkinter.TclError, self.nb.tab, 'notab') @@ -1070,7 +1067,6 @@ self.assertEqual(self.nb.tab(self.child1, text=None), 'abc') self.assertEqual(self.nb.tab(self.child1, 'text'), 'abc') - def test_configure_tabs(self): self.assertEqual(len(self.nb.tabs()), 2) @@ -1079,14 +1075,14 @@ self.assertEqual(self.nb.tabs(), ()) - def test_traversal(self): self.nb.pack() self.nb.update() self.nb.select(0) - self.assertEqual(self.nb.identify(5, 5), 'focus') + focus_identify_as = 'focus' if sys.platform != 'darwin' else '' + self.assertEqual(self.nb.identify(5, 5), focus_identify_as) simulate_mouse_click(self.nb, 5, 5) self.nb.focus_force() self.nb.event_generate('') @@ -1099,15 +1095,24 @@ self.assertEqual(self.nb.select(), str(self.child2)) self.nb.tab(self.child1, text='a', underline=0) + self.nb.tab(self.child2, text='e', underline=0) self.nb.enable_traversal() self.nb.focus_force() - self.assertEqual(self.nb.identify(5, 5), 'focus') + self.assertEqual(self.nb.identify(5, 5), focus_identify_as) simulate_mouse_click(self.nb, 5, 5) + # on macOS Emacs-style keyboard shortcuts are region-dependent; + # let's use the regular arrow keys instead if sys.platform == 'darwin': - self.nb.event_generate('') + begin = '' + end = '' else: - self.nb.event_generate('') + begin = '' + end = '' + self.nb.event_generate(begin) self.assertEqual(self.nb.select(), str(self.child1)) + self.nb.event_generate(end) + self.assertEqual(self.nb.select(), str(self.child2)) + @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class SpinboxTest(EntryTest, unittest.TestCase): @@ -1118,6 +1123,7 @@ 'takefocus', 'textvariable', 'to', 'validate', 'validatecommand', 'values', 'width', 'wrap', 'xscrollcommand', ) + IDENTIFY_AS = 'Spinbox.field' if sys.platform == 'darwin' else 'textarea' def setUp(self): super().setUp() @@ -1366,7 +1372,6 @@ child1 = self.tv.insert(item_id, 'end') self.assertEqual(self.tv.bbox(child1), '') - def test_children(self): # no children yet, should get an empty tuple self.assertEqual(self.tv.get_children(), ()) @@ -1394,7 +1399,6 @@ self.tv.set_children('') self.assertEqual(self.tv.get_children(), ()) - def test_column(self): # return a dict with all options/values self.assertIsInstance(self.tv.column('#0'), dict) @@ -1420,7 +1424,6 @@ self.assertRaises(tkinter.TclError, self.tv.column, '#0', **kw) - def test_delete(self): self.assertRaises(tkinter.TclError, self.tv.delete, '#0') @@ -1444,7 +1447,6 @@ self.tv.delete(item1, item2) self.assertFalse(self.tv.get_children()) - def test_detach_reattach(self): item_id = self.tv.insert('', 'end') item2 = self.tv.insert(item_id, 'end') @@ -1486,7 +1488,6 @@ self.assertEqual(self.tv.get_children(), ()) self.assertEqual(self.tv.get_children(item_id), ()) - def test_exists(self): self.assertEqual(self.tv.exists('something'), False) self.assertEqual(self.tv.exists(''), True) @@ -1497,7 +1498,6 @@ # in the tcl interpreter since tk requires an item. self.assertRaises(tkinter.TclError, self.tv.exists, None) - def test_focus(self): # nothing is focused right now self.assertEqual(self.tv.focus(), '') @@ -1512,7 +1512,6 @@ # try focusing inexistent item self.assertRaises(tkinter.TclError, self.tv.focus, 'hi') - def test_heading(self): # check a dict is returned self.assertIsInstance(self.tv.heading('#0'), dict) @@ -1564,7 +1563,6 @@ #self.tv.heading('#0', command='I dont exist') #simulate_heading_click(5, 5) - def test_index(self): # item 'what' doesn't exist self.assertRaises(tkinter.TclError, self.tv.index, 'what') @@ -1595,7 +1593,6 @@ self.tv.delete(item1) self.assertRaises(tkinter.TclError, self.tv.index, c2) - def test_insert_item(self): # parent 'none' doesn't exist self.assertRaises(tkinter.TclError, self.tv.insert, 'none', 'end') @@ -1672,7 +1669,6 @@ self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', False) self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', '') - def test_selection(self): self.assertRaises(TypeError, self.tv.selection, 'spam') # item 'none' doesn't exist @@ -1743,7 +1739,6 @@ self.tv.selection_toggle((c1, c3)) self.assertEqual(self.tv.selection(), (c3, item2)) - def test_set(self): self.tv['columns'] = ['A', 'B'] item = self.tv.insert('', 'end', values=['a', 'b']) @@ -1776,7 +1771,6 @@ # inexistent item self.assertRaises(tkinter.TclError, self.tv.set, 'notme') - def test_tag_bind(self): events = [] item1 = self.tv.insert('', 'end', tags=['call']) @@ -1809,7 +1803,6 @@ for evt in zip(events[::2], events[1::2]): self.assertEqual(evt, (1, 2)) - def test_tag_configure(self): # Just testing parameter passing for now self.assertRaises(TypeError, self.tv.tag_configure) diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/tix.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/tix.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/tix.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/tix.py 2022-10-11 11:21:44.000000000 +0000 @@ -310,7 +310,7 @@ del cnf[k] self.widgetName = widgetName - Widget._setup(self, master, cnf) + self._setup(master, cnf) # If widgetName is None, this is a dummy creation call where the # corresponding Tk widget has already been created by Tix diff -Nru python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/ttk.py python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/ttk.py --- python3-stdlib-extensions-3.9.7/3.10/Lib/tkinter/ttk.py 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Lib/tkinter/ttk.py 2022-10-11 11:21:44.000000000 +0000 @@ -1643,7 +1643,10 @@ menu.delete(0, 'end') for val in values: menu.add_radiobutton(label=val, - command=tkinter._setit(self._variable, val, self._callback), + command=( + None if self._callback is None + else lambda val=val: self._callback(val) + ), variable=self._variable) if default: diff -Nru python3-stdlib-extensions-3.9.7/3.10/Modules/_gdbmmodule.c python3-stdlib-extensions-3.10.8/3.10/Modules/_gdbmmodule.c --- python3-stdlib-extensions-3.9.7/3.10/Modules/_gdbmmodule.c 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Modules/_gdbmmodule.c 2022-10-11 11:21:44.000000000 +0000 @@ -450,7 +450,7 @@ to create a list in memory that contains them all: k = db.firstkey() - while k != None: + while k is not None: print(k) k = db.nextkey(k) [clinic start generated code]*/ @@ -458,7 +458,7 @@ static PyObject * _gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_clean_t key_length) -/*[clinic end generated code: output=204964441fdbaf02 input=fcf6a51a96ce0172]*/ +/*[clinic end generated code: output=204964441fdbaf02 input=365e297bc0b3db48]*/ { PyObject *v; datum dbm_key, nextkey; diff -Nru python3-stdlib-extensions-3.9.7/3.10/Modules/_tkinter.c python3-stdlib-extensions-3.10.8/3.10/Modules/_tkinter.c --- python3-stdlib-extensions-3.9.7/3.10/Modules/_tkinter.c 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Modules/_tkinter.c 2022-10-11 11:21:44.000000000 +0000 @@ -936,7 +936,7 @@ Py_INCREF(self->string); return self->string; } - /* XXX Could chache result if it is non-ASCII. */ + /* XXX Could cache result if it is non-ASCII. */ return unicodeFromTclObj(self->value); } diff -Nru python3-stdlib-extensions-3.9.7/3.10/Modules/clinic/_gdbmmodule.c.h python3-stdlib-extensions-3.10.8/3.10/Modules/clinic/_gdbmmodule.c.h --- python3-stdlib-extensions-3.9.7/3.10/Modules/clinic/_gdbmmodule.c.h 2021-08-02 19:53:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.10/Modules/clinic/_gdbmmodule.c.h 2022-10-11 11:21:44.000000000 +0000 @@ -104,18 +104,11 @@ static PyObject * _gdbm_gdbm_keys(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - PyObject *return_value = NULL; - static const char * const _keywords[] = { NULL}; - static _PyArg_Parser _parser = {":keys", _keywords, 0}; - - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser - )) { - goto exit; + if (nargs) { + PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); + return NULL; } - return_value = _gdbm_gdbm_keys_impl(self, cls); - -exit: - return return_value; + return _gdbm_gdbm_keys_impl(self, cls); } PyDoc_STRVAR(_gdbm_gdbm_firstkey__doc__, @@ -137,18 +130,11 @@ static PyObject * _gdbm_gdbm_firstkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - PyObject *return_value = NULL; - static const char * const _keywords[] = { NULL}; - static _PyArg_Parser _parser = {":firstkey", _keywords, 0}; - - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser - )) { - goto exit; + if (nargs) { + PyErr_SetString(PyExc_TypeError, "firstkey() takes no arguments"); + return NULL; } - return_value = _gdbm_gdbm_firstkey_impl(self, cls); - -exit: - return return_value; + return _gdbm_gdbm_firstkey_impl(self, cls); } PyDoc_STRVAR(_gdbm_gdbm_nextkey__doc__, @@ -161,7 +147,7 @@ "to create a list in memory that contains them all:\n" "\n" " k = db.firstkey()\n" -" while k != None:\n" +" while k is not None:\n" " print(k)\n" " k = db.nextkey(k)"); @@ -212,18 +198,11 @@ static PyObject * _gdbm_gdbm_reorganize(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - PyObject *return_value = NULL; - static const char * const _keywords[] = { NULL}; - static _PyArg_Parser _parser = {":reorganize", _keywords, 0}; - - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser - )) { - goto exit; + if (nargs) { + PyErr_SetString(PyExc_TypeError, "reorganize() takes no arguments"); + return NULL; } - return_value = _gdbm_gdbm_reorganize_impl(self, cls); - -exit: - return return_value; + return _gdbm_gdbm_reorganize_impl(self, cls); } PyDoc_STRVAR(_gdbm_gdbm_sync__doc__, @@ -244,18 +223,11 @@ static PyObject * _gdbm_gdbm_sync(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - PyObject *return_value = NULL; - static const char * const _keywords[] = { NULL}; - static _PyArg_Parser _parser = {":sync", _keywords, 0}; - - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser - )) { - goto exit; + if (nargs) { + PyErr_SetString(PyExc_TypeError, "sync() takes no arguments"); + return NULL; } - return_value = _gdbm_gdbm_sync_impl(self, cls); - -exit: - return return_value; + return _gdbm_gdbm_sync_impl(self, cls); } PyDoc_STRVAR(dbmopen__doc__, @@ -340,4 +312,4 @@ exit: return return_value; } -/*[clinic end generated code: output=e84bc6ac82fcb6d4 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=125f5bc685304744 input=a9049054013a1b77]*/ diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/README python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/README --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/README 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/README 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,11 @@ +This directory contains the Distutils package. + +There's a full documentation available at: + + https://docs.python.org/distutils/ + +The Distutils-SIG web page is also a good starting point: + + https://www.python.org/sigs/distutils-sig/ + +$Id$ diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,20 @@ +"""distutils + +The main package for the Python Module Distribution Utilities. Normally +used from a setup script as + + from distutils.core import setup + + setup (...) +""" + +import sys +import warnings + +__version__ = sys.version[:sys.version.index(' ')] + +_DEPRECATION_MESSAGE = ("The distutils package is deprecated and slated for " + "removal in Python 3.12. Use setuptools or check " + "PEP 632 for potential alternatives") +warnings.warn(_DEPRECATION_MESSAGE, + DeprecationWarning, 2) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/_msvccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/_msvccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/_msvccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/_msvccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,539 @@ +"""distutils._msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for Microsoft Visual Studio 2015. + +The module is compatible with VS 2015 and later. You can find legacy support +for older versions in distutils.msvc9compiler and distutils.msvccompiler. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +# finding DevStudio (through the registry) +# ported to VS 2005 and VS 2008 by Christian Heimes +# ported to VS 2015 by Steve Dower + +import os +import subprocess +import winreg + +from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ + CompileError, LibError, LinkError +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log +from distutils.util import get_platform + +from itertools import count + +def _find_vc2015(): + try: + key = winreg.OpenKeyEx( + winreg.HKEY_LOCAL_MACHINE, + r"Software\Microsoft\VisualStudio\SxS\VC7", + access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY + ) + except OSError: + log.debug("Visual C++ is not registered") + return None, None + + best_version = 0 + best_dir = None + with key: + for i in count(): + try: + v, vc_dir, vt = winreg.EnumValue(key, i) + except OSError: + break + if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir): + try: + version = int(float(v)) + except (ValueError, TypeError): + continue + if version >= 14 and version > best_version: + best_version, best_dir = version, vc_dir + return best_version, best_dir + +def _find_vc2017(): + """Returns "15, path" based on the result of invoking vswhere.exe + If no install is found, returns "None, None" + + The version is returned to avoid unnecessarily changing the function + result. It may be ignored when the path is not None. + + If vswhere.exe is not available, by definition, VS 2017 is not + installed. + """ + root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") + if not root: + return None, None + + try: + path = subprocess.check_output([ + os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), + "-latest", + "-prerelease", + "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", "installationPath", + "-products", "*", + ], encoding="mbcs", errors="strict").strip() + except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): + return None, None + + path = os.path.join(path, "VC", "Auxiliary", "Build") + if os.path.isdir(path): + return 15, path + + return None, None + +PLAT_SPEC_TO_RUNTIME = { + 'x86' : 'x86', + 'x86_amd64' : 'x64', + 'x86_arm' : 'arm', + 'x86_arm64' : 'arm64' +} + +def _find_vcvarsall(plat_spec): + # bpo-38597: Removed vcruntime return value + _, best_dir = _find_vc2017() + + if not best_dir: + best_version, best_dir = _find_vc2015() + + if not best_dir: + log.debug("No suitable Visual C++ version found") + return None, None + + vcvarsall = os.path.join(best_dir, "vcvarsall.bat") + if not os.path.isfile(vcvarsall): + log.debug("%s cannot be found", vcvarsall) + return None, None + + return vcvarsall, None + +def _get_vc_env(plat_spec): + if os.getenv("DISTUTILS_USE_SDK"): + return { + key.lower(): value + for key, value in os.environ.items() + } + + vcvarsall, _ = _find_vcvarsall(plat_spec) + if not vcvarsall: + raise DistutilsPlatformError("Unable to find vcvarsall.bat") + + try: + out = subprocess.check_output( + 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), + stderr=subprocess.STDOUT, + ).decode('utf-16le', errors='replace') + except subprocess.CalledProcessError as exc: + log.error(exc.output) + raise DistutilsPlatformError("Error executing {}" + .format(exc.cmd)) + + env = { + key.lower(): value + for key, _, value in + (line.partition('=') for line in out.splitlines()) + if key and value + } + + return env + +def _find_exe(exe, paths=None): + """Return path to an MSVC executable program. + + Tries to find the program in several places: first, one of the + MSVC program search paths from the registry; next, the directories + in the PATH environment variable. If any of those work, return an + absolute path that is known to exist. If none of them work, just + return the original program name, 'exe'. + """ + if not paths: + paths = os.getenv('path').split(os.pathsep) + for p in paths: + fn = os.path.join(os.path.abspath(p), exe) + if os.path.isfile(fn): + return fn + return exe + +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'. Always cross-compile from x86 to work with the +# lighter-weight MSVC installs that do not include native 64-bit tools. +PLAT_TO_VCVARS = { + 'win32' : 'x86', + 'win-amd64' : 'x86_amd64', + 'win-arm32' : 'x86_arm', + 'win-arm64' : 'x86_arm64' +} + +class MSVCCompiler(CCompiler) : + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class.""" + + compiler_type = 'msvc' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + _rc_extensions = ['.rc'] + _mc_extensions = ['.mc'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = (_c_extensions + _cpp_extensions + + _rc_extensions + _mc_extensions) + res_extension = '.res' + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + + def __init__(self, verbose=0, dry_run=0, force=0): + CCompiler.__init__ (self, verbose, dry_run, force) + # target platform (.plat_name is consistent with 'bdist') + self.plat_name = None + self.initialized = False + + def initialize(self, plat_name=None): + # multi-init means we would need to check platform same each time... + assert not self.initialized, "don't init multiple times" + if plat_name is None: + plat_name = get_platform() + # sanity check for platforms to prevent obscure errors later. + if plat_name not in PLAT_TO_VCVARS: + raise DistutilsPlatformError("--plat-name must be one of {}" + .format(tuple(PLAT_TO_VCVARS))) + + # Get the vcvarsall.bat spec for the requested platform. + plat_spec = PLAT_TO_VCVARS[plat_name] + + vc_env = _get_vc_env(plat_spec) + if not vc_env: + raise DistutilsPlatformError("Unable to find a compatible " + "Visual Studio installation.") + + self._paths = vc_env.get('path', '') + paths = self._paths.split(os.pathsep) + self.cc = _find_exe("cl.exe", paths) + self.linker = _find_exe("link.exe", paths) + self.lib = _find_exe("lib.exe", paths) + self.rc = _find_exe("rc.exe", paths) # resource compiler + self.mc = _find_exe("mc.exe", paths) # message compiler + self.mt = _find_exe("mt.exe", paths) # message compiler + + for dir in vc_env.get('include', '').split(os.pathsep): + if dir: + self.add_include_dir(dir.rstrip(os.sep)) + + for dir in vc_env.get('lib', '').split(os.pathsep): + if dir: + self.add_library_dir(dir.rstrip(os.sep)) + + self.preprocess_options = None + # bpo-38597: Always compile with dynamic linking + # Future releases of Python 3.x will include all past + # versions of vcruntime*.dll for compatibility. + self.compile_options = [ + '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD' + ] + + self.compile_options_debug = [ + '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' + ] + + ldflags = [ + '/nologo', '/INCREMENTAL:NO', '/LTCG' + ] + + ldflags_debug = [ + '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' + ] + + self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] + self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] + self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_static = [*ldflags] + self.ldflags_static_debug = [*ldflags_debug] + + self._ldflags = { + (CCompiler.EXECUTABLE, None): self.ldflags_exe, + (CCompiler.EXECUTABLE, False): self.ldflags_exe, + (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug, + (CCompiler.SHARED_OBJECT, None): self.ldflags_shared, + (CCompiler.SHARED_OBJECT, False): self.ldflags_shared, + (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug, + (CCompiler.SHARED_LIBRARY, None): self.ldflags_static, + (CCompiler.SHARED_LIBRARY, False): self.ldflags_static, + (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug, + } + + self.initialized = True + + # -- Worker methods ------------------------------------------------ + + def object_filenames(self, + source_filenames, + strip_dir=0, + output_dir=''): + ext_map = { + **{ext: self.obj_extension for ext in self.src_extensions}, + **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, + } + + output_dir = output_dir or '' + + def make_out_path(p): + base, ext = os.path.splitext(p) + if strip_dir: + base = os.path.basename(base) + else: + _, base = os.path.splitdrive(base) + if base.startswith((os.path.sep, os.path.altsep)): + base = base[1:] + try: + # XXX: This may produce absurdly long paths. We should check + # the length of the result and trim base until we fit within + # 260 characters. + return os.path.join(output_dir, base + ext_map[ext]) + except LookupError: + # Better to raise an exception instead of silently continuing + # and later complain about sources and targets having + # different lengths + raise CompileError("Don't know how to compile {}".format(p)) + + return list(map(make_out_path, source_filenames)) + + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + if not self.initialized: + self.initialize() + compile_info = self._setup_compile(output_dir, macros, include_dirs, + sources, depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = compile_info + + compile_opts = extra_preargs or [] + compile_opts.append('/c') + if debug: + compile_opts.extend(self.compile_options_debug) + else: + compile_opts.extend(self.compile_options) + + + add_cpp_opts = False + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + if debug: + # pass the full pathname to MSVC in debug mode, + # this allows the debugger to find the source file + # without asking the user to browse for it + src = os.path.abspath(src) + + if ext in self._c_extensions: + input_opt = "/Tc" + src + elif ext in self._cpp_extensions: + input_opt = "/Tp" + src + add_cpp_opts = True + elif ext in self._rc_extensions: + # compile .RC to .RES file + input_opt = src + output_opt = "/fo" + obj + try: + self.spawn([self.rc] + pp_opts + [output_opt, input_opt]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue + elif ext in self._mc_extensions: + # Compile .MC to .RC file to .RES file. + # * '-h dir' specifies the directory for the + # generated include file + # * '-r dir' specifies the target directory of the + # generated RC file and the binary message resource + # it includes + # + # For now (since there are no options to change this), + # we use the source-directory for the include file and + # the build directory for the RC file and message + # resources. This works at least for win32all. + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) + try: + # first compile .MC to .RC and .H file + self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) + base, _ = os.path.splitext(os.path.basename (src)) + rc_file = os.path.join(rc_dir, base + '.rc') + # then compile .RC to .RES file + self.spawn([self.rc, "/fo" + obj, rc_file]) + + except DistutilsExecError as msg: + raise CompileError(msg) + continue + else: + # how to handle this file? + raise CompileError("Don't know how to compile {} to {}" + .format(src, obj)) + + args = [self.cc] + compile_opts + pp_opts + if add_cpp_opts: + args.append('/EHsc') + args.append(input_opt) + args.append("/Fo" + obj) + args.extend(extra_postargs) + + try: + self.spawn(args) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + + def create_static_lib(self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, + output_dir=output_dir) + + if self._need_link(objects, output_filename): + lib_args = objects + ['/OUT:' + output_filename] + if debug: + pass # XXX what goes here? + try: + log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) + self.spawn([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + libraries, library_dirs, runtime_library_dirs = fixed_args + + if runtime_library_dirs: + self.warn("I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs)) + + lib_opts = gen_lib_options(self, + library_dirs, runtime_library_dirs, + libraries) + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + ldflags = self._ldflags[target_desc, debug] + + export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] + + ld_args = (ldflags + lib_opts + export_opts + + objects + ['/OUT:' + output_filename]) + + # The MSVC linker generates .lib and .exp files, which cannot be + # suppressed by any linker switches. The .lib files may even be + # needed! Make sure they are generated in the temporary build + # directory. Since they have different names for debug and release + # builds, they can go into the same directory. + build_temp = os.path.dirname(objects[0]) + if export_symbols is not None: + (dll_name, dll_ext) = os.path.splitext( + os.path.basename(output_filename)) + implib_file = os.path.join( + build_temp, + self.library_filename(dll_name)) + ld_args.append ('/IMPLIB:' + implib_file) + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + output_dir = os.path.dirname(os.path.abspath(output_filename)) + self.mkpath(output_dir) + try: + log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args)) + self.spawn([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + def spawn(self, cmd): + old_path = os.getenv('path') + try: + os.environ['path'] = self._paths + return super().spawn(cmd) + finally: + os.environ['path'] = old_path + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "/LIBPATH:" + dir + + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError( + "don't know how to set runtime library search path for MSVC") + + def library_option(self, lib): + return self.library_filename(lib) + + def find_library_file(self, dirs, lib, debug=0): + # Prefer a debugging library if found (and requested), but deal + # with it if we don't have one. + if debug: + try_names = [lib + "_d", lib] + else: + try_names = [lib] + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename(name)) + if os.path.isfile(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/archive_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/archive_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/archive_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/archive_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,256 @@ +"""distutils.archive_util + +Utility functions for creating archive files (tarballs, zip files, +that sort of thing).""" + +import os +from warnings import warn +import sys + +try: + import zipfile +except ImportError: + zipfile = None + + +from distutils.errors import DistutilsExecError +from distutils.spawn import spawn +from distutils.dir_util import mkpath +from distutils import log + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or + None. ("compress" will be deprecated in Python 3.2) + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_dir' + ".tar", possibly plus + the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', + 'compress': ''} + compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', + 'compress': '.Z'} + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext.keys(): + raise ValueError( + "bad value for 'compress': must be None, 'gzip', 'bzip2', " + "'xz' or 'compress'") + + archive_name = base_name + '.tar' + if compress != 'compress': + archive_name += compress_ext.get(compress, '') + + mkpath(os.path.dirname(archive_name), dry_run=dry_run) + + # creating the tarball + import tarfile # late import so Python build itself doesn't break + + log.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + # compression using `compress` + if compress == 'compress': + warn("'compress' will be deprecated.", PendingDeprecationWarning) + # the option varies depending on the platform + compressed_name = archive_name + compress_ext[compress] + if sys.platform == 'win32': + cmd = [compress, archive_name, compressed_name] + else: + cmd = [compress, '-f', archive_name] + spawn(cmd, dry_run=dry_run) + return compressed_name + + return archive_name + +def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises DistutilsExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + + # If zipfile module is not available, try spawning an external + # 'zip' command. + if zipfile is None: + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + + try: + spawn(["zip", zipoptions, zip_filename, base_dir], + dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise DistutilsExecError(("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename) + + else: + log.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + try: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + except RuntimeError: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_STORED) + + with zip: + if base_dir != os.curdir: + path = os.path.normpath(os.path.join(base_dir, '')) + zip.write(path, path) + log.info("adding '%s'", path) + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in dirnames: + path = os.path.normpath(os.path.join(dirpath, name, '')) + zip.write(path, path) + log.info("adding '%s'", path) + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + log.info("adding '%s'", path) + + return zip_filename + +ARCHIVE_FORMATS = { + 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), + 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), + 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (make_zipfile, [],"ZIP file") + } + +def check_archive_formats(formats): + """Returns the first format from the 'format' list that is unknown. + + If all formats are known, returns None + """ + for format in formats: + if format not in ARCHIVE_FORMATS: + return format + return None + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "gztar", + "bztar", "xztar", or "ztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + log.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run} + + try: + format_info = ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + log.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/bcppcompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/bcppcompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/bcppcompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/bcppcompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,393 @@ +"""distutils.bcppcompiler + +Contains BorlandCCompiler, an implementation of the abstract CCompiler class +for the Borland C++ compiler. +""" + +# This implementation by Lyle Johnson, based on the original msvccompiler.py +# module and using the directions originally published by Gordon Williams. + +# XXX looks like there's a LOT of overlap between these two classes: +# someone should sit down and factor out the common code as +# WindowsCCompiler! --GPW + + +import os +from distutils.errors import \ + DistutilsExecError, \ + CompileError, LibError, LinkError, UnknownFileError +from distutils.ccompiler import \ + CCompiler, gen_preprocess_options +from distutils.file_util import write_file +from distutils.dep_util import newer +from distutils import log + +class BCPPCompiler(CCompiler) : + """Concrete class that implements an interface to the Borland C/C++ + compiler, as defined by the CCompiler abstract class. + """ + + compiler_type = 'bcpp' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = _c_extensions + _cpp_extensions + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + + def __init__ (self, + verbose=0, + dry_run=0, + force=0): + + CCompiler.__init__ (self, verbose, dry_run, force) + + # These executables are assumed to all be in the path. + # Borland doesn't seem to use any special registry settings to + # indicate their installation locations. + + self.cc = "bcc32.exe" + self.linker = "ilink32.exe" + self.lib = "tlib.exe" + + self.preprocess_options = None + self.compile_options = ['/tWM', '/O2', '/q', '/g0'] + self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] + + self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] + self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] + self.ldflags_static = [] + self.ldflags_exe = ['/Gn', '/q', '/x'] + self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] + + + # -- Worker methods ------------------------------------------------ + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + macros, objects, extra_postargs, pp_opts, build = \ + self._setup_compile(output_dir, macros, include_dirs, sources, + depends, extra_postargs) + compile_opts = extra_preargs or [] + compile_opts.append ('-c') + if debug: + compile_opts.extend (self.compile_options_debug) + else: + compile_opts.extend (self.compile_options) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + # XXX why do the normpath here? + src = os.path.normpath(src) + obj = os.path.normpath(obj) + # XXX _setup_compile() did a mkpath() too but before the normpath. + # Is it possible to skip the normpath? + self.mkpath(os.path.dirname(obj)) + + if ext == '.res': + # This is already a binary file -- skip it. + continue # the 'for' loop + if ext == '.rc': + # This needs to be compiled to a .res file -- do it now. + try: + self.spawn (["brcc32", "-fo", obj, src]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue # the 'for' loop + + # The next two are both for the real compiler. + if ext in self._c_extensions: + input_opt = "" + elif ext in self._cpp_extensions: + input_opt = "-P" + else: + # Unknown file type -- no extra options. The compiler + # will probably fail, but let it just in case this is a + # file the compiler recognizes even if we don't. + input_opt = "" + + output_opt = "-o" + obj + + # Compiler command line syntax is: "bcc32 [options] file(s)". + # Note that the source file names must appear at the end of + # the command line. + try: + self.spawn ([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs + [src]) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + # compile () + + + def create_static_lib (self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + (objects, output_dir) = self._fix_object_args (objects, output_dir) + output_filename = \ + self.library_filename (output_libname, output_dir=output_dir) + + if self._need_link (objects, output_filename): + lib_args = [output_filename, '/u'] + objects + if debug: + pass # XXX what goes here? + try: + self.spawn ([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + # create_static_lib () + + + def link (self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + # XXX this ignores 'build_temp'! should follow the lead of + # msvccompiler.py + + (objects, output_dir) = self._fix_object_args (objects, output_dir) + (libraries, library_dirs, runtime_library_dirs) = \ + self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) + + if runtime_library_dirs: + log.warn("I don't know what to do with 'runtime_library_dirs': %s", + str(runtime_library_dirs)) + + if output_dir is not None: + output_filename = os.path.join (output_dir, output_filename) + + if self._need_link (objects, output_filename): + + # Figure out linker args based on type of target. + if target_desc == CCompiler.EXECUTABLE: + startup_obj = 'c0w32' + if debug: + ld_args = self.ldflags_exe_debug[:] + else: + ld_args = self.ldflags_exe[:] + else: + startup_obj = 'c0d32' + if debug: + ld_args = self.ldflags_shared_debug[:] + else: + ld_args = self.ldflags_shared[:] + + + # Create a temporary exports file for use by the linker + if export_symbols is None: + def_file = '' + else: + head, tail = os.path.split (output_filename) + modname, ext = os.path.splitext (tail) + temp_dir = os.path.dirname(objects[0]) # preserve tree structure + def_file = os.path.join (temp_dir, '%s.def' % modname) + contents = ['EXPORTS'] + for sym in (export_symbols or []): + contents.append(' %s=_%s' % (sym, sym)) + self.execute(write_file, (def_file, contents), + "writing %s" % def_file) + + # Borland C++ has problems with '/' in paths + objects2 = map(os.path.normpath, objects) + # split objects in .obj and .res files + # Borland C++ needs them at different positions in the command line + objects = [startup_obj] + resources = [] + for file in objects2: + (base, ext) = os.path.splitext(os.path.normcase(file)) + if ext == '.res': + resources.append(file) + else: + objects.append(file) + + + for l in library_dirs: + ld_args.append("/L%s" % os.path.normpath(l)) + ld_args.append("/L.") # we sometimes use relative paths + + # list of object files + ld_args.extend(objects) + + # XXX the command-line syntax for Borland C++ is a bit wonky; + # certain filenames are jammed together in one big string, but + # comma-delimited. This doesn't mesh too well with the + # Unix-centric attitude (with a DOS/Windows quoting hack) of + # 'spawn()', so constructing the argument list is a bit + # awkward. Note that doing the obvious thing and jamming all + # the filenames and commas into one argument would be wrong, + # because 'spawn()' would quote any filenames with spaces in + # them. Arghghh!. Apparently it works fine as coded... + + # name of dll/exe file + ld_args.extend([',',output_filename]) + # no map file and start libraries + ld_args.append(',,') + + for lib in libraries: + # see if we find it and if there is a bcpp specific lib + # (xxx_bcpp.lib) + libfile = self.find_library_file(library_dirs, lib, debug) + if libfile is None: + ld_args.append(lib) + # probably a BCPP internal library -- don't warn + else: + # full name which prefers bcpp_xxx.lib over xxx.lib + ld_args.append(libfile) + + # some default libraries + ld_args.append ('import32') + ld_args.append ('cw32mt') + + # def file for export symbols + ld_args.extend([',',def_file]) + # add resource files + ld_args.append(',') + ld_args.extend(resources) + + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + self.mkpath (os.path.dirname (output_filename)) + try: + self.spawn ([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + + else: + log.debug("skipping %s (up-to-date)", output_filename) + + # link () + + # -- Miscellaneous methods ----------------------------------------- + + + def find_library_file (self, dirs, lib, debug=0): + # List of effective library names to try, in order of preference: + # xxx_bcpp.lib is better than xxx.lib + # and xxx_d.lib is better than xxx.lib if debug is set + # + # The "_bcpp" suffix is to handle a Python installation for people + # with multiple compilers (primarily Distutils hackers, I suspect + # ;-). The idea is they'd have one static library for each + # compiler they care about, since (almost?) every Windows compiler + # seems to have a different format for static libraries. + if debug: + dlib = (lib + "_d") + try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) + else: + try_names = (lib + "_bcpp", lib) + + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename(name)) + if os.path.exists(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None + + # overwrite the one from CCompiler to support rc and res-files + def object_filenames (self, + source_filenames, + strip_dir=0, + output_dir=''): + if output_dir is None: output_dir = '' + obj_names = [] + for src_name in source_filenames: + # use normcase to make sure '.rc' is really '.rc' and not '.RC' + (base, ext) = os.path.splitext (os.path.normcase(src_name)) + if ext not in (self.src_extensions + ['.rc','.res']): + raise UnknownFileError("unknown file type '%s' (from '%s')" % \ + (ext, src_name)) + if strip_dir: + base = os.path.basename (base) + if ext == '.res': + # these can go unchanged + obj_names.append (os.path.join (output_dir, base + ext)) + elif ext == '.rc': + # these need to be compiled to .res-files + obj_names.append (os.path.join (output_dir, base + '.res')) + else: + obj_names.append (os.path.join (output_dir, + base + self.obj_extension)) + return obj_names + + # object_filenames () + + def preprocess (self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None): + + (_, macros, include_dirs) = \ + self._fix_compile_args(None, macros, include_dirs) + pp_opts = gen_preprocess_options(macros, include_dirs) + pp_args = ['cpp32.exe'] + pp_opts + if output_file is not None: + pp_args.append('-o' + output_file) + if extra_preargs: + pp_args[:0] = extra_preargs + if extra_postargs: + pp_args.extend(extra_postargs) + pp_args.append(source) + + # We need to preprocess: either we're being forced to, or the + # source file is newer than the target (or the target doesn't + # exist). + if self.force or output_file is None or newer(source, output_file): + if output_file: + self.mkpath(os.path.dirname(output_file)) + try: + self.spawn(pp_args) + except DistutilsExecError as msg: + print(msg) + raise CompileError(msg) + + # preprocess() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/ccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/ccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/ccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/ccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,1116 @@ +"""distutils.ccompiler + +Contains CCompiler, an abstract base class that defines the interface +for the Distutils compiler abstraction model.""" + +import sys, os, re +from distutils.errors import * +from distutils.spawn import spawn +from distutils.file_util import move_file +from distutils.dir_util import mkpath +from distutils.dep_util import newer_group +from distutils.util import split_quoted, execute +from distutils import log + +class CCompiler: + """Abstract base class to define the interface that must be implemented + by real compiler classes. Also has some utility methods used by + several compiler classes. + + The basic idea behind a compiler abstraction class is that each + instance can be used for all the compile/link steps in building a + single project. Thus, attributes common to all of those compile and + link steps -- include directories, macros to define, libraries to link + against, etc. -- are attributes of the compiler instance. To allow for + variability in how individual files are treated, most of those + attributes may be varied on a per-compilation or per-link basis. + """ + + # 'compiler_type' is a class attribute that identifies this class. It + # keeps code that wants to know what kind of compiler it's dealing with + # from having to import all possible compiler classes just to do an + # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type' + # should really, really be one of the keys of the 'compiler_class' + # dictionary (see below -- used by the 'new_compiler()' factory + # function) -- authors of new compiler interface classes are + # responsible for updating 'compiler_class'! + compiler_type = None + + # XXX things not handled by this compiler abstraction model: + # * client can't provide additional options for a compiler, + # e.g. warning, optimization, debugging flags. Perhaps this + # should be the domain of concrete compiler abstraction classes + # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base + # class should have methods for the common ones. + # * can't completely override the include or library searchg + # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2". + # I'm not sure how widely supported this is even by Unix + # compilers, much less on other platforms. And I'm even less + # sure how useful it is; maybe for cross-compiling, but + # support for that is a ways off. (And anyways, cross + # compilers probably have a dedicated binary with the + # right paths compiled in. I hope.) + # * can't do really freaky things with the library list/library + # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against + # different versions of libfoo.a in different locations. I + # think this is useless without the ability to null out the + # library search path anyways. + + + # Subclasses that rely on the standard filename generation methods + # implemented below should override these; see the comment near + # those methods ('object_filenames()' et. al.) for details: + src_extensions = None # list of strings + obj_extension = None # string + static_lib_extension = None + shared_lib_extension = None # string + static_lib_format = None # format string + shared_lib_format = None # prob. same as static_lib_format + exe_extension = None # string + + # Default language settings. language_map is used to detect a source + # file or Extension target language, checking source filenames. + # language_order is used to detect the language precedence, when deciding + # what language to use when mixing source types. For example, if some + # extension has two files with ".c" extension, and one with ".cpp", it + # is still linked as c++. + language_map = {".c" : "c", + ".cc" : "c++", + ".cpp" : "c++", + ".cxx" : "c++", + ".m" : "objc", + } + language_order = ["c++", "objc", "c"] + + def __init__(self, verbose=0, dry_run=0, force=0): + self.dry_run = dry_run + self.force = force + self.verbose = verbose + + # 'output_dir': a common output directory for object, library, + # shared object, and shared library files + self.output_dir = None + + # 'macros': a list of macro definitions (or undefinitions). A + # macro definition is a 2-tuple (name, value), where the value is + # either a string or None (no explicit value). A macro + # undefinition is a 1-tuple (name,). + self.macros = [] + + # 'include_dirs': a list of directories to search for include files + self.include_dirs = [] + + # 'libraries': a list of libraries to include in any link + # (library names, not filenames: eg. "foo" not "libfoo.a") + self.libraries = [] + + # 'library_dirs': a list of directories to search for libraries + self.library_dirs = [] + + # 'runtime_library_dirs': a list of directories to search for + # shared libraries/objects at runtime + self.runtime_library_dirs = [] + + # 'objects': a list of object files (or similar, such as explicitly + # named library files) to include on any link + self.objects = [] + + for key in self.executables.keys(): + self.set_executable(key, self.executables[key]) + + def set_executables(self, **kwargs): + """Define the executables (and options for them) that will be run + to perform the various stages of compilation. The exact set of + executables that may be specified here depends on the compiler + class (via the 'executables' class attribute), but most will have: + compiler the C/C++ compiler + linker_so linker used to create shared objects and libraries + linker_exe linker used to create binary executables + archiver static library creator + + On platforms with a command-line (Unix, DOS/Windows), each of these + is a string that will be split into executable name and (optional) + list of arguments. (Splitting the string is done similarly to how + Unix shells operate: words are delimited by spaces, but quotes and + backslashes can override this. See + 'distutils.util.split_quoted()'.) + """ + + # Note that some CCompiler implementation classes will define class + # attributes 'cpp', 'cc', etc. with hard-coded executable names; + # this is appropriate when a compiler class is for exactly one + # compiler/OS combination (eg. MSVCCompiler). Other compiler + # classes (UnixCCompiler, in particular) are driven by information + # discovered at run-time, since there are many different ways to do + # basically the same things with Unix C compilers. + + for key in kwargs: + if key not in self.executables: + raise ValueError("unknown executable '%s' for class %s" % + (key, self.__class__.__name__)) + self.set_executable(key, kwargs[key]) + + def set_executable(self, key, value): + if isinstance(value, str): + setattr(self, key, split_quoted(value)) + else: + setattr(self, key, value) + + def _find_macro(self, name): + i = 0 + for defn in self.macros: + if defn[0] == name: + return i + i += 1 + return None + + def _check_macro_definitions(self, definitions): + """Ensures that every element of 'definitions' is a valid macro + definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do + nothing if all definitions are OK, raise TypeError otherwise. + """ + for defn in definitions: + if not (isinstance(defn, tuple) and + (len(defn) in (1, 2) and + (isinstance (defn[1], str) or defn[1] is None)) and + isinstance (defn[0], str)): + raise TypeError(("invalid macro definition '%s': " % defn) + \ + "must be tuple (string,), (string, string), or " + \ + "(string, None)") + + + # -- Bookkeeping methods ------------------------------------------- + + def define_macro(self, name, value=None): + """Define a preprocessor macro for all compilations driven by this + compiler object. The optional parameter 'value' should be a + string; if it is not supplied, then the macro will be defined + without an explicit value and the exact outcome depends on the + compiler used (XXX true? does ANSI say anything about this?) + """ + # Delete from the list of macro definitions/undefinitions if + # already there (so that this one will take precedence). + i = self._find_macro (name) + if i is not None: + del self.macros[i] + + self.macros.append((name, value)) + + def undefine_macro(self, name): + """Undefine a preprocessor macro for all compilations driven by + this compiler object. If the same macro is defined by + 'define_macro()' and undefined by 'undefine_macro()' the last call + takes precedence (including multiple redefinitions or + undefinitions). If the macro is redefined/undefined on a + per-compilation basis (ie. in the call to 'compile()'), then that + takes precedence. + """ + # Delete from the list of macro definitions/undefinitions if + # already there (so that this one will take precedence). + i = self._find_macro (name) + if i is not None: + del self.macros[i] + + undefn = (name,) + self.macros.append(undefn) + + def add_include_dir(self, dir): + """Add 'dir' to the list of directories that will be searched for + header files. The compiler is instructed to search directories in + the order in which they are supplied by successive calls to + 'add_include_dir()'. + """ + self.include_dirs.append(dir) + + def set_include_dirs(self, dirs): + """Set the list of directories that will be searched to 'dirs' (a + list of strings). Overrides any preceding calls to + 'add_include_dir()'; subsequence calls to 'add_include_dir()' add + to the list passed to 'set_include_dirs()'. This does not affect + any list of standard include directories that the compiler may + search by default. + """ + self.include_dirs = dirs[:] + + def add_library(self, libname): + """Add 'libname' to the list of libraries that will be included in + all links driven by this compiler object. Note that 'libname' + should *not* be the name of a file containing a library, but the + name of the library itself: the actual filename will be inferred by + the linker, the compiler, or the compiler class (depending on the + platform). + + The linker will be instructed to link against libraries in the + order they were supplied to 'add_library()' and/or + 'set_libraries()'. It is perfectly valid to duplicate library + names; the linker will be instructed to link against libraries as + many times as they are mentioned. + """ + self.libraries.append(libname) + + def set_libraries(self, libnames): + """Set the list of libraries to be included in all links driven by + this compiler object to 'libnames' (a list of strings). This does + not affect any standard system libraries that the linker may + include by default. + """ + self.libraries = libnames[:] + + def add_library_dir(self, dir): + """Add 'dir' to the list of directories that will be searched for + libraries specified to 'add_library()' and 'set_libraries()'. The + linker will be instructed to search for libraries in the order they + are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. + """ + self.library_dirs.append(dir) + + def set_library_dirs(self, dirs): + """Set the list of library search directories to 'dirs' (a list of + strings). This does not affect any standard library search path + that the linker may search by default. + """ + self.library_dirs = dirs[:] + + def add_runtime_library_dir(self, dir): + """Add 'dir' to the list of directories that will be searched for + shared libraries at runtime. + """ + self.runtime_library_dirs.append(dir) + + def set_runtime_library_dirs(self, dirs): + """Set the list of directories to search for shared libraries at + runtime to 'dirs' (a list of strings). This does not affect any + standard search path that the runtime linker may search by + default. + """ + self.runtime_library_dirs = dirs[:] + + def add_link_object(self, object): + """Add 'object' to the list of object files (or analogues, such as + explicitly named library files or the output of "resource + compilers") to be included in every link driven by this compiler + object. + """ + self.objects.append(object) + + def set_link_objects(self, objects): + """Set the list of object files (or analogues) to be included in + every link to 'objects'. This does not affect any standard object + files that the linker may include by default (such as system + libraries). + """ + self.objects = objects[:] + + + # -- Private utility methods -------------------------------------- + # (here for the convenience of subclasses) + + # Helper method to prep compiler in subclass compile() methods + + def _setup_compile(self, outdir, macros, incdirs, sources, depends, + extra): + """Process arguments and decide which source files to compile.""" + if outdir is None: + outdir = self.output_dir + elif not isinstance(outdir, str): + raise TypeError("'output_dir' must be a string or None") + + if macros is None: + macros = self.macros + elif isinstance(macros, list): + macros = macros + (self.macros or []) + else: + raise TypeError("'macros' (if supplied) must be a list of tuples") + + if incdirs is None: + incdirs = self.include_dirs + elif isinstance(incdirs, (list, tuple)): + incdirs = list(incdirs) + (self.include_dirs or []) + else: + raise TypeError( + "'include_dirs' (if supplied) must be a list of strings") + + if extra is None: + extra = [] + + # Get the list of expected output (object) files + objects = self.object_filenames(sources, strip_dir=0, + output_dir=outdir) + assert len(objects) == len(sources) + + pp_opts = gen_preprocess_options(macros, incdirs) + + build = {} + for i in range(len(sources)): + src = sources[i] + obj = objects[i] + ext = os.path.splitext(src)[1] + self.mkpath(os.path.dirname(obj)) + build[obj] = (src, ext) + + return macros, objects, extra, pp_opts, build + + def _get_cc_args(self, pp_opts, debug, before): + # works for unixccompiler, cygwinccompiler + cc_args = pp_opts + ['-c'] + if debug: + cc_args[:0] = ['-g'] + if before: + cc_args[:0] = before + return cc_args + + def _fix_compile_args(self, output_dir, macros, include_dirs): + """Typecheck and fix-up some of the arguments to the 'compile()' + method, and return fixed-up values. Specifically: if 'output_dir' + is None, replaces it with 'self.output_dir'; ensures that 'macros' + is a list, and augments it with 'self.macros'; ensures that + 'include_dirs' is a list, and augments it with 'self.include_dirs'. + Guarantees that the returned values are of the correct type, + i.e. for 'output_dir' either string or None, and for 'macros' and + 'include_dirs' either list or None. + """ + if output_dir is None: + output_dir = self.output_dir + elif not isinstance(output_dir, str): + raise TypeError("'output_dir' must be a string or None") + + if macros is None: + macros = self.macros + elif isinstance(macros, list): + macros = macros + (self.macros or []) + else: + raise TypeError("'macros' (if supplied) must be a list of tuples") + + if include_dirs is None: + include_dirs = self.include_dirs + elif isinstance(include_dirs, (list, tuple)): + include_dirs = list(include_dirs) + (self.include_dirs or []) + else: + raise TypeError( + "'include_dirs' (if supplied) must be a list of strings") + + return output_dir, macros, include_dirs + + def _prep_compile(self, sources, output_dir, depends=None): + """Decide which source files must be recompiled. + + Determine the list of object files corresponding to 'sources', + and figure out which ones really need to be recompiled. + Return a list of all object files and a dictionary telling + which source files can be skipped. + """ + # Get the list of expected output (object) files + objects = self.object_filenames(sources, output_dir=output_dir) + assert len(objects) == len(sources) + + # Return an empty dict for the "which source files can be skipped" + # return value to preserve API compatibility. + return objects, {} + + def _fix_object_args(self, objects, output_dir): + """Typecheck and fix up some arguments supplied to various methods. + Specifically: ensure that 'objects' is a list; if output_dir is + None, replace with self.output_dir. Return fixed versions of + 'objects' and 'output_dir'. + """ + if not isinstance(objects, (list, tuple)): + raise TypeError("'objects' must be a list or tuple of strings") + objects = list(objects) + + if output_dir is None: + output_dir = self.output_dir + elif not isinstance(output_dir, str): + raise TypeError("'output_dir' must be a string or None") + + return (objects, output_dir) + + def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): + """Typecheck and fix up some of the arguments supplied to the + 'link_*' methods. Specifically: ensure that all arguments are + lists, and augment them with their permanent versions + (eg. 'self.libraries' augments 'libraries'). Return a tuple with + fixed versions of all arguments. + """ + if libraries is None: + libraries = self.libraries + elif isinstance(libraries, (list, tuple)): + libraries = list (libraries) + (self.libraries or []) + else: + raise TypeError( + "'libraries' (if supplied) must be a list of strings") + + if library_dirs is None: + library_dirs = self.library_dirs + elif isinstance(library_dirs, (list, tuple)): + library_dirs = list (library_dirs) + (self.library_dirs or []) + else: + raise TypeError( + "'library_dirs' (if supplied) must be a list of strings") + + if runtime_library_dirs is None: + runtime_library_dirs = self.runtime_library_dirs + elif isinstance(runtime_library_dirs, (list, tuple)): + runtime_library_dirs = (list(runtime_library_dirs) + + (self.runtime_library_dirs or [])) + else: + raise TypeError("'runtime_library_dirs' (if supplied) " + "must be a list of strings") + + return (libraries, library_dirs, runtime_library_dirs) + + def _need_link(self, objects, output_file): + """Return true if we need to relink the files listed in 'objects' + to recreate 'output_file'. + """ + if self.force: + return True + else: + if self.dry_run: + newer = newer_group (objects, output_file, missing='newer') + else: + newer = newer_group (objects, output_file) + return newer + + def detect_language(self, sources): + """Detect the language of a given file, or list of files. Uses + language_map, and language_order to do the job. + """ + if not isinstance(sources, list): + sources = [sources] + lang = None + index = len(self.language_order) + for source in sources: + base, ext = os.path.splitext(source) + extlang = self.language_map.get(ext) + try: + extindex = self.language_order.index(extlang) + if extindex < index: + lang = extlang + index = extindex + except ValueError: + pass + return lang + + + # -- Worker methods ------------------------------------------------ + # (must be implemented by subclasses) + + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + """Preprocess a single C/C++ source file, named in 'source'. + Output will be written to file named 'output_file', or stdout if + 'output_file' not supplied. 'macros' is a list of macro + definitions as for 'compile()', which will augment the macros set + with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a + list of directory names that will be added to the default list. + + Raises PreprocessError on failure. + """ + pass + + def compile(self, sources, output_dir=None, macros=None, + include_dirs=None, debug=0, extra_preargs=None, + extra_postargs=None, depends=None): + """Compile one or more source files. + + 'sources' must be a list of filenames, most likely C/C++ + files, but in reality anything that can be handled by a + particular compiler and compiler class (eg. MSVCCompiler can + handle resource files in 'sources'). Return a list of object + filenames, one per source filename in 'sources'. Depending on + the implementation, not all source files will necessarily be + compiled, but all corresponding object filenames will be + returned. + + If 'output_dir' is given, object files will be put under it, while + retaining their original path component. That is, "foo/bar.c" + normally compiles to "foo/bar.o" (for a Unix implementation); if + 'output_dir' is "build", then it would compile to + "build/foo/bar.o". + + 'macros', if given, must be a list of macro definitions. A macro + definition is either a (name, value) 2-tuple or a (name,) 1-tuple. + The former defines a macro; if the value is None, the macro is + defined without an explicit value. The 1-tuple case undefines a + macro. Later definitions/redefinitions/ undefinitions take + precedence. + + 'include_dirs', if given, must be a list of strings, the + directories to add to the default include file search path for this + compilation only. + + 'debug' is a boolean; if true, the compiler will be instructed to + output debug symbols in (or alongside) the object file(s). + + 'extra_preargs' and 'extra_postargs' are implementation- dependent. + On platforms that have the notion of a command-line (e.g. Unix, + DOS/Windows), they are most likely lists of strings: extra + command-line arguments to prepend/append to the compiler command + line. On other platforms, consult the implementation class + documentation. In any event, they are intended as an escape hatch + for those occasions when the abstract compiler framework doesn't + cut the mustard. + + 'depends', if given, is a list of filenames that all targets + depend on. If a source file is older than any file in + depends, then the source file will be recompiled. This + supports dependency tracking, but only at a coarse + granularity. + + Raises CompileError on failure. + """ + # A concrete compiler class can either override this method + # entirely or implement _compile(). + macros, objects, extra_postargs, pp_opts, build = \ + self._setup_compile(output_dir, macros, include_dirs, sources, + depends, extra_postargs) + cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) + + # Return *all* object filenames, not just the ones we just built. + return objects + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + """Compile 'src' to product 'obj'.""" + # A concrete compiler class that does not override compile() + # should implement _compile(). + pass + + def create_static_lib(self, objects, output_libname, output_dir=None, + debug=0, target_lang=None): + """Link a bunch of stuff together to create a static library file. + The "bunch of stuff" consists of the list of object files supplied + as 'objects', the extra object files supplied to + 'add_link_object()' and/or 'set_link_objects()', the libraries + supplied to 'add_library()' and/or 'set_libraries()', and the + libraries supplied as 'libraries' (if any). + + 'output_libname' should be a library name, not a filename; the + filename will be inferred from the library name. 'output_dir' is + the directory where the library file will be put. + + 'debug' is a boolean; if true, debugging information will be + included in the library (note that on most platforms, it is the + compile step where this matters: the 'debug' flag is included here + just for consistency). + + 'target_lang' is the target language for which the given objects + are being compiled. This allows specific linkage time treatment of + certain languages. + + Raises LibError on failure. + """ + pass + + + # values for target_desc parameter in link() + SHARED_OBJECT = "shared_object" + SHARED_LIBRARY = "shared_library" + EXECUTABLE = "executable" + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + """Link a bunch of stuff together to create an executable or + shared library file. + + The "bunch of stuff" consists of the list of object files supplied + as 'objects'. 'output_filename' should be a filename. If + 'output_dir' is supplied, 'output_filename' is relative to it + (i.e. 'output_filename' can provide directory components if + needed). + + 'libraries' is a list of libraries to link against. These are + library names, not filenames, since they're translated into + filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" + on Unix and "foo.lib" on DOS/Windows). However, they can include a + directory component, which means the linker will look in that + specific directory rather than searching all the normal locations. + + 'library_dirs', if supplied, should be a list of directories to + search for libraries that were specified as bare library names + (ie. no directory component). These are on top of the system + default and those supplied to 'add_library_dir()' and/or + 'set_library_dirs()'. 'runtime_library_dirs' is a list of + directories that will be embedded into the shared library and used + to search for other shared libraries that *it* depends on at + run-time. (This may only be relevant on Unix.) + + 'export_symbols' is a list of symbols that the shared library will + export. (This appears to be relevant only on Windows.) + + 'debug' is as for 'compile()' and 'create_static_lib()', with the + slight distinction that it actually matters on most platforms (as + opposed to 'create_static_lib()', which includes a 'debug' flag + mostly for form's sake). + + 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except + of course that they supply command-line arguments for the + particular linker being used). + + 'target_lang' is the target language for which the given objects + are being compiled. This allows specific linkage time treatment of + certain languages. + + Raises LinkError on failure. + """ + raise NotImplementedError + + + # Old 'link_*()' methods, rewritten to use the new 'link()' method. + + def link_shared_lib(self, + objects, + output_libname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + self.link(CCompiler.SHARED_LIBRARY, objects, + self.library_filename(output_libname, lib_type='shared'), + output_dir, + libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, + extra_preargs, extra_postargs, build_temp, target_lang) + + + def link_shared_object(self, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + self.link(CCompiler.SHARED_OBJECT, objects, + output_filename, output_dir, + libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, + extra_preargs, extra_postargs, build_temp, target_lang) + + + def link_executable(self, + objects, + output_progname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + target_lang=None): + self.link(CCompiler.EXECUTABLE, objects, + self.executable_filename(output_progname), output_dir, + libraries, library_dirs, runtime_library_dirs, None, + debug, extra_preargs, extra_postargs, None, target_lang) + + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function; there is + # no appropriate default implementation so subclasses should + # implement all of these. + + def library_dir_option(self, dir): + """Return the compiler option to add 'dir' to the list of + directories searched for libraries. + """ + raise NotImplementedError + + def runtime_library_dir_option(self, dir): + """Return the compiler option to add 'dir' to the list of + directories searched for runtime libraries. + """ + raise NotImplementedError + + def library_option(self, lib): + """Return the compiler option to add 'lib' to the list of libraries + linked into the shared library or executable. + """ + raise NotImplementedError + + def has_function(self, funcname, includes=None, include_dirs=None, + libraries=None, library_dirs=None): + """Return a boolean indicating whether funcname is supported on + the current platform. The optional arguments can be used to + augment the compilation environment. + """ + # this can't be included at module scope because it tries to + # import math which might not be available at that point - maybe + # the necessary logic should just be inlined? + import tempfile + if includes is None: + includes = [] + if include_dirs is None: + include_dirs = [] + if libraries is None: + libraries = [] + if library_dirs is None: + library_dirs = [] + fd, fname = tempfile.mkstemp(".c", funcname, text=True) + f = os.fdopen(fd, "w") + try: + for incl in includes: + f.write("""#include "%s"\n""" % incl) + f.write("""\ +int main (int argc, char **argv) { + %s(); + return 0; +} +""" % funcname) + finally: + f.close() + try: + objects = self.compile([fname], include_dirs=include_dirs) + except CompileError: + return False + + try: + self.link_executable(objects, "a.out", + libraries=libraries, + library_dirs=library_dirs) + except (LinkError, TypeError): + return False + return True + + def find_library_file (self, dirs, lib, debug=0): + """Search the specified list of directories for a static or shared + library file 'lib' and return the full path to that file. If + 'debug' true, look for a debugging version (if that makes sense on + the current platform). Return None if 'lib' wasn't found in any of + the specified directories. + """ + raise NotImplementedError + + # -- Filename generation methods ----------------------------------- + + # The default implementation of the filename generating methods are + # prejudiced towards the Unix/DOS/Windows view of the world: + # * object files are named by replacing the source file extension + # (eg. .c/.cpp -> .o/.obj) + # * library files (shared or static) are named by plugging the + # library name and extension into a format string, eg. + # "lib%s.%s" % (lib_name, ".a") for Unix static libraries + # * executables are named by appending an extension (possibly + # empty) to the program name: eg. progname + ".exe" for + # Windows + # + # To reduce redundant code, these methods expect to find + # several attributes in the current object (presumably defined + # as class attributes): + # * src_extensions - + # list of C/C++ source file extensions, eg. ['.c', '.cpp'] + # * obj_extension - + # object file extension, eg. '.o' or '.obj' + # * static_lib_extension - + # extension for static library files, eg. '.a' or '.lib' + # * shared_lib_extension - + # extension for shared library/object files, eg. '.so', '.dll' + # * static_lib_format - + # format string for generating static library filenames, + # eg. 'lib%s.%s' or '%s.%s' + # * shared_lib_format + # format string for generating shared library filenames + # (probably same as static_lib_format, since the extension + # is one of the intended parameters to the format string) + # * exe_extension - + # extension for executable files, eg. '' or '.exe' + + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + if output_dir is None: + output_dir = '' + obj_names = [] + for src_name in source_filenames: + base, ext = os.path.splitext(src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base):] # If abs, chop off leading / + if ext not in self.src_extensions: + raise UnknownFileError( + "unknown file type '%s' (from '%s')" % (ext, src_name)) + if strip_dir: + base = os.path.basename(base) + obj_names.append(os.path.join(output_dir, + base + self.obj_extension)) + return obj_names + + def shared_object_filename(self, basename, strip_dir=0, output_dir=''): + assert output_dir is not None + if strip_dir: + basename = os.path.basename(basename) + return os.path.join(output_dir, basename + self.shared_lib_extension) + + def executable_filename(self, basename, strip_dir=0, output_dir=''): + assert output_dir is not None + if strip_dir: + basename = os.path.basename(basename) + return os.path.join(output_dir, basename + (self.exe_extension or '')) + + def library_filename(self, libname, lib_type='static', # or 'shared' + strip_dir=0, output_dir=''): + assert output_dir is not None + if lib_type not in ("static", "shared", "dylib", "xcode_stub"): + raise ValueError( + "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") + fmt = getattr(self, lib_type + "_lib_format") + ext = getattr(self, lib_type + "_lib_extension") + + dir, base = os.path.split(libname) + filename = fmt % (base, ext) + if strip_dir: + dir = '' + + return os.path.join(output_dir, dir, filename) + + + # -- Utility methods ----------------------------------------------- + + def announce(self, msg, level=1): + log.debug(msg) + + def debug_print(self, msg): + from distutils.debug import DEBUG + if DEBUG: + print(msg) + + def warn(self, msg): + sys.stderr.write("warning: %s\n" % msg) + + def execute(self, func, args, msg=None, level=1): + execute(func, args, msg, self.dry_run) + + def spawn(self, cmd): + spawn(cmd, dry_run=self.dry_run) + + def move_file(self, src, dst): + return move_file(src, dst, dry_run=self.dry_run) + + def mkpath (self, name, mode=0o777): + mkpath(name, mode, dry_run=self.dry_run) + + +# Map a sys.platform/os.name ('posix', 'nt') to the default compiler +# type for that platform. Keys are interpreted as re match +# patterns. Order is important; platform mappings are preferred over +# OS names. +_default_compilers = ( + + # Platform string mappings + + # on a cygwin built python we can use gcc like an ordinary UNIXish + # compiler + ('cygwin.*', 'unix'), + + # OS name mappings + ('posix', 'unix'), + ('nt', 'msvc'), + + ) + +def get_default_compiler(osname=None, platform=None): + """Determine the default compiler to use for the given platform. + + osname should be one of the standard Python OS names (i.e. the + ones returned by os.name) and platform the common value + returned by sys.platform for the platform in question. + + The default values are os.name and sys.platform in case the + parameters are not given. + """ + if osname is None: + osname = os.name + if platform is None: + platform = sys.platform + for pattern, compiler in _default_compilers: + if re.match(pattern, platform) is not None or \ + re.match(pattern, osname) is not None: + return compiler + # Default to Unix compiler + return 'unix' + +# Map compiler types to (module_name, class_name) pairs -- ie. where to +# find the code that implements an interface to this compiler. (The module +# is assumed to be in the 'distutils' package.) +compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', + "standard UNIX-style compiler"), + 'msvc': ('_msvccompiler', 'MSVCCompiler', + "Microsoft Visual C++"), + 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', + "Cygwin port of GNU C Compiler for Win32"), + 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler', + "Mingw32 port of GNU C Compiler for Win32"), + 'bcpp': ('bcppcompiler', 'BCPPCompiler', + "Borland C++ Compiler"), + } + +def show_compilers(): + """Print list of available compilers (used by the "--help-compiler" + options to "build", "build_ext", "build_clib"). + """ + # XXX this "knows" that the compiler option it's describing is + # "--compiler", which just happens to be the case for the three + # commands that use it. + from distutils.fancy_getopt import FancyGetopt + compilers = [] + for compiler in compiler_class.keys(): + compilers.append(("compiler="+compiler, None, + compiler_class[compiler][2])) + compilers.sort() + pretty_printer = FancyGetopt(compilers) + pretty_printer.print_help("List of available compilers:") + + +def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): + """Generate an instance of some CCompiler subclass for the supplied + platform/compiler combination. 'plat' defaults to 'os.name' + (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler + for that platform. Currently only 'posix' and 'nt' are supported, and + the default compilers are "traditional Unix interface" (UnixCCompiler + class) and Visual C++ (MSVCCompiler class). Note that it's perfectly + possible to ask for a Unix compiler object under Windows, and a + Microsoft compiler object under Unix -- if you supply a value for + 'compiler', 'plat' is ignored. + """ + if plat is None: + plat = os.name + + try: + if compiler is None: + compiler = get_default_compiler(plat) + + (module_name, class_name, long_description) = compiler_class[compiler] + except KeyError: + msg = "don't know how to compile C/C++ code on platform '%s'" % plat + if compiler is not None: + msg = msg + " with '%s' compiler" % compiler + raise DistutilsPlatformError(msg) + + try: + module_name = "distutils." + module_name + __import__ (module_name) + module = sys.modules[module_name] + klass = vars(module)[class_name] + except ImportError: + raise DistutilsModuleError( + "can't compile C/C++ code: unable to load module '%s'" % \ + module_name) + except KeyError: + raise DistutilsModuleError( + "can't compile C/C++ code: unable to find class '%s' " + "in module '%s'" % (class_name, module_name)) + + # XXX The None is necessary to preserve backwards compatibility + # with classes that expect verbose to be the first positional + # argument. + return klass(None, dry_run, force) + + +def gen_preprocess_options(macros, include_dirs): + """Generate C pre-processor options (-D, -U, -I) as used by at least + two types of compilers: the typical Unix compiler and Visual C++. + 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) + means undefine (-U) macro 'name', and (name,value) means define (-D) + macro 'name' to 'value'. 'include_dirs' is just a list of directory + names to be added to the header file search path (-I). Returns a list + of command-line options suitable for either Unix compilers or Visual + C++. + """ + # XXX it would be nice (mainly aesthetic, and so we don't generate + # stupid-looking command lines) to go over 'macros' and eliminate + # redundant definitions/undefinitions (ie. ensure that only the + # latest mention of a particular macro winds up on the command + # line). I don't think it's essential, though, since most (all?) + # Unix C compilers only pay attention to the latest -D or -U + # mention of a macro on their command line. Similar situation for + # 'include_dirs'. I'm punting on both for now. Anyways, weeding out + # redundancies like this should probably be the province of + # CCompiler, since the data structures used are inherited from it + # and therefore common to all CCompiler classes. + pp_opts = [] + for macro in macros: + if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): + raise TypeError( + "bad macro definition '%s': " + "each element of 'macros' list must be a 1- or 2-tuple" + % macro) + + if len(macro) == 1: # undefine this macro + pp_opts.append("-U%s" % macro[0]) + elif len(macro) == 2: + if macro[1] is None: # define with no explicit value + pp_opts.append("-D%s" % macro[0]) + else: + # XXX *don't* need to be clever about quoting the + # macro value here, because we're going to avoid the + # shell at all costs when we spawn the command! + pp_opts.append("-D%s=%s" % macro) + + for dir in include_dirs: + pp_opts.append("-I%s" % dir) + return pp_opts + + +def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): + """Generate linker options for searching library directories and + linking with specific libraries. 'libraries' and 'library_dirs' are, + respectively, lists of library names (not filenames!) and search + directories. Returns a list of command-line options suitable for use + with some compiler (depending on the two format strings passed in). + """ + lib_opts = [] + + for dir in library_dirs: + lib_opts.append(compiler.library_dir_option(dir)) + + for dir in runtime_library_dirs: + opt = compiler.runtime_library_dir_option(dir) + if isinstance(opt, list): + lib_opts = lib_opts + opt + else: + lib_opts.append(opt) + + # XXX it's important that we *not* remove redundant library mentions! + # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to + # resolve all symbols. I just hope we never have to say "-lfoo obj.o + # -lbar" to get things to work -- that's certainly a possibility, but a + # pretty nasty way to arrange your C code. + + for lib in libraries: + (lib_dir, lib_name) = os.path.split(lib) + if lib_dir: + lib_file = compiler.find_library_file([lib_dir], lib_name) + if lib_file: + lib_opts.append(lib_file) + else: + compiler.warn("no library file corresponding to " + "'%s' found (skipping)" % lib) + else: + lib_opts.append(compiler.library_option (lib)) + return lib_opts diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/cmd.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/cmd.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/cmd.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/cmd.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,403 @@ +"""distutils.cmd + +Provides the Command class, the base class for the command classes +in the distutils.command package. +""" + +import sys, os, re +from distutils.errors import DistutilsOptionError +from distutils import util, dir_util, file_util, archive_util, dep_util +from distutils import log + +class Command: + """Abstract base class for defining command classes, the "worker bees" + of the Distutils. A useful analogy for command classes is to think of + them as subroutines with local variables called "options". The options + are "declared" in 'initialize_options()' and "defined" (given their + final values, aka "finalized") in 'finalize_options()', both of which + must be defined by every command class. The distinction between the + two is necessary because option values might come from the outside + world (command line, config file, ...), and any options dependent on + other options must be computed *after* these outside influences have + been processed -- hence 'finalize_options()'. The "body" of the + subroutine, where it does all its work based on the values of its + options, is the 'run()' method, which must also be implemented by every + command class. + """ + + # 'sub_commands' formalizes the notion of a "family" of commands, + # eg. "install" as the parent with sub-commands "install_lib", + # "install_headers", etc. The parent of a family of commands + # defines 'sub_commands' as a class attribute; it's a list of + # (command_name : string, predicate : unbound_method | string | None) + # tuples, where 'predicate' is a method of the parent command that + # determines whether the corresponding command is applicable in the + # current situation. (Eg. we "install_headers" is only applicable if + # we have any C header files to install.) If 'predicate' is None, + # that command is always applicable. + # + # 'sub_commands' is usually defined at the *end* of a class, because + # predicates can be unbound methods, so they must already have been + # defined. The canonical example is the "install" command. + sub_commands = [] + + + # -- Creation/initialization methods ------------------------------- + + def __init__(self, dist): + """Create and initialize a new Command object. Most importantly, + invokes the 'initialize_options()' method, which is the real + initializer and depends on the actual command being + instantiated. + """ + # late import because of mutual dependence between these classes + from distutils.dist import Distribution + + if not isinstance(dist, Distribution): + raise TypeError("dist must be a Distribution instance") + if self.__class__ is Command: + raise RuntimeError("Command is an abstract class") + + self.distribution = dist + self.initialize_options() + + # Per-command versions of the global flags, so that the user can + # customize Distutils' behaviour command-by-command and let some + # commands fall back on the Distribution's behaviour. None means + # "not defined, check self.distribution's copy", while 0 or 1 mean + # false and true (duh). Note that this means figuring out the real + # value of each flag is a touch complicated -- hence "self._dry_run" + # will be handled by __getattr__, below. + # XXX This needs to be fixed. + self._dry_run = None + + # verbose is largely ignored, but needs to be set for + # backwards compatibility (I think)? + self.verbose = dist.verbose + + # Some commands define a 'self.force' option to ignore file + # timestamps, but methods defined *here* assume that + # 'self.force' exists for all commands. So define it here + # just to be safe. + self.force = None + + # The 'help' flag is just used for command-line parsing, so + # none of that complicated bureaucracy is needed. + self.help = 0 + + # 'finalized' records whether or not 'finalize_options()' has been + # called. 'finalize_options()' itself should not pay attention to + # this flag: it is the business of 'ensure_finalized()', which + # always calls 'finalize_options()', to respect/update it. + self.finalized = 0 + + # XXX A more explicit way to customize dry_run would be better. + def __getattr__(self, attr): + if attr == 'dry_run': + myval = getattr(self, "_" + attr) + if myval is None: + return getattr(self.distribution, attr) + else: + return myval + else: + raise AttributeError(attr) + + def ensure_finalized(self): + if not self.finalized: + self.finalize_options() + self.finalized = 1 + + # Subclasses must define: + # initialize_options() + # provide default values for all options; may be customized by + # setup script, by options from config file(s), or by command-line + # options + # finalize_options() + # decide on the final values for all options; this is called + # after all possible intervention from the outside world + # (command-line, option file, etc.) has been processed + # run() + # run the command: do whatever it is we're here to do, + # controlled by the command's various option values + + def initialize_options(self): + """Set default values for all the options that this command + supports. Note that these defaults may be overridden by other + commands, by the setup script, by config files, or by the + command-line. Thus, this is not the place to code dependencies + between options; generally, 'initialize_options()' implementations + are just a bunch of "self.foo = None" assignments. + + This method must be implemented by all command classes. + """ + raise RuntimeError("abstract method -- subclass %s must override" + % self.__class__) + + def finalize_options(self): + """Set final values for all the options that this command supports. + This is always called as late as possible, ie. after any option + assignments from the command-line or from other commands have been + done. Thus, this is the place to code option dependencies: if + 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as + long as 'foo' still has the same value it was assigned in + 'initialize_options()'. + + This method must be implemented by all command classes. + """ + raise RuntimeError("abstract method -- subclass %s must override" + % self.__class__) + + + def dump_options(self, header=None, indent=""): + from distutils.fancy_getopt import longopt_xlate + if header is None: + header = "command options for '%s':" % self.get_command_name() + self.announce(indent + header, level=log.INFO) + indent = indent + " " + for (option, _, _) in self.user_options: + option = option.translate(longopt_xlate) + if option[-1] == "=": + option = option[:-1] + value = getattr(self, option) + self.announce(indent + "%s = %s" % (option, value), + level=log.INFO) + + def run(self): + """A command's raison d'etre: carry out the action it exists to + perform, controlled by the options initialized in + 'initialize_options()', customized by other commands, the setup + script, the command-line, and config files, and finalized in + 'finalize_options()'. All terminal output and filesystem + interaction should be done by 'run()'. + + This method must be implemented by all command classes. + """ + raise RuntimeError("abstract method -- subclass %s must override" + % self.__class__) + + def announce(self, msg, level=1): + """If the current verbosity level is of greater than or equal to + 'level' print 'msg' to stdout. + """ + log.log(level, msg) + + def debug_print(self, msg): + """Print 'msg' to stdout if the global DEBUG (taken from the + DISTUTILS_DEBUG environment variable) flag is true. + """ + from distutils.debug import DEBUG + if DEBUG: + print(msg) + sys.stdout.flush() + + + # -- Option validation methods ------------------------------------- + # (these are very handy in writing the 'finalize_options()' method) + # + # NB. the general philosophy here is to ensure that a particular option + # value meets certain type and value constraints. If not, we try to + # force it into conformance (eg. if we expect a list but have a string, + # split the string on comma and/or whitespace). If we can't force the + # option into conformance, raise DistutilsOptionError. Thus, command + # classes need do nothing more than (eg.) + # self.ensure_string_list('foo') + # and they can be guaranteed that thereafter, self.foo will be + # a list of strings. + + def _ensure_stringlike(self, option, what, default=None): + val = getattr(self, option) + if val is None: + setattr(self, option, default) + return default + elif not isinstance(val, str): + raise DistutilsOptionError("'%s' must be a %s (got `%s`)" + % (option, what, val)) + return val + + def ensure_string(self, option, default=None): + """Ensure that 'option' is a string; if not defined, set it to + 'default'. + """ + self._ensure_stringlike(option, "string", default) + + def ensure_string_list(self, option): + r"""Ensure that 'option' is a list of strings. If 'option' is + currently a string, we split it either on /,\s*/ or /\s+/, so + "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become + ["foo", "bar", "baz"]. + """ + val = getattr(self, option) + if val is None: + return + elif isinstance(val, str): + setattr(self, option, re.split(r',\s*|\s+', val)) + else: + if isinstance(val, list): + ok = all(isinstance(v, str) for v in val) + else: + ok = False + if not ok: + raise DistutilsOptionError( + "'%s' must be a list of strings (got %r)" + % (option, val)) + + def _ensure_tested_string(self, option, tester, what, error_fmt, + default=None): + val = self._ensure_stringlike(option, what, default) + if val is not None and not tester(val): + raise DistutilsOptionError(("error in '%s' option: " + error_fmt) + % (option, val)) + + def ensure_filename(self, option): + """Ensure that 'option' is the name of an existing file.""" + self._ensure_tested_string(option, os.path.isfile, + "filename", + "'%s' does not exist or is not a file") + + def ensure_dirname(self, option): + self._ensure_tested_string(option, os.path.isdir, + "directory name", + "'%s' does not exist or is not a directory") + + + # -- Convenience methods for commands ------------------------------ + + def get_command_name(self): + if hasattr(self, 'command_name'): + return self.command_name + else: + return self.__class__.__name__ + + def set_undefined_options(self, src_cmd, *option_pairs): + """Set the values of any "undefined" options from corresponding + option values in some other command object. "Undefined" here means + "is None", which is the convention used to indicate that an option + has not been changed between 'initialize_options()' and + 'finalize_options()'. Usually called from 'finalize_options()' for + options that depend on some other command rather than another + option of the same command. 'src_cmd' is the other command from + which option values will be taken (a command object will be created + for it if necessary); the remaining arguments are + '(src_option,dst_option)' tuples which mean "take the value of + 'src_option' in the 'src_cmd' command object, and copy it to + 'dst_option' in the current command object". + """ + # Option_pairs: list of (src_option, dst_option) tuples + src_cmd_obj = self.distribution.get_command_obj(src_cmd) + src_cmd_obj.ensure_finalized() + for (src_option, dst_option) in option_pairs: + if getattr(self, dst_option) is None: + setattr(self, dst_option, getattr(src_cmd_obj, src_option)) + + def get_finalized_command(self, command, create=1): + """Wrapper around Distribution's 'get_command_obj()' method: find + (create if necessary and 'create' is true) the command object for + 'command', call its 'ensure_finalized()' method, and return the + finalized command object. + """ + cmd_obj = self.distribution.get_command_obj(command, create) + cmd_obj.ensure_finalized() + return cmd_obj + + # XXX rename to 'get_reinitialized_command()'? (should do the + # same in dist.py, if so) + def reinitialize_command(self, command, reinit_subcommands=0): + return self.distribution.reinitialize_command(command, + reinit_subcommands) + + def run_command(self, command): + """Run some other command: uses the 'run_command()' method of + Distribution, which creates and finalizes the command object if + necessary and then invokes its 'run()' method. + """ + self.distribution.run_command(command) + + def get_sub_commands(self): + """Determine the sub-commands that are relevant in the current + distribution (ie., that need to be run). This is based on the + 'sub_commands' class attribute: each tuple in that list may include + a method that we call to determine if the subcommand needs to be + run for the current distribution. Return a list of command names. + """ + commands = [] + for (cmd_name, method) in self.sub_commands: + if method is None or method(self): + commands.append(cmd_name) + return commands + + + # -- External world manipulation ----------------------------------- + + def warn(self, msg): + log.warn("warning: %s: %s\n", self.get_command_name(), msg) + + def execute(self, func, args, msg=None, level=1): + util.execute(func, args, msg, dry_run=self.dry_run) + + def mkpath(self, name, mode=0o777): + dir_util.mkpath(name, mode, dry_run=self.dry_run) + + def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, + link=None, level=1): + """Copy a file respecting verbose, dry-run and force flags. (The + former two default to whatever is in the Distribution object, and + the latter defaults to false for commands that don't define it.)""" + return file_util.copy_file(infile, outfile, preserve_mode, + preserve_times, not self.force, link, + dry_run=self.dry_run) + + def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, + preserve_symlinks=0, level=1): + """Copy an entire directory tree respecting verbose, dry-run, + and force flags. + """ + return dir_util.copy_tree(infile, outfile, preserve_mode, + preserve_times, preserve_symlinks, + not self.force, dry_run=self.dry_run) + + def move_file (self, src, dst, level=1): + """Move a file respecting dry-run flag.""" + return file_util.move_file(src, dst, dry_run=self.dry_run) + + def spawn(self, cmd, search_path=1, level=1): + """Spawn an external command respecting dry-run flag.""" + from distutils.spawn import spawn + spawn(cmd, search_path, dry_run=self.dry_run) + + def make_archive(self, base_name, format, root_dir=None, base_dir=None, + owner=None, group=None): + return archive_util.make_archive(base_name, format, root_dir, base_dir, + dry_run=self.dry_run, + owner=owner, group=group) + + def make_file(self, infiles, outfile, func, args, + exec_msg=None, skip_msg=None, level=1): + """Special case of 'execute()' for operations that process one or + more input files and generate one output file. Works just like + 'execute()', except the operation is skipped and a different + message printed if 'outfile' already exists and is newer than all + files listed in 'infiles'. If the command defined 'self.force', + and it is true, then the command is unconditionally run -- does no + timestamp checks. + """ + if skip_msg is None: + skip_msg = "skipping %s (inputs unchanged)" % outfile + + # Allow 'infiles' to be a single string + if isinstance(infiles, str): + infiles = (infiles,) + elif not isinstance(infiles, (list, tuple)): + raise TypeError( + "'infiles' must be a string, or a list or tuple of strings") + + if exec_msg is None: + exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) + + # If 'outfile' must be regenerated (either because it doesn't + # exist, is out-of-date, or the 'force' flag is true) then + # perform the action that presumably regenerates it + if self.force or dep_util.newer_group(infiles, outfile): + self.execute(func, args, exec_msg, level) + # Otherwise, print the "skip" message + else: + log.debug(skip_msg) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,30 @@ +"""distutils.command + +Package containing implementation of all the standard Distutils +commands.""" + +__all__ = ['build', + 'build_py', + 'build_ext', + 'build_clib', + 'build_scripts', + 'clean', + 'install', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + 'sdist', + 'register', + 'bdist', + 'bdist_dumb', + 'bdist_rpm', + 'check', + 'upload', + # These two are reserved for future use: + #'bdist_sdux', + #'bdist_pkgtool', + # Note: + # bdist_packager is not included because it only provides + # an abstract base class + ] diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/bdist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/bdist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/bdist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/bdist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,138 @@ +"""distutils.command.bdist + +Implements the Distutils 'bdist' command (create a built [binary] +distribution).""" + +import os +from distutils.core import Command +from distutils.errors import * +from distutils.util import get_platform + + +def show_formats(): + """Print list of available formats (arguments to "--format" option). + """ + from distutils.fancy_getopt import FancyGetopt + formats = [] + for format in bdist.format_commands: + formats.append(("formats=" + format, None, + bdist.format_command[format][1])) + pretty_printer = FancyGetopt(formats) + pretty_printer.print_help("List of available distribution formats:") + + +class bdist(Command): + + description = "create a built (binary) distribution" + + user_options = [('bdist-base=', 'b', + "temporary directory for creating built distributions"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform()), + ('formats=', None, + "formats for distribution (comma-separated list)"), + ('dist-dir=', 'd', + "directory to put final built distributions in " + "[default: dist]"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ('owner=', 'u', + "Owner name used when creating a tar file" + " [default: current user]"), + ('group=', 'g', + "Group name used when creating a tar file" + " [default: current group]"), + ] + + boolean_options = ['skip-build'] + + help_options = [ + ('help-formats', None, + "lists available distribution formats", show_formats), + ] + + # The following commands do not take a format option from bdist + no_format_option = ('bdist_rpm',) + + # This won't do in reality: will need to distinguish RPM-ish Linux, + # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. + default_format = {'posix': 'gztar', + 'nt': 'zip'} + + # Establish the preferred order (for the --help-formats option). + format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip'] + + # And the real information. + format_command = {'rpm': ('bdist_rpm', "RPM distribution"), + 'gztar': ('bdist_dumb', "gzip'ed tar file"), + 'bztar': ('bdist_dumb', "bzip2'ed tar file"), + 'xztar': ('bdist_dumb', "xz'ed tar file"), + 'ztar': ('bdist_dumb', "compressed tar file"), + 'tar': ('bdist_dumb', "tar file"), + 'zip': ('bdist_dumb', "ZIP file"), + } + + def initialize_options(self): + self.bdist_base = None + self.plat_name = None + self.formats = None + self.dist_dir = None + self.skip_build = 0 + self.group = None + self.owner = None + + def finalize_options(self): + # have to finalize 'plat_name' before 'bdist_base' + if self.plat_name is None: + if self.skip_build: + self.plat_name = get_platform() + else: + self.plat_name = self.get_finalized_command('build').plat_name + + # 'bdist_base' -- parent of per-built-distribution-format + # temporary directories (eg. we'll probably have + # "build/bdist./dumb", "build/bdist./rpm", etc.) + if self.bdist_base is None: + build_base = self.get_finalized_command('build').build_base + self.bdist_base = os.path.join(build_base, + 'bdist.' + self.plat_name) + + self.ensure_string_list('formats') + if self.formats is None: + try: + self.formats = [self.default_format[os.name]] + except KeyError: + raise DistutilsPlatformError( + "don't know how to create built distributions " + "on platform %s" % os.name) + + if self.dist_dir is None: + self.dist_dir = "dist" + + def run(self): + # Figure out which sub-commands we need to run. + commands = [] + for format in self.formats: + try: + commands.append(self.format_command[format][0]) + except KeyError: + raise DistutilsOptionError("invalid format '%s'" % format) + + # Reinitialize and run each command. + for i in range(len(self.formats)): + cmd_name = commands[i] + sub_cmd = self.reinitialize_command(cmd_name) + if cmd_name not in self.no_format_option: + sub_cmd.format = self.formats[i] + + # passing the owner and group names for tar archiving + if cmd_name == 'bdist_dumb': + sub_cmd.owner = self.owner + sub_cmd.group = self.group + + # If we're going to need to run this command again, tell it to + # keep its temporary files around so subsequent runs go faster. + if cmd_name in commands[i+1:]: + sub_cmd.keep_temp = 1 + self.run_command(cmd_name) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/bdist_dumb.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/bdist_dumb.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/bdist_dumb.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/bdist_dumb.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,123 @@ +"""distutils.command.bdist_dumb + +Implements the Distutils 'bdist_dumb' command (create a "dumb" built +distribution -- i.e., just an archive to be unpacked under $prefix or +$exec_prefix).""" + +import os +from distutils.core import Command +from distutils.util import get_platform +from distutils.dir_util import remove_tree, ensure_relative +from distutils.errors import * +from distutils.sysconfig import get_python_version +from distutils import log + +class bdist_dumb(Command): + + description = "create a \"dumb\" built distribution" + + user_options = [('bdist-dir=', 'd', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform()), + ('format=', 'f', + "archive format to create (tar, gztar, bztar, xztar, " + "ztar, zip)"), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ('relative', None, + "build the archive using relative paths " + "(default: false)"), + ('owner=', 'u', + "Owner name used when creating a tar file" + " [default: current user]"), + ('group=', 'g', + "Group name used when creating a tar file" + " [default: current group]"), + ] + + boolean_options = ['keep-temp', 'skip-build', 'relative'] + + default_format = { 'posix': 'gztar', + 'nt': 'zip' } + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.format = None + self.keep_temp = 0 + self.dist_dir = None + self.skip_build = None + self.relative = 0 + self.owner = None + self.group = None + + def finalize_options(self): + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'dumb') + + if self.format is None: + try: + self.format = self.default_format[os.name] + except KeyError: + raise DistutilsPlatformError( + "don't know how to create dumb built distributions " + "on platform %s" % os.name) + + self.set_undefined_options('bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ('skip_build', 'skip_build')) + + def run(self): + if not self.skip_build: + self.run_command('build') + + install = self.reinitialize_command('install', reinit_subcommands=1) + install.root = self.bdist_dir + install.skip_build = self.skip_build + install.warn_dir = 0 + + log.info("installing to %s", self.bdist_dir) + self.run_command('install') + + # And make an archive relative to the root of the + # pseudo-installation tree. + archive_basename = "%s.%s" % (self.distribution.get_fullname(), + self.plat_name) + + pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) + if not self.relative: + archive_root = self.bdist_dir + else: + if (self.distribution.has_ext_modules() and + (install.install_base != install.install_platbase)): + raise DistutilsPlatformError( + "can't make a dumb built distribution where " + "base and platbase are different (%s, %s)" + % (repr(install.install_base), + repr(install.install_platbase))) + else: + archive_root = os.path.join(self.bdist_dir, + ensure_relative(install.install_base)) + + # Make the archive + filename = self.make_archive(pseudoinstall_root, + self.format, root_dir=archive_root, + owner=self.owner, group=self.group) + if self.distribution.has_ext_modules(): + pyversion = get_python_version() + else: + pyversion = 'any' + self.distribution.dist_files.append(('bdist_dumb', pyversion, + filename)) + + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/bdist_rpm.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/bdist_rpm.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/bdist_rpm.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/bdist_rpm.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,579 @@ +"""distutils.command.bdist_rpm + +Implements the Distutils 'bdist_rpm' command (create RPM source and binary +distributions).""" + +import subprocess, sys, os +from distutils.core import Command +from distutils.debug import DEBUG +from distutils.file_util import write_file +from distutils.errors import * +from distutils.sysconfig import get_python_version +from distutils import log + +class bdist_rpm(Command): + + description = "create an RPM distribution" + + user_options = [ + ('bdist-base=', None, + "base directory for creating built distributions"), + ('rpm-base=', None, + "base directory for creating RPMs (defaults to \"rpm\" under " + "--bdist-base; must be specified for RPM 2)"), + ('dist-dir=', 'd', + "directory to put final RPM files in " + "(and .spec files if --spec-only)"), + ('python=', None, + "path to Python interpreter to hard-code in the .spec file " + "(default: \"python\")"), + ('fix-python', None, + "hard-code the exact path to the current Python interpreter in " + "the .spec file"), + ('spec-only', None, + "only regenerate spec file"), + ('source-only', None, + "only generate source RPM"), + ('binary-only', None, + "only generate binary RPM"), + ('use-bzip2', None, + "use bzip2 instead of gzip to create source distribution"), + + # More meta-data: too RPM-specific to put in the setup script, + # but needs to go in the .spec file -- so we make these options + # to "bdist_rpm". The idea is that packagers would put this + # info in setup.cfg, although they are of course free to + # supply it on the command line. + ('distribution-name=', None, + "name of the (Linux) distribution to which this " + "RPM applies (*not* the name of the module distribution!)"), + ('group=', None, + "package classification [default: \"Development/Libraries\"]"), + ('release=', None, + "RPM release number"), + ('serial=', None, + "RPM serial number"), + ('vendor=', None, + "RPM \"vendor\" (eg. \"Joe Blow \") " + "[default: maintainer or author from setup script]"), + ('packager=', None, + "RPM packager (eg. \"Jane Doe \") " + "[default: vendor]"), + ('doc-files=', None, + "list of documentation files (space or comma-separated)"), + ('changelog=', None, + "RPM changelog"), + ('icon=', None, + "name of icon file"), + ('provides=', None, + "capabilities provided by this package"), + ('requires=', None, + "capabilities required by this package"), + ('conflicts=', None, + "capabilities which conflict with this package"), + ('build-requires=', None, + "capabilities required to build this package"), + ('obsoletes=', None, + "capabilities made obsolete by this package"), + ('no-autoreq', None, + "do not automatically calculate dependencies"), + + # Actions to take when building RPM + ('keep-temp', 'k', + "don't clean up RPM build directory"), + ('no-keep-temp', None, + "clean up RPM build directory [default]"), + ('use-rpm-opt-flags', None, + "compile with RPM_OPT_FLAGS when building from source RPM"), + ('no-rpm-opt-flags', None, + "do not pass any RPM CFLAGS to compiler"), + ('rpm3-mode', None, + "RPM 3 compatibility mode (default)"), + ('rpm2-mode', None, + "RPM 2 compatibility mode"), + + # Add the hooks necessary for specifying custom scripts + ('prep-script=', None, + "Specify a script for the PREP phase of RPM building"), + ('build-script=', None, + "Specify a script for the BUILD phase of RPM building"), + + ('pre-install=', None, + "Specify a script for the pre-INSTALL phase of RPM building"), + ('install-script=', None, + "Specify a script for the INSTALL phase of RPM building"), + ('post-install=', None, + "Specify a script for the post-INSTALL phase of RPM building"), + + ('pre-uninstall=', None, + "Specify a script for the pre-UNINSTALL phase of RPM building"), + ('post-uninstall=', None, + "Specify a script for the post-UNINSTALL phase of RPM building"), + + ('clean-script=', None, + "Specify a script for the CLEAN phase of RPM building"), + + ('verify-script=', None, + "Specify a script for the VERIFY phase of the RPM build"), + + # Allow a packager to explicitly force an architecture + ('force-arch=', None, + "Force an architecture onto the RPM build process"), + + ('quiet', 'q', + "Run the INSTALL phase of RPM building in quiet mode"), + ] + + boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode', + 'no-autoreq', 'quiet'] + + negative_opt = {'no-keep-temp': 'keep-temp', + 'no-rpm-opt-flags': 'use-rpm-opt-flags', + 'rpm2-mode': 'rpm3-mode'} + + + def initialize_options(self): + self.bdist_base = None + self.rpm_base = None + self.dist_dir = None + self.python = None + self.fix_python = None + self.spec_only = None + self.binary_only = None + self.source_only = None + self.use_bzip2 = None + + self.distribution_name = None + self.group = None + self.release = None + self.serial = None + self.vendor = None + self.packager = None + self.doc_files = None + self.changelog = None + self.icon = None + + self.prep_script = None + self.build_script = None + self.install_script = None + self.clean_script = None + self.verify_script = None + self.pre_install = None + self.post_install = None + self.pre_uninstall = None + self.post_uninstall = None + self.prep = None + self.provides = None + self.requires = None + self.conflicts = None + self.build_requires = None + self.obsoletes = None + + self.keep_temp = 0 + self.use_rpm_opt_flags = 1 + self.rpm3_mode = 1 + self.no_autoreq = 0 + + self.force_arch = None + self.quiet = 0 + + def finalize_options(self): + self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) + if self.rpm_base is None: + if not self.rpm3_mode: + raise DistutilsOptionError( + "you must specify --rpm-base in RPM 2 mode") + self.rpm_base = os.path.join(self.bdist_base, "rpm") + + if self.python is None: + if self.fix_python: + self.python = sys.executable + else: + self.python = "python3" + elif self.fix_python: + raise DistutilsOptionError( + "--python and --fix-python are mutually exclusive options") + + if os.name != 'posix': + raise DistutilsPlatformError("don't know how to create RPM " + "distributions on platform %s" % os.name) + if self.binary_only and self.source_only: + raise DistutilsOptionError( + "cannot supply both '--source-only' and '--binary-only'") + + # don't pass CFLAGS to pure python distributions + if not self.distribution.has_ext_modules(): + self.use_rpm_opt_flags = 0 + + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + self.finalize_package_data() + + def finalize_package_data(self): + self.ensure_string('group', "Development/Libraries") + self.ensure_string('vendor', + "%s <%s>" % (self.distribution.get_contact(), + self.distribution.get_contact_email())) + self.ensure_string('packager') + self.ensure_string_list('doc_files') + if isinstance(self.doc_files, list): + for readme in ('README', 'README.txt'): + if os.path.exists(readme) and readme not in self.doc_files: + self.doc_files.append(readme) + + self.ensure_string('release', "1") + self.ensure_string('serial') # should it be an int? + + self.ensure_string('distribution_name') + + self.ensure_string('changelog') + # Format changelog correctly + self.changelog = self._format_changelog(self.changelog) + + self.ensure_filename('icon') + + self.ensure_filename('prep_script') + self.ensure_filename('build_script') + self.ensure_filename('install_script') + self.ensure_filename('clean_script') + self.ensure_filename('verify_script') + self.ensure_filename('pre_install') + self.ensure_filename('post_install') + self.ensure_filename('pre_uninstall') + self.ensure_filename('post_uninstall') + + # XXX don't forget we punted on summaries and descriptions -- they + # should be handled here eventually! + + # Now *this* is some meta-data that belongs in the setup script... + self.ensure_string_list('provides') + self.ensure_string_list('requires') + self.ensure_string_list('conflicts') + self.ensure_string_list('build_requires') + self.ensure_string_list('obsoletes') + + self.ensure_string('force_arch') + + def run(self): + if DEBUG: + print("before _get_package_data():") + print("vendor =", self.vendor) + print("packager =", self.packager) + print("doc_files =", self.doc_files) + print("changelog =", self.changelog) + + # make directories + if self.spec_only: + spec_dir = self.dist_dir + self.mkpath(spec_dir) + else: + rpm_dir = {} + for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'): + rpm_dir[d] = os.path.join(self.rpm_base, d) + self.mkpath(rpm_dir[d]) + spec_dir = rpm_dir['SPECS'] + + # Spec file goes into 'dist_dir' if '--spec-only specified', + # build/rpm. otherwise. + spec_path = os.path.join(spec_dir, + "%s.spec" % self.distribution.get_name()) + self.execute(write_file, + (spec_path, + self._make_spec_file()), + "writing '%s'" % spec_path) + + if self.spec_only: # stop if requested + return + + # Make a source distribution and copy to SOURCES directory with + # optional icon. + saved_dist_files = self.distribution.dist_files[:] + sdist = self.reinitialize_command('sdist') + if self.use_bzip2: + sdist.formats = ['bztar'] + else: + sdist.formats = ['gztar'] + self.run_command('sdist') + self.distribution.dist_files = saved_dist_files + + source = sdist.get_archive_files()[0] + source_dir = rpm_dir['SOURCES'] + self.copy_file(source, source_dir) + + if self.icon: + if os.path.exists(self.icon): + self.copy_file(self.icon, source_dir) + else: + raise DistutilsFileError( + "icon file '%s' does not exist" % self.icon) + + # build package + log.info("building RPMs") + rpm_cmd = ['rpmbuild'] + + if self.source_only: # what kind of RPMs? + rpm_cmd.append('-bs') + elif self.binary_only: + rpm_cmd.append('-bb') + else: + rpm_cmd.append('-ba') + rpm_cmd.extend(['--define', '__python %s' % self.python]) + if self.rpm3_mode: + rpm_cmd.extend(['--define', + '_topdir %s' % os.path.abspath(self.rpm_base)]) + if not self.keep_temp: + rpm_cmd.append('--clean') + + if self.quiet: + rpm_cmd.append('--quiet') + + rpm_cmd.append(spec_path) + # Determine the binary rpm names that should be built out of this spec + # file + # Note that some of these may not be really built (if the file + # list is empty) + nvr_string = "%{name}-%{version}-%{release}" + src_rpm = nvr_string + ".src.rpm" + non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" + q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % ( + src_rpm, non_src_rpm, spec_path) + + out = os.popen(q_cmd) + try: + binary_rpms = [] + source_rpm = None + while True: + line = out.readline() + if not line: + break + l = line.strip().split() + assert(len(l) == 2) + binary_rpms.append(l[1]) + # The source rpm is named after the first entry in the spec file + if source_rpm is None: + source_rpm = l[0] + + status = out.close() + if status: + raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd)) + + finally: + out.close() + + self.spawn(rpm_cmd) + + if not self.dry_run: + if self.distribution.has_ext_modules(): + pyversion = get_python_version() + else: + pyversion = 'any' + + if not self.binary_only: + srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) + assert(os.path.exists(srpm)) + self.move_file(srpm, self.dist_dir) + filename = os.path.join(self.dist_dir, source_rpm) + self.distribution.dist_files.append( + ('bdist_rpm', pyversion, filename)) + + if not self.source_only: + for rpm in binary_rpms: + rpm = os.path.join(rpm_dir['RPMS'], rpm) + if os.path.exists(rpm): + self.move_file(rpm, self.dist_dir) + filename = os.path.join(self.dist_dir, + os.path.basename(rpm)) + self.distribution.dist_files.append( + ('bdist_rpm', pyversion, filename)) + + def _dist_path(self, path): + return os.path.join(self.dist_dir, os.path.basename(path)) + + def _make_spec_file(self): + """Generate the text of an RPM spec file and return it as a + list of strings (one per line). + """ + # definitions and headers + spec_file = [ + '%define name ' + self.distribution.get_name(), + '%define version ' + self.distribution.get_version().replace('-','_'), + '%define unmangled_version ' + self.distribution.get_version(), + '%define release ' + self.release.replace('-','_'), + '', + 'Summary: ' + self.distribution.get_description(), + ] + + # Workaround for #14443 which affects some RPM based systems such as + # RHEL6 (and probably derivatives) + vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}') + # Generate a potential replacement value for __os_install_post (whilst + # normalizing the whitespace to simplify the test for whether the + # invocation of brp-python-bytecompile passes in __python): + vendor_hook = '\n'.join([' %s \\' % line.strip() + for line in vendor_hook.splitlines()]) + problem = "brp-python-bytecompile \\\n" + fixed = "brp-python-bytecompile %{__python} \\\n" + fixed_hook = vendor_hook.replace(problem, fixed) + if fixed_hook != vendor_hook: + spec_file.append('# Workaround for http://bugs.python.org/issue14443') + spec_file.append('%define __os_install_post ' + fixed_hook + '\n') + + # put locale summaries into spec file + # XXX not supported for now (hard to put a dictionary + # in a config file -- arg!) + #for locale in self.summaries.keys(): + # spec_file.append('Summary(%s): %s' % (locale, + # self.summaries[locale])) + + spec_file.extend([ + 'Name: %{name}', + 'Version: %{version}', + 'Release: %{release}',]) + + # XXX yuck! this filename is available from the "sdist" command, + # but only after it has run: and we create the spec file before + # running "sdist", in case of --spec-only. + if self.use_bzip2: + spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2') + else: + spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') + + spec_file.extend([ + 'License: ' + self.distribution.get_license(), + 'Group: ' + self.group, + 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', + 'Prefix: %{_prefix}', ]) + + if not self.force_arch: + # noarch if no extension modules + if not self.distribution.has_ext_modules(): + spec_file.append('BuildArch: noarch') + else: + spec_file.append( 'BuildArch: %s' % self.force_arch ) + + for field in ('Vendor', + 'Packager', + 'Provides', + 'Requires', + 'Conflicts', + 'Obsoletes', + ): + val = getattr(self, field.lower()) + if isinstance(val, list): + spec_file.append('%s: %s' % (field, ' '.join(val))) + elif val is not None: + spec_file.append('%s: %s' % (field, val)) + + + if self.distribution.get_url() != 'UNKNOWN': + spec_file.append('Url: ' + self.distribution.get_url()) + + if self.distribution_name: + spec_file.append('Distribution: ' + self.distribution_name) + + if self.build_requires: + spec_file.append('BuildRequires: ' + + ' '.join(self.build_requires)) + + if self.icon: + spec_file.append('Icon: ' + os.path.basename(self.icon)) + + if self.no_autoreq: + spec_file.append('AutoReq: 0') + + spec_file.extend([ + '', + '%description', + self.distribution.get_long_description() + ]) + + # put locale descriptions into spec file + # XXX again, suppressed because config file syntax doesn't + # easily support this ;-( + #for locale in self.descriptions.keys(): + # spec_file.extend([ + # '', + # '%description -l ' + locale, + # self.descriptions[locale], + # ]) + + # rpm scripts + # figure out default build script + def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) + def_build = "%s build" % def_setup_call + if self.use_rpm_opt_flags: + def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build + + # insert contents of files + + # XXX this is kind of misleading: user-supplied options are files + # that we open and interpolate into the spec file, but the defaults + # are just text that we drop in as-is. Hmmm. + + install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT ' + '--record=INSTALLED_FILES') % def_setup_call + + script_options = [ + ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), + ('build', 'build_script', def_build), + ('install', 'install_script', install_cmd), + ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"), + ('verifyscript', 'verify_script', None), + ('pre', 'pre_install', None), + ('post', 'post_install', None), + ('preun', 'pre_uninstall', None), + ('postun', 'post_uninstall', None), + ] + + for (rpm_opt, attr, default) in script_options: + # Insert contents of file referred to, if no file is referred to + # use 'default' as contents of script + val = getattr(self, attr) + if val or default: + spec_file.extend([ + '', + '%' + rpm_opt,]) + if val: + with open(val) as f: + spec_file.extend(f.read().split('\n')) + else: + spec_file.append(default) + + + # files section + spec_file.extend([ + '', + '%files -f INSTALLED_FILES', + '%defattr(-,root,root)', + ]) + + if self.doc_files: + spec_file.append('%doc ' + ' '.join(self.doc_files)) + + if self.changelog: + spec_file.extend([ + '', + '%changelog',]) + spec_file.extend(self.changelog) + + return spec_file + + def _format_changelog(self, changelog): + """Format the changelog correctly and convert it to a list of strings + """ + if not changelog: + return changelog + new_changelog = [] + for line in changelog.strip().split('\n'): + line = line.strip() + if line[0] == '*': + new_changelog.extend(['', line]) + elif line[0] == '-': + new_changelog.append(line) + else: + new_changelog.append(' ' + line) + + # strip trailing newline inserted by first changelog entry + if not new_changelog[0]: + del new_changelog[0] + + return new_changelog diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,157 @@ +"""distutils.command.build + +Implements the Distutils 'build' command.""" + +import sys, os +from distutils.core import Command +from distutils.errors import DistutilsOptionError +from distutils.util import get_platform + + +def show_compilers(): + from distutils.ccompiler import show_compilers + show_compilers() + + +class build(Command): + + description = "build everything needed to install" + + user_options = [ + ('build-base=', 'b', + "base directory for build library"), + ('build-purelib=', None, + "build directory for platform-neutral distributions"), + ('build-platlib=', None, + "build directory for platform-specific distributions"), + ('build-lib=', None, + "build directory for all distribution (defaults to either " + + "build-purelib or build-platlib"), + ('build-scripts=', None, + "build directory for scripts"), + ('build-temp=', 't', + "temporary build directory"), + ('plat-name=', 'p', + "platform name to build for, if supported " + "(default: %s)" % get_platform()), + ('compiler=', 'c', + "specify the compiler type"), + ('parallel=', 'j', + "number of parallel build jobs"), + ('debug', 'g', + "compile extensions and libraries with debugging information"), + ('force', 'f', + "forcibly build everything (ignore file timestamps)"), + ('executable=', 'e', + "specify final destination interpreter path (build.py)"), + ] + + boolean_options = ['debug', 'force'] + + help_options = [ + ('help-compiler', None, + "list available compilers", show_compilers), + ] + + def initialize_options(self): + self.build_base = 'build' + # these are decided only after 'build_base' has its final value + # (unless overridden by the user or client) + self.build_purelib = None + self.build_platlib = None + self.build_lib = None + self.build_temp = None + self.build_scripts = None + self.compiler = None + self.plat_name = None + self.debug = None + self.force = 0 + self.executable = None + self.parallel = None + + def finalize_options(self): + if self.plat_name is None: + self.plat_name = get_platform() + else: + # plat-name only supported for windows (other platforms are + # supported via ./configure flags, if at all). Avoid misleading + # other platforms. + if os.name != 'nt': + raise DistutilsOptionError( + "--plat-name only supported on Windows (try " + "using './configure --help' on your platform)") + + plat_specifier = ".%s-%d.%d" % (self.plat_name, *sys.version_info[:2]) + + # Make it so Python 2.x and Python 2.x with --with-pydebug don't + # share the same build directories. Doing so confuses the build + # process for C modules + if hasattr(sys, 'gettotalrefcount'): + plat_specifier += '-pydebug' + + # 'build_purelib' and 'build_platlib' just default to 'lib' and + # 'lib.' under the base build directory. We only use one of + # them for a given distribution, though -- + if self.build_purelib is None: + self.build_purelib = os.path.join(self.build_base, 'lib') + if self.build_platlib is None: + self.build_platlib = os.path.join(self.build_base, + 'lib' + plat_specifier) + + # 'build_lib' is the actual directory that we will use for this + # particular module distribution -- if user didn't supply it, pick + # one of 'build_purelib' or 'build_platlib'. + if self.build_lib is None: + if self.distribution.ext_modules: + self.build_lib = self.build_platlib + else: + self.build_lib = self.build_purelib + + # 'build_temp' -- temporary directory for compiler turds, + # "build/temp." + if self.build_temp is None: + self.build_temp = os.path.join(self.build_base, + 'temp' + plat_specifier) + if self.build_scripts is None: + self.build_scripts = os.path.join(self.build_base, + 'scripts-%d.%d' % sys.version_info[:2]) + + if self.executable is None and sys.executable: + self.executable = os.path.normpath(sys.executable) + + if isinstance(self.parallel, str): + try: + self.parallel = int(self.parallel) + except ValueError: + raise DistutilsOptionError("parallel should be an integer") + + def run(self): + # Run all relevant sub-commands. This will be some subset of: + # - build_py - pure Python modules + # - build_clib - standalone C libraries + # - build_ext - Python extensions + # - build_scripts - (Python) scripts + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + + # -- Predicates for the sub-command list --------------------------- + + def has_pure_modules(self): + return self.distribution.has_pure_modules() + + def has_c_libraries(self): + return self.distribution.has_c_libraries() + + def has_ext_modules(self): + return self.distribution.has_ext_modules() + + def has_scripts(self): + return self.distribution.has_scripts() + + + sub_commands = [('build_py', has_pure_modules), + ('build_clib', has_c_libraries), + ('build_ext', has_ext_modules), + ('build_scripts', has_scripts), + ] diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_clib.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_clib.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_clib.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_clib.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,209 @@ +"""distutils.command.build_clib + +Implements the Distutils 'build_clib' command, to build a C/C++ library +that is included in the module distribution and needed by an extension +module.""" + + +# XXX this module has *lots* of code ripped-off quite transparently from +# build_ext.py -- not surprisingly really, as the work required to build +# a static library from a collection of C source files is not really all +# that different from what's required to build a shared object file from +# a collection of C source files. Nevertheless, I haven't done the +# necessary refactoring to account for the overlap in code between the +# two modules, mainly because a number of subtle details changed in the +# cut 'n paste. Sigh. + +import os +from distutils.core import Command +from distutils.errors import * +from distutils.sysconfig import customize_compiler +from distutils import log + +def show_compilers(): + from distutils.ccompiler import show_compilers + show_compilers() + + +class build_clib(Command): + + description = "build C/C++ libraries used by Python extensions" + + user_options = [ + ('build-clib=', 'b', + "directory to build C/C++ libraries to"), + ('build-temp=', 't', + "directory to put temporary build by-products"), + ('debug', 'g', + "compile with debugging information"), + ('force', 'f', + "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', + "specify the compiler type"), + ] + + boolean_options = ['debug', 'force'] + + help_options = [ + ('help-compiler', None, + "list available compilers", show_compilers), + ] + + def initialize_options(self): + self.build_clib = None + self.build_temp = None + + # List of libraries to build + self.libraries = None + + # Compilation options for all libraries + self.include_dirs = None + self.define = None + self.undef = None + self.debug = None + self.force = 0 + self.compiler = None + + + def finalize_options(self): + # This might be confusing: both build-clib and build-temp default + # to build-temp as defined by the "build" command. This is because + # I think that C libraries are really just temporary build + # by-products, at least from the point of view of building Python + # extensions -- but I want to keep my options open. + self.set_undefined_options('build', + ('build_temp', 'build_clib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force')) + + self.libraries = self.distribution.libraries + if self.libraries: + self.check_library_list(self.libraries) + + if self.include_dirs is None: + self.include_dirs = self.distribution.include_dirs or [] + if isinstance(self.include_dirs, str): + self.include_dirs = self.include_dirs.split(os.pathsep) + + # XXX same as for build_ext -- what about 'self.define' and + # 'self.undef' ? + + + def run(self): + if not self.libraries: + return + + # Yech -- this is cut 'n pasted from build_ext.py! + from distutils.ccompiler import new_compiler + self.compiler = new_compiler(compiler=self.compiler, + dry_run=self.dry_run, + force=self.force) + customize_compiler(self.compiler) + + if self.include_dirs is not None: + self.compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name,value) in self.define: + self.compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + self.compiler.undefine_macro(macro) + + self.build_libraries(self.libraries) + + + def check_library_list(self, libraries): + """Ensure that the list of libraries is valid. + + `library` is presumably provided as a command option 'libraries'. + This method checks that it is a list of 2-tuples, where the tuples + are (library_name, build_info_dict). + + Raise DistutilsSetupError if the structure is invalid anywhere; + just returns otherwise. + """ + if not isinstance(libraries, list): + raise DistutilsSetupError( + "'libraries' option must be a list of tuples") + + for lib in libraries: + if not isinstance(lib, tuple) and len(lib) != 2: + raise DistutilsSetupError( + "each element of 'libraries' must a 2-tuple") + + name, build_info = lib + + if not isinstance(name, str): + raise DistutilsSetupError( + "first element of each tuple in 'libraries' " + "must be a string (the library name)") + + if '/' in name or (os.sep != '/' and os.sep in name): + raise DistutilsSetupError("bad library name '%s': " + "may not contain directory separators" % lib[0]) + + if not isinstance(build_info, dict): + raise DistutilsSetupError( + "second element of each tuple in 'libraries' " + "must be a dictionary (build info)") + + + def get_library_names(self): + # Assume the library list is valid -- 'check_library_list()' is + # called from 'finalize_options()', so it should be! + if not self.libraries: + return None + + lib_names = [] + for (lib_name, build_info) in self.libraries: + lib_names.append(lib_name) + return lib_names + + + def get_source_files(self): + self.check_library_list(self.libraries) + filenames = [] + for (lib_name, build_info) in self.libraries: + sources = build_info.get('sources') + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name) + + filenames.extend(sources) + return filenames + + + def build_libraries(self, libraries): + for (lib_name, build_info) in libraries: + sources = build_info.get('sources') + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name) + sources = list(sources) + + log.info("building '%s' library", lib_name) + + # First, compile the source code to object files in the library + # directory. (This should probably change to putting object + # files in a temporary build directory.) + macros = build_info.get('macros') + include_dirs = build_info.get('include_dirs') + objects = self.compiler.compile(sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=include_dirs, + debug=self.debug) + + # Now "link" the object files together into a static library. + # (On Unix at least, this isn't really linking -- it just + # builds an archive. Whatever.) + self.compiler.create_static_lib(objects, lib_name, + output_dir=self.build_clib, + debug=self.debug) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_ext.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_ext.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_ext.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_ext.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,754 @@ +"""distutils.command.build_ext + +Implements the Distutils 'build_ext' command, for building extension +modules (currently limited to C extensions, should accommodate C++ +extensions ASAP).""" + +import contextlib +import os +import re +import sys +from distutils.core import Command +from distutils.errors import * +from distutils.sysconfig import customize_compiler, get_python_version +from distutils.sysconfig import get_config_h_filename +from distutils.dep_util import newer_group +from distutils.extension import Extension +from distutils.util import get_platform +from distutils import log + +from site import USER_BASE + +# An extension name is just a dot-separated list of Python NAMEs (ie. +# the same as a fully-qualified module name). +extension_name_re = re.compile \ + (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') + + +def show_compilers (): + from distutils.ccompiler import show_compilers + show_compilers() + + +class build_ext(Command): + + description = "build C/C++ extensions (compile/link to build directory)" + + # XXX thoughts on how to deal with complex command-line options like + # these, i.e. how to make it so fancy_getopt can suck them off the + # command line and make it look like setup.py defined the appropriate + # lists of tuples of what-have-you. + # - each command needs a callback to process its command-line options + # - Command.__init__() needs access to its share of the whole + # command line (must ultimately come from + # Distribution.parse_command_line()) + # - it then calls the current command class' option-parsing + # callback to deal with weird options like -D, which have to + # parse the option text and churn out some custom data + # structure + # - that data structure (in this case, a list of 2-tuples) + # will then be present in the command object by the time + # we get to finalize_options() (i.e. the constructor + # takes care of both command-line and client options + # in between initialize_options() and finalize_options()) + + sep_by = " (separated by '%s')" % os.pathsep + user_options = [ + ('build-lib=', 'b', + "directory for compiled extension modules"), + ('build-temp=', 't', + "directory for temporary files (build by-products)"), + ('plat-name=', 'p', + "platform name to cross-compile for, if supported " + "(default: %s)" % get_platform()), + ('inplace', 'i', + "ignore build-lib and put compiled extensions into the source " + + "directory alongside your pure Python modules"), + ('include-dirs=', 'I', + "list of directories to search for header files" + sep_by), + ('define=', 'D', + "C preprocessor macros to define"), + ('undef=', 'U', + "C preprocessor macros to undefine"), + ('libraries=', 'l', + "external C libraries to link with"), + ('library-dirs=', 'L', + "directories to search for external C libraries" + sep_by), + ('rpath=', 'R', + "directories to search for shared C libraries at runtime"), + ('link-objects=', 'O', + "extra explicit link objects to include in the link"), + ('debug', 'g', + "compile/link with debugging information"), + ('force', 'f', + "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', + "specify the compiler type"), + ('parallel=', 'j', + "number of parallel build jobs"), + ('swig-cpp', None, + "make SWIG create C++ files (default is C)"), + ('swig-opts=', None, + "list of SWIG command line options"), + ('swig=', None, + "path to the SWIG executable"), + ('user', None, + "add user include, library and rpath") + ] + + boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] + + help_options = [ + ('help-compiler', None, + "list available compilers", show_compilers), + ] + + def initialize_options(self): + self.extensions = None + self.build_lib = None + self.plat_name = None + self.build_temp = None + self.inplace = 0 + self.package = None + + self.include_dirs = None + self.define = None + self.undef = None + self.libraries = None + self.library_dirs = None + self.rpath = None + self.link_objects = None + self.debug = None + self.force = None + self.compiler = None + self.swig = None + self.swig_cpp = None + self.swig_opts = None + self.user = None + self.parallel = None + + def finalize_options(self): + from distutils import sysconfig + + self.set_undefined_options('build', + ('build_lib', 'build_lib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force'), + ('parallel', 'parallel'), + ('plat_name', 'plat_name'), + ) + + if self.package is None: + self.package = self.distribution.ext_package + + self.extensions = self.distribution.ext_modules + + # Make sure Python's include directories (for Python.h, pyconfig.h, + # etc.) are in the include search path. + py_include = sysconfig.get_python_inc() + plat_py_include = sysconfig.get_python_inc(plat_specific=1) + if self.include_dirs is None: + self.include_dirs = self.distribution.include_dirs or [] + if isinstance(self.include_dirs, str): + self.include_dirs = self.include_dirs.split(os.pathsep) + + # If in a virtualenv, add its include directory + # Issue 16116 + if sys.exec_prefix != sys.base_exec_prefix: + self.include_dirs.append(os.path.join(sys.exec_prefix, 'include')) + + # Put the Python "system" include dir at the end, so that + # any local include dirs take precedence. + self.include_dirs.extend(py_include.split(os.path.pathsep)) + if plat_py_include != py_include: + self.include_dirs.extend( + plat_py_include.split(os.path.pathsep)) + + self.ensure_string_list('libraries') + self.ensure_string_list('link_objects') + + # Life is easier if we're not forever checking for None, so + # simplify these options to empty lists if unset + if self.libraries is None: + self.libraries = [] + if self.library_dirs is None: + self.library_dirs = [] + elif isinstance(self.library_dirs, str): + self.library_dirs = self.library_dirs.split(os.pathsep) + + if self.rpath is None: + self.rpath = [] + elif isinstance(self.rpath, str): + self.rpath = self.rpath.split(os.pathsep) + + # for extensions under windows use different directories + # for Release and Debug builds. + # also Python's library directory must be appended to library_dirs + if os.name == 'nt': + # the 'libs' directory is for binary installs - we assume that + # must be the *native* platform. But we don't really support + # cross-compiling via a binary install anyway, so we let it go. + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) + if sys.base_exec_prefix != sys.prefix: # Issue 16116 + self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) + if self.debug: + self.build_temp = os.path.join(self.build_temp, "Debug") + else: + self.build_temp = os.path.join(self.build_temp, "Release") + + # Append the source distribution include and library directories, + # this allows distutils on windows to work in the source tree + self.include_dirs.append(os.path.dirname(get_config_h_filename())) + _sys_home = getattr(sys, '_home', None) + if _sys_home: + self.library_dirs.append(_sys_home) + + # Use the .lib files for the correct architecture + if self.plat_name == 'win32': + suffix = 'win32' + else: + # win-amd64 + suffix = self.plat_name[4:] + new_lib = os.path.join(sys.exec_prefix, 'PCbuild') + if suffix: + new_lib = os.path.join(new_lib, suffix) + self.library_dirs.append(new_lib) + + # For extensions under Cygwin, Python's library directory must be + # appended to library_dirs + if sys.platform[:6] == 'cygwin': + if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): + # building third party extensions + self.library_dirs.append(os.path.join(sys.prefix, "lib", + "python" + get_python_version(), + "config")) + else: + # building python standard extensions + self.library_dirs.append('.') + + # For building extensions with a shared Python library, + # Python's library directory must be appended to library_dirs + # See Issues: #1600860, #4366 + if (sysconfig.get_config_var('Py_ENABLE_SHARED')): + if not sysconfig.python_build: + # building third party extensions + self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) + else: + # building python standard extensions + self.library_dirs.append('.') + + # The argument parsing will result in self.define being a string, but + # it has to be a list of 2-tuples. All the preprocessor symbols + # specified by the 'define' option will be set to '1'. Multiple + # symbols can be separated with commas. + + if self.define: + defines = self.define.split(',') + self.define = [(symbol, '1') for symbol in defines] + + # The option for macros to undefine is also a string from the + # option parsing, but has to be a list. Multiple symbols can also + # be separated with commas here. + if self.undef: + self.undef = self.undef.split(',') + + if self.swig_opts is None: + self.swig_opts = [] + else: + self.swig_opts = self.swig_opts.split(' ') + + # Finally add the user include and library directories if requested + if self.user: + user_include = os.path.join(USER_BASE, "include") + user_lib = os.path.join(USER_BASE, "lib") + if os.path.isdir(user_include): + self.include_dirs.append(user_include) + if os.path.isdir(user_lib): + self.library_dirs.append(user_lib) + self.rpath.append(user_lib) + + if isinstance(self.parallel, str): + try: + self.parallel = int(self.parallel) + except ValueError: + raise DistutilsOptionError("parallel should be an integer") + + def run(self): + from distutils.ccompiler import new_compiler + + # 'self.extensions', as supplied by setup.py, is a list of + # Extension instances. See the documentation for Extension (in + # distutils.extension) for details. + # + # For backwards compatibility with Distutils 0.8.2 and earlier, we + # also allow the 'extensions' list to be a list of tuples: + # (ext_name, build_info) + # where build_info is a dictionary containing everything that + # Extension instances do except the name, with a few things being + # differently named. We convert these 2-tuples to Extension + # instances as needed. + + if not self.extensions: + return + + # If we were asked to build any C/C++ libraries, make sure that the + # directory where we put them is in the library search path for + # linking extensions. + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.libraries.extend(build_clib.get_library_names() or []) + self.library_dirs.append(build_clib.build_clib) + + # Setup the CCompiler object that we'll use to do all the + # compiling and linking + self.compiler = new_compiler(compiler=self.compiler, + verbose=self.verbose, + dry_run=self.dry_run, + force=self.force) + customize_compiler(self.compiler) + # If we are cross-compiling, init the compiler now (if we are not + # cross-compiling, init would not hurt, but people may rely on + # late initialization of compiler even if they shouldn't...) + if os.name == 'nt' and self.plat_name != get_platform(): + self.compiler.initialize(self.plat_name) + + # And make sure that any compile/link-related options (which might + # come from the command-line or from the setup script) are set in + # that CCompiler object -- that way, they automatically apply to + # all compiling and linking done here. + if self.include_dirs is not None: + self.compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name, value) in self.define: + self.compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + self.compiler.undefine_macro(macro) + if self.libraries is not None: + self.compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + self.compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + self.compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + self.compiler.set_link_objects(self.link_objects) + + # Now actually compile and link everything. + self.build_extensions() + + def check_extensions_list(self, extensions): + """Ensure that the list of extensions (presumably provided as a + command option 'extensions') is valid, i.e. it is a list of + Extension objects. We also support the old-style list of 2-tuples, + where the tuples are (ext_name, build_info), which are converted to + Extension instances here. + + Raise DistutilsSetupError if the structure is invalid anywhere; + just returns otherwise. + """ + if not isinstance(extensions, list): + raise DistutilsSetupError( + "'ext_modules' option must be a list of Extension instances") + + for i, ext in enumerate(extensions): + if isinstance(ext, Extension): + continue # OK! (assume type-checking done + # by Extension constructor) + + if not isinstance(ext, tuple) or len(ext) != 2: + raise DistutilsSetupError( + "each element of 'ext_modules' option must be an " + "Extension instance or 2-tuple") + + ext_name, build_info = ext + + log.warn("old-style (ext_name, build_info) tuple found in " + "ext_modules for extension '%s' " + "-- please convert to Extension instance", ext_name) + + if not (isinstance(ext_name, str) and + extension_name_re.match(ext_name)): + raise DistutilsSetupError( + "first element of each tuple in 'ext_modules' " + "must be the extension name (a string)") + + if not isinstance(build_info, dict): + raise DistutilsSetupError( + "second element of each tuple in 'ext_modules' " + "must be a dictionary (build info)") + + # OK, the (ext_name, build_info) dict is type-safe: convert it + # to an Extension instance. + ext = Extension(ext_name, build_info['sources']) + + # Easy stuff: one-to-one mapping from dict elements to + # instance attributes. + for key in ('include_dirs', 'library_dirs', 'libraries', + 'extra_objects', 'extra_compile_args', + 'extra_link_args'): + val = build_info.get(key) + if val is not None: + setattr(ext, key, val) + + # Medium-easy stuff: same syntax/semantics, different names. + ext.runtime_library_dirs = build_info.get('rpath') + if 'def_file' in build_info: + log.warn("'def_file' element of build info dict " + "no longer supported") + + # Non-trivial stuff: 'macros' split into 'define_macros' + # and 'undef_macros'. + macros = build_info.get('macros') + if macros: + ext.define_macros = [] + ext.undef_macros = [] + for macro in macros: + if not (isinstance(macro, tuple) and len(macro) in (1, 2)): + raise DistutilsSetupError( + "'macros' element of build info dict " + "must be 1- or 2-tuple") + if len(macro) == 1: + ext.undef_macros.append(macro[0]) + elif len(macro) == 2: + ext.define_macros.append(macro) + + extensions[i] = ext + + def get_source_files(self): + self.check_extensions_list(self.extensions) + filenames = [] + + # Wouldn't it be neat if we knew the names of header files too... + for ext in self.extensions: + filenames.extend(ext.sources) + return filenames + + def get_outputs(self): + # Sanity check the 'extensions' list -- can't assume this is being + # done in the same run as a 'build_extensions()' call (in fact, we + # can probably assume that it *isn't*!). + self.check_extensions_list(self.extensions) + + # And build the list of output (built) filenames. Note that this + # ignores the 'inplace' flag, and assumes everything goes in the + # "build" tree. + outputs = [] + for ext in self.extensions: + outputs.append(self.get_ext_fullpath(ext.name)) + return outputs + + def build_extensions(self): + # First, sanity-check the 'extensions' list + self.check_extensions_list(self.extensions) + if self.parallel: + self._build_extensions_parallel() + else: + self._build_extensions_serial() + + def _build_extensions_parallel(self): + workers = self.parallel + if self.parallel is True: + workers = os.cpu_count() # may return None + try: + from concurrent.futures import ThreadPoolExecutor + except ImportError: + workers = None + + if workers is None: + self._build_extensions_serial() + return + + with ThreadPoolExecutor(max_workers=workers) as executor: + futures = [executor.submit(self.build_extension, ext) + for ext in self.extensions] + for ext, fut in zip(self.extensions, futures): + with self._filter_build_errors(ext): + fut.result() + + def _build_extensions_serial(self): + for ext in self.extensions: + with self._filter_build_errors(ext): + self.build_extension(ext) + + @contextlib.contextmanager + def _filter_build_errors(self, ext): + try: + yield + except (CCompilerError, DistutilsError, CompileError) as e: + if not ext.optional: + raise + self.warn('building extension "%s" failed: %s' % + (ext.name, e)) + + def build_extension(self, ext): + sources = ext.sources + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % ext.name) + # sort to make the resulting .so file build reproducible + sources = sorted(sources) + + ext_path = self.get_ext_fullpath(ext.name) + depends = sources + ext.depends + if not (self.force or newer_group(depends, ext_path, 'newer')): + log.debug("skipping '%s' extension (up-to-date)", ext.name) + return + else: + log.info("building '%s' extension", ext.name) + + # First, scan the sources for SWIG definition files (.i), run + # SWIG on 'em to create .c files, and modify the sources list + # accordingly. + sources = self.swig_sources(sources, ext) + + # Next, compile the source code to object files. + + # XXX not honouring 'define_macros' or 'undef_macros' -- the + # CCompiler API needs to change to accommodate this, and I + # want to do one thing at a time! + + # Two possible sources for extra compiler arguments: + # - 'extra_compile_args' in Extension object + # - CFLAGS environment variable (not particularly + # elegant, but people seem to expect it and I + # guess it's useful) + # The environment variable should take precedence, and + # any sensible compiler will give precedence to later + # command line args. Hence we combine them in order: + extra_args = ext.extra_compile_args or [] + + macros = ext.define_macros[:] + for undef in ext.undef_macros: + macros.append((undef,)) + + objects = self.compiler.compile(sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=ext.include_dirs, + debug=self.debug, + extra_postargs=extra_args, + depends=ext.depends) + + # XXX outdated variable, kept here in case third-part code + # needs it. + self._built_objects = objects[:] + + # Now link the object files together into a "shared object" -- + # of course, first we have to figure out all the other things + # that go into the mix. + if ext.extra_objects: + objects.extend(ext.extra_objects) + extra_args = ext.extra_link_args or [] + + # Detect target language, if not provided + language = ext.language or self.compiler.detect_language(sources) + + self.compiler.link_shared_object( + objects, ext_path, + libraries=self.get_libraries(ext), + library_dirs=ext.library_dirs, + runtime_library_dirs=ext.runtime_library_dirs, + extra_postargs=extra_args, + export_symbols=self.get_export_symbols(ext), + debug=self.debug, + build_temp=self.build_temp, + target_lang=language) + + def swig_sources(self, sources, extension): + """Walk the list of source files in 'sources', looking for SWIG + interface (.i) files. Run SWIG on all that are found, and + return a modified 'sources' list with SWIG source files replaced + by the generated C (or C++) files. + """ + new_sources = [] + swig_sources = [] + swig_targets = {} + + # XXX this drops generated C/C++ files into the source tree, which + # is fine for developers who want to distribute the generated + # source -- but there should be an option to put SWIG output in + # the temp dir. + + if self.swig_cpp: + log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") + + if self.swig_cpp or ('-c++' in self.swig_opts) or \ + ('-c++' in extension.swig_opts): + target_ext = '.cpp' + else: + target_ext = '.c' + + for source in sources: + (base, ext) = os.path.splitext(source) + if ext == ".i": # SWIG interface file + new_sources.append(base + '_wrap' + target_ext) + swig_sources.append(source) + swig_targets[source] = new_sources[-1] + else: + new_sources.append(source) + + if not swig_sources: + return new_sources + + swig = self.swig or self.find_swig() + swig_cmd = [swig, "-python"] + swig_cmd.extend(self.swig_opts) + if self.swig_cpp: + swig_cmd.append("-c++") + + # Do not override commandline arguments + if not self.swig_opts: + for o in extension.swig_opts: + swig_cmd.append(o) + + for source in swig_sources: + target = swig_targets[source] + log.info("swigging %s to %s", source, target) + self.spawn(swig_cmd + ["-o", target, source]) + + return new_sources + + def find_swig(self): + """Return the name of the SWIG executable. On Unix, this is + just "swig" -- it should be in the PATH. Tries a bit harder on + Windows. + """ + if os.name == "posix": + return "swig" + elif os.name == "nt": + # Look for SWIG in its standard installation directory on + # Windows (or so I presume!). If we find it there, great; + # if not, act like Unix and assume it's in the PATH. + for vers in ("1.3", "1.2", "1.1"): + fn = os.path.join("c:\\swig%s" % vers, "swig.exe") + if os.path.isfile(fn): + return fn + else: + return "swig.exe" + else: + raise DistutilsPlatformError( + "I don't know how to find (much less run) SWIG " + "on platform '%s'" % os.name) + + # -- Name generators ----------------------------------------------- + # (extension names, filenames, whatever) + def get_ext_fullpath(self, ext_name): + """Returns the path of the filename for a given extension. + + The file is located in `build_lib` or directly in the package + (inplace option). + """ + fullname = self.get_ext_fullname(ext_name) + modpath = fullname.split('.') + filename = self.get_ext_filename(modpath[-1]) + + if not self.inplace: + # no further work needed + # returning : + # build_dir/package/path/filename + filename = os.path.join(*modpath[:-1]+[filename]) + return os.path.join(self.build_lib, filename) + + # the inplace option requires to find the package directory + # using the build_py command for that + package = '.'.join(modpath[0:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = os.path.abspath(build_py.get_package_dir(package)) + + # returning + # package_dir/filename + return os.path.join(package_dir, filename) + + def get_ext_fullname(self, ext_name): + """Returns the fullname of a given extension name. + + Adds the `package.` prefix""" + if self.package is None: + return ext_name + else: + return self.package + '.' + ext_name + + def get_ext_filename(self, ext_name): + r"""Convert the name of an extension (eg. "foo.bar") into the name + of the file from which it will be loaded (eg. "foo/bar.so", or + "foo\bar.pyd"). + """ + from distutils.sysconfig import get_config_var + ext_path = ext_name.split('.') + ext_suffix = get_config_var('EXT_SUFFIX') + return os.path.join(*ext_path) + ext_suffix + + def get_export_symbols(self, ext): + """Return the list of symbols that a shared extension has to + export. This either uses 'ext.export_symbols' or, if it's not + provided, "PyInit_" + module_name. Only relevant on Windows, where + the .pyd file (DLL) must export the module "PyInit_" function. + """ + suffix = '_' + ext.name.split('.')[-1] + try: + # Unicode module name support as defined in PEP-489 + # https://peps.python.org/pep-0489/#export-hook-name + suffix.encode('ascii') + except UnicodeEncodeError: + suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii') + + initfunc_name = "PyInit" + suffix + if initfunc_name not in ext.export_symbols: + ext.export_symbols.append(initfunc_name) + return ext.export_symbols + + def get_libraries(self, ext): + """Return the list of libraries to link against when building a + shared extension. On most platforms, this is just 'ext.libraries'; + on Windows, we add the Python library (eg. python20.dll). + """ + # The python library is always needed on Windows. For MSVC, this + # is redundant, since the library is mentioned in a pragma in + # pyconfig.h that MSVC groks. The other Windows compilers all seem + # to need it mentioned explicitly, though, so that's what we do. + # Append '_d' to the python import library on debug builds. + if sys.platform == "win32": + from distutils._msvccompiler import MSVCCompiler + if not isinstance(self.compiler, MSVCCompiler): + template = "python%d%d" + if self.debug: + template = template + '_d' + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + # don't extend ext.libraries, it may be shared with other + # extensions, it is a reference to the original list + return ext.libraries + [pythonlib] + else: + # On Android only the main executable and LD_PRELOADs are considered + # to be RTLD_GLOBAL, all the dependencies of the main executable + # remain RTLD_LOCAL and so the shared libraries must be linked with + # libpython when python is built with a shared python library (issue + # bpo-21536). + # On Cygwin (and if required, other POSIX-like platforms based on + # Windows like MinGW) it is simply necessary that all symbols in + # shared libraries are resolved at link time. + from distutils.sysconfig import get_config_var + link_libpython = False + if get_config_var('Py_ENABLE_SHARED'): + # A native build on an Android device or on Cygwin + if hasattr(sys, 'getandroidapilevel'): + link_libpython = True + elif sys.platform == 'cygwin': + link_libpython = True + elif '_PYTHON_HOST_PLATFORM' in os.environ: + # We are cross-compiling for one of the relevant platforms + if get_config_var('ANDROID_API_LEVEL') != 0: + link_libpython = True + elif get_config_var('MACHDEP') == 'cygwin': + link_libpython = True + + if link_libpython: + ldversion = get_config_var('LDVERSION') + return ext.libraries + ['python' + ldversion] + + return ext.libraries diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_py.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_py.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_py.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_py.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,416 @@ +"""distutils.command.build_py + +Implements the Distutils 'build_py' command.""" + +import os +import importlib.util +import sys +import glob + +from distutils.core import Command +from distutils.errors import * +from distutils.util import convert_path, Mixin2to3 +from distutils import log + +class build_py (Command): + + description = "\"build\" pure Python modules (copy to build directory)" + + user_options = [ + ('build-lib=', 'd', "directory to \"build\" (copy) to"), + ('compile', 'c', "compile .py to .pyc"), + ('no-compile', None, "don't compile .py files [default]"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('force', 'f', "forcibly build everything (ignore file timestamps)"), + ] + + boolean_options = ['compile', 'force'] + negative_opt = {'no-compile' : 'compile'} + + def initialize_options(self): + self.build_lib = None + self.py_modules = None + self.package = None + self.package_data = None + self.package_dir = None + self.compile = 0 + self.optimize = 0 + self.force = None + + def finalize_options(self): + self.set_undefined_options('build', + ('build_lib', 'build_lib'), + ('force', 'force')) + + # Get the distribution options that are aliases for build_py + # options -- list of packages and list of modules. + self.packages = self.distribution.packages + self.py_modules = self.distribution.py_modules + self.package_data = self.distribution.package_data + self.package_dir = {} + if self.distribution.package_dir: + for name, path in self.distribution.package_dir.items(): + self.package_dir[name] = convert_path(path) + self.data_files = self.get_data_files() + + # Ick, copied straight from install_lib.py (fancy_getopt needs a + # type system! Hell, *everything* needs a type system!!!) + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + assert 0 <= self.optimize <= 2 + except (ValueError, AssertionError): + raise DistutilsOptionError("optimize must be 0, 1, or 2") + + def run(self): + # XXX copy_file by default preserves atime and mtime. IMHO this is + # the right thing to do, but perhaps it should be an option -- in + # particular, a site administrator might want installed files to + # reflect the time of installation rather than the last + # modification time before the installed release. + + # XXX copy_file by default preserves mode, which appears to be the + # wrong thing to do: if a file is read-only in the working + # directory, we want it to be installed read/write so that the next + # installation of the same module distribution can overwrite it + # without problems. (This might be a Unix-specific issue.) Thus + # we turn off 'preserve_mode' when copying to the build directory, + # since the build directory is supposed to be exactly what the + # installation will look like (ie. we preserve mode when + # installing). + + # Two options control which modules will be installed: 'packages' + # and 'py_modules'. The former lets us work with whole packages, not + # specifying individual modules at all; the latter is for + # specifying modules one-at-a-time. + + if self.py_modules: + self.build_modules() + if self.packages: + self.build_packages() + self.build_package_data() + + self.byte_compile(self.get_outputs(include_bytecode=0)) + + def get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + data = [] + if not self.packages: + return data + for package in self.packages: + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Length of path to strip from found files + plen = 0 + if src_dir: + plen = len(src_dir)+1 + + # Strip directory from globbed filenames + filenames = [ + file[plen:] for file in self.find_data_files(package, src_dir) + ] + data.append((package, src_dir, build_dir, filenames)) + return data + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + globs = (self.package_data.get('', []) + + self.package_data.get(package, [])) + files = [] + for pattern in globs: + # Each pattern has to be converted to a platform-specific path + filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) + # Files that match more than one pattern are only added once + files.extend([fn for fn in filelist if fn not in files + and os.path.isfile(fn)]) + return files + + def build_package_data(self): + """Copy data files into build directory""" + lastdir = None + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + self.copy_file(os.path.join(src_dir, filename), target, + preserve_mode=False) + + def get_package_dir(self, package): + """Return the directory, relative to the top of the source + distribution, where package 'package' should be found + (at least according to the 'package_dir' option, if any).""" + path = package.split('.') + + if not self.package_dir: + if path: + return os.path.join(*path) + else: + return '' + else: + tail = [] + while path: + try: + pdir = self.package_dir['.'.join(path)] + except KeyError: + tail.insert(0, path[-1]) + del path[-1] + else: + tail.insert(0, pdir) + return os.path.join(*tail) + else: + # Oops, got all the way through 'path' without finding a + # match in package_dir. If package_dir defines a directory + # for the root (nameless) package, then fallback on it; + # otherwise, we might as well have not consulted + # package_dir at all, as we just use the directory implied + # by 'tail' (which should be the same as the original value + # of 'path' at this point). + pdir = self.package_dir.get('') + if pdir is not None: + tail.insert(0, pdir) + + if tail: + return os.path.join(*tail) + else: + return '' + + def check_package(self, package, package_dir): + # Empty dir name means current directory, which we can probably + # assume exists. Also, os.path.exists and isdir don't know about + # my "empty string means current dir" convention, so we have to + # circumvent them. + if package_dir != "": + if not os.path.exists(package_dir): + raise DistutilsFileError( + "package directory '%s' does not exist" % package_dir) + if not os.path.isdir(package_dir): + raise DistutilsFileError( + "supposed package directory '%s' exists, " + "but is not a directory" % package_dir) + + # Require __init__.py for all but the "root package" + if package: + init_py = os.path.join(package_dir, "__init__.py") + if os.path.isfile(init_py): + return init_py + else: + log.warn(("package init file '%s' not found " + + "(or not a regular file)"), init_py) + + # Either not in a package at all (__init__.py not expected), or + # __init__.py doesn't exist -- so don't return the filename. + return None + + def check_module(self, module, module_file): + if not os.path.isfile(module_file): + log.warn("file %s (for module %s) not found", module_file, module) + return False + else: + return True + + def find_package_modules(self, package, package_dir): + self.check_package(package, package_dir) + module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) + modules = [] + setup_script = os.path.abspath(self.distribution.script_name) + + for f in module_files: + abs_f = os.path.abspath(f) + if abs_f != setup_script: + module = os.path.splitext(os.path.basename(f))[0] + modules.append((package, module, f)) + else: + self.debug_print("excluding %s" % setup_script) + return modules + + def find_modules(self): + """Finds individually-specified Python modules, ie. those listed by + module name in 'self.py_modules'. Returns a list of tuples (package, + module_base, filename): 'package' is a tuple of the path through + package-space to the module; 'module_base' is the bare (no + packages, no dots) module name, and 'filename' is the path to the + ".py" file (relative to the distribution root) that implements the + module. + """ + # Map package names to tuples of useful info about the package: + # (package_dir, checked) + # package_dir - the directory where we'll find source files for + # this package + # checked - true if we have checked that the package directory + # is valid (exists, contains __init__.py, ... ?) + packages = {} + + # List of (package, module, filename) tuples to return + modules = [] + + # We treat modules-in-packages almost the same as toplevel modules, + # just the "package" for a toplevel is empty (either an empty + # string or empty list, depending on context). Differences: + # - don't check for __init__.py in directory for empty package + for module in self.py_modules: + path = module.split('.') + package = '.'.join(path[0:-1]) + module_base = path[-1] + + try: + (package_dir, checked) = packages[package] + except KeyError: + package_dir = self.get_package_dir(package) + checked = 0 + + if not checked: + init_py = self.check_package(package, package_dir) + packages[package] = (package_dir, 1) + if init_py: + modules.append((package, "__init__", init_py)) + + # XXX perhaps we should also check for just .pyc files + # (so greedy closed-source bastards can distribute Python + # modules too) + module_file = os.path.join(package_dir, module_base + ".py") + if not self.check_module(module, module_file): + continue + + modules.append((package, module_base, module_file)) + + return modules + + def find_all_modules(self): + """Compute the list of all modules that will be built, whether + they are specified one-module-at-a-time ('self.py_modules') or + by whole packages ('self.packages'). Return a list of tuples + (package, module, module_file), just like 'find_modules()' and + 'find_package_modules()' do.""" + modules = [] + if self.py_modules: + modules.extend(self.find_modules()) + if self.packages: + for package in self.packages: + package_dir = self.get_package_dir(package) + m = self.find_package_modules(package, package_dir) + modules.extend(m) + return modules + + def get_source_files(self): + return [module[-1] for module in self.find_all_modules()] + + def get_module_outfile(self, build_dir, package, module): + outfile_path = [build_dir] + list(package) + [module + ".py"] + return os.path.join(*outfile_path) + + def get_outputs(self, include_bytecode=1): + modules = self.find_all_modules() + outputs = [] + for (package, module, module_file) in modules: + package = package.split('.') + filename = self.get_module_outfile(self.build_lib, package, module) + outputs.append(filename) + if include_bytecode: + if self.compile: + outputs.append(importlib.util.cache_from_source( + filename, optimization='')) + if self.optimize > 0: + outputs.append(importlib.util.cache_from_source( + filename, optimization=self.optimize)) + + outputs += [ + os.path.join(build_dir, filename) + for package, src_dir, build_dir, filenames in self.data_files + for filename in filenames + ] + + return outputs + + def build_module(self, module, module_file, package): + if isinstance(package, str): + package = package.split('.') + elif not isinstance(package, (list, tuple)): + raise TypeError( + "'package' must be a string (dot-separated), list, or tuple") + + # Now put the module source file into the "build" area -- this is + # easy, we just copy it somewhere under self.build_lib (the build + # directory for Python source). + outfile = self.get_module_outfile(self.build_lib, package, module) + dir = os.path.dirname(outfile) + self.mkpath(dir) + return self.copy_file(module_file, outfile, preserve_mode=0) + + def build_modules(self): + modules = self.find_modules() + for (package, module, module_file) in modules: + # Now "build" the module -- ie. copy the source file to + # self.build_lib (the build directory for Python source). + # (Actually, it gets copied to the directory for this package + # under self.build_lib.) + self.build_module(module, module_file, package) + + def build_packages(self): + for package in self.packages: + # Get list of (package, module, module_file) tuples based on + # scanning the package directory. 'package' is only included + # in the tuple so that 'find_modules()' and + # 'find_package_tuples()' have a consistent interface; it's + # ignored here (apart from a sanity check). Also, 'module' is + # the *unqualified* module name (ie. no dots, no package -- we + # already know its package!), and 'module_file' is the path to + # the .py file, relative to the current directory + # (ie. including 'package_dir'). + package_dir = self.get_package_dir(package) + modules = self.find_package_modules(package, package_dir) + + # Now loop over the modules we found, "building" each one (just + # copy it to self.build_lib). + for (package_, module, module_file) in modules: + assert package == package_ + self.build_module(module, module_file, package) + + def byte_compile(self, files): + if sys.dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + + from distutils.util import byte_compile + prefix = self.build_lib + if prefix[-1] != os.sep: + prefix = prefix + os.sep + + # XXX this code is essentially the same as the 'byte_compile() + # method of the "install_lib" command, except for the determination + # of the 'prefix' string. Hmmm. + if self.compile: + byte_compile(files, optimize=0, + force=self.force, prefix=prefix, dry_run=self.dry_run) + if self.optimize > 0: + byte_compile(files, optimize=self.optimize, + force=self.force, prefix=prefix, dry_run=self.dry_run) + +class build_py_2to3(build_py, Mixin2to3): + def run(self): + self.updated_files = [] + + # Base class code + if self.py_modules: + self.build_modules() + if self.packages: + self.build_packages() + self.build_package_data() + + # 2to3 + self.run_2to3(self.updated_files) + + # Remaining base class code + self.byte_compile(self.get_outputs(include_bytecode=0)) + + def build_module(self, module, module_file, package): + res = build_py.build_module(self, module, module_file, package) + if res[1]: + # file was copied + self.updated_files.append(res[0]) + return res diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_scripts.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_scripts.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/build_scripts.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/build_scripts.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,160 @@ +"""distutils.command.build_scripts + +Implements the Distutils 'build_scripts' command.""" + +import os, re +from stat import ST_MODE +from distutils import sysconfig +from distutils.core import Command +from distutils.dep_util import newer +from distutils.util import convert_path, Mixin2to3 +from distutils import log +import tokenize + +# check if Python is called on the first line with this expression +first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') + +class build_scripts(Command): + + description = "\"build\" scripts (copy and fixup #! line)" + + user_options = [ + ('build-dir=', 'd', "directory to \"build\" (copy) to"), + ('force', 'f', "forcibly build everything (ignore file timestamps"), + ('executable=', 'e', "specify final destination interpreter path"), + ] + + boolean_options = ['force'] + + + def initialize_options(self): + self.build_dir = None + self.scripts = None + self.force = None + self.executable = None + self.outfiles = None + + def finalize_options(self): + self.set_undefined_options('build', + ('build_scripts', 'build_dir'), + ('force', 'force'), + ('executable', 'executable')) + self.scripts = self.distribution.scripts + + def get_source_files(self): + return self.scripts + + def run(self): + if not self.scripts: + return + self.copy_scripts() + + + def copy_scripts(self): + r"""Copy each script listed in 'self.scripts'; if it's marked as a + Python script in the Unix way (first line matches 'first_line_re', + ie. starts with "\#!" and contains "python"), then adjust the first + line to refer to the current Python interpreter as we copy. + """ + self.mkpath(self.build_dir) + outfiles = [] + updated_files = [] + for script in self.scripts: + adjust = False + script = convert_path(script) + outfile = os.path.join(self.build_dir, os.path.basename(script)) + outfiles.append(outfile) + + if not self.force and not newer(script, outfile): + log.debug("not copying %s (up-to-date)", script) + continue + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, "rb") + except OSError: + if not self.dry_run: + raise + f = None + else: + encoding, lines = tokenize.detect_encoding(f.readline) + f.seek(0) + first_line = f.readline() + if not first_line: + self.warn("%s is an empty file (skipping)" % script) + continue + + match = first_line_re.match(first_line) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if adjust: + log.info("copying and adjusting %s -> %s", script, + self.build_dir) + updated_files.append(outfile) + if not self.dry_run: + if not sysconfig.python_build: + executable = self.executable + else: + executable = os.path.join( + sysconfig.get_config_var("BINDIR"), + "python%s%s" % (sysconfig.get_config_var("VERSION"), + sysconfig.get_config_var("EXE"))) + executable = os.fsencode(executable) + shebang = b"#!" + executable + post_interp + b"\n" + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: + raise ValueError( + "The shebang ({!r}) is not decodable " + "from utf-8".format(shebang)) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + try: + shebang.decode(encoding) + except UnicodeDecodeError: + raise ValueError( + "The shebang ({!r}) is not decodable " + "from the script encoding ({})" + .format(shebang, encoding)) + with open(outfile, "wb") as outf: + outf.write(shebang) + outf.writelines(f.readlines()) + if f: + f.close() + else: + if f: + f.close() + updated_files.append(outfile) + self.copy_file(script, outfile) + + if os.name == 'posix': + for file in outfiles: + if self.dry_run: + log.info("changing mode of %s", file) + else: + oldmode = os.stat(file)[ST_MODE] & 0o7777 + newmode = (oldmode | 0o555) & 0o7777 + if newmode != oldmode: + log.info("changing mode of %s from %o to %o", + file, oldmode, newmode) + os.chmod(file, newmode) + # XXX should we modify self.outfiles? + return outfiles, updated_files + +class build_scripts_2to3(build_scripts, Mixin2to3): + + def copy_scripts(self): + outfiles, updated_files = build_scripts.copy_scripts(self) + if not self.dry_run: + self.run_2to3(updated_files) + return outfiles, updated_files diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/check.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/check.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/check.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/check.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,148 @@ +"""distutils.command.check + +Implements the Distutils 'check' command. +""" +from distutils.core import Command +from distutils.errors import DistutilsSetupError + +try: + # docutils is installed + from docutils.utils import Reporter + from docutils.parsers.rst import Parser + from docutils import frontend + from docutils import nodes + + class SilentReporter(Reporter): + + def __init__(self, source, report_level, halt_level, stream=None, + debug=0, encoding='ascii', error_handler='replace'): + self.messages = [] + Reporter.__init__(self, source, report_level, halt_level, stream, + debug, encoding, error_handler) + + def system_message(self, level, message, *children, **kwargs): + self.messages.append((level, message, children, kwargs)) + return nodes.system_message(message, level=level, + type=self.levels[level], + *children, **kwargs) + + HAS_DOCUTILS = True +except Exception: + # Catch all exceptions because exceptions besides ImportError probably + # indicate that docutils is not ported to Py3k. + HAS_DOCUTILS = False + +class check(Command): + """This command checks the meta-data of the package. + """ + description = ("perform some checks on the package") + user_options = [('metadata', 'm', 'Verify meta-data'), + ('restructuredtext', 'r', + ('Checks if long string meta-data syntax ' + 'are reStructuredText-compliant')), + ('strict', 's', + 'Will exit with an error if a check fails')] + + boolean_options = ['metadata', 'restructuredtext', 'strict'] + + def initialize_options(self): + """Sets default values for options.""" + self.restructuredtext = 0 + self.metadata = 1 + self.strict = 0 + self._warnings = 0 + + def finalize_options(self): + pass + + def warn(self, msg): + """Counts the number of warnings that occurs.""" + self._warnings += 1 + return Command.warn(self, msg) + + def run(self): + """Runs the command.""" + # perform the various tests + if self.metadata: + self.check_metadata() + if self.restructuredtext: + if HAS_DOCUTILS: + self.check_restructuredtext() + elif self.strict: + raise DistutilsSetupError('The docutils package is needed.') + + # let's raise an error in strict mode, if we have at least + # one warning + if self.strict and self._warnings > 0: + raise DistutilsSetupError('Please correct your package.') + + def check_metadata(self): + """Ensures that all required elements of meta-data are supplied. + + Required fields: + name, version, URL + + Recommended fields: + (author and author_email) or (maintainer and maintainer_email) + + Warns if any are missing. + """ + metadata = self.distribution.metadata + + missing = [] + for attr in ('name', 'version', 'url'): + if not (hasattr(metadata, attr) and getattr(metadata, attr)): + missing.append(attr) + + if missing: + self.warn("missing required meta-data: %s" % ', '.join(missing)) + if metadata.author: + if not metadata.author_email: + self.warn("missing meta-data: if 'author' supplied, " + + "'author_email' should be supplied too") + elif metadata.maintainer: + if not metadata.maintainer_email: + self.warn("missing meta-data: if 'maintainer' supplied, " + + "'maintainer_email' should be supplied too") + else: + self.warn("missing meta-data: either (author and author_email) " + + "or (maintainer and maintainer_email) " + + "should be supplied") + + def check_restructuredtext(self): + """Checks if the long string fields are reST-compliant.""" + data = self.distribution.get_long_description() + for warning in self._check_rst_data(data): + line = warning[-1].get('line') + if line is None: + warning = warning[1] + else: + warning = '%s (line %s)' % (warning[1], line) + self.warn(warning) + + def _check_rst_data(self, data): + """Returns warnings when the provided data doesn't compile.""" + # the include and csv_table directives need this to be a path + source_path = self.distribution.script_name or 'setup.py' + parser = Parser() + settings = frontend.OptionParser(components=(Parser,)).get_default_values() + settings.tab_width = 4 + settings.pep_references = None + settings.rfc_references = None + reporter = SilentReporter(source_path, + settings.report_level, + settings.halt_level, + stream=settings.warning_stream, + debug=settings.debug, + encoding=settings.error_encoding, + error_handler=settings.error_encoding_error_handler) + + document = nodes.document(settings, reporter, source=source_path) + document.note_source(source_path, -1) + try: + parser.parse(data, document) + except AttributeError as e: + reporter.messages.append( + (-1, 'Could not finish the parsing: %s.' % e, '', {})) + + return reporter.messages diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/clean.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/clean.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/clean.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/clean.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,76 @@ +"""distutils.command.clean + +Implements the Distutils 'clean' command.""" + +# contributed by Bastian Kleineidam , added 2000-03-18 + +import os +from distutils.core import Command +from distutils.dir_util import remove_tree +from distutils import log + +class clean(Command): + + description = "clean up temporary files from 'build' command" + user_options = [ + ('build-base=', 'b', + "base build directory (default: 'build.build-base')"), + ('build-lib=', None, + "build directory for all modules (default: 'build.build-lib')"), + ('build-temp=', 't', + "temporary build directory (default: 'build.build-temp')"), + ('build-scripts=', None, + "build directory for scripts (default: 'build.build-scripts')"), + ('bdist-base=', None, + "temporary directory for built distributions"), + ('all', 'a', + "remove all build output, not just temporary by-products") + ] + + boolean_options = ['all'] + + def initialize_options(self): + self.build_base = None + self.build_lib = None + self.build_temp = None + self.build_scripts = None + self.bdist_base = None + self.all = None + + def finalize_options(self): + self.set_undefined_options('build', + ('build_base', 'build_base'), + ('build_lib', 'build_lib'), + ('build_scripts', 'build_scripts'), + ('build_temp', 'build_temp')) + self.set_undefined_options('bdist', + ('bdist_base', 'bdist_base')) + + def run(self): + # remove the build/temp. directory (unless it's already + # gone) + if os.path.exists(self.build_temp): + remove_tree(self.build_temp, dry_run=self.dry_run) + else: + log.debug("'%s' does not exist -- can't clean it", + self.build_temp) + + if self.all: + # remove build directories + for directory in (self.build_lib, + self.bdist_base, + self.build_scripts): + if os.path.exists(directory): + remove_tree(directory, dry_run=self.dry_run) + else: + log.warn("'%s' does not exist -- can't clean it", + directory) + + # just for the heck of it, try to remove the base build directory: + # we might have emptied it right now, but if not we don't care + if not self.dry_run: + try: + os.rmdir(self.build_base) + log.info("removing '%s'", self.build_base) + except OSError: + pass diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/command_template python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/command_template --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/command_template 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/command_template 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,33 @@ +"""distutils.command.x + +Implements the Distutils 'x' command. +""" + +# created 2000/mm/dd, John Doe + +__revision__ = "$Id$" + +from distutils.core import Command + + +class x(Command): + + # Brief (40-50 characters) description of the command + description = "" + + # List of option tuples: long name, short name (None if no short + # name), and help string. + user_options = [('', '', + ""), + ] + + def initialize_options(self): + self. = None + self. = None + self. = None + + def finalize_options(self): + if self.x is None: + self.x = + + def run(self): diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/config.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/config.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/config.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/config.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,344 @@ +"""distutils.command.config + +Implements the Distutils 'config' command, a (mostly) empty command class +that exists mainly to be sub-classed by specific module distributions and +applications. The idea is that while every "config" command is different, +at least they're all named the same, and users always see "config" in the +list of standard commands. Also, this is a good place to put common +configure-like tasks: "try to compile this C code", or "figure out where +this header file lives". +""" + +import os, re + +from distutils.core import Command +from distutils.errors import DistutilsExecError +from distutils.sysconfig import customize_compiler +from distutils import log + +LANG_EXT = {"c": ".c", "c++": ".cxx"} + +class config(Command): + + description = "prepare to build" + + user_options = [ + ('compiler=', None, + "specify the compiler type"), + ('cc=', None, + "specify the compiler executable"), + ('include-dirs=', 'I', + "list of directories to search for header files"), + ('define=', 'D', + "C preprocessor macros to define"), + ('undef=', 'U', + "C preprocessor macros to undefine"), + ('libraries=', 'l', + "external C libraries to link with"), + ('library-dirs=', 'L', + "directories to search for external C libraries"), + + ('noisy', None, + "show every action (compile, link, run, ...) taken"), + ('dump-source', None, + "dump generated source files before attempting to compile them"), + ] + + + # The three standard command methods: since the "config" command + # does nothing by default, these are empty. + + def initialize_options(self): + self.compiler = None + self.cc = None + self.include_dirs = None + self.libraries = None + self.library_dirs = None + + # maximal output for now + self.noisy = 1 + self.dump_source = 1 + + # list of temporary files generated along-the-way that we have + # to clean at some point + self.temp_files = [] + + def finalize_options(self): + if self.include_dirs is None: + self.include_dirs = self.distribution.include_dirs or [] + elif isinstance(self.include_dirs, str): + self.include_dirs = self.include_dirs.split(os.pathsep) + + if self.libraries is None: + self.libraries = [] + elif isinstance(self.libraries, str): + self.libraries = [self.libraries] + + if self.library_dirs is None: + self.library_dirs = [] + elif isinstance(self.library_dirs, str): + self.library_dirs = self.library_dirs.split(os.pathsep) + + def run(self): + pass + + # Utility methods for actual "config" commands. The interfaces are + # loosely based on Autoconf macros of similar names. Sub-classes + # may use these freely. + + def _check_compiler(self): + """Check that 'self.compiler' really is a CCompiler object; + if not, make it one. + """ + # We do this late, and only on-demand, because this is an expensive + # import. + from distutils.ccompiler import CCompiler, new_compiler + if not isinstance(self.compiler, CCompiler): + self.compiler = new_compiler(compiler=self.compiler, + dry_run=self.dry_run, force=1) + customize_compiler(self.compiler) + if self.include_dirs: + self.compiler.set_include_dirs(self.include_dirs) + if self.libraries: + self.compiler.set_libraries(self.libraries) + if self.library_dirs: + self.compiler.set_library_dirs(self.library_dirs) + + def _gen_temp_sourcefile(self, body, headers, lang): + filename = "_configtest" + LANG_EXT[lang] + with open(filename, "w") as file: + if headers: + for header in headers: + file.write("#include <%s>\n" % header) + file.write("\n") + file.write(body) + if body[-1] != "\n": + file.write("\n") + return filename + + def _preprocess(self, body, headers, include_dirs, lang): + src = self._gen_temp_sourcefile(body, headers, lang) + out = "_configtest.i" + self.temp_files.extend([src, out]) + self.compiler.preprocess(src, out, include_dirs=include_dirs) + return (src, out) + + def _compile(self, body, headers, include_dirs, lang): + src = self._gen_temp_sourcefile(body, headers, lang) + if self.dump_source: + dump_file(src, "compiling '%s':" % src) + (obj,) = self.compiler.object_filenames([src]) + self.temp_files.extend([src, obj]) + self.compiler.compile([src], include_dirs=include_dirs) + return (src, obj) + + def _link(self, body, headers, include_dirs, libraries, library_dirs, + lang): + (src, obj) = self._compile(body, headers, include_dirs, lang) + prog = os.path.splitext(os.path.basename(src))[0] + self.compiler.link_executable([obj], prog, + libraries=libraries, + library_dirs=library_dirs, + target_lang=lang) + + if self.compiler.exe_extension is not None: + prog = prog + self.compiler.exe_extension + self.temp_files.append(prog) + + return (src, obj, prog) + + def _clean(self, *filenames): + if not filenames: + filenames = self.temp_files + self.temp_files = [] + log.info("removing: %s", ' '.join(filenames)) + for filename in filenames: + try: + os.remove(filename) + except OSError: + pass + + + # XXX these ignore the dry-run flag: what to do, what to do? even if + # you want a dry-run build, you still need some sort of configuration + # info. My inclination is to make it up to the real config command to + # consult 'dry_run', and assume a default (minimal) configuration if + # true. The problem with trying to do it here is that you'd have to + # return either true or false from all the 'try' methods, neither of + # which is correct. + + # XXX need access to the header search path and maybe default macros. + + def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): + """Construct a source file from 'body' (a string containing lines + of C/C++ code) and 'headers' (a list of header files to include) + and run it through the preprocessor. Return true if the + preprocessor succeeded, false if there were any errors. + ('body' probably isn't of much use, but what the heck.) + """ + from distutils.ccompiler import CompileError + self._check_compiler() + ok = True + try: + self._preprocess(body, headers, include_dirs, lang) + except CompileError: + ok = False + + self._clean() + return ok + + def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, + lang="c"): + """Construct a source file (just like 'try_cpp()'), run it through + the preprocessor, and return true if any line of the output matches + 'pattern'. 'pattern' should either be a compiled regex object or a + string containing a regex. If both 'body' and 'headers' are None, + preprocesses an empty file -- which can be useful to determine the + symbols the preprocessor and compiler set by default. + """ + self._check_compiler() + src, out = self._preprocess(body, headers, include_dirs, lang) + + if isinstance(pattern, str): + pattern = re.compile(pattern) + + with open(out) as file: + match = False + while True: + line = file.readline() + if line == '': + break + if pattern.search(line): + match = True + break + + self._clean() + return match + + def try_compile(self, body, headers=None, include_dirs=None, lang="c"): + """Try to compile a source file built from 'body' and 'headers'. + Return true on success, false otherwise. + """ + from distutils.ccompiler import CompileError + self._check_compiler() + try: + self._compile(body, headers, include_dirs, lang) + ok = True + except CompileError: + ok = False + + log.info(ok and "success!" or "failure.") + self._clean() + return ok + + def try_link(self, body, headers=None, include_dirs=None, libraries=None, + library_dirs=None, lang="c"): + """Try to compile and link a source file, built from 'body' and + 'headers', to executable form. Return true on success, false + otherwise. + """ + from distutils.ccompiler import CompileError, LinkError + self._check_compiler() + try: + self._link(body, headers, include_dirs, + libraries, library_dirs, lang) + ok = True + except (CompileError, LinkError): + ok = False + + log.info(ok and "success!" or "failure.") + self._clean() + return ok + + def try_run(self, body, headers=None, include_dirs=None, libraries=None, + library_dirs=None, lang="c"): + """Try to compile, link to an executable, and run a program + built from 'body' and 'headers'. Return true on success, false + otherwise. + """ + from distutils.ccompiler import CompileError, LinkError + self._check_compiler() + try: + src, obj, exe = self._link(body, headers, include_dirs, + libraries, library_dirs, lang) + self.spawn([exe]) + ok = True + except (CompileError, LinkError, DistutilsExecError): + ok = False + + log.info(ok and "success!" or "failure.") + self._clean() + return ok + + + # -- High-level methods -------------------------------------------- + # (these are the ones that are actually likely to be useful + # when implementing a real-world config command!) + + def check_func(self, func, headers=None, include_dirs=None, + libraries=None, library_dirs=None, decl=0, call=0): + """Determine if function 'func' is available by constructing a + source file that refers to 'func', and compiles and links it. + If everything succeeds, returns true; otherwise returns false. + + The constructed source file starts out by including the header + files listed in 'headers'. If 'decl' is true, it then declares + 'func' (as "int func()"); you probably shouldn't supply 'headers' + and set 'decl' true in the same call, or you might get errors about + a conflicting declarations for 'func'. Finally, the constructed + 'main()' function either references 'func' or (if 'call' is true) + calls it. 'libraries' and 'library_dirs' are used when + linking. + """ + self._check_compiler() + body = [] + if decl: + body.append("int %s ();" % func) + body.append("int main () {") + if call: + body.append(" %s();" % func) + else: + body.append(" %s;" % func) + body.append("}") + body = "\n".join(body) + "\n" + + return self.try_link(body, headers, include_dirs, + libraries, library_dirs) + + def check_lib(self, library, library_dirs=None, headers=None, + include_dirs=None, other_libraries=[]): + """Determine if 'library' is available to be linked against, + without actually checking that any particular symbols are provided + by it. 'headers' will be used in constructing the source file to + be compiled, but the only effect of this is to check if all the + header files listed are available. Any libraries listed in + 'other_libraries' will be included in the link, in case 'library' + has symbols that depend on other libraries. + """ + self._check_compiler() + return self.try_link("int main (void) { }", headers, include_dirs, + [library] + other_libraries, library_dirs) + + def check_header(self, header, include_dirs=None, library_dirs=None, + lang="c"): + """Determine if the system header file named by 'header_file' + exists and can be found by the preprocessor; return true if so, + false otherwise. + """ + return self.try_cpp(body="/* No body */", headers=[header], + include_dirs=include_dirs) + +def dump_file(filename, head=None): + """Dumps a file content into log.info. + + If head is not None, will be dumped before the file content. + """ + if head is None: + log.info('%s', filename) + else: + log.info(head) + file = open(filename) + try: + log.info(file.read()) + finally: + file.close() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,679 @@ +"""distutils.command.install + +Implements the Distutils 'install' command.""" + +import sys +import sysconfig +import os +import re + +from distutils import log +from distutils.core import Command +from distutils.debug import DEBUG +from distutils.sysconfig import get_config_vars +from distutils.errors import DistutilsPlatformError +from distutils.file_util import write_file +from distutils.util import convert_path, subst_vars, change_root +from distutils.util import get_platform +from distutils.errors import DistutilsOptionError + +from site import USER_BASE +from site import USER_SITE + +HAS_USER_SITE = (USER_SITE is not None) + +# The keys to an installation scheme; if any new types of files are to be +# installed, be sure to add an entry to every scheme in +# sysconfig._INSTALL_SCHEMES, and to SCHEME_KEYS here. +SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data') + +# The following code provides backward-compatible INSTALL_SCHEMES +# while making the sysconfig module the single point of truth. +# This makes it easier for OS distributions where they need to +# alter locations for packages installations in a single place. +# Note that this module is deprecated (PEP 632); all consumers +# of this information should switch to using sysconfig directly. +INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}} + +# Copy from sysconfig._INSTALL_SCHEMES +for key in SCHEME_KEYS: + for distutils_scheme_name, sys_scheme_name in ( + ("unix_prefix", "posix_prefix"), ("unix_home", "posix_home"), + ("nt", "nt")): + sys_key = key + sys_scheme = sysconfig._INSTALL_SCHEMES[sys_scheme_name] + if key == "headers" and key not in sys_scheme: + # On POSIX-y platforms, Python will: + # - Build from .h files in 'headers' (only there when + # building CPython) + # - Install .h files to 'include' + # When 'headers' is missing, fall back to 'include' + sys_key = 'include' + INSTALL_SCHEMES[distutils_scheme_name][key] = sys_scheme[sys_key] + +# Transformation to different template format +for main_key in INSTALL_SCHEMES: + for key, value in INSTALL_SCHEMES[main_key].items(): + # Change all ocurences of {variable} to $variable + value = re.sub(r"\{(.+?)\}", r"$\g<1>", value) + value = value.replace("$installed_base", "$base") + value = value.replace("$py_version_nodot_plat", "$py_version_nodot") + if key == "headers": + value += "/$dist_name" + if sys.version_info >= (3, 9) and key == "platlib": + # platlibdir is available since 3.9: bpo-1294959 + value = value.replace("/lib/", "/$platlibdir/") + INSTALL_SCHEMES[main_key][key] = value + +# The following part of INSTALL_SCHEMES has a different definition +# than the one in sysconfig, but because both depend on the site module, +# the outcomes should be the same. +if HAS_USER_SITE: + INSTALL_SCHEMES['nt_user'] = { + 'purelib': '$usersite', + 'platlib': '$usersite', + 'headers': '$userbase/Python$py_version_nodot/Include/$dist_name', + 'scripts': '$userbase/Python$py_version_nodot/Scripts', + 'data' : '$userbase', + } + + INSTALL_SCHEMES['unix_user'] = { + 'purelib': '$usersite', + 'platlib': '$usersite', + 'headers': + '$userbase/include/python$py_version_short$abiflags/$dist_name', + 'scripts': '$userbase/bin', + 'data' : '$userbase', + } + + +class install(Command): + + description = "install everything from build directory" + + user_options = [ + # Select installation scheme and set base director(y|ies) + ('prefix=', None, + "installation prefix"), + ('exec-prefix=', None, + "(Unix only) prefix for platform-specific files"), + ('home=', None, + "(Unix only) home directory to install under"), + + # Or, just set the base director(y|ies) + ('install-base=', None, + "base installation directory (instead of --prefix or --home)"), + ('install-platbase=', None, + "base installation directory for platform-specific files " + + "(instead of --exec-prefix or --home)"), + ('root=', None, + "install everything relative to this alternate root directory"), + + # Or, explicitly set the installation scheme + ('install-purelib=', None, + "installation directory for pure Python module distributions"), + ('install-platlib=', None, + "installation directory for non-pure module distributions"), + ('install-lib=', None, + "installation directory for all module distributions " + + "(overrides --install-purelib and --install-platlib)"), + + ('install-headers=', None, + "installation directory for C/C++ headers"), + ('install-scripts=', None, + "installation directory for Python scripts"), + ('install-data=', None, + "installation directory for data files"), + + # Byte-compilation options -- see install_lib.py for details, as + # these are duplicated from there (but only install_lib does + # anything with them). + ('compile', 'c', "compile .py to .pyc [default]"), + ('no-compile', None, "don't compile .py files"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + + # Miscellaneous control options + ('force', 'f', + "force installation (overwrite any existing files)"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + + # Where to install documentation (eventually!) + #('doc-format=', None, "format of documentation to generate"), + #('install-man=', None, "directory for Unix man pages"), + #('install-html=', None, "directory for HTML documentation"), + #('install-info=', None, "directory for GNU info files"), + + ('record=', None, + "filename in which to record list of installed files"), + ] + + boolean_options = ['compile', 'force', 'skip-build'] + + if HAS_USER_SITE: + user_options.append(('user', None, + "install in user site-package '%s'" % USER_SITE)) + boolean_options.append('user') + + negative_opt = {'no-compile' : 'compile'} + + + def initialize_options(self): + """Initializes options.""" + # High-level options: these select both an installation base + # and scheme. + self.prefix = None + self.exec_prefix = None + self.home = None + self.user = 0 + + # These select only the installation base; it's up to the user to + # specify the installation scheme (currently, that means supplying + # the --install-{platlib,purelib,scripts,data} options). + self.install_base = None + self.install_platbase = None + self.root = None + + # These options are the actual installation directories; if not + # supplied by the user, they are filled in using the installation + # scheme implied by prefix/exec-prefix/home and the contents of + # that installation scheme. + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib + self.install_scripts = None + self.install_data = None + if HAS_USER_SITE: + self.install_userbase = USER_BASE + self.install_usersite = USER_SITE + + self.compile = None + self.optimize = None + + # Deprecated + # These two are for putting non-packagized distributions into their + # own directory and creating a .pth file if it makes sense. + # 'extra_path' comes from the setup file; 'install_path_file' can + # be turned off if it makes no sense to install a .pth file. (But + # better to install it uselessly than to guess wrong and not + # install it when it's necessary and would be used!) Currently, + # 'install_path_file' is always true unless some outsider meddles + # with it. + self.extra_path = None + self.install_path_file = 1 + + # 'force' forces installation, even if target files are not + # out-of-date. 'skip_build' skips running the "build" command, + # handy if you know it's not necessary. 'warn_dir' (which is *not* + # a user option, it's just there so the bdist_* commands can turn + # it off) determines whether we warn about installing to a + # directory not in sys.path. + self.force = 0 + self.skip_build = 0 + self.warn_dir = 1 + + # These are only here as a conduit from the 'build' command to the + # 'install_*' commands that do the real work. ('build_base' isn't + # actually used anywhere, but it might be useful in future.) They + # are not user options, because if the user told the install + # command where the build directory is, that wouldn't affect the + # build command. + self.build_base = None + self.build_lib = None + + # Not defined yet because we don't know anything about + # documentation yet. + #self.install_man = None + #self.install_html = None + #self.install_info = None + + self.record = None + + + # -- Option finalizing methods ------------------------------------- + # (This is rather more involved than for most commands, + # because this is where the policy for installing third- + # party Python modules on various platforms given a wide + # array of user input is decided. Yes, it's quite complex!) + + def finalize_options(self): + """Finalizes options.""" + # This method (and its helpers, like 'finalize_unix()', + # 'finalize_other()', and 'select_scheme()') is where the default + # installation directories for modules, extension modules, and + # anything else we care to install from a Python module + # distribution. Thus, this code makes a pretty important policy + # statement about how third-party stuff is added to a Python + # installation! Note that the actual work of installation is done + # by the relatively simple 'install_*' commands; they just take + # their orders from the installation directory options determined + # here. + + # Check for errors/inconsistencies in the options; first, stuff + # that's wrong on any platform. + + if ((self.prefix or self.exec_prefix or self.home) and + (self.install_base or self.install_platbase)): + raise DistutilsOptionError( + "must supply either prefix/exec-prefix/home or " + + "install-base/install-platbase -- not both") + + if self.home and (self.prefix or self.exec_prefix): + raise DistutilsOptionError( + "must supply either home or prefix/exec-prefix -- not both") + + if self.user and (self.prefix or self.exec_prefix or self.home or + self.install_base or self.install_platbase): + raise DistutilsOptionError("can't combine user with prefix, " + "exec_prefix/home, or install_(plat)base") + + # Next, stuff that's wrong (or dubious) only on certain platforms. + if os.name != "posix": + if self.exec_prefix: + self.warn("exec-prefix option ignored on this platform") + self.exec_prefix = None + + # Now the interesting logic -- so interesting that we farm it out + # to other methods. The goal of these methods is to set the final + # values for the install_{lib,scripts,data,...} options, using as + # input a heady brew of prefix, exec_prefix, home, install_base, + # install_platbase, user-supplied versions of + # install_{purelib,platlib,lib,scripts,data,...}, and the + # INSTALL_SCHEME dictionary above. Phew! + + self.dump_dirs("pre-finalize_{unix,other}") + + if os.name == 'posix': + self.finalize_unix() + else: + self.finalize_other() + + self.dump_dirs("post-finalize_{unix,other}()") + + # Expand configuration variables, tilde, etc. in self.install_base + # and self.install_platbase -- that way, we can use $base or + # $platbase in the other installation directories and not worry + # about needing recursive variable expansion (shudder). + + py_version = sys.version.split()[0] + (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix') + try: + abiflags = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + abiflags = '' + self.config_vars = {'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': '%d.%d' % sys.version_info[:2], + 'py_version_nodot': '%d%d' % sys.version_info[:2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + 'abiflags': abiflags, + 'platlibdir': sys.platlibdir, + } + + if HAS_USER_SITE: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + + if sysconfig.is_python_build(True): + self.config_vars['srcdir'] = sysconfig.get_config_var('srcdir') + + self.expand_basedirs() + + self.dump_dirs("post-expand_basedirs()") + + # Now define config vars for the base directories so we can expand + # everything else. + self.config_vars['base'] = self.install_base + self.config_vars['platbase'] = self.install_platbase + + if DEBUG: + from pprint import pprint + print("config vars:") + pprint(self.config_vars) + + # Expand "~" and configuration variables in the installation + # directories. + self.expand_dirs() + + self.dump_dirs("post-expand_dirs()") + + # Create directories in the home dir: + if self.user: + self.create_home_path() + + # Pick the actual directory to install all modules to: either + # install_purelib or install_platlib, depending on whether this + # module distribution is pure or not. Of course, if the user + # already specified install_lib, use their selection. + if self.install_lib is None: + if self.distribution.ext_modules: # has extensions: non-pure + self.install_lib = self.install_platlib + else: + self.install_lib = self.install_purelib + + + # Convert directories from Unix /-separated syntax to the local + # convention. + self.convert_paths('lib', 'purelib', 'platlib', + 'scripts', 'data', 'headers') + if HAS_USER_SITE: + self.convert_paths('userbase', 'usersite') + + # Deprecated + # Well, we're not actually fully completely finalized yet: we still + # have to deal with 'extra_path', which is the hack for allowing + # non-packagized module distributions (hello, Numerical Python!) to + # get their own directories. + self.handle_extra_path() + self.install_libbase = self.install_lib # needed for .pth file + self.install_lib = os.path.join(self.install_lib, self.extra_dirs) + + # If a new root directory was supplied, make all the installation + # dirs relative to it. + if self.root is not None: + self.change_roots('libbase', 'lib', 'purelib', 'platlib', + 'scripts', 'data', 'headers') + + self.dump_dirs("after prepending root") + + # Find out the build directories, ie. where to install from. + self.set_undefined_options('build', + ('build_base', 'build_base'), + ('build_lib', 'build_lib')) + + # Punt on doc directories for now -- after all, we're punting on + # documentation completely! + + def dump_dirs(self, msg): + """Dumps the list of user options.""" + if not DEBUG: + return + from distutils.fancy_getopt import longopt_xlate + log.debug(msg + ":") + for opt in self.user_options: + opt_name = opt[0] + if opt_name[-1] == "=": + opt_name = opt_name[0:-1] + if opt_name in self.negative_opt: + opt_name = self.negative_opt[opt_name] + opt_name = opt_name.translate(longopt_xlate) + val = not getattr(self, opt_name) + else: + opt_name = opt_name.translate(longopt_xlate) + val = getattr(self, opt_name) + log.debug(" %s: %s", opt_name, val) + + def finalize_unix(self): + """Finalizes options for posix platforms.""" + if self.install_base is not None or self.install_platbase is not None: + if ((self.install_lib is None and + self.install_purelib is None and + self.install_platlib is None) or + self.install_headers is None or + self.install_scripts is None or + self.install_data is None): + raise DistutilsOptionError( + "install-base or install-platbase supplied, but " + "installation scheme is incomplete") + return + + if self.user: + if self.install_userbase is None: + raise DistutilsPlatformError( + "User base directory is not specified") + self.install_base = self.install_platbase = self.install_userbase + self.select_scheme("unix_user") + elif self.home is not None: + self.install_base = self.install_platbase = self.home + self.select_scheme("unix_home") + else: + if self.prefix is None: + if self.exec_prefix is not None: + raise DistutilsOptionError( + "must not supply exec-prefix without prefix") + + self.prefix = os.path.normpath(sys.prefix) + self.exec_prefix = os.path.normpath(sys.exec_prefix) + + else: + if self.exec_prefix is None: + self.exec_prefix = self.prefix + + self.install_base = self.prefix + self.install_platbase = self.exec_prefix + self.select_scheme("unix_prefix") + + def finalize_other(self): + """Finalizes options for non-posix platforms""" + if self.user: + if self.install_userbase is None: + raise DistutilsPlatformError( + "User base directory is not specified") + self.install_base = self.install_platbase = self.install_userbase + self.select_scheme(os.name + "_user") + elif self.home is not None: + self.install_base = self.install_platbase = self.home + self.select_scheme("unix_home") + else: + if self.prefix is None: + self.prefix = os.path.normpath(sys.prefix) + + self.install_base = self.install_platbase = self.prefix + try: + self.select_scheme(os.name) + except KeyError: + raise DistutilsPlatformError( + "I don't know how to install stuff on '%s'" % os.name) + + def select_scheme(self, name): + """Sets the install directories by applying the install schemes.""" + # it's the caller's problem if they supply a bad name! + scheme = INSTALL_SCHEMES[name] + for key in SCHEME_KEYS: + attrname = 'install_' + key + if getattr(self, attrname) is None: + setattr(self, attrname, scheme[key]) + + def _expand_attrs(self, attrs): + for attr in attrs: + val = getattr(self, attr) + if val is not None: + if os.name == 'posix' or os.name == 'nt': + val = os.path.expanduser(val) + val = subst_vars(val, self.config_vars) + setattr(self, attr, val) + + def expand_basedirs(self): + """Calls `os.path.expanduser` on install_base, install_platbase and + root.""" + self._expand_attrs(['install_base', 'install_platbase', 'root']) + + def expand_dirs(self): + """Calls `os.path.expanduser` on install dirs.""" + self._expand_attrs(['install_purelib', 'install_platlib', + 'install_lib', 'install_headers', + 'install_scripts', 'install_data',]) + + def convert_paths(self, *names): + """Call `convert_path` over `names`.""" + for name in names: + attr = "install_" + name + setattr(self, attr, convert_path(getattr(self, attr))) + + def handle_extra_path(self): + """Set `path_file` and `extra_dirs` using `extra_path`.""" + if self.extra_path is None: + self.extra_path = self.distribution.extra_path + + if self.extra_path is not None: + log.warn( + "Distribution option extra_path is deprecated. " + "See issue27919 for details." + ) + if isinstance(self.extra_path, str): + self.extra_path = self.extra_path.split(',') + + if len(self.extra_path) == 1: + path_file = extra_dirs = self.extra_path[0] + elif len(self.extra_path) == 2: + path_file, extra_dirs = self.extra_path + else: + raise DistutilsOptionError( + "'extra_path' option must be a list, tuple, or " + "comma-separated string with 1 or 2 elements") + + # convert to local form in case Unix notation used (as it + # should be in setup scripts) + extra_dirs = convert_path(extra_dirs) + else: + path_file = None + extra_dirs = '' + + # XXX should we warn if path_file and not extra_dirs? (in which + # case the path file would be harmless but pointless) + self.path_file = path_file + self.extra_dirs = extra_dirs + + def change_roots(self, *names): + """Change the install directories pointed by name using root.""" + for name in names: + attr = "install_" + name + setattr(self, attr, change_root(self.root, getattr(self, attr))) + + def create_home_path(self): + """Create directories under ~.""" + if not self.user: + return + home = convert_path(os.path.expanduser("~")) + for name, path in self.config_vars.items(): + if path.startswith(home) and not os.path.isdir(path): + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) + + # -- Command execution methods ------------------------------------- + + def run(self): + """Runs the command.""" + # Obviously have to build before we can install + if not self.skip_build: + self.run_command('build') + # If we built for any other platform, we can't install. + build_plat = self.distribution.get_command_obj('build').plat_name + # check warn_dir - it is a clue that the 'install' is happening + # internally, and not to sys.path, so we don't check the platform + # matches what we are running. + if self.warn_dir and build_plat != get_platform(): + raise DistutilsPlatformError("Can't install when " + "cross-compiling") + + # Run all sub-commands (at least those that need to be run) + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + if self.path_file: + self.create_path_file() + + # write list of installed files, if requested. + if self.record: + outputs = self.get_outputs() + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in range(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + self.execute(write_file, + (self.record, outputs), + "writing list of installed files to '%s'" % + self.record) + + sys_path = map(os.path.normpath, sys.path) + sys_path = map(os.path.normcase, sys_path) + install_lib = os.path.normcase(os.path.normpath(self.install_lib)) + if (self.warn_dir and + not (self.path_file and self.install_path_file) and + install_lib not in sys_path): + log.debug(("modules installed to '%s', which is not in " + "Python's module search path (sys.path) -- " + "you'll have to change the search path yourself"), + self.install_lib) + + def create_path_file(self): + """Creates the .pth file""" + filename = os.path.join(self.install_libbase, + self.path_file + ".pth") + if self.install_path_file: + self.execute(write_file, + (filename, [self.extra_dirs]), + "creating %s" % filename) + else: + self.warn("path file '%s' not created" % filename) + + + # -- Reporting methods --------------------------------------------- + + def get_outputs(self): + """Assembles the outputs of all the sub-commands.""" + outputs = [] + for cmd_name in self.get_sub_commands(): + cmd = self.get_finalized_command(cmd_name) + # Add the contents of cmd.get_outputs(), ensuring + # that outputs doesn't contain duplicate entries + for filename in cmd.get_outputs(): + if filename not in outputs: + outputs.append(filename) + + if self.path_file and self.install_path_file: + outputs.append(os.path.join(self.install_libbase, + self.path_file + ".pth")) + + return outputs + + def get_inputs(self): + """Returns the inputs of all the sub-commands""" + # XXX gee, this looks familiar ;-( + inputs = [] + for cmd_name in self.get_sub_commands(): + cmd = self.get_finalized_command(cmd_name) + inputs.extend(cmd.get_inputs()) + + return inputs + + # -- Predicates for sub-command list ------------------------------- + + def has_lib(self): + """Returns true if the current distribution has any Python + modules to install.""" + return (self.distribution.has_pure_modules() or + self.distribution.has_ext_modules()) + + def has_headers(self): + """Returns true if the current distribution has any headers to + install.""" + return self.distribution.has_headers() + + def has_scripts(self): + """Returns true if the current distribution has any scripts to. + install.""" + return self.distribution.has_scripts() + + def has_data(self): + """Returns true if the current distribution has any data to. + install.""" + return self.distribution.has_data_files() + + # 'sub_commands': a list of commands this command might have to run to + # get its work done. See cmd.py for more info. + sub_commands = [('install_lib', has_lib), + ('install_headers', has_headers), + ('install_scripts', has_scripts), + ('install_data', has_data), + ('install_egg_info', lambda self:True), + ] diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_data.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_data.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_data.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_data.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,79 @@ +"""distutils.command.install_data + +Implements the Distutils 'install_data' command, for installing +platform-independent data files.""" + +# contributed by Bastian Kleineidam + +import os +from distutils.core import Command +from distutils.util import change_root, convert_path + +class install_data(Command): + + description = "install data files" + + user_options = [ + ('install-dir=', 'd', + "base directory for installing data files " + "(default: installation base dir)"), + ('root=', None, + "install everything relative to this alternate root directory"), + ('force', 'f', "force installation (overwrite existing files)"), + ] + + boolean_options = ['force'] + + def initialize_options(self): + self.install_dir = None + self.outfiles = [] + self.root = None + self.force = 0 + self.data_files = self.distribution.data_files + self.warn_dir = 1 + + def finalize_options(self): + self.set_undefined_options('install', + ('install_data', 'install_dir'), + ('root', 'root'), + ('force', 'force'), + ) + + def run(self): + self.mkpath(self.install_dir) + for f in self.data_files: + if isinstance(f, str): + # it's a simple file, so copy it + f = convert_path(f) + if self.warn_dir: + self.warn("setup script did not provide a directory for " + "'%s' -- installing right in '%s'" % + (f, self.install_dir)) + (out, _) = self.copy_file(f, self.install_dir) + self.outfiles.append(out) + else: + # it's a tuple with path to install to and a list of files + dir = convert_path(f[0]) + if not os.path.isabs(dir): + dir = os.path.join(self.install_dir, dir) + elif self.root: + dir = change_root(self.root, dir) + self.mkpath(dir) + + if f[1] == []: + # If there are no files listed, the user must be + # trying to create an empty directory, so add the + # directory to the list of output files. + self.outfiles.append(dir) + else: + # Copy files, adding them to the list of output files. + for data in f[1]: + data = convert_path(data) + (out, _) = self.copy_file(data, dir) + self.outfiles.append(out) + + def get_inputs(self): + return self.data_files or [] + + def get_outputs(self): + return self.outfiles diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_egg_info.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_egg_info.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_egg_info.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_egg_info.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,77 @@ +"""distutils.command.install_egg_info + +Implements the Distutils 'install_egg_info' command, for installing +a package's PKG-INFO metadata.""" + + +from distutils.cmd import Command +from distutils import log, dir_util +import os, sys, re + +class install_egg_info(Command): + """Install an .egg-info file for the package""" + + description = "Install package's PKG-INFO metadata as an .egg-info file" + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ] + + def initialize_options(self): + self.install_dir = None + + def finalize_options(self): + self.set_undefined_options('install_lib',('install_dir','install_dir')) + basename = "%s-%s-py%d.%d.egg-info" % ( + to_filename(safe_name(self.distribution.get_name())), + to_filename(safe_version(self.distribution.get_version())), + *sys.version_info[:2] + ) + self.target = os.path.join(self.install_dir, basename) + self.outputs = [self.target] + + def run(self): + target = self.target + if os.path.isdir(target) and not os.path.islink(target): + dir_util.remove_tree(target, dry_run=self.dry_run) + elif os.path.exists(target): + self.execute(os.unlink,(self.target,),"Removing "+target) + elif not os.path.isdir(self.install_dir): + self.execute(os.makedirs, (self.install_dir,), + "Creating "+self.install_dir) + log.info("Writing %s", target) + if not self.dry_run: + with open(target, 'w', encoding='UTF-8') as f: + self.distribution.metadata.write_pkg_file(f) + + def get_outputs(self): + return self.outputs + + +# The following routines are taken from setuptools' pkg_resources module and +# can be replaced by importing them from pkg_resources once it is included +# in the stdlib. + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """Convert an arbitrary string to a standard version string + + Spaces become dots, and all other non-alphanumeric characters become + dashes, with runs of multiple dashes condensed to a single dash. + """ + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-','_') diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_headers.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_headers.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_headers.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_headers.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,47 @@ +"""distutils.command.install_headers + +Implements the Distutils 'install_headers' command, to install C/C++ header +files to the Python include directory.""" + +from distutils.core import Command + + +# XXX force is never used +class install_headers(Command): + + description = "install C/C++ header files" + + user_options = [('install-dir=', 'd', + "directory to install header files to"), + ('force', 'f', + "force installation (overwrite existing files)"), + ] + + boolean_options = ['force'] + + def initialize_options(self): + self.install_dir = None + self.force = 0 + self.outfiles = [] + + def finalize_options(self): + self.set_undefined_options('install', + ('install_headers', 'install_dir'), + ('force', 'force')) + + + def run(self): + headers = self.distribution.headers + if not headers: + return + + self.mkpath(self.install_dir) + for header in headers: + (out, _) = self.copy_file(header, self.install_dir) + self.outfiles.append(out) + + def get_inputs(self): + return self.distribution.headers or [] + + def get_outputs(self): + return self.outfiles diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_lib.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_lib.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_lib.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_lib.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,217 @@ +"""distutils.command.install_lib + +Implements the Distutils 'install_lib' command +(install all Python modules).""" + +import os +import importlib.util +import sys + +from distutils.core import Command +from distutils.errors import DistutilsOptionError + + +# Extension for Python source files. +PYTHON_SOURCE_EXTENSION = ".py" + +class install_lib(Command): + + description = "install all Python modules (extensions and pure Python)" + + # The byte-compilation options are a tad confusing. Here are the + # possible scenarios: + # 1) no compilation at all (--no-compile --no-optimize) + # 2) compile .pyc only (--compile --no-optimize; default) + # 3) compile .pyc and "opt-1" .pyc (--compile --optimize) + # 4) compile "opt-1" .pyc only (--no-compile --optimize) + # 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more) + # 6) compile "opt-2" .pyc only (--no-compile --optimize-more) + # + # The UI for this is two options, 'compile' and 'optimize'. + # 'compile' is strictly boolean, and only decides whether to + # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and + # decides both whether to generate .pyc files and what level of + # optimization to use. + + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ('build-dir=','b', "build directory (where to install from)"), + ('force', 'f', "force installation (overwrite existing files)"), + ('compile', 'c', "compile .py to .pyc [default]"), + ('no-compile', None, "don't compile .py files"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('skip-build', None, "skip the build steps"), + ] + + boolean_options = ['force', 'compile', 'skip-build'] + negative_opt = {'no-compile' : 'compile'} + + def initialize_options(self): + # let the 'install' command dictate our installation directory + self.install_dir = None + self.build_dir = None + self.force = 0 + self.compile = None + self.optimize = None + self.skip_build = None + + def finalize_options(self): + # Get all the information we need to install pure Python modules + # from the umbrella 'install' command -- build (source) directory, + # install (target) directory, and whether to compile .py files. + self.set_undefined_options('install', + ('build_lib', 'build_dir'), + ('install_lib', 'install_dir'), + ('force', 'force'), + ('compile', 'compile'), + ('optimize', 'optimize'), + ('skip_build', 'skip_build'), + ) + + if self.compile is None: + self.compile = True + if self.optimize is None: + self.optimize = False + + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + if self.optimize not in (0, 1, 2): + raise AssertionError + except (ValueError, AssertionError): + raise DistutilsOptionError("optimize must be 0, 1, or 2") + + def run(self): + # Make sure we have built everything we need first + self.build() + + # Install everything: simply dump the entire contents of the build + # directory to the installation directory (that's the beauty of + # having a build directory!) + outfiles = self.install() + + # (Optionally) compile .py to .pyc + if outfiles is not None and self.distribution.has_pure_modules(): + self.byte_compile(outfiles) + + # -- Top-level worker functions ------------------------------------ + # (called from 'run()') + + def build(self): + if not self.skip_build: + if self.distribution.has_pure_modules(): + self.run_command('build_py') + if self.distribution.has_ext_modules(): + self.run_command('build_ext') + + def install(self): + if os.path.isdir(self.build_dir): + outfiles = self.copy_tree(self.build_dir, self.install_dir) + else: + self.warn("'%s' does not exist -- no Python modules to install" % + self.build_dir) + return + return outfiles + + def byte_compile(self, files): + if sys.dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + + from distutils.util import byte_compile + + # Get the "--root" directory supplied to the "install" command, + # and use it as a prefix to strip off the purported filename + # encoded in bytecode files. This is far from complete, but it + # should at least generate usable bytecode in RPM distributions. + install_root = self.get_finalized_command('install').root + + if self.compile: + byte_compile(files, optimize=0, + force=self.force, prefix=install_root, + dry_run=self.dry_run) + if self.optimize > 0: + byte_compile(files, optimize=self.optimize, + force=self.force, prefix=install_root, + verbose=self.verbose, dry_run=self.dry_run) + + + # -- Utility methods ----------------------------------------------- + + def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir): + if not has_any: + return [] + + build_cmd = self.get_finalized_command(build_cmd) + build_files = build_cmd.get_outputs() + build_dir = getattr(build_cmd, cmd_option) + + prefix_len = len(build_dir) + len(os.sep) + outputs = [] + for file in build_files: + outputs.append(os.path.join(output_dir, file[prefix_len:])) + + return outputs + + def _bytecode_filenames(self, py_filenames): + bytecode_files = [] + for py_file in py_filenames: + # Since build_py handles package data installation, the + # list of outputs can contain more than just .py files. + # Make sure we only report bytecode for the .py files. + ext = os.path.splitext(os.path.normcase(py_file))[1] + if ext != PYTHON_SOURCE_EXTENSION: + continue + if self.compile: + bytecode_files.append(importlib.util.cache_from_source( + py_file, optimization='')) + if self.optimize > 0: + bytecode_files.append(importlib.util.cache_from_source( + py_file, optimization=self.optimize)) + + return bytecode_files + + + # -- External interface -------------------------------------------- + # (called by outsiders) + + def get_outputs(self): + """Return the list of files that would be installed if this command + were actually run. Not affected by the "dry-run" flag or whether + modules have actually been built yet. + """ + pure_outputs = \ + self._mutate_outputs(self.distribution.has_pure_modules(), + 'build_py', 'build_lib', + self.install_dir) + if self.compile: + bytecode_outputs = self._bytecode_filenames(pure_outputs) + else: + bytecode_outputs = [] + + ext_outputs = \ + self._mutate_outputs(self.distribution.has_ext_modules(), + 'build_ext', 'build_lib', + self.install_dir) + + return pure_outputs + bytecode_outputs + ext_outputs + + def get_inputs(self): + """Get the list of files that are input to this command, ie. the + files that get installed as they are named in the build tree. + The files in this list correspond one-to-one to the output + filenames returned by 'get_outputs()'. + """ + inputs = [] + + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + inputs.extend(build_py.get_outputs()) + + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + inputs.extend(build_ext.get_outputs()) + + return inputs diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_scripts.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_scripts.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/install_scripts.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/install_scripts.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,60 @@ +"""distutils.command.install_scripts + +Implements the Distutils 'install_scripts' command, for installing +Python scripts.""" + +# contributed by Bastian Kleineidam + +import os +from distutils.core import Command +from distutils import log +from stat import ST_MODE + + +class install_scripts(Command): + + description = "install scripts (Python or otherwise)" + + user_options = [ + ('install-dir=', 'd', "directory to install scripts to"), + ('build-dir=','b', "build directory (where to install from)"), + ('force', 'f', "force installation (overwrite existing files)"), + ('skip-build', None, "skip the build steps"), + ] + + boolean_options = ['force', 'skip-build'] + + def initialize_options(self): + self.install_dir = None + self.force = 0 + self.build_dir = None + self.skip_build = None + + def finalize_options(self): + self.set_undefined_options('build', ('build_scripts', 'build_dir')) + self.set_undefined_options('install', + ('install_scripts', 'install_dir'), + ('force', 'force'), + ('skip_build', 'skip_build'), + ) + + def run(self): + if not self.skip_build: + self.run_command('build_scripts') + self.outfiles = self.copy_tree(self.build_dir, self.install_dir) + if os.name == 'posix': + # Set the executable bits (owner, group, and world) on + # all the scripts we just installed. + for file in self.get_outputs(): + if self.dry_run: + log.info("changing mode of %s", file) + else: + mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777 + log.info("changing mode of %s to %o", file, mode) + os.chmod(file, mode) + + def get_inputs(self): + return self.distribution.scripts or [] + + def get_outputs(self): + return self.outfiles or [] diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/register.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/register.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/register.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/register.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,304 @@ +"""distutils.command.register + +Implements the Distutils 'register' command (register with the repository). +""" + +# created 2002/10/21, Richard Jones + +import getpass +import io +import urllib.parse, urllib.request +from warnings import warn + +from distutils.core import PyPIRCCommand +from distutils.errors import * +from distutils import log + +class register(PyPIRCCommand): + + description = ("register the distribution with the Python package index") + user_options = PyPIRCCommand.user_options + [ + ('list-classifiers', None, + 'list the valid Trove classifiers'), + ('strict', None , + 'Will stop the registering if the meta-data are not fully compliant') + ] + boolean_options = PyPIRCCommand.boolean_options + [ + 'verify', 'list-classifiers', 'strict'] + + sub_commands = [('check', lambda self: True)] + + def initialize_options(self): + PyPIRCCommand.initialize_options(self) + self.list_classifiers = 0 + self.strict = 0 + + def finalize_options(self): + PyPIRCCommand.finalize_options(self) + # setting options for the `check` subcommand + check_options = {'strict': ('register', self.strict), + 'restructuredtext': ('register', 1)} + self.distribution.command_options['check'] = check_options + + def run(self): + self.finalize_options() + self._set_config() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + if self.dry_run: + self.verify_metadata() + elif self.list_classifiers: + self.classifiers() + else: + self.send_metadata() + + def check_metadata(self): + """Deprecated API.""" + warn("distutils.command.register.check_metadata is deprecated, \ + use the check command instead", PendingDeprecationWarning) + check = self.distribution.get_command_obj('check') + check.ensure_finalized() + check.strict = self.strict + check.restructuredtext = 1 + check.run() + + def _set_config(self): + ''' Reads the configuration file and set attributes. + ''' + config = self._read_pypirc() + if config != {}: + self.username = config['username'] + self.password = config['password'] + self.repository = config['repository'] + self.realm = config['realm'] + self.has_config = True + else: + if self.repository not in ('pypi', self.DEFAULT_REPOSITORY): + raise ValueError('%s not found in .pypirc' % self.repository) + if self.repository == 'pypi': + self.repository = self.DEFAULT_REPOSITORY + self.has_config = False + + def classifiers(self): + ''' Fetch the list of classifiers from the server. + ''' + url = self.repository+'?:action=list_classifiers' + response = urllib.request.urlopen(url) + log.info(self._read_pypi_response(response)) + + def verify_metadata(self): + ''' Send the metadata to the package index server to be checked. + ''' + # send the info to the server and report the result + (code, result) = self.post_to_server(self.build_post_data('verify')) + log.info('Server response (%s): %s', code, result) + + def send_metadata(self): + ''' Send the metadata to the package index server. + + Well, do the following: + 1. figure who the user is, and then + 2. send the data as a Basic auth'ed POST. + + First we try to read the username/password from $HOME/.pypirc, + which is a ConfigParser-formatted file with a section + [distutils] containing username and password entries (both + in clear text). Eg: + + [distutils] + index-servers = + pypi + + [pypi] + username: fred + password: sekrit + + Otherwise, to figure who the user is, we offer the user three + choices: + + 1. use existing login, + 2. register as a new user, or + 3. set the password to a random string and email the user. + + ''' + # see if we can short-cut and get the username/password from the + # config + if self.has_config: + choice = '1' + username = self.username + password = self.password + else: + choice = 'x' + username = password = '' + + # get the user's login info + choices = '1 2 3 4'.split() + while choice not in choices: + self.announce('''\ +We need to know who you are, so please choose either: + 1. use your existing login, + 2. register as a new user, + 3. have the server generate a new password for you (and email it to you), or + 4. quit +Your selection [default 1]: ''', log.INFO) + choice = input() + if not choice: + choice = '1' + elif choice not in choices: + print('Please choose one of the four options!') + + if choice == '1': + # get the username and password + while not username: + username = input('Username: ') + while not password: + password = getpass.getpass('Password: ') + + # set up the authentication + auth = urllib.request.HTTPPasswordMgr() + host = urllib.parse.urlparse(self.repository)[1] + auth.add_password(self.realm, host, username, password) + # send the info to the server and report the result + code, result = self.post_to_server(self.build_post_data('submit'), + auth) + self.announce('Server response (%s): %s' % (code, result), + log.INFO) + + # possibly save the login + if code == 200: + if self.has_config: + # sharing the password in the distribution instance + # so the upload command can reuse it + self.distribution.password = password + else: + self.announce(('I can store your PyPI login so future ' + 'submissions will be faster.'), log.INFO) + self.announce('(the login will be stored in %s)' % \ + self._get_rc_file(), log.INFO) + choice = 'X' + while choice.lower() not in 'yn': + choice = input('Save your login (y/N)?') + if not choice: + choice = 'n' + if choice.lower() == 'y': + self._store_pypirc(username, password) + + elif choice == '2': + data = {':action': 'user'} + data['name'] = data['password'] = data['email'] = '' + data['confirm'] = None + while not data['name']: + data['name'] = input('Username: ') + while data['password'] != data['confirm']: + while not data['password']: + data['password'] = getpass.getpass('Password: ') + while not data['confirm']: + data['confirm'] = getpass.getpass(' Confirm: ') + if data['password'] != data['confirm']: + data['password'] = '' + data['confirm'] = None + print("Password and confirm don't match!") + while not data['email']: + data['email'] = input(' EMail: ') + code, result = self.post_to_server(data) + if code != 200: + log.info('Server response (%s): %s', code, result) + else: + log.info('You will receive an email shortly.') + log.info(('Follow the instructions in it to ' + 'complete registration.')) + elif choice == '3': + data = {':action': 'password_reset'} + data['email'] = '' + while not data['email']: + data['email'] = input('Your email address: ') + code, result = self.post_to_server(data) + log.info('Server response (%s): %s', code, result) + + def build_post_data(self, action): + # figure the data to send - the metadata plus some additional + # information used by the package server + meta = self.distribution.metadata + data = { + ':action': action, + 'metadata_version' : '1.0', + 'name': meta.get_name(), + 'version': meta.get_version(), + 'summary': meta.get_description(), + 'home_page': meta.get_url(), + 'author': meta.get_contact(), + 'author_email': meta.get_contact_email(), + 'license': meta.get_licence(), + 'description': meta.get_long_description(), + 'keywords': meta.get_keywords(), + 'platform': meta.get_platforms(), + 'classifiers': meta.get_classifiers(), + 'download_url': meta.get_download_url(), + # PEP 314 + 'provides': meta.get_provides(), + 'requires': meta.get_requires(), + 'obsoletes': meta.get_obsoletes(), + } + if data['provides'] or data['requires'] or data['obsoletes']: + data['metadata_version'] = '1.1' + return data + + def post_to_server(self, data, auth=None): + ''' Post a query to the server, and return a string response. + ''' + if 'name' in data: + self.announce('Registering %s to %s' % (data['name'], + self.repository), + log.INFO) + # Build up the MIME payload for the urllib2 POST data + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = '\n--' + boundary + end_boundary = sep_boundary + '--' + body = io.StringIO() + for key, value in data.items(): + # handle multiple entries for the same name + if type(value) not in (type([]), type( () )): + value = [value] + for value in value: + value = str(value) + body.write(sep_boundary) + body.write('\nContent-Disposition: form-data; name="%s"'%key) + body.write("\n\n") + body.write(value) + if value and value[-1] == '\r': + body.write('\n') # write an extra newline (lurve Macs) + body.write(end_boundary) + body.write("\n") + body = body.getvalue().encode("utf-8") + + # build the Request + headers = { + 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, + 'Content-length': str(len(body)) + } + req = urllib.request.Request(self.repository, body, headers) + + # handle HTTP and include the Basic Auth handler + opener = urllib.request.build_opener( + urllib.request.HTTPBasicAuthHandler(password_mgr=auth) + ) + data = '' + try: + result = opener.open(req) + except urllib.error.HTTPError as e: + if self.show_response: + data = e.fp.read() + result = e.code, e.msg + except urllib.error.URLError as e: + result = 500, str(e) + else: + if self.show_response: + data = self._read_pypi_response(result) + result = 200, 'OK' + if self.show_response: + msg = '\n'.join(('-' * 75, data, '-' * 75)) + self.announce(msg, log.INFO) + return result diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/sdist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/sdist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/sdist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/sdist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,494 @@ +"""distutils.command.sdist + +Implements the Distutils 'sdist' command (create a source distribution).""" + +import os +import sys +from glob import glob +from warnings import warn + +from distutils.core import Command +from distutils import dir_util +from distutils import file_util +from distutils import archive_util +from distutils.text_file import TextFile +from distutils.filelist import FileList +from distutils import log +from distutils.util import convert_path +from distutils.errors import DistutilsTemplateError, DistutilsOptionError + + +def show_formats(): + """Print all possible values for the 'formats' option (used by + the "--help-formats" command-line option). + """ + from distutils.fancy_getopt import FancyGetopt + from distutils.archive_util import ARCHIVE_FORMATS + formats = [] + for format in ARCHIVE_FORMATS.keys(): + formats.append(("formats=" + format, None, + ARCHIVE_FORMATS[format][2])) + formats.sort() + FancyGetopt(formats).print_help( + "List of available source distribution formats:") + + +class sdist(Command): + + description = "create a source distribution (tarball, zip file, etc.)" + + def checking_metadata(self): + """Callable used for the check sub-command. + + Placed here so user_options can view it""" + return self.metadata_check + + user_options = [ + ('template=', 't', + "name of manifest template file [default: MANIFEST.in]"), + ('manifest=', 'm', + "name of manifest file [default: MANIFEST]"), + ('use-defaults', None, + "include the default file set in the manifest " + "[default; disable with --no-defaults]"), + ('no-defaults', None, + "don't include the default file set"), + ('prune', None, + "specifically exclude files/directories that should not be " + "distributed (build tree, RCS/CVS dirs, etc.) " + "[default; disable with --no-prune]"), + ('no-prune', None, + "don't automatically exclude anything"), + ('manifest-only', 'o', + "just regenerate the manifest and then stop " + "(implies --force-manifest)"), + ('force-manifest', 'f', + "forcibly regenerate the manifest and carry on as usual. " + "Deprecated: now the manifest is always regenerated."), + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ('metadata-check', None, + "Ensure that all required elements of meta-data " + "are supplied. Warn if any missing. [default]"), + ('owner=', 'u', + "Owner name used when creating a tar file [default: current user]"), + ('group=', 'g', + "Group name used when creating a tar file [default: current group]"), + ] + + boolean_options = ['use-defaults', 'prune', + 'manifest-only', 'force-manifest', + 'keep-temp', 'metadata-check'] + + help_options = [ + ('help-formats', None, + "list available distribution formats", show_formats), + ] + + negative_opt = {'no-defaults': 'use-defaults', + 'no-prune': 'prune' } + + sub_commands = [('check', checking_metadata)] + + READMES = ('README', 'README.txt', 'README.rst') + + def initialize_options(self): + # 'template' and 'manifest' are, respectively, the names of + # the manifest template and manifest file. + self.template = None + self.manifest = None + + # 'use_defaults': if true, we will include the default file set + # in the manifest + self.use_defaults = 1 + self.prune = 1 + + self.manifest_only = 0 + self.force_manifest = 0 + + self.formats = ['gztar'] + self.keep_temp = 0 + self.dist_dir = None + + self.archive_files = None + self.metadata_check = 1 + self.owner = None + self.group = None + + def finalize_options(self): + if self.manifest is None: + self.manifest = "MANIFEST" + if self.template is None: + self.template = "MANIFEST.in" + + self.ensure_string_list('formats') + + bad_format = archive_util.check_archive_formats(self.formats) + if bad_format: + raise DistutilsOptionError( + "unknown archive format '%s'" % bad_format) + + if self.dist_dir is None: + self.dist_dir = "dist" + + def run(self): + # 'filelist' contains the list of files that will make up the + # manifest + self.filelist = FileList() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + # Do whatever it takes to get the list of files to process + # (process the manifest template, read an existing manifest, + # whatever). File list is accumulated in 'self.filelist'. + self.get_file_list() + + # If user just wanted us to regenerate the manifest, stop now. + if self.manifest_only: + return + + # Otherwise, go ahead and create the source distribution tarball, + # or zipfile, or whatever. + self.make_distribution() + + def check_metadata(self): + """Deprecated API.""" + warn("distutils.command.sdist.check_metadata is deprecated, \ + use the check command instead", PendingDeprecationWarning) + check = self.distribution.get_command_obj('check') + check.ensure_finalized() + check.run() + + def get_file_list(self): + """Figure out the list of files to include in the source + distribution, and put it in 'self.filelist'. This might involve + reading the manifest template (and writing the manifest), or just + reading the manifest, or just using the default file set -- it all + depends on the user's options. + """ + # new behavior when using a template: + # the file list is recalculated every time because + # even if MANIFEST.in or setup.py are not changed + # the user might have added some files in the tree that + # need to be included. + # + # This makes --force the default and only behavior with templates. + template_exists = os.path.isfile(self.template) + if not template_exists and self._manifest_is_not_generated(): + self.read_manifest() + self.filelist.sort() + self.filelist.remove_duplicates() + return + + if not template_exists: + self.warn(("manifest template '%s' does not exist " + + "(using default file list)") % + self.template) + self.filelist.findall() + + if self.use_defaults: + self.add_defaults() + + if template_exists: + self.read_template() + + if self.prune: + self.prune_file_list() + + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def add_defaults(self): + """Add all the default files to self.filelist: + - README or README.txt + - setup.py + - test/test*.py + - all pure Python modules mentioned in setup script + - all files pointed by package_data (build_py) + - all files defined in data_files. + - all files defined as scripts. + - all C sources listed as part of extensions or C libraries + in the setup script (doesn't catch C headers!) + Warns if (README or README.txt) or setup.py are missing; everything + else is optional. + """ + self._add_defaults_standards() + self._add_defaults_optional() + self._add_defaults_python() + self._add_defaults_data_files() + self._add_defaults_ext() + self._add_defaults_c_libs() + self._add_defaults_scripts() + + @staticmethod + def _cs_path_exists(fspath): + """ + Case-sensitive path existence check + + >>> sdist._cs_path_exists(__file__) + True + >>> sdist._cs_path_exists(__file__.upper()) + False + """ + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) + + def _add_defaults_standards(self): + standards = [self.READMES, self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = False + for fn in alts: + if self._cs_path_exists(fn): + got_it = True + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if self._cs_path_exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + def _add_defaults_optional(self): + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = filter(os.path.isfile, glob(pattern)) + self.filelist.extend(files) + + def _add_defaults_python(self): + # build_py is used to get: + # - python modules + # - files defined in package_data + build_py = self.get_finalized_command('build_py') + + # getting python files + if self.distribution.has_pure_modules(): + self.filelist.extend(build_py.get_source_files()) + + # getting package_data files + # (computed in build_py.data_files by build_py.finalize_options) + for pkg, src_dir, build_dir, filenames in build_py.data_files: + for filename in filenames: + self.filelist.append(os.path.join(src_dir, filename)) + + def _add_defaults_data_files(self): + # getting distribution.data_files + if self.distribution.has_data_files(): + for item in self.distribution.data_files: + if isinstance(item, str): + # plain file + item = convert_path(item) + if os.path.isfile(item): + self.filelist.append(item) + else: + # a (dirname, filenames) tuple + dirname, filenames = item + for f in filenames: + f = convert_path(f) + if os.path.isfile(f): + self.filelist.append(f) + + def _add_defaults_ext(self): + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + def _add_defaults_c_libs(self): + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + def _add_defaults_scripts(self): + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + def read_template(self): + """Read and parse manifest template file named by self.template. + + (usually "MANIFEST.in") The parsing and processing is done by + 'self.filelist', which updates itself accordingly. + """ + log.info("reading manifest template '%s'", self.template) + template = TextFile(self.template, strip_comments=1, skip_blanks=1, + join_lines=1, lstrip_ws=1, rstrip_ws=1, + collapse_join=1) + + try: + while True: + line = template.readline() + if line is None: # end of file + break + + try: + self.filelist.process_template_line(line) + # the call above can raise a DistutilsTemplateError for + # malformed lines, or a ValueError from the lower-level + # convert_path function + except (DistutilsTemplateError, ValueError) as msg: + self.warn("%s, line %d: %s" % (template.filename, + template.current_line, + msg)) + finally: + template.close() + + def prune_file_list(self): + """Prune off branches that might slip into the file list as created + by 'read_template()', but really don't belong there: + * the build tree (typically "build") + * the release tree itself (only an issue if we ran "sdist" + previously with --keep-temp, or it aborted) + * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories + """ + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + + self.filelist.exclude_pattern(None, prefix=build.build_base) + self.filelist.exclude_pattern(None, prefix=base_dir) + + if sys.platform == 'win32': + seps = r'/|\\' + else: + seps = '/' + + vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', + '_darcs'] + vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) + self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) + + def write_manifest(self): + """Write the file list in 'self.filelist' (presumably as filled in + by 'add_defaults()' and 'read_template()') to the manifest file + named by 'self.manifest'. + """ + if self._manifest_is_not_generated(): + log.info("not writing to manually maintained " + "manifest file '%s'" % self.manifest) + return + + content = self.filelist.files[:] + content.insert(0, '# file GENERATED by distutils, do NOT edit') + self.execute(file_util.write_file, (self.manifest, content), + "writing manifest file '%s'" % self.manifest) + + def _manifest_is_not_generated(self): + # check for special comment used in 3.1.3 and higher + if not os.path.isfile(self.manifest): + return False + + fp = open(self.manifest) + try: + first_line = fp.readline() + finally: + fp.close() + return first_line != '# file GENERATED by distutils, do NOT edit\n' + + def read_manifest(self): + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + log.info("reading manifest file '%s'", self.manifest) + with open(self.manifest) as manifest: + for line in manifest: + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue + self.filelist.append(line) + + def make_release_tree(self, base_dir, files): + """Create the directory tree that will become the source + distribution archive. All directories implied by the filenames in + 'files' are created under 'base_dir', and then we hard link or copy + (if hard linking is unavailable) those files into place. + Essentially, this duplicates the developer's source tree, but in a + directory named after the distribution, containing only the files + to be distributed. + """ + # Create all the directories under 'base_dir' necessary to + # put 'files' there; the 'mkpath()' is just so we don't die + # if the manifest happens to be empty. + self.mkpath(base_dir) + dir_util.create_tree(base_dir, files, dry_run=self.dry_run) + + # And walk over the list of files, either making a hard link (if + # os.link exists) to each one that doesn't already exist in its + # corresponding location under 'base_dir', or copying each file + # that's out-of-date in 'base_dir'. (Usually, all files will be + # out-of-date, because by default we blow away 'base_dir' when + # we're done making the distribution archives.) + + if hasattr(os, 'link'): # can make hard links on this system + link = 'hard' + msg = "making hard links in %s..." % base_dir + else: # nope, have to copy + link = None + msg = "copying files to %s..." % base_dir + + if not files: + log.warn("no files to distribute -- empty manifest?") + else: + log.info(msg) + for file in files: + if not os.path.isfile(file): + log.warn("'%s' not a regular file -- skipping", file) + else: + dest = os.path.join(base_dir, file) + self.copy_file(file, dest, link=link) + + self.distribution.metadata.write_pkg_info(base_dir) + + def make_distribution(self): + """Create the source distribution(s). First, we create the release + tree with 'make_release_tree()'; then, we create all required + archive files (according to 'self.formats') from the release tree. + Finally, we clean up by blowing away the release tree (unless + 'self.keep_temp' is true). The list of archive files created is + stored so it can be retrieved later by 'get_archive_files()'. + """ + # Don't warn about missing meta-data here -- should be (and is!) + # done elsewhere. + base_dir = self.distribution.get_fullname() + base_name = os.path.join(self.dist_dir, base_dir) + + self.make_release_tree(base_dir, self.filelist.files) + archive_files = [] # remember names of files we create + # tar archive must be created last to avoid overwrite and remove + if 'tar' in self.formats: + self.formats.append(self.formats.pop(self.formats.index('tar'))) + + for fmt in self.formats: + file = self.make_archive(base_name, fmt, base_dir=base_dir, + owner=self.owner, group=self.group) + archive_files.append(file) + self.distribution.dist_files.append(('sdist', '', file)) + + self.archive_files = archive_files + + if not self.keep_temp: + dir_util.remove_tree(base_dir, dry_run=self.dry_run) + + def get_archive_files(self): + """Return the list of archive files created when the command + was run, or None if the command hasn't run yet. + """ + return self.archive_files diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/upload.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/upload.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/command/upload.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/command/upload.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,215 @@ +""" +distutils.command.upload + +Implements the Distutils 'upload' subcommand (upload package to a package +index). +""" + +import os +import io +import hashlib +from base64 import standard_b64encode +from urllib.error import HTTPError +from urllib.request import urlopen, Request +from urllib.parse import urlparse +from distutils.errors import DistutilsError, DistutilsOptionError +from distutils.core import PyPIRCCommand +from distutils.spawn import spawn +from distutils import log + + +# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) +# https://bugs.python.org/issue40698 +_FILE_CONTENT_DIGESTS = { + "md5_digest": getattr(hashlib, "md5", None), + "sha256_digest": getattr(hashlib, "sha256", None), + "blake2_256_digest": getattr(hashlib, "blake2b", None), +} + + +class upload(PyPIRCCommand): + + description = "upload binary package to PyPI" + + user_options = PyPIRCCommand.user_options + [ + ('sign', 's', + 'sign files to upload using gpg'), + ('identity=', 'i', 'GPG identity used to sign files'), + ] + + boolean_options = PyPIRCCommand.boolean_options + ['sign'] + + def initialize_options(self): + PyPIRCCommand.initialize_options(self) + self.username = '' + self.password = '' + self.show_response = 0 + self.sign = False + self.identity = None + + def finalize_options(self): + PyPIRCCommand.finalize_options(self) + if self.identity and not self.sign: + raise DistutilsOptionError( + "Must use --sign for --identity to have meaning" + ) + config = self._read_pypirc() + if config != {}: + self.username = config['username'] + self.password = config['password'] + self.repository = config['repository'] + self.realm = config['realm'] + + # getting the password from the distribution + # if previously set by the register command + if not self.password and self.distribution.password: + self.password = self.distribution.password + + def run(self): + if not self.distribution.dist_files: + msg = ("Must create and upload files in one command " + "(e.g. setup.py sdist upload)") + raise DistutilsOptionError(msg) + for command, pyversion, filename in self.distribution.dist_files: + self.upload_file(command, pyversion, filename) + + def upload_file(self, command, pyversion, filename): + # Makes sure the repository URL is compliant + schema, netloc, url, params, query, fragments = \ + urlparse(self.repository) + if params or query or fragments: + raise AssertionError("Incompatible url %s" % self.repository) + + if schema not in ('http', 'https'): + raise AssertionError("unsupported schema " + schema) + + # Sign if requested + if self.sign: + gpg_args = ["gpg", "--detach-sign", "-a", filename] + if self.identity: + gpg_args[2:2] = ["--local-user", self.identity] + spawn(gpg_args, + dry_run=self.dry_run) + + # Fill in the data - send all the meta-data in case we need to + # register a new release + f = open(filename,'rb') + try: + content = f.read() + finally: + f.close() + + meta = self.distribution.metadata + data = { + # action + ':action': 'file_upload', + 'protocol_version': '1', + + # identify release + 'name': meta.get_name(), + 'version': meta.get_version(), + + # file content + 'content': (os.path.basename(filename),content), + 'filetype': command, + 'pyversion': pyversion, + + # additional meta-data + 'metadata_version': '1.0', + 'summary': meta.get_description(), + 'home_page': meta.get_url(), + 'author': meta.get_contact(), + 'author_email': meta.get_contact_email(), + 'license': meta.get_licence(), + 'description': meta.get_long_description(), + 'keywords': meta.get_keywords(), + 'platform': meta.get_platforms(), + 'classifiers': meta.get_classifiers(), + 'download_url': meta.get_download_url(), + # PEP 314 + 'provides': meta.get_provides(), + 'requires': meta.get_requires(), + 'obsoletes': meta.get_obsoletes(), + } + + data['comment'] = '' + + # file content digests + for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): + if digest_cons is None: + continue + try: + data[digest_name] = digest_cons(content).hexdigest() + except ValueError: + # hash digest not available or blocked by security policy + pass + + if self.sign: + with open(filename + ".asc", "rb") as f: + data['gpg_signature'] = (os.path.basename(filename) + ".asc", + f.read()) + + # set up the authentication + user_pass = (self.username + ":" + self.password).encode('ascii') + # The exact encoding of the authentication string is debated. + # Anyway PyPI only accepts ascii for both username or password. + auth = "Basic " + standard_b64encode(user_pass).decode('ascii') + + # Build up the MIME payload for the POST data + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b'\r\n--' + boundary.encode('ascii') + end_boundary = sep_boundary + b'--\r\n' + body = io.BytesIO() + for key, value in data.items(): + title = '\r\nContent-Disposition: form-data; name="%s"' % key + # handle multiple entries for the same name + if not isinstance(value, list): + value = [value] + for value in value: + if type(value) is tuple: + title += '; filename="%s"' % value[0] + value = value[1] + else: + value = str(value).encode('utf-8') + body.write(sep_boundary) + body.write(title.encode('utf-8')) + body.write(b"\r\n\r\n") + body.write(value) + body.write(end_boundary) + body = body.getvalue() + + msg = "Submitting %s to %s" % (filename, self.repository) + self.announce(msg, log.INFO) + + # build the Request + headers = { + 'Content-type': 'multipart/form-data; boundary=%s' % boundary, + 'Content-length': str(len(body)), + 'Authorization': auth, + } + + request = Request(self.repository, data=body, + headers=headers) + # send the data + try: + result = urlopen(request) + status = result.getcode() + reason = result.msg + except HTTPError as e: + status = e.code + reason = e.msg + except OSError as e: + self.announce(str(e), log.ERROR) + raise + + if status == 200: + self.announce('Server response (%s): %s' % (status, reason), + log.INFO) + if self.show_response: + text = self._read_pypi_response(result) + msg = '\n'.join(('-' * 75, text, '-' * 75)) + self.announce(msg, log.INFO) + else: + msg = 'Upload failed (%s): %s' % (status, reason) + self.announce(msg, log.ERROR) + raise DistutilsError(msg) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/config.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/config.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/config.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/config.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,133 @@ +"""distutils.pypirc + +Provides the PyPIRCCommand class, the base class for the command classes +that uses .pypirc in the distutils.command package. +""" +import os +from configparser import RawConfigParser +import warnings + +from distutils.cmd import Command + +DEFAULT_PYPIRC = """\ +[distutils] +index-servers = + pypi + +[pypi] +username:%s +password:%s +""" + +class PyPIRCCommand(Command): + """Base command that knows how to handle the .pypirc file + """ + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' + DEFAULT_REALM = 'pypi' + repository = None + realm = None + + user_options = [ + ('repository=', 'r', + "url of repository [default: %s]" % \ + DEFAULT_REPOSITORY), + ('show-response', None, + 'display full response text from server')] + + boolean_options = ['show-response'] + + def _get_rc_file(self): + """Returns rc file path.""" + return os.path.join(os.path.expanduser('~'), '.pypirc') + + def _store_pypirc(self, username, password): + """Creates a default .pypirc file.""" + rc = self._get_rc_file() + with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: + f.write(DEFAULT_PYPIRC % (username, password)) + + def _read_pypirc(self): + """Reads the .pypirc file.""" + rc = self._get_rc_file() + if os.path.exists(rc): + self.announce('Using PyPI login from %s' % rc) + repository = self.repository or self.DEFAULT_REPOSITORY + + config = RawConfigParser() + config.read(rc) + sections = config.sections() + if 'distutils' in sections: + # let's get the list of servers + index_servers = config.get('distutils', 'index-servers') + _servers = [server.strip() for server in + index_servers.split('\n') + if server.strip() != ''] + if _servers == []: + # nothing set, let's try to get the default pypi + if 'pypi' in sections: + _servers = ['pypi'] + else: + # the file is not properly defined, returning + # an empty dict + return {} + for server in _servers: + current = {'server': server} + current['username'] = config.get(server, 'username') + + # optional params + for key, default in (('repository', + self.DEFAULT_REPOSITORY), + ('realm', self.DEFAULT_REALM), + ('password', None)): + if config.has_option(server, key): + current[key] = config.get(server, key) + else: + current[key] = default + + # work around people having "repository" for the "pypi" + # section of their config set to the HTTP (rather than + # HTTPS) URL + if (server == 'pypi' and + repository in (self.DEFAULT_REPOSITORY, 'pypi')): + current['repository'] = self.DEFAULT_REPOSITORY + return current + + if (current['server'] == repository or + current['repository'] == repository): + return current + elif 'server-login' in sections: + # old format + server = 'server-login' + if config.has_option(server, 'repository'): + repository = config.get(server, 'repository') + else: + repository = self.DEFAULT_REPOSITORY + return {'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM} + + return {} + + def _read_pypi_response(self, response): + """Read and decode a PyPI HTTP response.""" + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + import cgi + content_type = response.getheader('content-type', 'text/plain') + encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') + return response.read().decode(encoding) + + def initialize_options(self): + """Initialize options.""" + self.repository = None + self.realm = None + self.show_response = 0 + + def finalize_options(self): + """Finalizes options.""" + if self.repository is None: + self.repository = self.DEFAULT_REPOSITORY + if self.realm is None: + self.realm = self.DEFAULT_REALM diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/core.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/core.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/core.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/core.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,234 @@ +"""distutils.core + +The only module that needs to be imported to use the Distutils; provides +the 'setup' function (which is to be called from the setup script). Also +indirectly provides the Distribution and Command classes, although they are +really defined in distutils.dist and distutils.cmd. +""" + +import os +import sys + +from distutils.debug import DEBUG +from distutils.errors import * + +# Mainly import these so setup scripts can "from distutils.core import" them. +from distutils.dist import Distribution +from distutils.cmd import Command +from distutils.config import PyPIRCCommand +from distutils.extension import Extension + +# This is a barebones help message generated displayed when the user +# runs the setup script with no arguments at all. More useful help +# is generated with various --help options: global help, list commands, +# and per-command help. +USAGE = """\ +usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] + or: %(script)s --help [cmd1 cmd2 ...] + or: %(script)s --help-commands + or: %(script)s cmd --help +""" + +def gen_usage (script_name): + script = os.path.basename(script_name) + return USAGE % vars() + + +# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. +_setup_stop_after = None +_setup_distribution = None + +# Legal keyword arguments for the setup() function +setup_keywords = ('distclass', 'script_name', 'script_args', 'options', + 'name', 'version', 'author', 'author_email', + 'maintainer', 'maintainer_email', 'url', 'license', + 'description', 'long_description', 'keywords', + 'platforms', 'classifiers', 'download_url', + 'requires', 'provides', 'obsoletes', + ) + +# Legal keyword arguments for the Extension constructor +extension_keywords = ('name', 'sources', 'include_dirs', + 'define_macros', 'undef_macros', + 'library_dirs', 'libraries', 'runtime_library_dirs', + 'extra_objects', 'extra_compile_args', 'extra_link_args', + 'swig_opts', 'export_symbols', 'depends', 'language') + +def setup (**attrs): + """The gateway to the Distutils: do everything your setup script needs + to do, in a highly flexible and user-driven way. Briefly: create a + Distribution instance; find and parse config files; parse the command + line; run each Distutils command found there, customized by the options + supplied to 'setup()' (as keyword arguments), in config files, and on + the command line. + + The Distribution instance might be an instance of a class supplied via + the 'distclass' keyword argument to 'setup'; if no such class is + supplied, then the Distribution class (in dist.py) is instantiated. + All other arguments to 'setup' (except for 'cmdclass') are used to set + attributes of the Distribution instance. + + The 'cmdclass' argument, if supplied, is a dictionary mapping command + names to command classes. Each command encountered on the command line + will be turned into a command class, which is in turn instantiated; any + class found in 'cmdclass' is used in place of the default, which is + (for command 'foo_bar') class 'foo_bar' in module + 'distutils.command.foo_bar'. The command class must provide a + 'user_options' attribute which is a list of option specifiers for + 'distutils.fancy_getopt'. Any command-line options between the current + and the next command are used to set attributes of the current command + object. + + When the entire command-line has been successfully parsed, calls the + 'run()' method on each command object in turn. This method will be + driven entirely by the Distribution object (which each command object + has a reference to, thanks to its constructor), and the + command-specific options that became attributes of each command + object. + """ + + global _setup_stop_after, _setup_distribution + + # Determine the distribution class -- either caller-supplied or + # our Distribution (see below). + klass = attrs.get('distclass') + if klass: + del attrs['distclass'] + else: + klass = Distribution + + if 'script_name' not in attrs: + attrs['script_name'] = os.path.basename(sys.argv[0]) + if 'script_args' not in attrs: + attrs['script_args'] = sys.argv[1:] + + # Create the Distribution instance, using the remaining arguments + # (ie. everything except distclass) to initialize it + try: + _setup_distribution = dist = klass(attrs) + except DistutilsSetupError as msg: + if 'name' not in attrs: + raise SystemExit("error in setup command: %s" % msg) + else: + raise SystemExit("error in %s setup command: %s" % \ + (attrs['name'], msg)) + + if _setup_stop_after == "init": + return dist + + # Find and parse the config file(s): they will override options from + # the setup script, but be overridden by the command line. + dist.parse_config_files() + + if DEBUG: + print("options (after parsing config files):") + dist.dump_option_dicts() + + if _setup_stop_after == "config": + return dist + + # Parse the command line and override config files; any + # command-line errors are the end user's fault, so turn them into + # SystemExit to suppress tracebacks. + try: + ok = dist.parse_command_line() + except DistutilsArgError as msg: + raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg) + + if DEBUG: + print("options (after parsing command line):") + dist.dump_option_dicts() + + if _setup_stop_after == "commandline": + return dist + + # And finally, run all the commands found on the command line. + if ok: + try: + dist.run_commands() + except KeyboardInterrupt: + raise SystemExit("interrupted") + except OSError as exc: + if DEBUG: + sys.stderr.write("error: %s\n" % (exc,)) + raise + else: + raise SystemExit("error: %s" % (exc,)) + + except (DistutilsError, + CCompilerError) as msg: + if DEBUG: + raise + else: + raise SystemExit("error: " + str(msg)) + + return dist + +# setup () + + +def run_setup (script_name, script_args=None, stop_after="run"): + """Run a setup script in a somewhat controlled environment, and + return the Distribution instance that drives things. This is useful + if you need to find out the distribution meta-data (passed as + keyword args from 'script' to 'setup()', or the contents of the + config files or command-line. + + 'script_name' is a file that will be read and run with 'exec()'; + 'sys.argv[0]' will be replaced with 'script' for the duration of the + call. 'script_args' is a list of strings; if supplied, + 'sys.argv[1:]' will be replaced by 'script_args' for the duration of + the call. + + 'stop_after' tells 'setup()' when to stop processing; possible + values: + init + stop after the Distribution instance has been created and + populated with the keyword arguments to 'setup()' + config + stop after config files have been parsed (and their data + stored in the Distribution instance) + commandline + stop after the command-line ('sys.argv[1:]' or 'script_args') + have been parsed (and the data stored in the Distribution) + run [default] + stop after all commands have been run (the same as if 'setup()' + had been called in the usual way + + Returns the Distribution instance, which provides all information + used to drive the Distutils. + """ + if stop_after not in ('init', 'config', 'commandline', 'run'): + raise ValueError("invalid value for 'stop_after': %r" % (stop_after,)) + + global _setup_stop_after, _setup_distribution + _setup_stop_after = stop_after + + save_argv = sys.argv.copy() + g = {'__file__': script_name} + try: + try: + sys.argv[0] = script_name + if script_args is not None: + sys.argv[1:] = script_args + with open(script_name, 'rb') as f: + exec(f.read(), g) + finally: + sys.argv = save_argv + _setup_stop_after = None + except SystemExit: + # Hmm, should we do something if exiting with a non-zero code + # (ie. error)? + pass + + if _setup_distribution is None: + raise RuntimeError(("'distutils.core.setup()' was never called -- " + "perhaps '%s' is not a Distutils setup script?") % \ + script_name) + + # I wonder if the setup script's namespace -- g and l -- would be of + # any interest to callers? + #print "_setup_distribution:", _setup_distribution + return _setup_distribution + +# run_setup () diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/cygwinccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/cygwinccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/cygwinccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/cygwinccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,403 @@ +"""distutils.cygwinccompiler + +Provides the CygwinCCompiler class, a subclass of UnixCCompiler that +handles the Cygwin port of the GNU C compiler to Windows. It also contains +the Mingw32CCompiler class which handles the mingw32 port of GCC (same as +cygwin in no-cygwin mode). +""" + +# problems: +# +# * if you use a msvc compiled python version (1.5.2) +# 1. you have to insert a __GNUC__ section in its config.h +# 2. you have to generate an import library for its dll +# - create a def-file for python??.dll +# - create an import library using +# dlltool --dllname python15.dll --def python15.def \ +# --output-lib libpython15.a +# +# see also http://starship.python.net/crew/kernr/mingw32/Notes.html +# +# * We put export_symbols in a def-file, and don't use +# --export-all-symbols because it doesn't worked reliable in some +# tested configurations. And because other windows compilers also +# need their symbols specified this no serious problem. +# +# tested configurations: +# +# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works +# (after patching python's config.h and for C++ some other include files) +# see also http://starship.python.net/crew/kernr/mingw32/Notes.html +# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works +# (ld doesn't support -shared, so we use dllwrap) +# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now +# - its dllwrap doesn't work, there is a bug in binutils 2.10.90 +# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html +# - using gcc -mdll instead dllwrap doesn't work without -static because +# it tries to link against dlls instead their import libraries. (If +# it finds the dll first.) +# By specifying -static we force ld to link against the import libraries, +# this is windows standard and there are normally not the necessary symbols +# in the dlls. +# *** only the version of June 2000 shows these problems +# * cygwin gcc 3.2/ld 2.13.90 works +# (ld supports -shared) +# * mingw gcc 3.2/ld 2.13 works +# (ld supports -shared) + +import os +import sys +import copy +from subprocess import Popen, PIPE, check_output +import re + +from distutils.unixccompiler import UnixCCompiler +from distutils.file_util import write_file +from distutils.errors import (DistutilsExecError, CCompilerError, + CompileError, UnknownFileError) +from distutils.version import LooseVersion +from distutils.spawn import find_executable + +def get_msvcr(): + """Include the appropriate MSVC runtime library if Python was built + with MSVC 7.0 or later. + """ + msc_pos = sys.version.find('MSC v.') + if msc_pos != -1: + msc_ver = sys.version[msc_pos+6:msc_pos+10] + if msc_ver == '1300': + # MSVC 7.0 + return ['msvcr70'] + elif msc_ver == '1310': + # MSVC 7.1 + return ['msvcr71'] + elif msc_ver == '1400': + # VS2005 / MSVC 8.0 + return ['msvcr80'] + elif msc_ver == '1500': + # VS2008 / MSVC 9.0 + return ['msvcr90'] + elif msc_ver == '1600': + # VS2010 / MSVC 10.0 + return ['msvcr100'] + else: + raise ValueError("Unknown MS Compiler version %s " % msc_ver) + + +class CygwinCCompiler(UnixCCompiler): + """ Handles the Cygwin port of the GNU C compiler to Windows. + """ + compiler_type = 'cygwin' + obj_extension = ".o" + static_lib_extension = ".a" + shared_lib_extension = ".dll" + static_lib_format = "lib%s%s" + shared_lib_format = "%s%s" + exe_extension = ".exe" + + def __init__(self, verbose=0, dry_run=0, force=0): + + UnixCCompiler.__init__(self, verbose, dry_run, force) + + status, details = check_config_h() + self.debug_print("Python's GCC status: %s (details: %s)" % + (status, details)) + if status is not CONFIG_H_OK: + self.warn( + "Python's pyconfig.h doesn't seem to support your compiler. " + "Reason: %s. " + "Compiling may fail because of undefined preprocessor macros." + % details) + + self.gcc_version, self.ld_version, self.dllwrap_version = \ + get_versions() + self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" % + (self.gcc_version, + self.ld_version, + self.dllwrap_version) ) + + # ld_version >= "2.10.90" and < "2.13" should also be able to use + # gcc -mdll instead of dllwrap + # Older dllwraps had own version numbers, newer ones use the + # same as the rest of binutils ( also ld ) + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" + else: + self.linker_dll = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static + if self.ld_version >= "2.13": + shared_option = "-shared" + else: + shared_option = "-mdll -static" + + # Hard-code GCC because that's what this is all about. + # XXX optimization, warnings etc. should be customizable. + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % + (self.linker_dll, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": + # cygwin shouldn't need msvcrt, but without the dlls will crash + # (gcc version 2.91.57) -- perhaps something about initialization + self.dll_libraries=["msvcrt"] + self.warn( + "Consider upgrading to a newer version of gcc") + else: + # Include the appropriate MSVC runtime library if Python was built + # with MSVC 7.0 or later. + self.dll_libraries = get_msvcr() + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + """Compiles the source by spawning GCC and windres if needed.""" + if ext == '.rc' or ext == '.res': + # gcc needs '.res' and '.rc' compiled to object files !!! + try: + self.spawn(["windres", "-i", src, "-o", obj]) + except DistutilsExecError as msg: + raise CompileError(msg) + else: # for other files use the C-compiler + try: + self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + def link(self, target_desc, objects, output_filename, output_dir=None, + libraries=None, library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=0, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None): + """Link the objects.""" + # use separate copies, so we can modify the lists + extra_preargs = copy.copy(extra_preargs or []) + libraries = copy.copy(libraries or []) + objects = copy.copy(objects or []) + + # Additional libraries + libraries.extend(self.dll_libraries) + + # handle export symbols by creating a def-file + # with executables this only works with gcc/ld as linker + if ((export_symbols is not None) and + (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): + # (The linker doesn't do anything if output is up-to-date. + # So it would probably better to check if we really need this, + # but for this we had to insert some unchanged parts of + # UnixCCompiler, and this is not what we want.) + + # we want to put some files in the same directory as the + # object files are, build_temp doesn't help much + # where are the object files + temp_dir = os.path.dirname(objects[0]) + # name of dll to give the helper files the same base name + (dll_name, dll_extension) = os.path.splitext( + os.path.basename(output_filename)) + + # generate the filenames for these files + def_file = os.path.join(temp_dir, dll_name + ".def") + lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") + + # Generate .def file + contents = [ + "LIBRARY %s" % os.path.basename(output_filename), + "EXPORTS"] + for sym in export_symbols: + contents.append(sym) + self.execute(write_file, (def_file, contents), + "writing %s" % def_file) + + # next add options for def-file and to creating import libraries + + # dllwrap uses different options than gcc/ld + if self.linker_dll == "dllwrap": + extra_preargs.extend(["--output-lib", lib_file]) + # for dllwrap we have to use a special option + extra_preargs.extend(["--def", def_file]) + # we use gcc/ld here and can be sure ld is >= 2.9.10 + else: + # doesn't work: bfd_close build\...\libfoo.a: Invalid operation + #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) + # for gcc/ld the def-file is specified as any object files + objects.append(def_file) + + #end: if ((export_symbols is not None) and + # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): + + # who wants symbols and a many times larger output file + # should explicitly switch the debug mode on + # otherwise we let dllwrap/ld strip the output file + # (On my machine: 10KiB < stripped_file < ??100KiB + # unstripped_file = stripped_file + XXX KiB + # ( XXX=254 for a typical python extension)) + if not debug: + extra_preargs.append("-s") + + UnixCCompiler.link(self, target_desc, objects, output_filename, + output_dir, libraries, library_dirs, + runtime_library_dirs, + None, # export_symbols, we do this in our def-file + debug, extra_preargs, extra_postargs, build_temp, + target_lang) + + # -- Miscellaneous methods ----------------------------------------- + + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + """Adds supports for rc and res files.""" + if output_dir is None: + output_dir = '' + obj_names = [] + for src_name in source_filenames: + # use normcase to make sure '.rc' is really '.rc' and not '.RC' + base, ext = os.path.splitext(os.path.normcase(src_name)) + if ext not in (self.src_extensions + ['.rc','.res']): + raise UnknownFileError("unknown file type '%s' (from '%s')" % \ + (ext, src_name)) + if strip_dir: + base = os.path.basename (base) + if ext in ('.res', '.rc'): + # these need to be compiled to object files + obj_names.append (os.path.join(output_dir, + base + ext + self.obj_extension)) + else: + obj_names.append (os.path.join(output_dir, + base + self.obj_extension)) + return obj_names + +# the same as cygwin plus some additional parameters +class Mingw32CCompiler(CygwinCCompiler): + """ Handles the Mingw32 port of the GNU C compiler to Windows. + """ + compiler_type = 'mingw32' + + def __init__(self, verbose=0, dry_run=0, force=0): + + CygwinCCompiler.__init__ (self, verbose, dry_run, force) + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static + if self.ld_version >= "2.13": + shared_option = "-shared" + else: + shared_option = "-mdll -static" + + # A real mingw32 doesn't need to specify a different entry point, + # but cygwin 2.91.57 in no-cygwin-mode needs it. + if self.gcc_version <= "2.91.57": + entry_point = '--entry _DllMain@12' + else: + entry_point = '' + + if is_cygwingcc(): + raise CCompilerError( + 'Cygwin gcc cannot be used with --compiler=mingw32') + + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) + # (-mthreads: Support thread-safe exception handling on `Mingw32') + + # no additional libraries needed + self.dll_libraries=[] + + # Include the appropriate MSVC runtime library if Python was built + # with MSVC 7.0 or later. + self.dll_libraries = get_msvcr() + +# Because these compilers aren't configured in Python's pyconfig.h file by +# default, we should at least warn the user if he is using an unmodified +# version. + +CONFIG_H_OK = "ok" +CONFIG_H_NOTOK = "not ok" +CONFIG_H_UNCERTAIN = "uncertain" + +def check_config_h(): + """Check if the current Python installation appears amenable to building + extensions with GCC. + + Returns a tuple (status, details), where 'status' is one of the following + constants: + + - CONFIG_H_OK: all is well, go ahead and compile + - CONFIG_H_NOTOK: doesn't look good + - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h + + 'details' is a human-readable string explaining the situation. + + Note there are two ways to conclude "OK": either 'sys.version' contains + the string "GCC" (implying that this Python was built with GCC), or the + installed "pyconfig.h" contains the string "__GNUC__". + """ + + # XXX since this function also checks sys.version, it's not strictly a + # "pyconfig.h" check -- should probably be renamed... + + from distutils import sysconfig + + # if sys.version contains GCC then python was compiled with GCC, and the + # pyconfig.h file should be OK + if "GCC" in sys.version: + return CONFIG_H_OK, "sys.version mentions 'GCC'" + + # let's see if __GNUC__ is mentioned in python.h + fn = sysconfig.get_config_h_filename() + try: + config_h = open(fn) + try: + if "__GNUC__" in config_h.read(): + return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn + else: + return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn + finally: + config_h.close() + except OSError as exc: + return (CONFIG_H_UNCERTAIN, + "couldn't read '%s': %s" % (fn, exc.strerror)) + +RE_VERSION = re.compile(br'(\d+\.\d+(\.\d+)*)') + +def _find_exe_version(cmd): + """Find the version of an executable by running `cmd` in the shell. + + If the command is not found, or the output does not match + `RE_VERSION`, returns None. + """ + executable = cmd.split()[0] + if find_executable(executable) is None: + return None + out = Popen(cmd, shell=True, stdout=PIPE).stdout + try: + out_string = out.read() + finally: + out.close() + result = RE_VERSION.search(out_string) + if result is None: + return None + # LooseVersion works with strings + # so we need to decode our bytes + return LooseVersion(result.group(1).decode()) + +def get_versions(): + """ Try to find out the versions of gcc, ld and dllwrap. + + If not possible it returns None for it. + """ + commands = ['gcc -dumpversion', 'ld -v', 'dllwrap --version'] + return tuple([_find_exe_version(cmd) for cmd in commands]) + +def is_cygwingcc(): + '''Try to determine if the gcc that would be used is from cygwin.''' + out_string = check_output(['gcc', '-dumpmachine']) + return out_string.strip().endswith(b'cygwin') diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/debug.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/debug.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/debug.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/debug.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,5 @@ +import os + +# If DISTUTILS_DEBUG is anything other than the empty string, we run in +# debug mode. +DEBUG = os.environ.get('DISTUTILS_DEBUG') diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/dep_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/dep_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/dep_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/dep_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,92 @@ +"""distutils.dep_util + +Utility functions for simple, timestamp-based dependency of files +and groups of files; also, function based entirely on such +timestamp dependency analysis.""" + +import os +from distutils.errors import DistutilsFileError + + +def newer (source, target): + """Return true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. Return false if + both exist and 'target' is the same age or younger than 'source'. + Raise DistutilsFileError if 'source' does not exist. + """ + if not os.path.exists(source): + raise DistutilsFileError("file '%s' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return 1 + + from stat import ST_MTIME + mtime1 = os.stat(source)[ST_MTIME] + mtime2 = os.stat(target)[ST_MTIME] + + return mtime1 > mtime2 + +# newer () + + +def newer_pairwise (sources, targets): + """Walk two filename lists in parallel, testing if each source is newer + than its corresponding target. Return a pair of lists (sources, + targets) where source is newer than target, according to the semantics + of 'newer()'. + """ + if len(sources) != len(targets): + raise ValueError("'sources' and 'targets' must be same length") + + # build a pair of lists (sources, targets) where source is newer + n_sources = [] + n_targets = [] + for i in range(len(sources)): + if newer(sources[i], targets[i]): + n_sources.append(sources[i]) + n_targets.append(targets[i]) + + return (n_sources, n_targets) + +# newer_pairwise () + + +def newer_group (sources, target, missing='error'): + """Return true if 'target' is out-of-date with respect to any file + listed in 'sources'. In other words, if 'target' exists and is newer + than every file in 'sources', return false; otherwise return true. + 'missing' controls what we do when a source file is missing; the + default ("error") is to blow up with an OSError from inside 'stat()'; + if it is "ignore", we silently drop any missing source files; if it is + "newer", any missing source files make us assume that 'target' is + out-of-date (this is handy in "dry-run" mode: it'll make you pretend to + carry out commands that wouldn't work because inputs are missing, but + that doesn't matter because you're not actually going to run the + commands). + """ + # If the target doesn't even exist, then it's definitely out-of-date. + if not os.path.exists(target): + return 1 + + # Otherwise we have to find out the hard way: if *any* source file + # is more recent than 'target', then 'target' is out-of-date and + # we can immediately return true. If we fall through to the end + # of the loop, then 'target' is up-to-date and we return false. + from stat import ST_MTIME + target_mtime = os.stat(target)[ST_MTIME] + for source in sources: + if not os.path.exists(source): + if missing == 'error': # blow up when we stat() the file + pass + elif missing == 'ignore': # missing source dropped from + continue # target's dependency list + elif missing == 'newer': # missing source means target is + return 1 # out-of-date + + source_mtime = os.stat(source)[ST_MTIME] + if source_mtime > target_mtime: + return 1 + else: + return 0 + +# newer_group () diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/dir_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/dir_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/dir_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/dir_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,210 @@ +"""distutils.dir_util + +Utility functions for manipulating directories and directory trees.""" + +import os +import errno +from distutils.errors import DistutilsFileError, DistutilsInternalError +from distutils import log + +# cache for by mkpath() -- in addition to cheapening redundant calls, +# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode +_path_created = {} + +# I don't use os.makedirs because a) it's new to Python 1.5.2, and +# b) it blows up if the directory already exists (I want to silently +# succeed in that case). +def mkpath(name, mode=0o777, verbose=1, dry_run=0): + """Create a directory and any missing ancestor directories. + + If the directory already exists (or if 'name' is the empty string, which + means the current directory, which of course exists), then do nothing. + Raise DistutilsFileError if unable to create some directory along the way + (eg. some sub-path exists, but is a file rather than a directory). + If 'verbose' is true, print a one-line summary of each mkdir to stdout. + Return the list of directories actually created. + """ + + global _path_created + + # Detect a common bug -- name is None + if not isinstance(name, str): + raise DistutilsInternalError( + "mkpath: 'name' must be a string (got %r)" % (name,)) + + # XXX what's the better way to handle verbosity? print as we create + # each directory in the path (the current behaviour), or only announce + # the creation of the whole path? (quite easy to do the latter since + # we're not using a recursive algorithm) + + name = os.path.normpath(name) + created_dirs = [] + if os.path.isdir(name) or name == '': + return created_dirs + if _path_created.get(os.path.abspath(name)): + return created_dirs + + (head, tail) = os.path.split(name) + tails = [tail] # stack of lone dirs to create + + while head and tail and not os.path.isdir(head): + (head, tail) = os.path.split(head) + tails.insert(0, tail) # push next higher dir onto stack + + # now 'head' contains the deepest directory that already exists + # (that is, the child of 'head' in 'name' is the highest directory + # that does *not* exist) + for d in tails: + #print "head = %s, d = %s: " % (head, d), + head = os.path.join(head, d) + abs_head = os.path.abspath(head) + + if _path_created.get(abs_head): + continue + + if verbose >= 1: + log.info("creating %s", head) + + if not dry_run: + try: + os.mkdir(head, mode) + except OSError as exc: + if not (exc.errno == errno.EEXIST and os.path.isdir(head)): + raise DistutilsFileError( + "could not create '%s': %s" % (head, exc.args[-1])) + created_dirs.append(head) + + _path_created[abs_head] = 1 + return created_dirs + +def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): + """Create all the empty directories under 'base_dir' needed to put 'files' + there. + + 'base_dir' is just the name of a directory which doesn't necessarily + exist yet; 'files' is a list of filenames to be interpreted relative to + 'base_dir'. 'base_dir' + the directory portion of every file in 'files' + will be created if it doesn't already exist. 'mode', 'verbose' and + 'dry_run' flags are as for 'mkpath()'. + """ + # First get the list of directories to create + need_dir = set() + for file in files: + need_dir.add(os.path.join(base_dir, os.path.dirname(file))) + + # Now create them + for dir in sorted(need_dir): + mkpath(dir, mode, verbose=verbose, dry_run=dry_run) + +def copy_tree(src, dst, preserve_mode=1, preserve_times=1, + preserve_symlinks=0, update=0, verbose=1, dry_run=0): + """Copy an entire directory tree 'src' to a new location 'dst'. + + Both 'src' and 'dst' must be directory names. If 'src' is not a + directory, raise DistutilsFileError. If 'dst' does not exist, it is + created with 'mkpath()'. The end result of the copy is that every + file in 'src' is copied to 'dst', and directories under 'src' are + recursively copied to 'dst'. Return the list of files that were + copied or might have been copied, using their output name. The + return value is unaffected by 'update' or 'dry_run': it is simply + the list of all files under 'src', with the names changed to be + under 'dst'. + + 'preserve_mode' and 'preserve_times' are the same as for + 'copy_file'; note that they only apply to regular files, not to + directories. If 'preserve_symlinks' is true, symlinks will be + copied as symlinks (on platforms that support them!); otherwise + (the default), the destination of the symlink will be copied. + 'update' and 'verbose' are the same as for 'copy_file'. + """ + from distutils.file_util import copy_file + + if not dry_run and not os.path.isdir(src): + raise DistutilsFileError( + "cannot copy tree '%s': not a directory" % src) + try: + names = os.listdir(src) + except OSError as e: + if dry_run: + names = [] + else: + raise DistutilsFileError( + "error listing files in '%s': %s" % (src, e.strerror)) + + if not dry_run: + mkpath(dst, verbose=verbose) + + outputs = [] + + for n in names: + src_name = os.path.join(src, n) + dst_name = os.path.join(dst, n) + + if n.startswith('.nfs'): + # skip NFS rename files + continue + + if preserve_symlinks and os.path.islink(src_name): + link_dest = os.readlink(src_name) + if verbose >= 1: + log.info("linking %s -> %s", dst_name, link_dest) + if not dry_run: + os.symlink(link_dest, dst_name) + outputs.append(dst_name) + + elif os.path.isdir(src_name): + outputs.extend( + copy_tree(src_name, dst_name, preserve_mode, + preserve_times, preserve_symlinks, update, + verbose=verbose, dry_run=dry_run)) + else: + copy_file(src_name, dst_name, preserve_mode, + preserve_times, update, verbose=verbose, + dry_run=dry_run) + outputs.append(dst_name) + + return outputs + +def _build_cmdtuple(path, cmdtuples): + """Helper for remove_tree().""" + for f in os.listdir(path): + real_f = os.path.join(path,f) + if os.path.isdir(real_f) and not os.path.islink(real_f): + _build_cmdtuple(real_f, cmdtuples) + else: + cmdtuples.append((os.remove, real_f)) + cmdtuples.append((os.rmdir, path)) + +def remove_tree(directory, verbose=1, dry_run=0): + """Recursively remove an entire directory tree. + + Any errors are ignored (apart from being reported to stdout if 'verbose' + is true). + """ + global _path_created + + if verbose >= 1: + log.info("removing '%s' (and everything under it)", directory) + if dry_run: + return + cmdtuples = [] + _build_cmdtuple(directory, cmdtuples) + for cmd in cmdtuples: + try: + cmd[0](cmd[1]) + # remove dir from cache if it's already there + abspath = os.path.abspath(cmd[1]) + if abspath in _path_created: + del _path_created[abspath] + except OSError as exc: + log.warn("error removing %s: %s", directory, exc) + +def ensure_relative(path): + """Take the full path 'path', and make it a relative path. + + This is useful to make 'path' the second argument to os.path.join(). + """ + drive, path = os.path.splitdrive(path) + if path[0:1] == os.sep: + path = drive + path[1:] + return path diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/dist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/dist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/dist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/dist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,1256 @@ +"""distutils.dist + +Provides the Distribution class, which represents the module distribution +being built/installed/distributed. +""" + +import sys +import os +import re +from email import message_from_file + +try: + import warnings +except ImportError: + warnings = None + +from distutils.errors import * +from distutils.fancy_getopt import FancyGetopt, translate_longopt +from distutils.util import check_environ, strtobool, rfc822_escape +from distutils import log +from distutils.debug import DEBUG + +# Regex to define acceptable Distutils command names. This is not *quite* +# the same as a Python NAME -- I don't allow leading underscores. The fact +# that they're very similar is no coincidence; the default naming scheme is +# to look for a Python module named after the command. +command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') + + +def _ensure_list(value, fieldname): + if isinstance(value, str): + # a string containing comma separated values is okay. It will + # be converted to a list by Distribution.finalize_options(). + pass + elif not isinstance(value, list): + # passing a tuple or an iterator perhaps, warn and convert + typename = type(value).__name__ + msg = f"Warning: '{fieldname}' should be a list, got type '{typename}'" + log.log(log.WARN, msg) + value = list(value) + return value + + +class Distribution: + """The core of the Distutils. Most of the work hiding behind 'setup' + is really done within a Distribution instance, which farms the work out + to the Distutils commands specified on the command line. + + Setup scripts will almost never instantiate Distribution directly, + unless the 'setup()' function is totally inadequate to their needs. + However, it is conceivable that a setup script might wish to subclass + Distribution for some specialized purpose, and then pass the subclass + to 'setup()' as the 'distclass' keyword argument. If so, it is + necessary to respect the expectations that 'setup' has of Distribution. + See the code for 'setup()', in core.py, for details. + """ + + # 'global_options' describes the command-line options that may be + # supplied to the setup script prior to any actual commands. + # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of + # these global options. This list should be kept to a bare minimum, + # since every global option is also valid as a command option -- and we + # don't want to pollute the commands with too many options that they + # have minimal control over. + # The fourth entry for verbose means that it can be repeated. + global_options = [ + ('verbose', 'v', "run verbosely (default)", 1), + ('quiet', 'q', "run quietly (turns verbosity off)"), + ('dry-run', 'n', "don't actually do anything"), + ('help', 'h', "show detailed help message"), + ('no-user-cfg', None, + 'ignore pydistutils.cfg in your home directory'), + ] + + # 'common_usage' is a short (2-3 line) string describing the common + # usage of the setup script. + common_usage = """\ +Common commands: (see '--help-commands' for more) + + setup.py build will build the package underneath 'build/' + setup.py install will install the package +""" + + # options that are not propagated to the commands + display_options = [ + ('help-commands', None, + "list all available commands"), + ('name', None, + "print package name"), + ('version', 'V', + "print package version"), + ('fullname', None, + "print -"), + ('author', None, + "print the author's name"), + ('author-email', None, + "print the author's email address"), + ('maintainer', None, + "print the maintainer's name"), + ('maintainer-email', None, + "print the maintainer's email address"), + ('contact', None, + "print the maintainer's name if known, else the author's"), + ('contact-email', None, + "print the maintainer's email address if known, else the author's"), + ('url', None, + "print the URL for this package"), + ('license', None, + "print the license of the package"), + ('licence', None, + "alias for --license"), + ('description', None, + "print the package description"), + ('long-description', None, + "print the long package description"), + ('platforms', None, + "print the list of platforms"), + ('classifiers', None, + "print the list of classifiers"), + ('keywords', None, + "print the list of keywords"), + ('provides', None, + "print the list of packages/modules provided"), + ('requires', None, + "print the list of packages/modules required"), + ('obsoletes', None, + "print the list of packages/modules made obsolete") + ] + display_option_names = [translate_longopt(x[0]) for x in display_options] + + # negative options are options that exclude other options + negative_opt = {'quiet': 'verbose'} + + # -- Creation/initialization methods ------------------------------- + + def __init__(self, attrs=None): + """Construct a new Distribution instance: initialize all the + attributes of a Distribution, and then use 'attrs' (a dictionary + mapping attribute names to values) to assign some of those + attributes their "real" values. (Any attributes not mentioned in + 'attrs' will be assigned to some null value: 0, None, an empty list + or dictionary, etc.) Most importantly, initialize the + 'command_obj' attribute to the empty dictionary; this will be + filled in with real command objects by 'parse_command_line()'. + """ + + # Default values for our command-line options + self.verbose = 1 + self.dry_run = 0 + self.help = 0 + for attr in self.display_option_names: + setattr(self, attr, 0) + + # Store the distribution meta-data (name, version, author, and so + # forth) in a separate object -- we're getting to have enough + # information here (and enough command-line options) that it's + # worth it. Also delegate 'get_XXX()' methods to the 'metadata' + # object in a sneaky and underhanded (but efficient!) way. + self.metadata = DistributionMetadata() + for basename in self.metadata._METHOD_BASENAMES: + method_name = "get_" + basename + setattr(self, method_name, getattr(self.metadata, method_name)) + + # 'cmdclass' maps command names to class objects, so we + # can 1) quickly figure out which class to instantiate when + # we need to create a new command object, and 2) have a way + # for the setup script to override command classes + self.cmdclass = {} + + # 'command_packages' is a list of packages in which commands + # are searched for. The factory for command 'foo' is expected + # to be named 'foo' in the module 'foo' in one of the packages + # named here. This list is searched from the left; an error + # is raised if no named package provides the command being + # searched for. (Always access using get_command_packages().) + self.command_packages = None + + # 'script_name' and 'script_args' are usually set to sys.argv[0] + # and sys.argv[1:], but they can be overridden when the caller is + # not necessarily a setup script run from the command-line. + self.script_name = None + self.script_args = None + + # 'command_options' is where we store command options between + # parsing them (from config files, the command-line, etc.) and when + # they are actually needed -- ie. when the command in question is + # instantiated. It is a dictionary of dictionaries of 2-tuples: + # command_options = { command_name : { option : (source, value) } } + self.command_options = {} + + # 'dist_files' is the list of (command, pyversion, file) that + # have been created by any dist commands run so far. This is + # filled regardless of whether the run is dry or not. pyversion + # gives sysconfig.get_python_version() if the dist file is + # specific to a Python version, 'any' if it is good for all + # Python versions on the target platform, and '' for a source + # file. pyversion should not be used to specify minimum or + # maximum required Python versions; use the metainfo for that + # instead. + self.dist_files = [] + + # These options are really the business of various commands, rather + # than of the Distribution itself. We provide aliases for them in + # Distribution as a convenience to the developer. + self.packages = None + self.package_data = {} + self.package_dir = None + self.py_modules = None + self.libraries = None + self.headers = None + self.ext_modules = None + self.ext_package = None + self.include_dirs = None + self.extra_path = None + self.scripts = None + self.data_files = None + self.password = '' + + # And now initialize bookkeeping stuff that can't be supplied by + # the caller at all. 'command_obj' maps command names to + # Command instances -- that's how we enforce that every command + # class is a singleton. + self.command_obj = {} + + # 'have_run' maps command names to boolean values; it keeps track + # of whether we have actually run a particular command, to make it + # cheap to "run" a command whenever we think we might need to -- if + # it's already been done, no need for expensive filesystem + # operations, we just check the 'have_run' dictionary and carry on. + # It's only safe to query 'have_run' for a command class that has + # been instantiated -- a false value will be inserted when the + # command object is created, and replaced with a true value when + # the command is successfully run. Thus it's probably best to use + # '.get()' rather than a straight lookup. + self.have_run = {} + + # Now we'll use the attrs dictionary (ultimately, keyword args from + # the setup script) to possibly override any or all of these + # distribution options. + + if attrs: + # Pull out the set of command options and work on them + # specifically. Note that this order guarantees that aliased + # command options will override any supplied redundantly + # through the general options dictionary. + options = attrs.get('options') + if options is not None: + del attrs['options'] + for (command, cmd_options) in options.items(): + opt_dict = self.get_option_dict(command) + for (opt, val) in cmd_options.items(): + opt_dict[opt] = ("setup script", val) + + if 'licence' in attrs: + attrs['license'] = attrs['licence'] + del attrs['licence'] + msg = "'licence' distribution option is deprecated; use 'license'" + if warnings is not None: + warnings.warn(msg) + else: + sys.stderr.write(msg + "\n") + + # Now work on the rest of the attributes. Any attribute that's + # not already defined is invalid! + for (key, val) in attrs.items(): + if hasattr(self.metadata, "set_" + key): + getattr(self.metadata, "set_" + key)(val) + elif hasattr(self.metadata, key): + setattr(self.metadata, key, val) + elif hasattr(self, key): + setattr(self, key, val) + else: + msg = "Unknown distribution option: %s" % repr(key) + warnings.warn(msg) + + # no-user-cfg is handled before other command line args + # because other args override the config files, and this + # one is needed before we can load the config files. + # If attrs['script_args'] wasn't passed, assume false. + # + # This also make sure we just look at the global options + self.want_user_cfg = True + + if self.script_args is not None: + for arg in self.script_args: + if not arg.startswith('-'): + break + if arg == '--no-user-cfg': + self.want_user_cfg = False + break + + self.finalize_options() + + def get_option_dict(self, command): + """Get the option dictionary for a given command. If that + command's option dictionary hasn't been created yet, then create it + and return the new dictionary; otherwise, return the existing + option dictionary. + """ + dict = self.command_options.get(command) + if dict is None: + dict = self.command_options[command] = {} + return dict + + def dump_option_dicts(self, header=None, commands=None, indent=""): + from pprint import pformat + + if commands is None: # dump all command option dicts + commands = sorted(self.command_options.keys()) + + if header is not None: + self.announce(indent + header) + indent = indent + " " + + if not commands: + self.announce(indent + "no commands known yet") + return + + for cmd_name in commands: + opt_dict = self.command_options.get(cmd_name) + if opt_dict is None: + self.announce(indent + + "no option dict for '%s' command" % cmd_name) + else: + self.announce(indent + + "option dict for '%s' command:" % cmd_name) + out = pformat(opt_dict) + for line in out.split('\n'): + self.announce(indent + " " + line) + + # -- Config file finding/parsing methods --------------------------- + + def find_config_files(self): + """Find as many configuration files as should be processed for this + platform, and return a list of filenames in the order in which they + should be parsed. The filenames returned are guaranteed to exist + (modulo nasty race conditions). + + There are three possible config files: distutils.cfg in the + Distutils installation directory (ie. where the top-level + Distutils __inst__.py file lives), a file in the user's home + directory named .pydistutils.cfg on Unix and pydistutils.cfg + on Windows/Mac; and setup.cfg in the current directory. + + The file in the user's home directory can be disabled with the + --no-user-cfg option. + """ + files = [] + check_environ() + + # Where to look for the system-wide Distutils config file + sys_dir = os.path.dirname(sys.modules['distutils'].__file__) + + # Look for the system config file + sys_file = os.path.join(sys_dir, "distutils.cfg") + if os.path.isfile(sys_file): + files.append(sys_file) + + # What to call the per-user config file + if os.name == 'posix': + user_filename = ".pydistutils.cfg" + else: + user_filename = "pydistutils.cfg" + + # And look for the user config file + if self.want_user_cfg: + user_file = os.path.join(os.path.expanduser('~'), user_filename) + if os.path.isfile(user_file): + files.append(user_file) + + # All platforms support local setup.cfg + local_file = "setup.cfg" + if os.path.isfile(local_file): + files.append(local_file) + + if DEBUG: + self.announce("using config files: %s" % ', '.join(files)) + + return files + + def parse_config_files(self, filenames=None): + from configparser import ConfigParser + + # Ignore install directory options if we have a venv + if sys.prefix != sys.base_prefix: + ignore_options = [ + 'install-base', 'install-platbase', 'install-lib', + 'install-platlib', 'install-purelib', 'install-headers', + 'install-scripts', 'install-data', 'prefix', 'exec-prefix', + 'home', 'user', 'root'] + else: + ignore_options = [] + + ignore_options = frozenset(ignore_options) + + if filenames is None: + filenames = self.find_config_files() + + if DEBUG: + self.announce("Distribution.parse_config_files():") + + parser = ConfigParser() + for filename in filenames: + if DEBUG: + self.announce(" reading %s" % filename) + parser.read(filename) + for section in parser.sections(): + options = parser.options(section) + opt_dict = self.get_option_dict(section) + + for opt in options: + if opt != '__name__' and opt not in ignore_options: + val = parser.get(section,opt) + opt = opt.replace('-', '_') + opt_dict[opt] = (filename, val) + + # Make the ConfigParser forget everything (so we retain + # the original filenames that options come from) + parser.__init__() + + # If there was a "global" section in the config file, use it + # to set Distribution options. + + if 'global' in self.command_options: + for (opt, (src, val)) in self.command_options['global'].items(): + alias = self.negative_opt.get(opt) + try: + if alias: + setattr(self, alias, not strtobool(val)) + elif opt in ('verbose', 'dry_run'): # ugh! + setattr(self, opt, strtobool(val)) + else: + setattr(self, opt, val) + except ValueError as msg: + raise DistutilsOptionError(msg) + + # -- Command-line parsing methods ---------------------------------- + + def parse_command_line(self): + """Parse the setup script's command line, taken from the + 'script_args' instance attribute (which defaults to 'sys.argv[1:]' + -- see 'setup()' in core.py). This list is first processed for + "global options" -- options that set attributes of the Distribution + instance. Then, it is alternately scanned for Distutils commands + and options for that command. Each new command terminates the + options for the previous command. The allowed options for a + command are determined by the 'user_options' attribute of the + command class -- thus, we have to be able to load command classes + in order to parse the command line. Any error in that 'options' + attribute raises DistutilsGetoptError; any error on the + command-line raises DistutilsArgError. If no Distutils commands + were found on the command line, raises DistutilsArgError. Return + true if command-line was successfully parsed and we should carry + on with executing commands; false if no errors but we shouldn't + execute commands (currently, this only happens if user asks for + help). + """ + # + # We now have enough information to show the Macintosh dialog + # that allows the user to interactively specify the "command line". + # + toplevel_options = self._get_toplevel_options() + + # We have to parse the command line a bit at a time -- global + # options, then the first command, then its options, and so on -- + # because each command will be handled by a different class, and + # the options that are valid for a particular class aren't known + # until we have loaded the command class, which doesn't happen + # until we know what the command is. + + self.commands = [] + parser = FancyGetopt(toplevel_options + self.display_options) + parser.set_negative_aliases(self.negative_opt) + parser.set_aliases({'licence': 'license'}) + args = parser.getopt(args=self.script_args, object=self) + option_order = parser.get_option_order() + log.set_verbosity(self.verbose) + + # for display options we return immediately + if self.handle_display_options(option_order): + return + while args: + args = self._parse_command_opts(parser, args) + if args is None: # user asked for help (and got it) + return + + # Handle the cases of --help as a "global" option, ie. + # "setup.py --help" and "setup.py --help command ...". For the + # former, we show global options (--verbose, --dry-run, etc.) + # and display-only options (--name, --version, etc.); for the + # latter, we omit the display-only options and show help for + # each command listed on the command line. + if self.help: + self._show_help(parser, + display_options=len(self.commands) == 0, + commands=self.commands) + return + + # Oops, no commands found -- an end-user error + if not self.commands: + raise DistutilsArgError("no commands supplied") + + # All is well: return true + return True + + def _get_toplevel_options(self): + """Return the non-display options recognized at the top level. + + This includes options that are recognized *only* at the top + level as well as options recognized for commands. + """ + return self.global_options + [ + ("command-packages=", None, + "list of packages that provide distutils commands"), + ] + + def _parse_command_opts(self, parser, args): + """Parse the command-line options for a single command. + 'parser' must be a FancyGetopt instance; 'args' must be the list + of arguments, starting with the current command (whose options + we are about to parse). Returns a new version of 'args' with + the next command at the front of the list; will be the empty + list if there are no more commands on the command line. Returns + None if the user asked for help on this command. + """ + # late import because of mutual dependence between these modules + from distutils.cmd import Command + + # Pull the current command from the head of the command line + command = args[0] + if not command_re.match(command): + raise SystemExit("invalid command name '%s'" % command) + self.commands.append(command) + + # Dig up the command class that implements this command, so we + # 1) know that it's a valid command, and 2) know which options + # it takes. + try: + cmd_class = self.get_command_class(command) + except DistutilsModuleError as msg: + raise DistutilsArgError(msg) + + # Require that the command class be derived from Command -- want + # to be sure that the basic "command" interface is implemented. + if not issubclass(cmd_class, Command): + raise DistutilsClassError( + "command class %s must subclass Command" % cmd_class) + + # Also make sure that the command object provides a list of its + # known options. + if not (hasattr(cmd_class, 'user_options') and + isinstance(cmd_class.user_options, list)): + msg = ("command class %s must provide " + "'user_options' attribute (a list of tuples)") + raise DistutilsClassError(msg % cmd_class) + + # If the command class has a list of negative alias options, + # merge it in with the global negative aliases. + negative_opt = self.negative_opt + if hasattr(cmd_class, 'negative_opt'): + negative_opt = negative_opt.copy() + negative_opt.update(cmd_class.negative_opt) + + # Check for help_options in command class. They have a different + # format (tuple of four) so we need to preprocess them here. + if (hasattr(cmd_class, 'help_options') and + isinstance(cmd_class.help_options, list)): + help_options = fix_help_options(cmd_class.help_options) + else: + help_options = [] + + # All commands support the global options too, just by adding + # in 'global_options'. + parser.set_option_table(self.global_options + + cmd_class.user_options + + help_options) + parser.set_negative_aliases(negative_opt) + (args, opts) = parser.getopt(args[1:]) + if hasattr(opts, 'help') and opts.help: + self._show_help(parser, display_options=0, commands=[cmd_class]) + return + + if (hasattr(cmd_class, 'help_options') and + isinstance(cmd_class.help_options, list)): + help_option_found=0 + for (help_option, short, desc, func) in cmd_class.help_options: + if hasattr(opts, parser.get_attr_name(help_option)): + help_option_found=1 + if callable(func): + func() + else: + raise DistutilsClassError( + "invalid help function %r for help option '%s': " + "must be a callable object (function, etc.)" + % (func, help_option)) + + if help_option_found: + return + + # Put the options from the command-line into their official + # holding pen, the 'command_options' dictionary. + opt_dict = self.get_option_dict(command) + for (name, value) in vars(opts).items(): + opt_dict[name] = ("command line", value) + + return args + + def finalize_options(self): + """Set final values for all the options on the Distribution + instance, analogous to the .finalize_options() method of Command + objects. + """ + for attr in ('keywords', 'platforms'): + value = getattr(self.metadata, attr) + if value is None: + continue + if isinstance(value, str): + value = [elm.strip() for elm in value.split(',')] + setattr(self.metadata, attr, value) + + def _show_help(self, parser, global_options=1, display_options=1, + commands=[]): + """Show help for the setup script command-line in the form of + several lists of command-line options. 'parser' should be a + FancyGetopt instance; do not expect it to be returned in the + same state, as its option table will be reset to make it + generate the correct help text. + + If 'global_options' is true, lists the global options: + --verbose, --dry-run, etc. If 'display_options' is true, lists + the "display-only" options: --name, --version, etc. Finally, + lists per-command help for every command name or command class + in 'commands'. + """ + # late import because of mutual dependence between these modules + from distutils.core import gen_usage + from distutils.cmd import Command + + if global_options: + if display_options: + options = self._get_toplevel_options() + else: + options = self.global_options + parser.set_option_table(options) + parser.print_help(self.common_usage + "\nGlobal options:") + print('') + + if display_options: + parser.set_option_table(self.display_options) + parser.print_help( + "Information display options (just display " + + "information, ignore any commands)") + print('') + + for command in self.commands: + if isinstance(command, type) and issubclass(command, Command): + klass = command + else: + klass = self.get_command_class(command) + if (hasattr(klass, 'help_options') and + isinstance(klass.help_options, list)): + parser.set_option_table(klass.user_options + + fix_help_options(klass.help_options)) + else: + parser.set_option_table(klass.user_options) + parser.print_help("Options for '%s' command:" % klass.__name__) + print('') + + print(gen_usage(self.script_name)) + + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + from distutils.core import gen_usage + + # User just wants a list of commands -- we'll print it out and stop + # processing now (ie. if they ran "setup --help-commands foo bar", + # we ignore "foo bar"). + if self.help_commands: + self.print_commands() + print('') + print(gen_usage(self.script_name)) + return 1 + + # If user supplied any of the "display metadata" options, then + # display that metadata in the order in which the user supplied the + # metadata options. + any_display_options = 0 + is_display_option = {} + for option in self.display_options: + is_display_option[option[0]] = 1 + + for (opt, val) in option_order: + if val and is_display_option.get(opt): + opt = translate_longopt(opt) + value = getattr(self.metadata, "get_"+opt)() + if opt in ['keywords', 'platforms']: + print(','.join(value)) + elif opt in ('classifiers', 'provides', 'requires', + 'obsoletes'): + print('\n'.join(value)) + else: + print(value) + any_display_options = 1 + + return any_display_options + + def print_command_list(self, commands, header, max_length): + """Print a subset of the list of all commands -- used by + 'print_commands()'. + """ + print(header + ":") + + for cmd in commands: + klass = self.cmdclass.get(cmd) + if not klass: + klass = self.get_command_class(cmd) + try: + description = klass.description + except AttributeError: + description = "(no description available)" + + print(" %-*s %s" % (max_length, cmd, description)) + + def print_commands(self): + """Print out a help message listing all available commands with a + description of each. The list is divided into "standard commands" + (listed in distutils.command.__all__) and "extra commands" + (mentioned in self.cmdclass, but not a standard command). The + descriptions come from the command class attribute + 'description'. + """ + import distutils.command + std_commands = distutils.command.__all__ + is_std = {} + for cmd in std_commands: + is_std[cmd] = 1 + + extra_commands = [] + for cmd in self.cmdclass.keys(): + if not is_std.get(cmd): + extra_commands.append(cmd) + + max_length = 0 + for cmd in (std_commands + extra_commands): + if len(cmd) > max_length: + max_length = len(cmd) + + self.print_command_list(std_commands, + "Standard commands", + max_length) + if extra_commands: + print() + self.print_command_list(extra_commands, + "Extra commands", + max_length) + + def get_command_list(self): + """Get a list of (command, description) tuples. + The list is divided into "standard commands" (listed in + distutils.command.__all__) and "extra commands" (mentioned in + self.cmdclass, but not a standard command). The descriptions come + from the command class attribute 'description'. + """ + # Currently this is only used on Mac OS, for the Mac-only GUI + # Distutils interface (by Jack Jansen) + import distutils.command + std_commands = distutils.command.__all__ + is_std = {} + for cmd in std_commands: + is_std[cmd] = 1 + + extra_commands = [] + for cmd in self.cmdclass.keys(): + if not is_std.get(cmd): + extra_commands.append(cmd) + + rv = [] + for cmd in (std_commands + extra_commands): + klass = self.cmdclass.get(cmd) + if not klass: + klass = self.get_command_class(cmd) + try: + description = klass.description + except AttributeError: + description = "(no description available)" + rv.append((cmd, description)) + return rv + + # -- Command class/object methods ---------------------------------- + + def get_command_packages(self): + """Return a list of packages from which commands are loaded.""" + pkgs = self.command_packages + if not isinstance(pkgs, list): + if pkgs is None: + pkgs = '' + pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != ''] + if "distutils.command" not in pkgs: + pkgs.insert(0, "distutils.command") + self.command_packages = pkgs + return pkgs + + def get_command_class(self, command): + """Return the class that implements the Distutils command named by + 'command'. First we check the 'cmdclass' dictionary; if the + command is mentioned there, we fetch the class object from the + dictionary and return it. Otherwise we load the command module + ("distutils.command." + command) and fetch the command class from + the module. The loaded class is also stored in 'cmdclass' + to speed future calls to 'get_command_class()'. + + Raises DistutilsModuleError if the expected module could not be + found, or if that module does not define the expected class. + """ + klass = self.cmdclass.get(command) + if klass: + return klass + + for pkgname in self.get_command_packages(): + module_name = "%s.%s" % (pkgname, command) + klass_name = command + + try: + __import__(module_name) + module = sys.modules[module_name] + except ImportError: + continue + + try: + klass = getattr(module, klass_name) + except AttributeError: + raise DistutilsModuleError( + "invalid command '%s' (no class '%s' in module '%s')" + % (command, klass_name, module_name)) + + self.cmdclass[command] = klass + return klass + + raise DistutilsModuleError("invalid command '%s'" % command) + + def get_command_obj(self, command, create=1): + """Return the command object for 'command'. Normally this object + is cached on a previous call to 'get_command_obj()'; if no command + object for 'command' is in the cache, then we either create and + return it (if 'create' is true) or return None. + """ + cmd_obj = self.command_obj.get(command) + if not cmd_obj and create: + if DEBUG: + self.announce("Distribution.get_command_obj(): " + "creating '%s' command object" % command) + + klass = self.get_command_class(command) + cmd_obj = self.command_obj[command] = klass(self) + self.have_run[command] = 0 + + # Set any options that were supplied in config files + # or on the command line. (NB. support for error + # reporting is lame here: any errors aren't reported + # until 'finalize_options()' is called, which means + # we won't report the source of the error.) + options = self.command_options.get(command) + if options: + self._set_command_options(cmd_obj, options) + + return cmd_obj + + def _set_command_options(self, command_obj, option_dict=None): + """Set the options for 'command_obj' from 'option_dict'. Basically + this means copying elements of a dictionary ('option_dict') to + attributes of an instance ('command'). + + 'command_obj' must be a Command instance. If 'option_dict' is not + supplied, uses the standard option dictionary for this command + (from 'self.command_options'). + """ + command_name = command_obj.get_command_name() + if option_dict is None: + option_dict = self.get_option_dict(command_name) + + if DEBUG: + self.announce(" setting options for '%s' command:" % command_name) + for (option, (source, value)) in option_dict.items(): + if DEBUG: + self.announce(" %s = %s (from %s)" % (option, value, + source)) + try: + bool_opts = [translate_longopt(o) + for o in command_obj.boolean_options] + except AttributeError: + bool_opts = [] + try: + neg_opt = command_obj.negative_opt + except AttributeError: + neg_opt = {} + + try: + is_string = isinstance(value, str) + if option in neg_opt and is_string: + setattr(command_obj, neg_opt[option], not strtobool(value)) + elif option in bool_opts and is_string: + setattr(command_obj, option, strtobool(value)) + elif hasattr(command_obj, option): + setattr(command_obj, option, value) + else: + raise DistutilsOptionError( + "error in %s: command '%s' has no such option '%s'" + % (source, command_name, option)) + except ValueError as msg: + raise DistutilsOptionError(msg) + + def reinitialize_command(self, command, reinit_subcommands=0): + """Reinitializes a command to the state it was in when first + returned by 'get_command_obj()': ie., initialized but not yet + finalized. This provides the opportunity to sneak option + values in programmatically, overriding or supplementing + user-supplied values from the config files and command line. + You'll have to re-finalize the command object (by calling + 'finalize_options()' or 'ensure_finalized()') before using it for + real. + + 'command' should be a command name (string) or command object. If + 'reinit_subcommands' is true, also reinitializes the command's + sub-commands, as declared by the 'sub_commands' class attribute (if + it has one). See the "install" command for an example. Only + reinitializes the sub-commands that actually matter, ie. those + whose test predicates return true. + + Returns the reinitialized command object. + """ + from distutils.cmd import Command + if not isinstance(command, Command): + command_name = command + command = self.get_command_obj(command_name) + else: + command_name = command.get_command_name() + + if not command.finalized: + return command + command.initialize_options() + command.finalized = 0 + self.have_run[command_name] = 0 + self._set_command_options(command) + + if reinit_subcommands: + for sub in command.get_sub_commands(): + self.reinitialize_command(sub, reinit_subcommands) + + return command + + # -- Methods that operate on the Distribution ---------------------- + + def announce(self, msg, level=log.INFO): + log.log(level, msg) + + def run_commands(self): + """Run each command that was seen on the setup script command line. + Uses the list of commands found and cache of command objects + created by 'get_command_obj()'. + """ + for cmd in self.commands: + self.run_command(cmd) + + # -- Methods that operate on its Commands -------------------------- + + def run_command(self, command): + """Do whatever it takes to run a command (including nothing at all, + if the command has already been run). Specifically: if we have + already created and run the command named by 'command', return + silently without doing anything. If the command named by 'command' + doesn't even have a command object yet, create one. Then invoke + 'run()' on that command object (or an existing one). + """ + # Already been here, done that? then return silently. + if self.have_run.get(command): + return + + log.info("running %s", command) + cmd_obj = self.get_command_obj(command) + cmd_obj.ensure_finalized() + cmd_obj.run() + self.have_run[command] = 1 + + # -- Distribution query methods ------------------------------------ + + def has_pure_modules(self): + return len(self.packages or self.py_modules or []) > 0 + + def has_ext_modules(self): + return self.ext_modules and len(self.ext_modules) > 0 + + def has_c_libraries(self): + return self.libraries and len(self.libraries) > 0 + + def has_modules(self): + return self.has_pure_modules() or self.has_ext_modules() + + def has_headers(self): + return self.headers and len(self.headers) > 0 + + def has_scripts(self): + return self.scripts and len(self.scripts) > 0 + + def has_data_files(self): + return self.data_files and len(self.data_files) > 0 + + def is_pure(self): + return (self.has_pure_modules() and + not self.has_ext_modules() and + not self.has_c_libraries()) + + # -- Metadata query methods ---------------------------------------- + + # If you're looking for 'get_name()', 'get_version()', and so forth, + # they are defined in a sneaky way: the constructor binds self.get_XXX + # to self.metadata.get_XXX. The actual code is in the + # DistributionMetadata class, below. + +class DistributionMetadata: + """Dummy class to hold the distribution meta-data: name, version, + author, and so forth. + """ + + _METHOD_BASENAMES = ("name", "version", "author", "author_email", + "maintainer", "maintainer_email", "url", + "license", "description", "long_description", + "keywords", "platforms", "fullname", "contact", + "contact_email", "classifiers", "download_url", + # PEP 314 + "provides", "requires", "obsoletes", + ) + + def __init__(self, path=None): + if path is not None: + self.read_pkg_file(open(path)) + else: + self.name = None + self.version = None + self.author = None + self.author_email = None + self.maintainer = None + self.maintainer_email = None + self.url = None + self.license = None + self.description = None + self.long_description = None + self.keywords = None + self.platforms = None + self.classifiers = None + self.download_url = None + # PEP 314 + self.provides = None + self.requires = None + self.obsoletes = None + + def read_pkg_file(self, file): + """Reads the metadata values from a file object.""" + msg = message_from_file(file) + + def _read_field(name): + value = msg[name] + if value == 'UNKNOWN': + return None + return value + + def _read_list(name): + values = msg.get_all(name, None) + if values == []: + return None + return values + + metadata_version = msg['metadata-version'] + self.name = _read_field('name') + self.version = _read_field('version') + self.description = _read_field('summary') + # we are filling author only. + self.author = _read_field('author') + self.maintainer = None + self.author_email = _read_field('author-email') + self.maintainer_email = None + self.url = _read_field('home-page') + self.license = _read_field('license') + + if 'download-url' in msg: + self.download_url = _read_field('download-url') + else: + self.download_url = None + + self.long_description = _read_field('description') + self.description = _read_field('summary') + + if 'keywords' in msg: + self.keywords = _read_field('keywords').split(',') + + self.platforms = _read_list('platform') + self.classifiers = _read_list('classifier') + + # PEP 314 - these fields only exist in 1.1 + if metadata_version == '1.1': + self.requires = _read_list('requires') + self.provides = _read_list('provides') + self.obsoletes = _read_list('obsoletes') + else: + self.requires = None + self.provides = None + self.obsoletes = None + + def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. + """ + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) + + def write_pkg_file(self, file): + """Write the PKG-INFO format data to a file object. + """ + version = '1.0' + if (self.provides or self.requires or self.obsoletes or + self.classifiers or self.download_url): + version = '1.1' + + file.write('Metadata-Version: %s\n' % version) + file.write('Name: %s\n' % self.get_name()) + file.write('Version: %s\n' % self.get_version()) + file.write('Summary: %s\n' % self.get_description()) + file.write('Home-page: %s\n' % self.get_url()) + file.write('Author: %s\n' % self.get_contact()) + file.write('Author-email: %s\n' % self.get_contact_email()) + file.write('License: %s\n' % self.get_license()) + if self.download_url: + file.write('Download-URL: %s\n' % self.download_url) + + long_desc = rfc822_escape(self.get_long_description()) + file.write('Description: %s\n' % long_desc) + + keywords = ','.join(self.get_keywords()) + if keywords: + file.write('Keywords: %s\n' % keywords) + + self._write_list(file, 'Platform', self.get_platforms()) + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + def _write_list(self, file, name, values): + for value in values: + file.write('%s: %s\n' % (name, value)) + + # -- Metadata query methods ---------------------------------------- + + def get_name(self): + return self.name or "UNKNOWN" + + def get_version(self): + return self.version or "0.0.0" + + def get_fullname(self): + return "%s-%s" % (self.get_name(), self.get_version()) + + def get_author(self): + return self.author or "UNKNOWN" + + def get_author_email(self): + return self.author_email or "UNKNOWN" + + def get_maintainer(self): + return self.maintainer or "UNKNOWN" + + def get_maintainer_email(self): + return self.maintainer_email or "UNKNOWN" + + def get_contact(self): + return self.maintainer or self.author or "UNKNOWN" + + def get_contact_email(self): + return self.maintainer_email or self.author_email or "UNKNOWN" + + def get_url(self): + return self.url or "UNKNOWN" + + def get_license(self): + return self.license or "UNKNOWN" + get_licence = get_license + + def get_description(self): + return self.description or "UNKNOWN" + + def get_long_description(self): + return self.long_description or "UNKNOWN" + + def get_keywords(self): + return self.keywords or [] + + def set_keywords(self, value): + self.keywords = _ensure_list(value, 'keywords') + + def get_platforms(self): + return self.platforms or ["UNKNOWN"] + + def set_platforms(self, value): + self.platforms = _ensure_list(value, 'platforms') + + def get_classifiers(self): + return self.classifiers or [] + + def set_classifiers(self, value): + self.classifiers = _ensure_list(value, 'classifiers') + + def get_download_url(self): + return self.download_url or "UNKNOWN" + + # PEP 314 + def get_requires(self): + return self.requires or [] + + def set_requires(self, value): + import distutils.versionpredicate + for v in value: + distutils.versionpredicate.VersionPredicate(v) + self.requires = list(value) + + def get_provides(self): + return self.provides or [] + + def set_provides(self, value): + value = [v.strip() for v in value] + for v in value: + import distutils.versionpredicate + distutils.versionpredicate.split_provision(v) + self.provides = value + + def get_obsoletes(self): + return self.obsoletes or [] + + def set_obsoletes(self, value): + import distutils.versionpredicate + for v in value: + distutils.versionpredicate.VersionPredicate(v) + self.obsoletes = list(value) + +def fix_help_options(options): + """Convert a 4-tuple 'help_options' list as found in various command + classes to the 3-tuple form required by FancyGetopt. + """ + new_options = [] + for help_tuple in options: + new_options.append(help_tuple[0:3]) + return new_options diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/errors.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/errors.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/errors.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/errors.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,97 @@ +"""distutils.errors + +Provides exceptions used by the Distutils modules. Note that Distutils +modules may raise standard exceptions; in particular, SystemExit is +usually raised for errors that are obviously the end-user's fault +(eg. bad command-line arguments). + +This module is safe to use in "from ... import *" mode; it only exports +symbols whose names start with "Distutils" and end with "Error".""" + +class DistutilsError (Exception): + """The root of all Distutils evil.""" + pass + +class DistutilsModuleError (DistutilsError): + """Unable to load an expected module, or to find an expected class + within some module (in particular, command modules and classes).""" + pass + +class DistutilsClassError (DistutilsError): + """Some command class (or possibly distribution class, if anyone + feels a need to subclass Distribution) is found not to be holding + up its end of the bargain, ie. implementing some part of the + "command "interface.""" + pass + +class DistutilsGetoptError (DistutilsError): + """The option table provided to 'fancy_getopt()' is bogus.""" + pass + +class DistutilsArgError (DistutilsError): + """Raised by fancy_getopt in response to getopt.error -- ie. an + error in the command line usage.""" + pass + +class DistutilsFileError (DistutilsError): + """Any problems in the filesystem: expected file not found, etc. + Typically this is for problems that we detect before OSError + could be raised.""" + pass + +class DistutilsOptionError (DistutilsError): + """Syntactic/semantic errors in command options, such as use of + mutually conflicting options, or inconsistent options, + badly-spelled values, etc. No distinction is made between option + values originating in the setup script, the command line, config + files, or what-have-you -- but if we *know* something originated in + the setup script, we'll raise DistutilsSetupError instead.""" + pass + +class DistutilsSetupError (DistutilsError): + """For errors that can be definitely blamed on the setup script, + such as invalid keyword arguments to 'setup()'.""" + pass + +class DistutilsPlatformError (DistutilsError): + """We don't know how to do something on the current platform (but + we do know how to do it on some platform) -- eg. trying to compile + C files on a platform not supported by a CCompiler subclass.""" + pass + +class DistutilsExecError (DistutilsError): + """Any problems executing an external program (such as the C + compiler, when compiling C files).""" + pass + +class DistutilsInternalError (DistutilsError): + """Internal inconsistencies or impossibilities (obviously, this + should never be seen if the code is working!).""" + pass + +class DistutilsTemplateError (DistutilsError): + """Syntax error in a file list template.""" + +class DistutilsByteCompileError(DistutilsError): + """Byte compile error.""" + +# Exception classes used by the CCompiler implementation classes +class CCompilerError (Exception): + """Some compile/link operation failed.""" + +class PreprocessError (CCompilerError): + """Failure to preprocess one or more C/C++ files.""" + +class CompileError (CCompilerError): + """Failure to compile one or more C/C++ source files.""" + +class LibError (CCompilerError): + """Failure to create a static library from one or more C/C++ object + files.""" + +class LinkError (CCompilerError): + """Failure to link one or more C/C++ object files into an executable + or shared library file.""" + +class UnknownFileError (CCompilerError): + """Attempt to process an unknown file type.""" diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/extension.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/extension.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/extension.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/extension.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,241 @@ +"""distutils.extension + +Provides the Extension class, used to describe C/C++ extension +modules in setup scripts.""" + +import os +import re +import warnings + +# This class is really only used by the "build_ext" command, so it might +# make sense to put it in distutils.command.build_ext. However, that +# module is already big enough, and I want to make this class a bit more +# complex to simplify some common cases ("foo" module in "foo.c") and do +# better error-checking ("foo.c" actually exists). +# +# Also, putting this in build_ext.py means every setup script would have to +# import that large-ish module (indirectly, through distutils.core) in +# order to do anything. + +class Extension: + """Just a collection of attributes that describes an extension + module and everything needed to build it (hopefully in a portable + way, but there are hooks that let you be as unportable as you need). + + Instance attributes: + name : string + the full name of the extension, including any packages -- ie. + *not* a filename or pathname, but Python dotted name + sources : [string] + list of source filenames, relative to the distribution root + (where the setup script lives), in Unix form (slash-separated) + for portability. Source files may be C, C++, SWIG (.i), + platform-specific resource files, or whatever else is recognized + by the "build_ext" command as source for a Python extension. + include_dirs : [string] + list of directories to search for C/C++ header files (in Unix + form for portability) + define_macros : [(name : string, value : string|None)] + list of macros to define; each macro is defined using a 2-tuple, + where 'value' is either the string to define it to or None to + define it without a particular value (equivalent of "#define + FOO" in source or -DFOO on Unix C compiler command line) + undef_macros : [string] + list of macros to undefine explicitly + library_dirs : [string] + list of directories to search for C/C++ libraries at link time + libraries : [string] + list of library names (not filenames or paths) to link against + runtime_library_dirs : [string] + list of directories to search for C/C++ libraries at run time + (for shared extensions, this is when the extension is loaded) + extra_objects : [string] + list of extra files to link with (eg. object files not implied + by 'sources', static library that must be explicitly specified, + binary resource files, etc.) + extra_compile_args : [string] + any extra platform- and compiler-specific information to use + when compiling the source files in 'sources'. For platforms and + compilers where "command line" makes sense, this is typically a + list of command-line arguments, but for other platforms it could + be anything. + extra_link_args : [string] + any extra platform- and compiler-specific information to use + when linking object files together to create the extension (or + to create a new static Python interpreter). Similar + interpretation as for 'extra_compile_args'. + export_symbols : [string] + list of symbols to be exported from a shared extension. Not + used on all platforms, and not generally necessary for Python + extensions, which typically export exactly one symbol: "init" + + extension_name. + swig_opts : [string] + any extra options to pass to SWIG if a source file has the .i + extension. + depends : [string] + list of files that the extension depends on + language : string + extension language (i.e. "c", "c++", "objc"). Will be detected + from the source extensions if not provided. + optional : boolean + specifies that a build failure in the extension should not abort the + build process, but simply not install the failing extension. + """ + + # When adding arguments to this constructor, be sure to update + # setup_keywords in core.py. + def __init__(self, name, sources, + include_dirs=None, + define_macros=None, + undef_macros=None, + library_dirs=None, + libraries=None, + runtime_library_dirs=None, + extra_objects=None, + extra_compile_args=None, + extra_link_args=None, + export_symbols=None, + swig_opts = None, + depends=None, + language=None, + optional=None, + **kw # To catch unknown keywords + ): + if not isinstance(name, str): + raise AssertionError("'name' must be a string") + if not (isinstance(sources, list) and + all(isinstance(v, str) for v in sources)): + raise AssertionError("'sources' must be a list of strings") + + self.name = name + self.sources = sources + self.include_dirs = include_dirs or [] + self.define_macros = define_macros or [] + self.undef_macros = undef_macros or [] + self.library_dirs = library_dirs or [] + self.libraries = libraries or [] + self.runtime_library_dirs = runtime_library_dirs or [] + self.extra_objects = extra_objects or [] + self.extra_compile_args = extra_compile_args or [] + self.extra_link_args = extra_link_args or [] + self.export_symbols = export_symbols or [] + self.swig_opts = swig_opts or [] + self.depends = depends or [] + self.language = language + self.optional = optional + + # If there are unknown keyword options, warn about them + if len(kw) > 0: + options = [repr(option) for option in kw] + options = ', '.join(sorted(options)) + msg = "Unknown Extension options: %s" % options + warnings.warn(msg) + + def __repr__(self): + return '<%s.%s(%r) at %#x>' % ( + self.__class__.__module__, + self.__class__.__qualname__, + self.name, + id(self)) + + +def read_setup_file(filename): + """Reads a Setup file and returns Extension instances.""" + from distutils.sysconfig import (parse_makefile, expand_makefile_vars, + _variable_rx) + + from distutils.text_file import TextFile + from distutils.util import split_quoted + + # First pass over the file to gather "VAR = VALUE" assignments. + vars = parse_makefile(filename) + + # Second pass to gobble up the real content: lines of the form + # ... [ ...] [ ...] [ ...] + file = TextFile(filename, + strip_comments=1, skip_blanks=1, join_lines=1, + lstrip_ws=1, rstrip_ws=1) + try: + extensions = [] + + while True: + line = file.readline() + if line is None: # eof + break + if re.match(_variable_rx, line): # VAR=VALUE, handled in first pass + continue + + if line[0] == line[-1] == "*": + file.warn("'%s' lines not handled yet" % line) + continue + + line = expand_makefile_vars(line, vars) + words = split_quoted(line) + + # NB. this parses a slightly different syntax than the old + # makesetup script: here, there must be exactly one extension per + # line, and it must be the first word of the line. I have no idea + # why the old syntax supported multiple extensions per line, as + # they all wind up being the same. + + module = words[0] + ext = Extension(module, []) + append_next_word = None + + for word in words[1:]: + if append_next_word is not None: + append_next_word.append(word) + append_next_word = None + continue + + suffix = os.path.splitext(word)[1] + switch = word[0:2] ; value = word[2:] + + if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): + # hmm, should we do something about C vs. C++ sources? + # or leave it up to the CCompiler implementation to + # worry about? + ext.sources.append(word) + elif switch == "-I": + ext.include_dirs.append(value) + elif switch == "-D": + equals = value.find("=") + if equals == -1: # bare "-DFOO" -- no value + ext.define_macros.append((value, None)) + else: # "-DFOO=blah" + ext.define_macros.append((value[0:equals], + value[equals+2:])) + elif switch == "-U": + ext.undef_macros.append(value) + elif switch == "-C": # only here 'cause makesetup has it! + ext.extra_compile_args.append(word) + elif switch == "-l": + ext.libraries.append(value) + elif switch == "-L": + ext.library_dirs.append(value) + elif switch == "-R": + ext.runtime_library_dirs.append(value) + elif word == "-rpath": + append_next_word = ext.runtime_library_dirs + elif word == "-Xlinker": + append_next_word = ext.extra_link_args + elif word == "-Xcompiler": + append_next_word = ext.extra_compile_args + elif switch == "-u": + ext.extra_link_args.append(word) + if not value: + append_next_word = ext.extra_link_args + elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): + # NB. a really faithful emulation of makesetup would + # append a .o file to extra_objects only if it + # had a slash in it; otherwise, it would s/.o/.c/ + # and append it to sources. Hmmmm. + ext.extra_objects.append(word) + else: + file.warn("unrecognized argument '%s'" % word) + + extensions.append(ext) + finally: + file.close() + + return extensions diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/fancy_getopt.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/fancy_getopt.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/fancy_getopt.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/fancy_getopt.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,457 @@ +"""distutils.fancy_getopt + +Wrapper around the standard getopt module that provides the following +additional features: + * short and long options are tied together + * options have help strings, so fancy_getopt could potentially + create a complete usage summary + * options set attributes of a passed-in object +""" + +import sys, string, re +import getopt +from distutils.errors import * + +# Much like command_re in distutils.core, this is close to but not quite +# the same as a Python NAME -- except, in the spirit of most GNU +# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!) +# The similarities to NAME are again not a coincidence... +longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)' +longopt_re = re.compile(r'^%s$' % longopt_pat) + +# For recognizing "negative alias" options, eg. "quiet=!verbose" +neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) + +# This is used to translate long options to legitimate Python identifiers +# (for use as attributes of some object). +longopt_xlate = str.maketrans('-', '_') + +class FancyGetopt: + """Wrapper around the standard 'getopt()' module that provides some + handy extra functionality: + * short and long options are tied together + * options have help strings, and help text can be assembled + from them + * options set attributes of a passed-in object + * boolean options can have "negative aliases" -- eg. if + --quiet is the "negative alias" of --verbose, then "--quiet" + on the command line sets 'verbose' to false + """ + + def __init__(self, option_table=None): + # The option table is (currently) a list of tuples. The + # tuples may have 3 or four values: + # (long_option, short_option, help_string [, repeatable]) + # if an option takes an argument, its long_option should have '=' + # appended; short_option should just be a single character, no ':' + # in any case. If a long_option doesn't have a corresponding + # short_option, short_option should be None. All option tuples + # must have long options. + self.option_table = option_table + + # 'option_index' maps long option names to entries in the option + # table (ie. those 3-tuples). + self.option_index = {} + if self.option_table: + self._build_index() + + # 'alias' records (duh) alias options; {'foo': 'bar'} means + # --foo is an alias for --bar + self.alias = {} + + # 'negative_alias' keeps track of options that are the boolean + # opposite of some other option + self.negative_alias = {} + + # These keep track of the information in the option table. We + # don't actually populate these structures until we're ready to + # parse the command-line, since the 'option_table' passed in here + # isn't necessarily the final word. + self.short_opts = [] + self.long_opts = [] + self.short2long = {} + self.attr_name = {} + self.takes_arg = {} + + # And 'option_order' is filled up in 'getopt()'; it records the + # original order of options (and their values) on the command-line, + # but expands short options, converts aliases, etc. + self.option_order = [] + + def _build_index(self): + self.option_index.clear() + for option in self.option_table: + self.option_index[option[0]] = option + + def set_option_table(self, option_table): + self.option_table = option_table + self._build_index() + + def add_option(self, long_option, short_option=None, help_string=None): + if long_option in self.option_index: + raise DistutilsGetoptError( + "option conflict: already an option '%s'" % long_option) + else: + option = (long_option, short_option, help_string) + self.option_table.append(option) + self.option_index[long_option] = option + + def has_option(self, long_option): + """Return true if the option table for this parser has an + option with long name 'long_option'.""" + return long_option in self.option_index + + def get_attr_name(self, long_option): + """Translate long option name 'long_option' to the form it + has as an attribute of some object: ie., translate hyphens + to underscores.""" + return long_option.translate(longopt_xlate) + + def _check_alias_dict(self, aliases, what): + assert isinstance(aliases, dict) + for (alias, opt) in aliases.items(): + if alias not in self.option_index: + raise DistutilsGetoptError(("invalid %s '%s': " + "option '%s' not defined") % (what, alias, alias)) + if opt not in self.option_index: + raise DistutilsGetoptError(("invalid %s '%s': " + "aliased option '%s' not defined") % (what, alias, opt)) + + def set_aliases(self, alias): + """Set the aliases for this option parser.""" + self._check_alias_dict(alias, "alias") + self.alias = alias + + def set_negative_aliases(self, negative_alias): + """Set the negative aliases for this option parser. + 'negative_alias' should be a dictionary mapping option names to + option names, both the key and value must already be defined + in the option table.""" + self._check_alias_dict(negative_alias, "negative alias") + self.negative_alias = negative_alias + + def _grok_option_table(self): + """Populate the various data structures that keep tabs on the + option table. Called by 'getopt()' before it can do anything + worthwhile. + """ + self.long_opts = [] + self.short_opts = [] + self.short2long.clear() + self.repeat = {} + + for option in self.option_table: + if len(option) == 3: + long, short, help = option + repeat = 0 + elif len(option) == 4: + long, short, help, repeat = option + else: + # the option table is part of the code, so simply + # assert that it is correct + raise ValueError("invalid option tuple: %r" % (option,)) + + # Type- and value-check the option names + if not isinstance(long, str) or len(long) < 2: + raise DistutilsGetoptError(("invalid long option '%s': " + "must be a string of length >= 2") % long) + + if (not ((short is None) or + (isinstance(short, str) and len(short) == 1))): + raise DistutilsGetoptError("invalid short option '%s': " + "must a single character or None" % short) + + self.repeat[long] = repeat + self.long_opts.append(long) + + if long[-1] == '=': # option takes an argument? + if short: short = short + ':' + long = long[0:-1] + self.takes_arg[long] = 1 + else: + # Is option is a "negative alias" for some other option (eg. + # "quiet" == "!verbose")? + alias_to = self.negative_alias.get(long) + if alias_to is not None: + if self.takes_arg[alias_to]: + raise DistutilsGetoptError( + "invalid negative alias '%s': " + "aliased option '%s' takes a value" + % (long, alias_to)) + + self.long_opts[-1] = long # XXX redundant?! + self.takes_arg[long] = 0 + + # If this is an alias option, make sure its "takes arg" flag is + # the same as the option it's aliased to. + alias_to = self.alias.get(long) + if alias_to is not None: + if self.takes_arg[long] != self.takes_arg[alias_to]: + raise DistutilsGetoptError( + "invalid alias '%s': inconsistent with " + "aliased option '%s' (one of them takes a value, " + "the other doesn't" + % (long, alias_to)) + + # Now enforce some bondage on the long option name, so we can + # later translate it to an attribute name on some object. Have + # to do this a bit late to make sure we've removed any trailing + # '='. + if not longopt_re.match(long): + raise DistutilsGetoptError( + "invalid long option name '%s' " + "(must be letters, numbers, hyphens only" % long) + + self.attr_name[long] = self.get_attr_name(long) + if short: + self.short_opts.append(short) + self.short2long[short[0]] = long + + def getopt(self, args=None, object=None): + """Parse command-line options in args. Store as attributes on object. + + If 'args' is None or not supplied, uses 'sys.argv[1:]'. If + 'object' is None or not supplied, creates a new OptionDummy + object, stores option values there, and returns a tuple (args, + object). If 'object' is supplied, it is modified in place and + 'getopt()' just returns 'args'; in both cases, the returned + 'args' is a modified copy of the passed-in 'args' list, which + is left untouched. + """ + if args is None: + args = sys.argv[1:] + if object is None: + object = OptionDummy() + created_object = True + else: + created_object = False + + self._grok_option_table() + + short_opts = ' '.join(self.short_opts) + try: + opts, args = getopt.getopt(args, short_opts, self.long_opts) + except getopt.error as msg: + raise DistutilsArgError(msg) + + for opt, val in opts: + if len(opt) == 2 and opt[0] == '-': # it's a short option + opt = self.short2long[opt[1]] + else: + assert len(opt) > 2 and opt[:2] == '--' + opt = opt[2:] + + alias = self.alias.get(opt) + if alias: + opt = alias + + if not self.takes_arg[opt]: # boolean option? + assert val == '', "boolean option can't have value" + alias = self.negative_alias.get(opt) + if alias: + opt = alias + val = 0 + else: + val = 1 + + attr = self.attr_name[opt] + # The only repeating option at the moment is 'verbose'. + # It has a negative option -q quiet, which should set verbose = 0. + if val and self.repeat.get(attr) is not None: + val = getattr(object, attr, 0) + 1 + setattr(object, attr, val) + self.option_order.append((opt, val)) + + # for opts + if created_object: + return args, object + else: + return args + + def get_option_order(self): + """Returns the list of (option, value) tuples processed by the + previous run of 'getopt()'. Raises RuntimeError if + 'getopt()' hasn't been called yet. + """ + if self.option_order is None: + raise RuntimeError("'getopt()' hasn't been called yet") + else: + return self.option_order + + def generate_help(self, header=None): + """Generate help text (a list of strings, one per suggested line of + output) from the option table for this FancyGetopt object. + """ + # Blithely assume the option table is good: probably wouldn't call + # 'generate_help()' unless you've already called 'getopt()'. + + # First pass: determine maximum length of long option names + max_opt = 0 + for option in self.option_table: + long = option[0] + short = option[1] + l = len(long) + if long[-1] == '=': + l = l - 1 + if short is not None: + l = l + 5 # " (-x)" where short == 'x' + if l > max_opt: + max_opt = l + + opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter + + # Typical help block looks like this: + # --foo controls foonabulation + # Help block for longest option looks like this: + # --flimflam set the flim-flam level + # and with wrapped text: + # --flimflam set the flim-flam level (must be between + # 0 and 100, except on Tuesdays) + # Options with short names will have the short name shown (but + # it doesn't contribute to max_opt): + # --foo (-f) controls foonabulation + # If adding the short option would make the left column too wide, + # we push the explanation off to the next line + # --flimflam (-l) + # set the flim-flam level + # Important parameters: + # - 2 spaces before option block start lines + # - 2 dashes for each long option name + # - min. 2 spaces between option and explanation (gutter) + # - 5 characters (incl. space) for short option name + + # Now generate lines of help text. (If 80 columns were good enough + # for Jesus, then 78 columns are good enough for me!) + line_width = 78 + text_width = line_width - opt_width + big_indent = ' ' * opt_width + if header: + lines = [header] + else: + lines = ['Option summary:'] + + for option in self.option_table: + long, short, help = option[:3] + text = wrap_text(help, text_width) + if long[-1] == '=': + long = long[0:-1] + + # Case 1: no short option at all (makes life easy) + if short is None: + if text: + lines.append(" --%-*s %s" % (max_opt, long, text[0])) + else: + lines.append(" --%-*s " % (max_opt, long)) + + # Case 2: we have a short option, so we have to include it + # just after the long option + else: + opt_names = "%s (-%s)" % (long, short) + if text: + lines.append(" --%-*s %s" % + (max_opt, opt_names, text[0])) + else: + lines.append(" --%-*s" % opt_names) + + for l in text[1:]: + lines.append(big_indent + l) + return lines + + def print_help(self, header=None, file=None): + if file is None: + file = sys.stdout + for line in self.generate_help(header): + file.write(line + "\n") + + +def fancy_getopt(options, negative_opt, object, args): + parser = FancyGetopt(options) + parser.set_negative_aliases(negative_opt) + return parser.getopt(args, object) + + +WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace} + +def wrap_text(text, width): + """wrap_text(text : string, width : int) -> [string] + + Split 'text' into multiple lines of no more than 'width' characters + each, and return the list of strings that results. + """ + if text is None: + return [] + if len(text) <= width: + return [text] + + text = text.expandtabs() + text = text.translate(WS_TRANS) + chunks = re.split(r'( +|-+)', text) + chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings + lines = [] + + while chunks: + cur_line = [] # list of chunks (to-be-joined) + cur_len = 0 # length of current line + + while chunks: + l = len(chunks[0]) + if cur_len + l <= width: # can squeeze (at least) this chunk in + cur_line.append(chunks[0]) + del chunks[0] + cur_len = cur_len + l + else: # this line is full + # drop last chunk if all space + if cur_line and cur_line[-1][0] == ' ': + del cur_line[-1] + break + + if chunks: # any chunks left to process? + # if the current line is still empty, then we had a single + # chunk that's too big too fit on a line -- so we break + # down and break it up at the line width + if cur_len == 0: + cur_line.append(chunks[0][0:width]) + chunks[0] = chunks[0][width:] + + # all-whitespace chunks at the end of a line can be discarded + # (and we know from the re.split above that if a chunk has + # *any* whitespace, it is *all* whitespace) + if chunks[0][0] == ' ': + del chunks[0] + + # and store this line in the list-of-all-lines -- as a single + # string, of course! + lines.append(''.join(cur_line)) + + return lines + + +def translate_longopt(opt): + """Convert a long option name to a valid Python identifier by + changing "-" to "_". + """ + return opt.translate(longopt_xlate) + + +class OptionDummy: + """Dummy class just used as a place to hold command-line option + values as instance attributes.""" + + def __init__(self, options=[]): + """Create a new OptionDummy instance. The attributes listed in + 'options' will be initialized to None.""" + for opt in options: + setattr(self, opt, None) + + +if __name__ == "__main__": + text = """\ +Tra-la-la, supercalifragilisticexpialidocious. +How *do* you spell that odd word, anyways? +(Someone ask Mary -- she'll know [or she'll +say, "How should I know?"].)""" + + for w in (10, 20, 30, 40): + print("width: %d" % w) + print("\n".join(wrap_text(text, w))) + print() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/file_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/file_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/file_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/file_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,238 @@ +"""distutils.file_util + +Utility functions for operating on single files. +""" + +import os +from distutils.errors import DistutilsFileError +from distutils import log + +# for generating verbose output in 'copy_file()' +_copy_action = { None: 'copying', + 'hard': 'hard linking', + 'sym': 'symbolically linking' } + + +def _copy_file_contents(src, dst, buffer_size=16*1024): + """Copy the file 'src' to 'dst'; both must be filenames. Any error + opening either file, reading from 'src', or writing to 'dst', raises + DistutilsFileError. Data is read/written in chunks of 'buffer_size' + bytes (default 16k). No attempt is made to handle anything apart from + regular files. + """ + # Stolen from shutil module in the standard library, but with + # custom error-handling added. + fsrc = None + fdst = None + try: + try: + fsrc = open(src, 'rb') + except OSError as e: + raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror)) + + if os.path.exists(dst): + try: + os.unlink(dst) + except OSError as e: + raise DistutilsFileError( + "could not delete '%s': %s" % (dst, e.strerror)) + + try: + fdst = open(dst, 'wb') + except OSError as e: + raise DistutilsFileError( + "could not create '%s': %s" % (dst, e.strerror)) + + while True: + try: + buf = fsrc.read(buffer_size) + except OSError as e: + raise DistutilsFileError( + "could not read from '%s': %s" % (src, e.strerror)) + + if not buf: + break + + try: + fdst.write(buf) + except OSError as e: + raise DistutilsFileError( + "could not write to '%s': %s" % (dst, e.strerror)) + finally: + if fdst: + fdst.close() + if fsrc: + fsrc.close() + +def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, + link=None, verbose=1, dry_run=0): + """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is + copied there with the same name; otherwise, it must be a filename. (If + the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' + is true (the default), the file's mode (type and permission bits, or + whatever is analogous on the current platform) is copied. If + 'preserve_times' is true (the default), the last-modified and + last-access times are copied as well. If 'update' is true, 'src' will + only be copied if 'dst' does not exist, or if 'dst' does exist but is + older than 'src'. + + 'link' allows you to make hard links (os.link) or symbolic links + (os.symlink) instead of copying: set it to "hard" or "sym"; if it is + None (the default), files are copied. Don't set 'link' on systems that + don't support it: 'copy_file()' doesn't check if hard or symbolic + linking is available. If hardlink fails, falls back to + _copy_file_contents(). + + Under Mac OS, uses the native file copy function in macostools; on + other systems, uses '_copy_file_contents()' to copy file contents. + + Return a tuple (dest_name, copied): 'dest_name' is the actual name of + the output file, and 'copied' is true if the file was copied (or would + have been copied, if 'dry_run' true). + """ + # XXX if the destination file already exists, we clobber it if + # copying, but blow up if linking. Hmmm. And I don't know what + # macostools.copyfile() does. Should definitely be consistent, and + # should probably blow up if destination exists and we would be + # changing it (ie. it's not already a hard/soft link to src OR + # (not update) and (src newer than dst). + + from distutils.dep_util import newer + from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE + + if not os.path.isfile(src): + raise DistutilsFileError( + "can't copy '%s': doesn't exist or not a regular file" % src) + + if os.path.isdir(dst): + dir = dst + dst = os.path.join(dst, os.path.basename(src)) + else: + dir = os.path.dirname(dst) + + if update and not newer(src, dst): + if verbose >= 1: + log.debug("not copying %s (output up-to-date)", src) + return (dst, 0) + + try: + action = _copy_action[link] + except KeyError: + raise ValueError("invalid value '%s' for 'link' argument" % link) + + if verbose >= 1: + if os.path.basename(dst) == os.path.basename(src): + log.info("%s %s -> %s", action, src, dir) + else: + log.info("%s %s -> %s", action, src, dst) + + if dry_run: + return (dst, 1) + + # If linking (hard or symbolic), use the appropriate system call + # (Unix only, of course, but that's the caller's responsibility) + elif link == 'hard': + if not (os.path.exists(dst) and os.path.samefile(src, dst)): + try: + os.link(src, dst) + return (dst, 1) + except OSError: + # If hard linking fails, fall back on copying file + # (some special filesystems don't support hard linking + # even under Unix, see issue #8876). + pass + elif link == 'sym': + if not (os.path.exists(dst) and os.path.samefile(src, dst)): + os.symlink(src, dst) + return (dst, 1) + + # Otherwise (non-Mac, not linking), copy the file contents and + # (optionally) copy the times and mode. + _copy_file_contents(src, dst) + if preserve_mode or preserve_times: + st = os.stat(src) + + # According to David Ascher , utime() should be done + # before chmod() (at least under NT). + if preserve_times: + os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) + if preserve_mode: + os.chmod(dst, S_IMODE(st[ST_MODE])) + + return (dst, 1) + + +# XXX I suspect this is Unix-specific -- need porting help! +def move_file (src, dst, + verbose=1, + dry_run=0): + + """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will + be moved into it with the same name; otherwise, 'src' is just renamed + to 'dst'. Return the new full name of the file. + + Handles cross-device moves on Unix using 'copy_file()'. What about + other systems??? + """ + from os.path import exists, isfile, isdir, basename, dirname + import errno + + if verbose >= 1: + log.info("moving %s -> %s", src, dst) + + if dry_run: + return dst + + if not isfile(src): + raise DistutilsFileError("can't move '%s': not a regular file" % src) + + if isdir(dst): + dst = os.path.join(dst, basename(src)) + elif exists(dst): + raise DistutilsFileError( + "can't move '%s': destination '%s' already exists" % + (src, dst)) + + if not isdir(dirname(dst)): + raise DistutilsFileError( + "can't move '%s': destination '%s' not a valid path" % + (src, dst)) + + copy_it = False + try: + os.rename(src, dst) + except OSError as e: + (num, msg) = e.args + if num == errno.EXDEV: + copy_it = True + else: + raise DistutilsFileError( + "couldn't move '%s' to '%s': %s" % (src, dst, msg)) + + if copy_it: + copy_file(src, dst, verbose=verbose) + try: + os.unlink(src) + except OSError as e: + (num, msg) = e.args + try: + os.unlink(dst) + except OSError: + pass + raise DistutilsFileError( + "couldn't move '%s' to '%s' by copy/delete: " + "delete '%s' failed: %s" + % (src, dst, src, msg)) + return dst + + +def write_file (filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + f = open(filename, "w") + try: + for line in contents: + f.write(line + "\n") + finally: + f.close() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/filelist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/filelist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/filelist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/filelist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,327 @@ +"""distutils.filelist + +Provides the FileList class, used for poking about the filesystem +and building lists of files. +""" + +import os, re +import fnmatch +import functools +from distutils.util import convert_path +from distutils.errors import DistutilsTemplateError, DistutilsInternalError +from distutils import log + +class FileList: + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + + Instance attributes: + dir + directory from which files will be taken -- only used if + 'allfiles' not supplied to constructor + files + list of filenames currently being built/filtered/manipulated + allfiles + complete list of files under consideration (ie. without any + filtering applied) + """ + + def __init__(self, warn=None, debug_print=None): + # ignore argument to FileList, but keep them for backwards + # compatibility + self.allfiles = None + self.files = [] + + def set_allfiles(self, allfiles): + self.allfiles = allfiles + + def findall(self, dir=os.curdir): + self.allfiles = findall(dir) + + def debug_print(self, msg): + """Print 'msg' to stdout if the global DEBUG (taken from the + DISTUTILS_DEBUG environment variable) flag is true. + """ + from distutils.debug import DEBUG + if DEBUG: + print(msg) + + # -- List-like methods --------------------------------------------- + + def append(self, item): + self.files.append(item) + + def extend(self, items): + self.files.extend(items) + + def sort(self): + # Not a strict lexical sort! + sortable_files = sorted(map(os.path.split, self.files)) + self.files = [] + for sort_tuple in sortable_files: + self.files.append(os.path.join(*sort_tuple)) + + + # -- Other miscellaneous utility methods --------------------------- + + def remove_duplicates(self): + # Assumes list has been sorted! + for i in range(len(self.files) - 1, 0, -1): + if self.files[i] == self.files[i - 1]: + del self.files[i] + + + # -- "File template" methods --------------------------------------- + + def _parse_template_line(self, line): + words = line.split() + action = words[0] + + patterns = dir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistutilsTemplateError( + "'%s' expects ..." % action) + patterns = [convert_path(w) for w in words[1:]] + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistutilsTemplateError( + "'%s' expects ..." % action) + dir = convert_path(words[1]) + patterns = [convert_path(w) for w in words[2:]] + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistutilsTemplateError( + "'%s' expects a single " % action) + dir_pattern = convert_path(words[1]) + else: + raise DistutilsTemplateError("unknown action '%s'" % action) + + return (action, patterns, dir, dir_pattern) + + def process_template_line(self, line): + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dir_pattern). + (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + self.debug_print("include " + ' '.join(patterns)) + for pattern in patterns: + if not self.include_pattern(pattern, anchor=1): + log.warn("warning: no files found matching '%s'", + pattern) + + elif action == 'exclude': + self.debug_print("exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.exclude_pattern(pattern, anchor=1): + log.warn(("warning: no previously-included files " + "found matching '%s'"), pattern) + + elif action == 'global-include': + self.debug_print("global-include " + ' '.join(patterns)) + for pattern in patterns: + if not self.include_pattern(pattern, anchor=0): + log.warn(("warning: no files found matching '%s' " + "anywhere in distribution"), pattern) + + elif action == 'global-exclude': + self.debug_print("global-exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.exclude_pattern(pattern, anchor=0): + log.warn(("warning: no previously-included files matching " + "'%s' found anywhere in distribution"), + pattern) + + elif action == 'recursive-include': + self.debug_print("recursive-include %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.include_pattern(pattern, prefix=dir): + log.warn(("warning: no files found matching '%s' " + "under directory '%s'"), + pattern, dir) + + elif action == 'recursive-exclude': + self.debug_print("recursive-exclude %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.exclude_pattern(pattern, prefix=dir): + log.warn(("warning: no previously-included files matching " + "'%s' found under directory '%s'"), + pattern, dir) + + elif action == 'graft': + self.debug_print("graft " + dir_pattern) + if not self.include_pattern(None, prefix=dir_pattern): + log.warn("warning: no directories found matching '%s'", + dir_pattern) + + elif action == 'prune': + self.debug_print("prune " + dir_pattern) + if not self.exclude_pattern(None, prefix=dir_pattern): + log.warn(("no previously-included directories found " + "matching '%s'"), dir_pattern) + else: + raise DistutilsInternalError( + "this cannot happen: invalid action '%s'" % action) + + + # -- Filtering/selection methods ----------------------------------- + + def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. Patterns + are not quite the same as implemented by the 'fnmatch' module: '*' + and '?' match non-special characters, where "special" is platform- + dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found, False otherwise. + """ + # XXX docstring lying about what the special chars are? + files_found = False + pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) + self.debug_print("include_pattern: applying regex r'%s'" % + pattern_re.pattern) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.debug_print(" adding " + name) + self.files.append(name) + files_found = True + return files_found + + + def exclude_pattern (self, pattern, + anchor=1, prefix=None, is_regex=0): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. Other parameters are the same as for + 'include_pattern()', above. + The list 'self.files' is modified in place. + Return True if files are found, False otherwise. + """ + files_found = False + pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) + self.debug_print("exclude_pattern: applying regex r'%s'" % + pattern_re.pattern) + for i in range(len(self.files)-1, -1, -1): + if pattern_re.search(self.files[i]): + self.debug_print(" removing " + self.files[i]) + del self.files[i] + files_found = True + return files_found + + +# ---------------------------------------------------------------------- +# Utility functions + +def _find_all_simple(path): + """ + Find all files under 'path' + """ + results = ( + os.path.join(base, file) + for base, dirs, files in os.walk(path, followlinks=True) + for file in files + ) + return filter(os.path.isfile, results) + + +def findall(dir=os.curdir): + """ + Find all files under 'dir' and return the list of full filenames. + Unless dir is '.', return full filenames with dir prepended. + """ + files = _find_all_simple(dir) + if dir == os.curdir: + make_rel = functools.partial(os.path.relpath, start=dir) + files = map(make_rel, files) + return list(files) + + +def glob_to_re(pattern): + """Translate a shell-like glob pattern to a regular expression; return + a string containing the regex. Differs from 'fnmatch.translate()' in + that '*' does not match "special characters" (which are + platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((?= self.threshold: + if args: + msg = msg % args + if level in (WARN, ERROR, FATAL): + stream = sys.stderr + else: + stream = sys.stdout + try: + stream.write('%s\n' % msg) + except UnicodeEncodeError: + # emulate backslashreplace error handler + encoding = stream.encoding + msg = msg.encode(encoding, "backslashreplace").decode(encoding) + stream.write('%s\n' % msg) + stream.flush() + + def log(self, level, msg, *args): + self._log(level, msg, args) + + def debug(self, msg, *args): + self._log(DEBUG, msg, args) + + def info(self, msg, *args): + self._log(INFO, msg, args) + + def warn(self, msg, *args): + self._log(WARN, msg, args) + + def error(self, msg, *args): + self._log(ERROR, msg, args) + + def fatal(self, msg, *args): + self._log(FATAL, msg, args) + +_global_log = Log() +log = _global_log.log +debug = _global_log.debug +info = _global_log.info +warn = _global_log.warn +error = _global_log.error +fatal = _global_log.fatal + +def set_threshold(level): + # return the old threshold for use from tests + old = _global_log.threshold + _global_log.threshold = level + return old + +def set_verbosity(v): + if v <= 0: + set_threshold(WARN) + elif v == 1: + set_threshold(INFO) + elif v >= 2: + set_threshold(DEBUG) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/msvc9compiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/msvc9compiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/msvc9compiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/msvc9compiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,788 @@ +"""distutils.msvc9compiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for the Microsoft Visual Studio 2008. + +The module is compatible with VS 2005 and VS 2008. You can find legacy support +for older versions of VS in distutils.msvccompiler. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +# finding DevStudio (through the registry) +# ported to VS2005 and VS 2008 by Christian Heimes + +import os +import subprocess +import sys +import re + +from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ + CompileError, LibError, LinkError +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log +from distutils.util import get_platform + +import winreg + +RegOpenKeyEx = winreg.OpenKeyEx +RegEnumKey = winreg.EnumKey +RegEnumValue = winreg.EnumValue +RegError = winreg.error + +HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) + +NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) +if NATIVE_WIN64: + # Visual C++ is a 32-bit application, so we need to look in + # the corresponding registry branch, if we're running a + # 64-bit Python on Win64 + VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" + WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" + NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" +else: + VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" + WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" + NET_BASE = r"Software\Microsoft\.NETFramework" + +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is +# the param to cross-compile on x86 targeting amd64.) +PLAT_TO_VCVARS = { + 'win32' : 'x86', + 'win-amd64' : 'amd64', +} + +class Reg: + """Helper class to read values from the registry + """ + + def get_value(cls, path, key): + for base in HKEYS: + d = cls.read_values(base, path) + if d and key in d: + return d[key] + raise KeyError(key) + get_value = classmethod(get_value) + + def read_keys(cls, base, key): + """Return list of registry keys.""" + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + L = [] + i = 0 + while True: + try: + k = RegEnumKey(handle, i) + except RegError: + break + L.append(k) + i += 1 + return L + read_keys = classmethod(read_keys) + + def read_values(cls, base, key): + """Return dict of registry keys and values. + + All names are converted to lowercase. + """ + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + d = {} + i = 0 + while True: + try: + name, value, type = RegEnumValue(handle, i) + except RegError: + break + name = name.lower() + d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) + i += 1 + return d + read_values = classmethod(read_values) + + def convert_mbcs(s): + dec = getattr(s, "decode", None) + if dec is not None: + try: + s = dec("mbcs") + except UnicodeError: + pass + return s + convert_mbcs = staticmethod(convert_mbcs) + +class MacroExpander: + + def __init__(self, version): + self.macros = {} + self.vsbase = VS_BASE % version + self.load_macros(version) + + def set_macro(self, macro, path, key): + self.macros["$(%s)" % macro] = Reg.get_value(path, key) + + def load_macros(self, version): + self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") + self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") + self.set_macro("FrameworkDir", NET_BASE, "installroot") + try: + if version >= 8.0: + self.set_macro("FrameworkSDKDir", NET_BASE, + "sdkinstallrootv2.0") + else: + raise KeyError("sdkinstallrootv2.0") + except KeyError: + raise DistutilsPlatformError( + """Python was built with Visual Studio 2008; +extensions must be built with a compiler than can generate compatible binaries. +Visual Studio 2008 was not found on this system. If you have Cygwin installed, +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") + + if version >= 9.0: + self.set_macro("FrameworkVersion", self.vsbase, "clr version") + self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") + else: + p = r"Software\Microsoft\NET Framework Setup\Product" + for base in HKEYS: + try: + h = RegOpenKeyEx(base, p) + except RegError: + continue + key = RegEnumKey(h, 0) + d = Reg.get_value(base, r"%s\%s" % (p, key)) + self.macros["$(FrameworkVersion)"] = d["version"] + + def sub(self, s): + for k, v in self.macros.items(): + s = s.replace(k, v) + return s + +def get_build_version(): + """Return the version of MSVC that was used to build Python. + + For Python 2.3 and up, the version number is included in + sys.version. For earlier versions, assume the compiler is MSVC 6. + """ + prefix = "MSC v." + i = sys.version.find(prefix) + if i == -1: + return 6 + i = i + len(prefix) + s, rest = sys.version[i:].split(" ", 1) + majorVersion = int(s[:-2]) - 6 + if majorVersion >= 13: + # v13 was skipped and should be v14 + majorVersion += 1 + minorVersion = int(s[2:3]) / 10.0 + # I don't think paths are affected by minor version in version 6 + if majorVersion == 6: + minorVersion = 0 + if majorVersion >= 6: + return majorVersion + minorVersion + # else we don't know what version of the compiler this is + return None + +def normalize_and_reduce_paths(paths): + """Return a list of normalized paths with duplicates removed. + + The current order of paths is maintained. + """ + # Paths are normalized so things like: /a and /a/ aren't both preserved. + reduced_paths = [] + for p in paths: + np = os.path.normpath(p) + # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. + if np not in reduced_paths: + reduced_paths.append(np) + return reduced_paths + +def removeDuplicates(variable): + """Remove duplicate values of an environment variable. + """ + oldList = variable.split(os.pathsep) + newList = [] + for i in oldList: + if i not in newList: + newList.append(i) + newVariable = os.pathsep.join(newList) + return newVariable + +def find_vcvarsall(version): + """Find the vcvarsall.bat file + + At first it tries to find the productdir of VS 2008 in the registry. If + that fails it falls back to the VS90COMNTOOLS env var. + """ + vsbase = VS_BASE % version + try: + productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, + "productdir") + except KeyError: + log.debug("Unable to find productdir in registry") + productdir = None + + if not productdir or not os.path.isdir(productdir): + toolskey = "VS%0.f0COMNTOOLS" % version + toolsdir = os.environ.get(toolskey, None) + + if toolsdir and os.path.isdir(toolsdir): + productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") + productdir = os.path.abspath(productdir) + if not os.path.isdir(productdir): + log.debug("%s is not a valid directory" % productdir) + return None + else: + log.debug("Env var %s is not set or invalid" % toolskey) + if not productdir: + log.debug("No productdir found") + return None + vcvarsall = os.path.join(productdir, "vcvarsall.bat") + if os.path.isfile(vcvarsall): + return vcvarsall + log.debug("Unable to find vcvarsall.bat") + return None + +def query_vcvarsall(version, arch="x86"): + """Launch vcvarsall.bat and read the settings from its environment + """ + vcvarsall = find_vcvarsall(version) + interesting = {"include", "lib", "libpath", "path"} + result = {} + + if vcvarsall is None: + raise DistutilsPlatformError("Unable to find vcvarsall.bat") + log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) + popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + try: + stdout, stderr = popen.communicate() + if popen.wait() != 0: + raise DistutilsPlatformError(stderr.decode("mbcs")) + + stdout = stdout.decode("mbcs") + for line in stdout.split("\n"): + line = Reg.convert_mbcs(line) + if '=' not in line: + continue + line = line.strip() + key, value = line.split('=', 1) + key = key.lower() + if key in interesting: + if value.endswith(os.pathsep): + value = value[:-1] + result[key] = removeDuplicates(value) + + finally: + popen.stdout.close() + popen.stderr.close() + + if len(result) != len(interesting): + raise ValueError(str(list(result.keys()))) + + return result + +# More globals +VERSION = get_build_version() +if VERSION < 8.0: + raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION) +# MACROS = MacroExpander(VERSION) + +class MSVCCompiler(CCompiler) : + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class.""" + + compiler_type = 'msvc' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + _rc_extensions = ['.rc'] + _mc_extensions = ['.mc'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = (_c_extensions + _cpp_extensions + + _rc_extensions + _mc_extensions) + res_extension = '.res' + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + def __init__(self, verbose=0, dry_run=0, force=0): + CCompiler.__init__ (self, verbose, dry_run, force) + self.__version = VERSION + self.__root = r"Software\Microsoft\VisualStudio" + # self.__macros = MACROS + self.__paths = [] + # target platform (.plat_name is consistent with 'bdist') + self.plat_name = None + self.__arch = None # deprecated name + self.initialized = False + + def initialize(self, plat_name=None): + # multi-init means we would need to check platform same each time... + assert not self.initialized, "don't init multiple times" + if plat_name is None: + plat_name = get_platform() + # sanity check for platforms to prevent obscure errors later. + ok_plats = 'win32', 'win-amd64' + if plat_name not in ok_plats: + raise DistutilsPlatformError("--plat-name must be one of %s" % + (ok_plats,)) + + if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + # Assume that the SDK set up everything alright; don't try to be + # smarter + self.cc = "cl.exe" + self.linker = "link.exe" + self.lib = "lib.exe" + self.rc = "rc.exe" + self.mc = "mc.exe" + else: + # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; + # to cross compile, you use 'x86_amd64'. + # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross + # compile use 'x86' (ie, it runs the x86 compiler directly) + if plat_name == get_platform() or plat_name == 'win32': + # native build or cross-compile to win32 + plat_spec = PLAT_TO_VCVARS[plat_name] + else: + # cross compile from win32 -> some 64bit + plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ + PLAT_TO_VCVARS[plat_name] + + vc_env = query_vcvarsall(VERSION, plat_spec) + + self.__paths = vc_env['path'].split(os.pathsep) + os.environ['lib'] = vc_env['lib'] + os.environ['include'] = vc_env['include'] + + if len(self.__paths) == 0: + raise DistutilsPlatformError("Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." + % self.__product) + + self.cc = self.find_exe("cl.exe") + self.linker = self.find_exe("link.exe") + self.lib = self.find_exe("lib.exe") + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler + #self.set_path_env_var('lib') + #self.set_path_env_var('include') + + # extend the MSVC path with the current path + try: + for p in os.environ['path'].split(';'): + self.__paths.append(p) + except KeyError: + pass + self.__paths = normalize_and_reduce_paths(self.__paths) + os.environ['path'] = ";".join(self.__paths) + + self.preprocess_options = None + if self.__arch == "x86": + self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', + '/Z7', '/D_DEBUG'] + else: + # Win64 + self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', + '/Z7', '/D_DEBUG'] + + self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] + if self.__version >= 7: + self.ldflags_shared_debug = [ + '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' + ] + self.ldflags_static = [ '/nologo'] + + self.initialized = True + + # -- Worker methods ------------------------------------------------ + + def object_filenames(self, + source_filenames, + strip_dir=0, + output_dir=''): + # Copied from ccompiler.py, extended to return .res as 'object'-file + # for .rc input file + if output_dir is None: output_dir = '' + obj_names = [] + for src_name in source_filenames: + (base, ext) = os.path.splitext (src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base):] # If abs, chop off leading / + if ext not in self.src_extensions: + # Better to raise an exception instead of silently continuing + # and later complain about sources and targets having + # different lengths + raise CompileError ("Don't know how to compile %s" % src_name) + if strip_dir: + base = os.path.basename (base) + if ext in self._rc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + elif ext in self._mc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + else: + obj_names.append (os.path.join (output_dir, + base + self.obj_extension)) + return obj_names + + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + if not self.initialized: + self.initialize() + compile_info = self._setup_compile(output_dir, macros, include_dirs, + sources, depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = compile_info + + compile_opts = extra_preargs or [] + compile_opts.append ('/c') + if debug: + compile_opts.extend(self.compile_options_debug) + else: + compile_opts.extend(self.compile_options) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + if debug: + # pass the full pathname to MSVC in debug mode, + # this allows the debugger to find the source file + # without asking the user to browse for it + src = os.path.abspath(src) + + if ext in self._c_extensions: + input_opt = "/Tc" + src + elif ext in self._cpp_extensions: + input_opt = "/Tp" + src + elif ext in self._rc_extensions: + # compile .RC to .RES file + input_opt = src + output_opt = "/fo" + obj + try: + self.spawn([self.rc] + pp_opts + + [output_opt] + [input_opt]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue + elif ext in self._mc_extensions: + # Compile .MC to .RC file to .RES file. + # * '-h dir' specifies the directory for the + # generated include file + # * '-r dir' specifies the target directory of the + # generated RC file and the binary message resource + # it includes + # + # For now (since there are no options to change this), + # we use the source-directory for the include file and + # the build directory for the RC file and message + # resources. This works at least for win32all. + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) + try: + # first compile .MC to .RC and .H file + self.spawn([self.mc] + + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext (os.path.basename (src)) + rc_file = os.path.join (rc_dir, base + '.rc') + # then compile .RC to .RES file + self.spawn([self.rc] + + ["/fo" + obj] + [rc_file]) + + except DistutilsExecError as msg: + raise CompileError(msg) + continue + else: + # how to handle this file? + raise CompileError("Don't know how to compile %s to %s" + % (src, obj)) + + output_opt = "/Fo" + obj + try: + self.spawn([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + + def create_static_lib(self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, + output_dir=output_dir) + + if self._need_link(objects, output_filename): + lib_args = objects + ['/OUT:' + output_filename] + if debug: + pass # XXX what goes here? + try: + self.spawn([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + (libraries, library_dirs, runtime_library_dirs) = fixed_args + + if runtime_library_dirs: + self.warn ("I don't know what to do with 'runtime_library_dirs': " + + str (runtime_library_dirs)) + + lib_opts = gen_lib_options(self, + library_dirs, runtime_library_dirs, + libraries) + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + if target_desc == CCompiler.EXECUTABLE: + if debug: + ldflags = self.ldflags_shared_debug[1:] + else: + ldflags = self.ldflags_shared[1:] + else: + if debug: + ldflags = self.ldflags_shared_debug + else: + ldflags = self.ldflags_shared + + export_opts = [] + for sym in (export_symbols or []): + export_opts.append("/EXPORT:" + sym) + + ld_args = (ldflags + lib_opts + export_opts + + objects + ['/OUT:' + output_filename]) + + # The MSVC linker generates .lib and .exp files, which cannot be + # suppressed by any linker switches. The .lib files may even be + # needed! Make sure they are generated in the temporary build + # directory. Since they have different names for debug and release + # builds, they can go into the same directory. + build_temp = os.path.dirname(objects[0]) + if export_symbols is not None: + (dll_name, dll_ext) = os.path.splitext( + os.path.basename(output_filename)) + implib_file = os.path.join( + build_temp, + self.library_filename(dll_name)) + ld_args.append ('/IMPLIB:' + implib_file) + + self.manifest_setup_ldargs(output_filename, build_temp, ld_args) + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + self.mkpath(os.path.dirname(output_filename)) + try: + self.spawn([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + + # embed the manifest + # XXX - this is somewhat fragile - if mt.exe fails, distutils + # will still consider the DLL up-to-date, but it will not have a + # manifest. Maybe we should link to a temp file? OTOH, that + # implies a build environment error that shouldn't go undetected. + mfinfo = self.manifest_get_embed_info(target_desc, ld_args) + if mfinfo is not None: + mffilename, mfid = mfinfo + out_arg = '-outputresource:%s;%s' % (output_filename, mfid) + try: + self.spawn(['mt.exe', '-nologo', '-manifest', + mffilename, out_arg]) + except DistutilsExecError as msg: + raise LinkError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): + # If we need a manifest at all, an embedded manifest is recommended. + # See MSDN article titled + # "How to: Embed a Manifest Inside a C/C++ Application" + # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) + # Ask the linker to generate the manifest in the temp dir, so + # we can check it, and possibly embed it, later. + temp_manifest = os.path.join( + build_temp, + os.path.basename(output_filename) + ".manifest") + ld_args.append('/MANIFESTFILE:' + temp_manifest) + + def manifest_get_embed_info(self, target_desc, ld_args): + # If a manifest should be embedded, return a tuple of + # (manifest_filename, resource_id). Returns None if no manifest + # should be embedded. See http://bugs.python.org/issue7833 for why + # we want to avoid any manifest for extension modules if we can. + for arg in ld_args: + if arg.startswith("/MANIFESTFILE:"): + temp_manifest = arg.split(":", 1)[1] + break + else: + # no /MANIFESTFILE so nothing to do. + return None + if target_desc == CCompiler.EXECUTABLE: + # by default, executables always get the manifest with the + # CRT referenced. + mfid = 1 + else: + # Extension modules try and avoid any manifest if possible. + mfid = 2 + temp_manifest = self._remove_visual_c_ref(temp_manifest) + if temp_manifest is None: + return None + return temp_manifest, mfid + + def _remove_visual_c_ref(self, manifest_file): + try: + # Remove references to the Visual C runtime, so they will + # fall through to the Visual C dependency of Python.exe. + # This way, when installed for a restricted user (e.g. + # runtimes are not in WinSxS folder, but in Python's own + # folder), the runtimes do not need to be in every folder + # with .pyd's. + # Returns either the filename of the modified manifest or + # None if no manifest should be embedded. + manifest_f = open(manifest_file) + try: + manifest_buf = manifest_f.read() + finally: + manifest_f.close() + pattern = re.compile( + r"""|)""", + re.DOTALL) + manifest_buf = re.sub(pattern, "", manifest_buf) + pattern = r"\s*" + manifest_buf = re.sub(pattern, "", manifest_buf) + # Now see if any other assemblies are referenced - if not, we + # don't want a manifest embedded. + pattern = re.compile( + r"""|)""", re.DOTALL) + if re.search(pattern, manifest_buf) is None: + return None + + manifest_f = open(manifest_file, 'w') + try: + manifest_f.write(manifest_buf) + return manifest_file + finally: + manifest_f.close() + except OSError: + pass + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "/LIBPATH:" + dir + + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError( + "don't know how to set runtime library search path for MSVC++") + + def library_option(self, lib): + return self.library_filename(lib) + + + def find_library_file(self, dirs, lib, debug=0): + # Prefer a debugging library if found (and requested), but deal + # with it if we don't have one. + if debug: + try_names = [lib + "_d", lib] + else: + try_names = [lib] + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename (name)) + if os.path.exists(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None + + # Helper methods for using the MSVC registry settings + + def find_exe(self, exe): + """Return path to an MSVC executable program. + + Tries to find the program in several places: first, one of the + MSVC program search paths from the registry; next, the directories + in the PATH environment variable. If any of those work, return an + absolute path that is known to exist. If none of them work, just + return the original program name, 'exe'. + """ + for p in self.__paths: + fn = os.path.join(os.path.abspath(p), exe) + if os.path.isfile(fn): + return fn + + # didn't find it; try existing path + for p in os.environ['Path'].split(';'): + fn = os.path.join(os.path.abspath(p),exe) + if os.path.isfile(fn): + return fn + + return exe diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/msvccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/msvccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/msvccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/msvccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,642 @@ +"""distutils.msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for the Microsoft Visual Studio. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +# finding DevStudio (through the registry) + +import sys, os +from distutils.errors import \ + DistutilsExecError, DistutilsPlatformError, \ + CompileError, LibError, LinkError +from distutils.ccompiler import \ + CCompiler, gen_lib_options +from distutils import log + +_can_read_reg = False +try: + import winreg + + _can_read_reg = True + hkey_mod = winreg + + RegOpenKeyEx = winreg.OpenKeyEx + RegEnumKey = winreg.EnumKey + RegEnumValue = winreg.EnumValue + RegError = winreg.error + +except ImportError: + try: + import win32api + import win32con + _can_read_reg = True + hkey_mod = win32con + + RegOpenKeyEx = win32api.RegOpenKeyEx + RegEnumKey = win32api.RegEnumKey + RegEnumValue = win32api.RegEnumValue + RegError = win32api.error + except ImportError: + log.info("Warning: Can't read registry to find the " + "necessary compiler setting\n" + "Make sure that Python modules winreg, " + "win32api or win32con are installed.") + +if _can_read_reg: + HKEYS = (hkey_mod.HKEY_USERS, + hkey_mod.HKEY_CURRENT_USER, + hkey_mod.HKEY_LOCAL_MACHINE, + hkey_mod.HKEY_CLASSES_ROOT) + +def read_keys(base, key): + """Return list of registry keys.""" + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + L = [] + i = 0 + while True: + try: + k = RegEnumKey(handle, i) + except RegError: + break + L.append(k) + i += 1 + return L + +def read_values(base, key): + """Return dict of registry keys and values. + + All names are converted to lowercase. + """ + try: + handle = RegOpenKeyEx(base, key) + except RegError: + return None + d = {} + i = 0 + while True: + try: + name, value, type = RegEnumValue(handle, i) + except RegError: + break + name = name.lower() + d[convert_mbcs(name)] = convert_mbcs(value) + i += 1 + return d + +def convert_mbcs(s): + dec = getattr(s, "decode", None) + if dec is not None: + try: + s = dec("mbcs") + except UnicodeError: + pass + return s + +class MacroExpander: + def __init__(self, version): + self.macros = {} + self.load_macros(version) + + def set_macro(self, macro, path, key): + for base in HKEYS: + d = read_values(base, path) + if d: + self.macros["$(%s)" % macro] = d[key] + break + + def load_macros(self, version): + vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version + self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") + self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") + net = r"Software\Microsoft\.NETFramework" + self.set_macro("FrameworkDir", net, "installroot") + try: + if version > 7.0: + self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") + else: + self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") + except KeyError as exc: # + raise DistutilsPlatformError( + """Python was built with Visual Studio 2003; +extensions must be built with a compiler than can generate compatible binaries. +Visual Studio 2003 was not found on this system. If you have Cygwin installed, +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") + + p = r"Software\Microsoft\NET Framework Setup\Product" + for base in HKEYS: + try: + h = RegOpenKeyEx(base, p) + except RegError: + continue + key = RegEnumKey(h, 0) + d = read_values(base, r"%s\%s" % (p, key)) + self.macros["$(FrameworkVersion)"] = d["version"] + + def sub(self, s): + for k, v in self.macros.items(): + s = s.replace(k, v) + return s + +def get_build_version(): + """Return the version of MSVC that was used to build Python. + + For Python 2.3 and up, the version number is included in + sys.version. For earlier versions, assume the compiler is MSVC 6. + """ + prefix = "MSC v." + i = sys.version.find(prefix) + if i == -1: + return 6 + i = i + len(prefix) + s, rest = sys.version[i:].split(" ", 1) + majorVersion = int(s[:-2]) - 6 + if majorVersion >= 13: + # v13 was skipped and should be v14 + majorVersion += 1 + minorVersion = int(s[2:3]) / 10.0 + # I don't think paths are affected by minor version in version 6 + if majorVersion == 6: + minorVersion = 0 + if majorVersion >= 6: + return majorVersion + minorVersion + # else we don't know what version of the compiler this is + return None + +def get_build_architecture(): + """Return the processor architecture. + + Possible results are "Intel" or "AMD64". + """ + + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return "Intel" + j = sys.version.find(")", i) + return sys.version[i+len(prefix):j] + +def normalize_and_reduce_paths(paths): + """Return a list of normalized paths with duplicates removed. + + The current order of paths is maintained. + """ + # Paths are normalized so things like: /a and /a/ aren't both preserved. + reduced_paths = [] + for p in paths: + np = os.path.normpath(p) + # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. + if np not in reduced_paths: + reduced_paths.append(np) + return reduced_paths + + +class MSVCCompiler(CCompiler) : + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class.""" + + compiler_type = 'msvc' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + _rc_extensions = ['.rc'] + _mc_extensions = ['.mc'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = (_c_extensions + _cpp_extensions + + _rc_extensions + _mc_extensions) + res_extension = '.res' + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + def __init__(self, verbose=0, dry_run=0, force=0): + CCompiler.__init__ (self, verbose, dry_run, force) + self.__version = get_build_version() + self.__arch = get_build_architecture() + if self.__arch == "Intel": + # x86 + if self.__version >= 7: + self.__root = r"Software\Microsoft\VisualStudio" + self.__macros = MacroExpander(self.__version) + else: + self.__root = r"Software\Microsoft\Devstudio" + self.__product = "Visual Studio version %s" % self.__version + else: + # Win64. Assume this was built with the platform SDK + self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) + + self.initialized = False + + def initialize(self): + self.__paths = [] + if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + # Assume that the SDK set up everything alright; don't try to be + # smarter + self.cc = "cl.exe" + self.linker = "link.exe" + self.lib = "lib.exe" + self.rc = "rc.exe" + self.mc = "mc.exe" + else: + self.__paths = self.get_msvc_paths("path") + + if len(self.__paths) == 0: + raise DistutilsPlatformError("Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." + % self.__product) + + self.cc = self.find_exe("cl.exe") + self.linker = self.find_exe("link.exe") + self.lib = self.find_exe("lib.exe") + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler + self.set_path_env_var('lib') + self.set_path_env_var('include') + + # extend the MSVC path with the current path + try: + for p in os.environ['path'].split(';'): + self.__paths.append(p) + except KeyError: + pass + self.__paths = normalize_and_reduce_paths(self.__paths) + os.environ['path'] = ";".join(self.__paths) + + self.preprocess_options = None + if self.__arch == "Intel": + self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' , + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', + '/Z7', '/D_DEBUG'] + else: + # Win64 + self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , + '/DNDEBUG'] + self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', + '/Z7', '/D_DEBUG'] + + self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] + if self.__version >= 7: + self.ldflags_shared_debug = [ + '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' + ] + else: + self.ldflags_shared_debug = [ + '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' + ] + self.ldflags_static = [ '/nologo'] + + self.initialized = True + + # -- Worker methods ------------------------------------------------ + + def object_filenames(self, + source_filenames, + strip_dir=0, + output_dir=''): + # Copied from ccompiler.py, extended to return .res as 'object'-file + # for .rc input file + if output_dir is None: output_dir = '' + obj_names = [] + for src_name in source_filenames: + (base, ext) = os.path.splitext (src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base):] # If abs, chop off leading / + if ext not in self.src_extensions: + # Better to raise an exception instead of silently continuing + # and later complain about sources and targets having + # different lengths + raise CompileError ("Don't know how to compile %s" % src_name) + if strip_dir: + base = os.path.basename (base) + if ext in self._rc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + elif ext in self._mc_extensions: + obj_names.append (os.path.join (output_dir, + base + self.res_extension)) + else: + obj_names.append (os.path.join (output_dir, + base + self.obj_extension)) + return obj_names + + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + if not self.initialized: + self.initialize() + compile_info = self._setup_compile(output_dir, macros, include_dirs, + sources, depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = compile_info + + compile_opts = extra_preargs or [] + compile_opts.append ('/c') + if debug: + compile_opts.extend(self.compile_options_debug) + else: + compile_opts.extend(self.compile_options) + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + if debug: + # pass the full pathname to MSVC in debug mode, + # this allows the debugger to find the source file + # without asking the user to browse for it + src = os.path.abspath(src) + + if ext in self._c_extensions: + input_opt = "/Tc" + src + elif ext in self._cpp_extensions: + input_opt = "/Tp" + src + elif ext in self._rc_extensions: + # compile .RC to .RES file + input_opt = src + output_opt = "/fo" + obj + try: + self.spawn([self.rc] + pp_opts + + [output_opt] + [input_opt]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue + elif ext in self._mc_extensions: + # Compile .MC to .RC file to .RES file. + # * '-h dir' specifies the directory for the + # generated include file + # * '-r dir' specifies the target directory of the + # generated RC file and the binary message resource + # it includes + # + # For now (since there are no options to change this), + # we use the source-directory for the include file and + # the build directory for the RC file and message + # resources. This works at least for win32all. + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) + try: + # first compile .MC to .RC and .H file + self.spawn([self.mc] + + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext (os.path.basename (src)) + rc_file = os.path.join (rc_dir, base + '.rc') + # then compile .RC to .RES file + self.spawn([self.rc] + + ["/fo" + obj] + [rc_file]) + + except DistutilsExecError as msg: + raise CompileError(msg) + continue + else: + # how to handle this file? + raise CompileError("Don't know how to compile %s to %s" + % (src, obj)) + + output_opt = "/Fo" + obj + try: + self.spawn([self.cc] + compile_opts + pp_opts + + [input_opt, output_opt] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + + def create_static_lib(self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, + output_dir=output_dir) + + if self._need_link(objects, output_filename): + lib_args = objects + ['/OUT:' + output_filename] + if debug: + pass # XXX what goes here? + try: + self.spawn([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + if not self.initialized: + self.initialize() + (objects, output_dir) = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + (libraries, library_dirs, runtime_library_dirs) = fixed_args + + if runtime_library_dirs: + self.warn ("I don't know what to do with 'runtime_library_dirs': " + + str (runtime_library_dirs)) + + lib_opts = gen_lib_options(self, + library_dirs, runtime_library_dirs, + libraries) + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + if target_desc == CCompiler.EXECUTABLE: + if debug: + ldflags = self.ldflags_shared_debug[1:] + else: + ldflags = self.ldflags_shared[1:] + else: + if debug: + ldflags = self.ldflags_shared_debug + else: + ldflags = self.ldflags_shared + + export_opts = [] + for sym in (export_symbols or []): + export_opts.append("/EXPORT:" + sym) + + ld_args = (ldflags + lib_opts + export_opts + + objects + ['/OUT:' + output_filename]) + + # The MSVC linker generates .lib and .exp files, which cannot be + # suppressed by any linker switches. The .lib files may even be + # needed! Make sure they are generated in the temporary build + # directory. Since they have different names for debug and release + # builds, they can go into the same directory. + if export_symbols is not None: + (dll_name, dll_ext) = os.path.splitext( + os.path.basename(output_filename)) + implib_file = os.path.join( + os.path.dirname(objects[0]), + self.library_filename(dll_name)) + ld_args.append ('/IMPLIB:' + implib_file) + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + self.mkpath(os.path.dirname(output_filename)) + try: + self.spawn([self.linker] + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "/LIBPATH:" + dir + + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError( + "don't know how to set runtime library search path for MSVC++") + + def library_option(self, lib): + return self.library_filename(lib) + + + def find_library_file(self, dirs, lib, debug=0): + # Prefer a debugging library if found (and requested), but deal + # with it if we don't have one. + if debug: + try_names = [lib + "_d", lib] + else: + try_names = [lib] + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename (name)) + if os.path.exists(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None + + # Helper methods for using the MSVC registry settings + + def find_exe(self, exe): + """Return path to an MSVC executable program. + + Tries to find the program in several places: first, one of the + MSVC program search paths from the registry; next, the directories + in the PATH environment variable. If any of those work, return an + absolute path that is known to exist. If none of them work, just + return the original program name, 'exe'. + """ + for p in self.__paths: + fn = os.path.join(os.path.abspath(p), exe) + if os.path.isfile(fn): + return fn + + # didn't find it; try existing path + for p in os.environ['Path'].split(';'): + fn = os.path.join(os.path.abspath(p),exe) + if os.path.isfile(fn): + return fn + + return exe + + def get_msvc_paths(self, path, platform='x86'): + """Get a list of devstudio directories (include, lib or path). + + Return a list of strings. The list will be empty if unable to + access the registry or appropriate registry keys not found. + """ + if not _can_read_reg: + return [] + + path = path + " dirs" + if self.__version >= 7: + key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" + % (self.__root, self.__version)) + else: + key = (r"%s\6.0\Build System\Components\Platforms" + r"\Win32 (%s)\Directories" % (self.__root, platform)) + + for base in HKEYS: + d = read_values(base, key) + if d: + if self.__version >= 7: + return self.__macros.sub(d[path]).split(";") + else: + return d[path].split(";") + # MSVC 6 seems to create the registry entries we need only when + # the GUI is run. + if self.__version == 6: + for base in HKEYS: + if read_values(base, r"%s\6.0" % self.__root) is not None: + self.warn("It seems you have Visual Studio 6 installed, " + "but the expected registry settings are not present.\n" + "You must at least run the Visual Studio GUI once " + "so that these entries are created.") + break + return [] + + def set_path_env_var(self, name): + """Set environment variable 'name' to an MSVC path type value. + + This is equivalent to a SET command prior to execution of spawned + commands. + """ + + if name == "lib": + p = self.get_msvc_paths("library") + else: + p = self.get_msvc_paths(name) + if p: + os.environ[name] = ';'.join(p) + + +if get_build_version() >= 8.0: + log.debug("Importing new compiler from distutils.msvc9compiler") + OldMSVCCompiler = MSVCCompiler + from distutils.msvc9compiler import MSVCCompiler + # get_build_architecture not really relevant now we support cross-compile + from distutils.msvc9compiler import MacroExpander diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/spawn.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/spawn.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/spawn.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/spawn.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,129 @@ +"""distutils.spawn + +Provides the 'spawn()' function, a front-end to various platform- +specific functions for launching another program in a sub-process. +Also provides the 'find_executable()' to search the path for a given +executable name. +""" + +import sys +import os +import subprocess + +from distutils.errors import DistutilsPlatformError, DistutilsExecError +from distutils.debug import DEBUG +from distutils import log + + +if sys.platform == 'darwin': + _cfg_target = None + _cfg_target_split = None + + +def spawn(cmd, search_path=1, verbose=0, dry_run=0): + """Run another program, specified as a command list 'cmd', in a new process. + + 'cmd' is just the argument list for the new process, ie. + cmd[0] is the program to run and cmd[1:] are the rest of its arguments. + There is no way to run a program with a name different from that of its + executable. + + If 'search_path' is true (the default), the system's executable + search path will be used to find the program; otherwise, cmd[0] + must be the exact path to the executable. If 'dry_run' is true, + the command will not actually be run. + + Raise DistutilsExecError if running the program fails in any way; just + return on success. + """ + # cmd is documented as a list, but just in case some code passes a tuple + # in, protect our %-formatting code against horrible death + cmd = list(cmd) + + log.info(' '.join(cmd)) + if dry_run: + return + + if search_path: + executable = find_executable(cmd[0]) + if executable is not None: + cmd[0] = executable + + env = None + if sys.platform == 'darwin': + global _cfg_target, _cfg_target_split + if _cfg_target is None: + from distutils import sysconfig + _cfg_target = sysconfig.get_config_var( + 'MACOSX_DEPLOYMENT_TARGET') or '' + if _cfg_target: + _cfg_target_split = [int(x) for x in _cfg_target.split('.')] + if _cfg_target: + # Ensure that the deployment target of the build process is not + # less than 10.3 if the interpreter was built for 10.3 or later. + # This ensures extension modules are built with correct + # compatibility values, specifically LDSHARED which can use + # '-undefined dynamic_lookup' which only works on >= 10.3. + cur_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target) + cur_target_split = [int(x) for x in cur_target.split('.')] + if _cfg_target_split[:2] >= [10, 3] and cur_target_split[:2] < [10, 3]: + my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: ' + 'now "%s" but "%s" during configure;' + 'must use 10.3 or later' + % (cur_target, _cfg_target)) + raise DistutilsPlatformError(my_msg) + env = dict(os.environ, + MACOSX_DEPLOYMENT_TARGET=cur_target) + + try: + proc = subprocess.Popen(cmd, env=env) + proc.wait() + exitcode = proc.returncode + except OSError as exc: + if not DEBUG: + cmd = cmd[0] + raise DistutilsExecError( + "command %r failed: %s" % (cmd, exc.args[-1])) from exc + + if exitcode: + if not DEBUG: + cmd = cmd[0] + raise DistutilsExecError( + "command %r failed with exit code %s" % (cmd, exitcode)) + + +def find_executable(executable, path=None): + """Tries to find 'executable' in the directories listed in 'path'. + + A string listing directories separated by 'os.pathsep'; defaults to + os.environ['PATH']. Returns the complete filename or None if not found. + """ + _, ext = os.path.splitext(executable) + if (sys.platform == 'win32') and (ext != '.exe'): + executable = executable + '.exe' + + if os.path.isfile(executable): + return executable + + if path is None: + path = os.environ.get('PATH', None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string + + # PATH='' doesn't match, whereas PATH=':' looks in the current directory + if not path: + return None + + paths = path.split(os.pathsep) + for p in paths: + f = os.path.join(p, executable) + if os.path.isfile(f): + # the file exists, we have a shot at spawn working + return f + return None diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/sysconfig.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/sysconfig.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/sysconfig.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/sysconfig.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,346 @@ +"""Provide access to Python's configuration information. The specific +configuration variables available depend heavily on the platform and +configuration. The values may be retrieved using +get_config_var(name), and the list of variables is available via +get_config_vars().keys(). Additional convenience functions are also +available. + +Written by: Fred L. Drake, Jr. +Email: +""" + +import _imp +import os +import re +import sys +import warnings + +from functools import partial + +from .errors import DistutilsPlatformError + +from sysconfig import ( + _PREFIX as PREFIX, + _BASE_PREFIX as BASE_PREFIX, + _EXEC_PREFIX as EXEC_PREFIX, + _BASE_EXEC_PREFIX as BASE_EXEC_PREFIX, + _PROJECT_BASE as project_base, + _PYTHON_BUILD as python_build, + _init_posix as sysconfig_init_posix, + parse_config_h as sysconfig_parse_config_h, + + _init_non_posix, + + _variable_rx, + _findvar1_rx, + _findvar2_rx, + + expand_makefile_vars, + is_python_build, + get_config_h_filename, + get_config_var, + get_config_vars, + get_makefile_filename, + get_python_version, +) + +# This is better than +# from sysconfig import _CONFIG_VARS as _config_vars +# because it makes sure that the global dictionary is initialized +# which might not be true in the time of import. +_config_vars = get_config_vars() + +warnings.warn( + 'The distutils.sysconfig module is deprecated, use sysconfig instead', + DeprecationWarning, + stacklevel=2 +) + + +# Following functions are the same as in sysconfig but with different API +def parse_config_h(fp, g=None): + return sysconfig_parse_config_h(fp, vars=g) + + +_python_build = partial(is_python_build, check_home=True) +_init_posix = partial(sysconfig_init_posix, _config_vars) +_init_nt = partial(_init_non_posix, _config_vars) + + +# Similar function is also implemented in sysconfig as _parse_makefile +# but without the parsing capabilities of distutils.text_file.TextFile. +def parse_makefile(fn, g=None): + """Parse a Makefile-style file. + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + from distutils.text_file import TextFile + fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") + + if g is None: + g = {} + done = {} + notdone = {} + + while True: + line = fp.readline() + if line is None: # eof + break + m = re.match(_variable_rx, line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + # do variable interpolation here + while notdone: + for name in list(notdone): + value = notdone[name] + m = re.search(_findvar1_rx, value) or re.search(_findvar2_rx, value) + if m: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if name.startswith('PY_') and name[3:] in renamed_variables: + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + else: + done[n] = item = "" + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + del notdone[name] + + if name.startswith('PY_') \ + and name[3:] in renamed_variables: + + name = name[3:] + if name not in done: + done[name] = value + else: + # bogus variable reference; just drop it since we can't deal + del notdone[name] + + fp.close() + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + g.update(done) + return g + + +# Following functions are deprecated together with this module and they +# have no direct replacement + +# Calculate the build qualifier flags if they are defined. Adding the flags +# to the include and lib directories only makes sense for an installation, not +# an in-source build. +build_flags = '' +try: + if not python_build: + build_flags = sys.abiflags +except AttributeError: + # It's not a configure-based build, so the sys module doesn't have + # this attribute, which is fine. + pass + + +def customize_compiler(compiler): + """Do any platform-specific customization of a CCompiler instance. + + Mainly needed on Unix, so we can plug in the information that + varies across Unices and is stored in Python's Makefile. + """ + if compiler.compiler_type == "unix": + if sys.platform == "darwin": + # Perform first-time customization of compiler-related + # config vars on OS X now that we know we need a compiler. + # This is primarily to support Pythons from binary + # installers. The kind and paths to build tools on + # the user system may vary significantly from the system + # that Python itself was built on. Also the user OS + # version and build tools may not support the same set + # of CPU architectures for universal builds. + if not _config_vars.get('CUSTOMIZED_OSX_COMPILER'): + import _osx_support + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + + (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ + get_config_vars('CC', 'CXX', 'CFLAGS', + 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') + + if 'CC' in os.environ: + newcc = os.environ['CC'] + if (sys.platform == 'darwin' + and 'LDSHARED' not in os.environ + and ldshared.startswith(cc)): + # On OS X, if CC is overridden, use that as the default + # command for LDSHARED as well + ldshared = newcc + ldshared[len(cc):] + cc = newcc + if 'CXX' in os.environ: + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: + cflags = cflags + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: + archiver = ar + ' ' + os.environ['ARFLAGS'] + else: + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, + compiler_cxx=cxx, + linker_so=ldshared, + linker_exe=cc, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix + + +def get_python_inc(plat_specific=0, prefix=None): + """Return the directory containing installed Python header files. + + If 'plat_specific' is false (the default), this is the path to the + non-platform-specific header files, i.e. Python.h and so on; + otherwise, this is the path to platform-specific header files + (namely pyconfig.h). + + If 'prefix' is supplied, use it instead of sys.base_prefix or + sys.base_exec_prefix -- i.e., ignore 'plat_specific'. + """ + if prefix is None: + prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX + if os.name == "posix": + if python_build: + # Assume the executable is in the build directory. The + # pyconfig.h file should be in the same directory. Since + # the build directory may not be the source directory, we + # must use "srcdir" from the makefile to find the "Include" + # directory. + if plat_specific: + return project_base + else: + incdir = os.path.join(get_config_var('srcdir'), 'Include') + return os.path.normpath(incdir) + python_dir = 'python' + get_python_version() + build_flags + return os.path.join(prefix, "include", python_dir) + elif os.name == "nt": + if python_build: + # Include both the include and PC dir to ensure we can find + # pyconfig.h + return (os.path.join(prefix, "include") + os.path.pathsep + + os.path.join(prefix, "PC")) + return os.path.join(prefix, "include") + else: + raise DistutilsPlatformError( + "I don't know where Python installs its C header files " + "on platform '%s'" % os.name) + + +def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): + """Return the directory containing the Python library (standard or + site additions). + + If 'plat_specific' is true, return the directory containing + platform-specific modules, i.e. any module from a non-pure-Python + module distribution; otherwise, return the platform-shared library + directory. If 'standard_lib' is true, return the directory + containing standard Python library modules; otherwise, return the + directory for site-specific modules. + + If 'prefix' is supplied, use it instead of sys.base_prefix or + sys.base_exec_prefix -- i.e., ignore 'plat_specific'. + """ + if prefix is None: + if standard_lib: + prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX + else: + prefix = plat_specific and EXEC_PREFIX or PREFIX + + if os.name == "posix": + if plat_specific or standard_lib: + # Platform-specific modules (any module from a non-pure-Python + # module distribution) or standard Python library modules. + libdir = sys.platlibdir + else: + # Pure Python + libdir = "lib" + libpython = os.path.join(prefix, libdir, + "python" + get_python_version()) + if standard_lib: + return libpython + else: + return os.path.join(libpython, "site-packages") + elif os.name == "nt": + if standard_lib: + return os.path.join(prefix, "Lib") + else: + return os.path.join(prefix, "Lib", "site-packages") + else: + raise DistutilsPlatformError( + "I don't know where Python installs its library " + "on platform '%s'" % os.name) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/Setup.sample python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/Setup.sample --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/Setup.sample 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/Setup.sample 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,67 @@ +# Setup file from the pygame project + +#--StartConfig +SDL = -I/usr/include/SDL -D_REENTRANT -lSDL +FONT = -lSDL_ttf +IMAGE = -lSDL_image +MIXER = -lSDL_mixer +SMPEG = -lsmpeg +PNG = -lpng +JPEG = -ljpeg +SCRAP = -lX11 +PORTMIDI = -lportmidi +PORTTIME = -lporttime +#--EndConfig + +#DEBUG = -C-W -C-Wall +DEBUG = + +#the following modules are optional. you will want to compile +#everything you can, but you can ignore ones you don't have +#dependencies for, just comment them out + +imageext src/imageext.c $(SDL) $(IMAGE) $(PNG) $(JPEG) $(DEBUG) +font src/font.c $(SDL) $(FONT) $(DEBUG) +mixer src/mixer.c $(SDL) $(MIXER) $(DEBUG) +mixer_music src/music.c $(SDL) $(MIXER) $(DEBUG) +_numericsurfarray src/_numericsurfarray.c $(SDL) $(DEBUG) +_numericsndarray src/_numericsndarray.c $(SDL) $(MIXER) $(DEBUG) +movie src/movie.c $(SDL) $(SMPEG) $(DEBUG) +scrap src/scrap.c $(SDL) $(SCRAP) $(DEBUG) +_camera src/_camera.c src/camera_v4l2.c src/camera_v4l.c $(SDL) $(DEBUG) +pypm src/pypm.c $(SDL) $(PORTMIDI) $(PORTTIME) $(DEBUG) + +GFX = src/SDL_gfx/SDL_gfxPrimitives.c +#GFX = src/SDL_gfx/SDL_gfxBlitFunc.c src/SDL_gfx/SDL_gfxPrimitives.c +gfxdraw src/gfxdraw.c $(SDL) $(GFX) $(DEBUG) + + + +#these modules are required for pygame to run. they only require +#SDL as a dependency. these should not be altered + +base src/base.c $(SDL) $(DEBUG) +cdrom src/cdrom.c $(SDL) $(DEBUG) +color src/color.c $(SDL) $(DEBUG) +constants src/constants.c $(SDL) $(DEBUG) +display src/display.c $(SDL) $(DEBUG) +event src/event.c $(SDL) $(DEBUG) +fastevent src/fastevent.c src/fastevents.c $(SDL) $(DEBUG) +key src/key.c $(SDL) $(DEBUG) +mouse src/mouse.c $(SDL) $(DEBUG) +rect src/rect.c $(SDL) $(DEBUG) +rwobject src/rwobject.c $(SDL) $(DEBUG) +surface src/surface.c src/alphablit.c src/surface_fill.c $(SDL) $(DEBUG) +surflock src/surflock.c $(SDL) $(DEBUG) +time src/time.c $(SDL) $(DEBUG) +joystick src/joystick.c $(SDL) $(DEBUG) +draw src/draw.c $(SDL) $(DEBUG) +image src/image.c $(SDL) $(DEBUG) +overlay src/overlay.c $(SDL) $(DEBUG) +transform src/transform.c src/rotozoom.c src/scale2x.c src/scale_mmx.c $(SDL) $(DEBUG) +mask src/mask.c src/bitmask.c $(SDL) $(DEBUG) +bufferproxy src/bufferproxy.c $(SDL) $(DEBUG) +pixelarray src/pixelarray.c $(SDL) $(DEBUG) +_arraysurfarray src/_arraysurfarray.c $(SDL) $(DEBUG) + + diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,41 @@ +"""Test suite for distutils. + +This test suite consists of a collection of test modules in the +distutils.tests package. Each test module has a name starting with +'test' and contains a function test_suite(). The function is expected +to return an initialized unittest.TestSuite instance. + +Tests for the command classes in the distutils.command package are +included in distutils.tests as well, instead of using a separate +distutils.command.tests package, since command identification is done +by import rather than matching pre-defined names. + +""" + +import os +import sys +import unittest +from test.support import run_unittest +from test.support.warnings_helper import save_restore_warnings_filters + + +here = os.path.dirname(__file__) or os.curdir + + +def test_suite(): + suite = unittest.TestSuite() + for fn in os.listdir(here): + if fn.startswith("test") and fn.endswith(".py"): + modname = "distutils.tests." + fn[:-3] + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources + # which adds a warnings filter. + with save_restore_warnings_filters(): + __import__(modname) + module = sys.modules[modname] + suite.addTest(module.test_suite()) + return suite + + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/includetest.rst python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/includetest.rst --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/includetest.rst 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/includetest.rst 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1 @@ +This should be included. diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/support.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/support.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/support.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/support.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,209 @@ +"""Support code for distutils test cases.""" +import os +import sys +import shutil +import tempfile +import unittest +import sysconfig +from copy import deepcopy +from test.support import os_helper + +from distutils import log +from distutils.log import DEBUG, INFO, WARN, ERROR, FATAL +from distutils.core import Distribution + + +class LoggingSilencer(object): + + def setUp(self): + super().setUp() + self.threshold = log.set_threshold(log.FATAL) + # catching warnings + # when log will be replaced by logging + # we won't need such monkey-patch anymore + self._old_log = log.Log._log + log.Log._log = self._log + self.logs = [] + + def tearDown(self): + log.set_threshold(self.threshold) + log.Log._log = self._old_log + super().tearDown() + + def _log(self, level, msg, args): + if level not in (DEBUG, INFO, WARN, ERROR, FATAL): + raise ValueError('%s wrong log level' % str(level)) + if not isinstance(msg, str): + raise TypeError("msg should be str, not '%.200s'" + % (type(msg).__name__)) + self.logs.append((level, msg, args)) + + def get_logs(self, *levels): + return [msg % args for level, msg, args + in self.logs if level in levels] + + def clear_logs(self): + self.logs = [] + + +class TempdirManager(object): + """Mix-in class that handles temporary directories for test cases. + + This is intended to be used with unittest.TestCase. + """ + + def setUp(self): + super().setUp() + self.old_cwd = os.getcwd() + self.tempdirs = [] + + def tearDown(self): + # Restore working dir, for Solaris and derivatives, where rmdir() + # on the current directory fails. + os.chdir(self.old_cwd) + super().tearDown() + while self.tempdirs: + tmpdir = self.tempdirs.pop() + os_helper.rmtree(tmpdir) + + def mkdtemp(self): + """Create a temporary directory that will be cleaned up. + + Returns the path of the directory. + """ + d = tempfile.mkdtemp() + self.tempdirs.append(d) + return d + + def write_file(self, path, content='xxx'): + """Writes a file in the given path. + + + path can be a string or a sequence. + """ + if isinstance(path, (list, tuple)): + path = os.path.join(*path) + f = open(path, 'w') + try: + f.write(content) + finally: + f.close() + + def create_dist(self, pkg_name='foo', **kw): + """Will generate a test environment. + + This function creates: + - a Distribution instance using keywords + - a temporary directory with a package structure + + It returns the package directory and the distribution + instance. + """ + tmp_dir = self.mkdtemp() + pkg_dir = os.path.join(tmp_dir, pkg_name) + os.mkdir(pkg_dir) + dist = Distribution(attrs=kw) + + return pkg_dir, dist + + +class DummyCommand: + """Class to store options for retrieval via set_undefined_options().""" + + def __init__(self, **kwargs): + for kw, val in kwargs.items(): + setattr(self, kw, val) + + def ensure_finalized(self): + pass + + +class EnvironGuard(object): + + def setUp(self): + super(EnvironGuard, self).setUp() + self.old_environ = deepcopy(os.environ) + + def tearDown(self): + for key, value in self.old_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + + for key in tuple(os.environ.keys()): + if key not in self.old_environ: + del os.environ[key] + + super(EnvironGuard, self).tearDown() + + +def copy_xxmodule_c(directory): + """Helper for tests that need the xxmodule.c source file. + + Example use: + + def test_compile(self): + copy_xxmodule_c(self.tmpdir) + self.assertIn('xxmodule.c', os.listdir(self.tmpdir)) + + If the source file can be found, it will be copied to *directory*. If not, + the test will be skipped. Errors during copy are not caught. + """ + filename = _get_xxmodule_path() + if filename is None: + raise unittest.SkipTest('cannot find xxmodule.c (test must run in ' + 'the python build dir)') + shutil.copy(filename, directory) + + +def _get_xxmodule_path(): + srcdir = sysconfig.get_config_var('srcdir') + candidates = [ + # use installed copy if available + os.path.join(os.path.dirname(__file__), 'xxmodule.c'), + # otherwise try using copy from build directory + os.path.join(srcdir, 'Modules', 'xxmodule.c'), + # srcdir mysteriously can be $srcdir/Lib/distutils/tests when + # this file is run from its parent directory, so walk up the + # tree to find the real srcdir + os.path.join(srcdir, '..', '..', '..', 'Modules', 'xxmodule.c'), + ] + for path in candidates: + if os.path.exists(path): + return path + + +def fixup_build_ext(cmd): + """Function needed to make build_ext tests pass. + + When Python was built with --enable-shared on Unix, -L. is not enough to + find libpython.so, because regrtest runs in a tempdir, not in the + source directory where the .so lives. + + When Python was built with in debug mode on Windows, build_ext commands + need their debug attribute set, and it is not done automatically for + some reason. + + This function handles both of these things. Example use: + + cmd = build_ext(dist) + support.fixup_build_ext(cmd) + cmd.ensure_finalized() + + Unlike most other Unix platforms, Mac OS X embeds absolute paths + to shared libraries into executables, so the fixup is not needed there. + """ + if os.name == 'nt': + cmd.debug = sys.executable.endswith('_d.exe') + elif sysconfig.get_config_var('Py_ENABLE_SHARED'): + # To further add to the shared builds fun on Unix, we can't just add + # library_dirs to the Extension() instance because that doesn't get + # plumbed through to the final compiler command. + runshared = sysconfig.get_config_var('RUNSHARED') + if runshared is None: + cmd.library_dirs = ['.'] + else: + if sys.platform == 'darwin': + cmd.library_dirs = [] + else: + name, equals, value = runshared.partition('=') + cmd.library_dirs = [d for d in value.split(os.pathsep) if d] diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_archive_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_archive_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_archive_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_archive_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +"""Tests for distutils.archive_util.""" +import unittest +import os +import sys +import tarfile +from os.path import splitdrive +import warnings + +from distutils import archive_util +from distutils.archive_util import (check_archive_formats, make_tarball, + make_zipfile, make_archive, + ARCHIVE_FORMATS) +from distutils.spawn import find_executable, spawn +from distutils.tests import support +from test.support import run_unittest, patch +from test.support.os_helper import change_cwd +from test.support.warnings_helper import check_warnings + +try: + import grp + import pwd + UID_GID_SUPPORT = True +except ImportError: + UID_GID_SUPPORT = False + +try: + import zipfile + ZIP_SUPPORT = True +except ImportError: + ZIP_SUPPORT = find_executable('zip') + +try: + import zlib + ZLIB_SUPPORT = True +except ImportError: + ZLIB_SUPPORT = False + +try: + import bz2 +except ImportError: + bz2 = None + +try: + import lzma +except ImportError: + lzma = None + +def can_fs_encode(filename): + """ + Return True if the filename can be saved in the file system. + """ + if os.path.supports_unicode_filenames: + return True + try: + filename.encode(sys.getfilesystemencoding()) + except UnicodeEncodeError: + return False + return True + + +class ArchiveUtilTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_make_tarball(self, name='archive'): + # creating something to tar + tmpdir = self._create_files() + self._make_tarball(tmpdir, name, '.tar.gz') + # trying an uncompressed one + self._make_tarball(tmpdir, name, '.tar', compress=None) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_make_tarball_gzip(self): + tmpdir = self._create_files() + self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip') + + @unittest.skipUnless(bz2, 'Need bz2 support to run') + def test_make_tarball_bzip2(self): + tmpdir = self._create_files() + self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2') + + @unittest.skipUnless(lzma, 'Need lzma support to run') + def test_make_tarball_xz(self): + tmpdir = self._create_files() + self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz') + + @unittest.skipUnless(can_fs_encode('årchiv'), + 'File system cannot handle this filename') + def test_make_tarball_latin1(self): + """ + Mirror test_make_tarball, except filename contains latin characters. + """ + self.test_make_tarball('årchiv') # note this isn't a real word + + @unittest.skipUnless(can_fs_encode('のアーカイブ'), + 'File system cannot handle this filename') + def test_make_tarball_extended(self): + """ + Mirror test_make_tarball, except filename contains extended + characters outside the latin charset. + """ + self.test_make_tarball('のアーカイブ') # japanese for archive + + def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): + tmpdir2 = self.mkdtemp() + unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], + "source and target should be on same drive") + + base_name = os.path.join(tmpdir2, target_name) + + # working with relative paths to avoid tar warnings + with change_cwd(tmpdir): + make_tarball(splitdrive(base_name)[1], 'dist', **kwargs) + + # check if the compressed tarball was created + tarball = base_name + suffix + self.assertTrue(os.path.exists(tarball)) + self.assertEqual(self._tarinfo(tarball), self._created_files) + + def _tarinfo(self, path): + tar = tarfile.open(path) + try: + names = tar.getnames() + names.sort() + return names + finally: + tar.close() + + _zip_created_files = ['dist/', 'dist/file1', 'dist/file2', + 'dist/sub/', 'dist/sub/file3', 'dist/sub2/'] + _created_files = [p.rstrip('/') for p in _zip_created_files] + + def _create_files(self): + # creating something to tar + tmpdir = self.mkdtemp() + dist = os.path.join(tmpdir, 'dist') + os.mkdir(dist) + self.write_file([dist, 'file1'], 'xxx') + self.write_file([dist, 'file2'], 'xxx') + os.mkdir(os.path.join(dist, 'sub')) + self.write_file([dist, 'sub', 'file3'], 'xxx') + os.mkdir(os.path.join(dist, 'sub2')) + return tmpdir + + @unittest.skipUnless(find_executable('tar') and find_executable('gzip') + and ZLIB_SUPPORT, + 'Need the tar, gzip and zlib command to run') + def test_tarfile_vs_tar(self): + tmpdir = self._create_files() + tmpdir2 = self.mkdtemp() + base_name = os.path.join(tmpdir2, 'archive') + old_dir = os.getcwd() + os.chdir(tmpdir) + try: + make_tarball(base_name, 'dist') + finally: + os.chdir(old_dir) + + # check if the compressed tarball was created + tarball = base_name + '.tar.gz' + self.assertTrue(os.path.exists(tarball)) + + # now create another tarball using `tar` + tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') + tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] + gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar'] + old_dir = os.getcwd() + os.chdir(tmpdir) + try: + spawn(tar_cmd) + spawn(gzip_cmd) + finally: + os.chdir(old_dir) + + self.assertTrue(os.path.exists(tarball2)) + # let's compare both tarballs + self.assertEqual(self._tarinfo(tarball), self._created_files) + self.assertEqual(self._tarinfo(tarball2), self._created_files) + + # trying an uncompressed one + base_name = os.path.join(tmpdir2, 'archive') + old_dir = os.getcwd() + os.chdir(tmpdir) + try: + make_tarball(base_name, 'dist', compress=None) + finally: + os.chdir(old_dir) + tarball = base_name + '.tar' + self.assertTrue(os.path.exists(tarball)) + + # now for a dry_run + base_name = os.path.join(tmpdir2, 'archive') + old_dir = os.getcwd() + os.chdir(tmpdir) + try: + make_tarball(base_name, 'dist', compress=None, dry_run=True) + finally: + os.chdir(old_dir) + tarball = base_name + '.tar' + self.assertTrue(os.path.exists(tarball)) + + @unittest.skipUnless(find_executable('compress'), + 'The compress program is required') + def test_compress_deprecated(self): + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') + + # using compress and testing the PendingDeprecationWarning + old_dir = os.getcwd() + os.chdir(tmpdir) + try: + with check_warnings() as w: + warnings.simplefilter("always") + make_tarball(base_name, 'dist', compress='compress') + finally: + os.chdir(old_dir) + tarball = base_name + '.tar.Z' + self.assertTrue(os.path.exists(tarball)) + self.assertEqual(len(w.warnings), 1) + + # same test with dry_run + os.remove(tarball) + old_dir = os.getcwd() + os.chdir(tmpdir) + try: + with check_warnings() as w: + warnings.simplefilter("always") + make_tarball(base_name, 'dist', compress='compress', + dry_run=True) + finally: + os.chdir(old_dir) + self.assertFalse(os.path.exists(tarball)) + self.assertEqual(len(w.warnings), 1) + + @unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT, + 'Need zip and zlib support to run') + def test_make_zipfile(self): + # creating something to tar + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') + with change_cwd(tmpdir): + make_zipfile(base_name, 'dist') + + # check if the compressed tarball was created + tarball = base_name + '.zip' + self.assertTrue(os.path.exists(tarball)) + with zipfile.ZipFile(tarball) as zf: + self.assertEqual(sorted(zf.namelist()), self._zip_created_files) + + @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') + def test_make_zipfile_no_zlib(self): + patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError + + called = [] + zipfile_class = zipfile.ZipFile + def fake_zipfile(*a, **kw): + if kw.get('compression', None) == zipfile.ZIP_STORED: + called.append((a, kw)) + return zipfile_class(*a, **kw) + + patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile) + + # create something to tar and compress + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') + with change_cwd(tmpdir): + make_zipfile(base_name, 'dist') + + tarball = base_name + '.zip' + self.assertEqual(called, + [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) + self.assertTrue(os.path.exists(tarball)) + with zipfile.ZipFile(tarball) as zf: + self.assertEqual(sorted(zf.namelist()), self._zip_created_files) + + def test_check_archive_formats(self): + self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), + 'xxx') + self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar', + 'ztar', 'tar', 'zip'])) + + def test_make_archive(self): + tmpdir = self.mkdtemp() + base_name = os.path.join(tmpdir, 'archive') + self.assertRaises(ValueError, make_archive, base_name, 'xxx') + + def test_make_archive_cwd(self): + current_dir = os.getcwd() + def _breaks(*args, **kw): + raise RuntimeError() + ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file') + try: + try: + make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) + except: + pass + self.assertEqual(os.getcwd(), current_dir) + finally: + del ARCHIVE_FORMATS['xxx'] + + def test_make_archive_tar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'tar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar') + self.assertEqual(self._tarinfo(res), self._created_files) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_make_archive_gztar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'gztar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar.gz') + self.assertEqual(self._tarinfo(res), self._created_files) + + @unittest.skipUnless(bz2, 'Need bz2 support to run') + def test_make_archive_bztar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'bztar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar.bz2') + self.assertEqual(self._tarinfo(res), self._created_files) + + @unittest.skipUnless(lzma, 'Need xz support to run') + def test_make_archive_xztar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'xztar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar.xz') + self.assertEqual(self._tarinfo(res), self._created_files) + + def test_make_archive_owner_group(self): + # testing make_archive with owner and group, with various combinations + # this works even if there's not gid/uid support + if UID_GID_SUPPORT: + group = grp.getgrgid(0)[0] + owner = pwd.getpwuid(0)[0] + else: + group = owner = 'root' + + base_dir = self._create_files() + root_dir = self.mkdtemp() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, + group=group) + self.assertTrue(os.path.exists(res)) + + res = make_archive(base_name, 'zip', root_dir, base_dir) + self.assertTrue(os.path.exists(res)) + + res = make_archive(base_name, 'tar', root_dir, base_dir, + owner=owner, group=group) + self.assertTrue(os.path.exists(res)) + + res = make_archive(base_name, 'tar', root_dir, base_dir, + owner='kjhkjhkjg', group='oihohoh') + self.assertTrue(os.path.exists(res)) + + @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib") + @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") + def test_tarfile_root_owner(self): + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') + old_dir = os.getcwd() + os.chdir(tmpdir) + group = grp.getgrgid(0)[0] + owner = pwd.getpwuid(0)[0] + try: + archive_name = make_tarball(base_name, 'dist', compress=None, + owner=owner, group=group) + finally: + os.chdir(old_dir) + + # check if the compressed tarball was created + self.assertTrue(os.path.exists(archive_name)) + + # now checks the rights + archive = tarfile.open(archive_name) + try: + for member in archive.getmembers(): + self.assertEqual(member.uid, 0) + self.assertEqual(member.gid, 0) + finally: + archive.close() + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(ArchiveUtilTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_bdist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_bdist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_bdist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_bdist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,52 @@ +"""Tests for distutils.command.bdist.""" +import os +import unittest +from test.support import run_unittest + +import warnings +with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + from distutils.command.bdist import bdist + from distutils.tests import support + + +class BuildTestCase(support.TempdirManager, + unittest.TestCase): + + def test_formats(self): + # let's create a command and make sure + # we can set the format + dist = self.create_dist()[1] + cmd = bdist(dist) + cmd.formats = ['tar'] + cmd.ensure_finalized() + self.assertEqual(cmd.formats, ['tar']) + + # what formats does bdist offer? + formats = ['bztar', 'gztar', 'rpm', 'tar', 'xztar', 'zip', 'ztar'] + found = sorted(cmd.format_command) + self.assertEqual(found, formats) + + def test_skip_build(self): + # bug #10946: bdist --skip-build should trickle down to subcommands + dist = self.create_dist()[1] + cmd = bdist(dist) + cmd.skip_build = 1 + cmd.ensure_finalized() + dist.command_obj['bdist'] = cmd + + for name in ['bdist_dumb']: # bdist_rpm does not support --skip-build + subcmd = cmd.get_finalized_command(name) + if getattr(subcmd, '_unsupported', False): + # command is not supported on this build + continue + self.assertTrue(subcmd.skip_build, + '%s should take --skip-build from bdist' % name) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) + + +if __name__ == '__main__': + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_bdist_dumb.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_bdist_dumb.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_bdist_dumb.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_bdist_dumb.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,97 @@ +"""Tests for distutils.command.bdist_dumb.""" + +import os +import sys +import zipfile +import unittest +from test.support import run_unittest + +from distutils.core import Distribution +from distutils.command.bdist_dumb import bdist_dumb +from distutils.tests import support + +SETUP_PY = """\ +from distutils.core import setup +import foo + +setup(name='foo', version='0.1', py_modules=['foo'], + url='xxx', author='xxx', author_email='xxx') + +""" + +try: + import zlib + ZLIB_SUPPORT = True +except ImportError: + ZLIB_SUPPORT = False + + +class BuildDumbTestCase(support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase): + + def setUp(self): + super(BuildDumbTestCase, self).setUp() + self.old_location = os.getcwd() + self.old_sys_argv = sys.argv, sys.argv[:] + + def tearDown(self): + os.chdir(self.old_location) + sys.argv = self.old_sys_argv[0] + sys.argv[:] = self.old_sys_argv[1] + super(BuildDumbTestCase, self).tearDown() + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_simple_built(self): + + # let's create a simple package + tmp_dir = self.mkdtemp() + pkg_dir = os.path.join(tmp_dir, 'foo') + os.mkdir(pkg_dir) + self.write_file((pkg_dir, 'setup.py'), SETUP_PY) + self.write_file((pkg_dir, 'foo.py'), '#') + self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') + self.write_file((pkg_dir, 'README'), '') + + dist = Distribution({'name': 'foo', 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx'}) + dist.script_name = 'setup.py' + os.chdir(pkg_dir) + + sys.argv = ['setup.py'] + cmd = bdist_dumb(dist) + + # so the output is the same no matter + # what is the platform + cmd.format = 'zip' + + cmd.ensure_finalized() + cmd.run() + + # see what we have + dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) + base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name) + + self.assertEqual(dist_created, [base]) + + # now let's check what we have in the zip file + fp = zipfile.ZipFile(os.path.join('dist', base)) + try: + contents = fp.namelist() + finally: + fp.close() + + contents = sorted(filter(None, map(os.path.basename, contents))) + wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2], 'foo.py'] + if not sys.dont_write_bytecode: + wanted.append('foo.%s.pyc' % sys.implementation.cache_tag) + self.assertEqual(contents, sorted(wanted)) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(BuildDumbTestCase) + +if __name__ == '__main__': + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_bdist_rpm.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_bdist_rpm.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_bdist_rpm.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_bdist_rpm.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,141 @@ +"""Tests for distutils.command.bdist_rpm.""" + +import unittest +import sys +import os +from test.support import run_unittest, requires_zlib + +from distutils.core import Distribution +from distutils.command.bdist_rpm import bdist_rpm +from distutils.tests import support +from distutils.spawn import find_executable + +SETUP_PY = """\ +from distutils.core import setup +import foo + +setup(name='foo', version='0.1', py_modules=['foo'], + url='xxx', author='xxx', author_email='xxx') + +""" + +class BuildRpmTestCase(support.TempdirManager, + support.EnvironGuard, + support.LoggingSilencer, + unittest.TestCase): + + def setUp(self): + try: + sys.executable.encode("UTF-8") + except UnicodeEncodeError: + raise unittest.SkipTest("sys.executable is not encodable to UTF-8") + + super(BuildRpmTestCase, self).setUp() + self.old_location = os.getcwd() + self.old_sys_argv = sys.argv, sys.argv[:] + + def tearDown(self): + os.chdir(self.old_location) + sys.argv = self.old_sys_argv[0] + sys.argv[:] = self.old_sys_argv[1] + super(BuildRpmTestCase, self).tearDown() + + # XXX I am unable yet to make this test work without + # spurious sdtout/stderr output under Mac OS X + @unittest.skipUnless(sys.platform.startswith('linux'), + 'spurious sdtout/stderr output under Mac OS X') + @requires_zlib() + @unittest.skipIf(find_executable('rpm') is None, + 'the rpm command is not found') + @unittest.skipIf(find_executable('rpmbuild') is None, + 'the rpmbuild command is not found') + # import foo fails with safe path + @unittest.skipIf(sys.flags.safe_path, + 'PYTHONSAFEPATH changes default sys.path') + def test_quiet(self): + # let's create a package + tmp_dir = self.mkdtemp() + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation + pkg_dir = os.path.join(tmp_dir, 'foo') + os.mkdir(pkg_dir) + self.write_file((pkg_dir, 'setup.py'), SETUP_PY) + self.write_file((pkg_dir, 'foo.py'), '#') + self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') + self.write_file((pkg_dir, 'README'), '') + + dist = Distribution({'name': 'foo', 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx'}) + dist.script_name = 'setup.py' + os.chdir(pkg_dir) + + sys.argv = ['setup.py'] + cmd = bdist_rpm(dist) + cmd.fix_python = True + + # running in quiet mode + cmd.quiet = 1 + cmd.ensure_finalized() + cmd.run() + + dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) + self.assertIn('foo-0.1-1.noarch.rpm', dist_created) + + # bug #2945: upload ignores bdist_rpm files + self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) + self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) + + # XXX I am unable yet to make this test work without + # spurious sdtout/stderr output under Mac OS X + @unittest.skipUnless(sys.platform.startswith('linux'), + 'spurious sdtout/stderr output under Mac OS X') + @requires_zlib() + # http://bugs.python.org/issue1533164 + @unittest.skipIf(find_executable('rpm') is None, + 'the rpm command is not found') + @unittest.skipIf(find_executable('rpmbuild') is None, + 'the rpmbuild command is not found') + # import foo fails with safe path + @unittest.skipIf(sys.flags.safe_path, + 'PYTHONSAFEPATH changes default sys.path') + def test_no_optimize_flag(self): + # let's create a package that breaks bdist_rpm + tmp_dir = self.mkdtemp() + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation + pkg_dir = os.path.join(tmp_dir, 'foo') + os.mkdir(pkg_dir) + self.write_file((pkg_dir, 'setup.py'), SETUP_PY) + self.write_file((pkg_dir, 'foo.py'), '#') + self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') + self.write_file((pkg_dir, 'README'), '') + + dist = Distribution({'name': 'foo', 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx'}) + dist.script_name = 'setup.py' + os.chdir(pkg_dir) + + sys.argv = ['setup.py'] + cmd = bdist_rpm(dist) + cmd.fix_python = True + + cmd.quiet = 1 + cmd.ensure_finalized() + cmd.run() + + dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) + self.assertIn('foo-0.1-1.noarch.rpm', dist_created) + + # bug #2945: upload ignores bdist_rpm files + self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) + self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) + + os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm')) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(BuildRpmTestCase) + +if __name__ == '__main__': + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,57 @@ +"""Tests for distutils.command.build.""" +import unittest +import os +import sys +from test.support import run_unittest + +from distutils.command.build import build +from distutils.tests import support +from sysconfig import get_platform + +class BuildTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + @unittest.skipUnless(sys.executable, "test requires sys.executable") + def test_finalize_options(self): + pkg_dir, dist = self.create_dist() + cmd = build(dist) + cmd.finalize_options() + + # if not specified, plat_name gets the current platform + self.assertEqual(cmd.plat_name, get_platform()) + + # build_purelib is build + lib + wanted = os.path.join(cmd.build_base, 'lib') + self.assertEqual(cmd.build_purelib, wanted) + + # build_platlib is 'build/lib.platform-x.x[-pydebug]' + # examples: + # build/lib.macosx-10.3-i386-2.7 + plat_spec = '.%s-%d.%d' % (cmd.plat_name, *sys.version_info[:2]) + if hasattr(sys, 'gettotalrefcount'): + self.assertTrue(cmd.build_platlib.endswith('-pydebug')) + plat_spec += '-pydebug' + wanted = os.path.join(cmd.build_base, 'lib' + plat_spec) + self.assertEqual(cmd.build_platlib, wanted) + + # by default, build_lib = build_purelib + self.assertEqual(cmd.build_lib, cmd.build_purelib) + + # build_temp is build/temp. + wanted = os.path.join(cmd.build_base, 'temp' + plat_spec) + self.assertEqual(cmd.build_temp, wanted) + + # build_scripts is build/scripts-x.x + wanted = os.path.join(cmd.build_base, + 'scripts-%d.%d' % sys.version_info[:2]) + self.assertEqual(cmd.build_scripts, wanted) + + # executable is os.path.normpath(sys.executable) + self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_clib.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_clib.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_clib.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_clib.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,147 @@ +"""Tests for distutils.command.build_clib.""" +import unittest +import os +import sys +import sysconfig + +from test.support import ( + run_unittest, missing_compiler_executable, requires_subprocess +) + +from distutils.command.build_clib import build_clib +from distutils.errors import DistutilsSetupError +from distutils.tests import support + +class BuildCLibTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + def setUp(self): + super().setUp() + self._backup_CONFIG_VARS = dict(sysconfig._CONFIG_VARS) + + def tearDown(self): + super().tearDown() + sysconfig._CONFIG_VARS.clear() + sysconfig._CONFIG_VARS.update(self._backup_CONFIG_VARS) + + def test_check_library_dist(self): + pkg_dir, dist = self.create_dist() + cmd = build_clib(dist) + + # 'libraries' option must be a list + self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo') + + # each element of 'libraries' must a 2-tuple + self.assertRaises(DistutilsSetupError, cmd.check_library_list, + ['foo1', 'foo2']) + + # first element of each tuple in 'libraries' + # must be a string (the library name) + self.assertRaises(DistutilsSetupError, cmd.check_library_list, + [(1, 'foo1'), ('name', 'foo2')]) + + # library name may not contain directory separators + self.assertRaises(DistutilsSetupError, cmd.check_library_list, + [('name', 'foo1'), + ('another/name', 'foo2')]) + + # second element of each tuple must be a dictionary (build info) + self.assertRaises(DistutilsSetupError, cmd.check_library_list, + [('name', {}), + ('another', 'foo2')]) + + # those work + libs = [('name', {}), ('name', {'ok': 'good'})] + cmd.check_library_list(libs) + + def test_get_source_files(self): + pkg_dir, dist = self.create_dist() + cmd = build_clib(dist) + + # "in 'libraries' option 'sources' must be present and must be + # a list of source filenames + cmd.libraries = [('name', {})] + self.assertRaises(DistutilsSetupError, cmd.get_source_files) + + cmd.libraries = [('name', {'sources': 1})] + self.assertRaises(DistutilsSetupError, cmd.get_source_files) + + cmd.libraries = [('name', {'sources': ['a', 'b']})] + self.assertEqual(cmd.get_source_files(), ['a', 'b']) + + cmd.libraries = [('name', {'sources': ('a', 'b')})] + self.assertEqual(cmd.get_source_files(), ['a', 'b']) + + cmd.libraries = [('name', {'sources': ('a', 'b')}), + ('name2', {'sources': ['c', 'd']})] + self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd']) + + def test_build_libraries(self): + + pkg_dir, dist = self.create_dist() + cmd = build_clib(dist) + class FakeCompiler: + def compile(*args, **kw): + pass + create_static_lib = compile + + cmd.compiler = FakeCompiler() + + # build_libraries is also doing a bit of typo checking + lib = [('name', {'sources': 'notvalid'})] + self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) + + lib = [('name', {'sources': list()})] + cmd.build_libraries(lib) + + lib = [('name', {'sources': tuple()})] + cmd.build_libraries(lib) + + def test_finalize_options(self): + pkg_dir, dist = self.create_dist() + cmd = build_clib(dist) + + cmd.include_dirs = 'one-dir' + cmd.finalize_options() + self.assertEqual(cmd.include_dirs, ['one-dir']) + + cmd.include_dirs = None + cmd.finalize_options() + self.assertEqual(cmd.include_dirs, []) + + cmd.distribution.libraries = 'WONTWORK' + self.assertRaises(DistutilsSetupError, cmd.finalize_options) + + @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + @requires_subprocess() + def test_run(self): + pkg_dir, dist = self.create_dist() + cmd = build_clib(dist) + + foo_c = os.path.join(pkg_dir, 'foo.c') + self.write_file(foo_c, 'int main(void) { return 1;}\n') + cmd.libraries = [('foo', {'sources': [foo_c]})] + + build_temp = os.path.join(pkg_dir, 'build') + os.mkdir(build_temp) + cmd.build_temp = build_temp + cmd.build_clib = build_temp + + # Before we run the command, we want to make sure + # all commands are present on the system. + ccmd = missing_compiler_executable() + if ccmd is not None: + self.skipTest('The %r command is not found' % ccmd) + + # this should work + cmd.run() + + # let's check the result + self.assertIn('libfoo.a', os.listdir(build_temp)) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(BuildCLibTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_ext.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_ext.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_ext.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_ext.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,555 @@ +import sys +import os +from io import StringIO +import textwrap + +from distutils.core import Distribution +from distutils.command.build_ext import build_ext +from distutils import sysconfig +from distutils.tests.support import (TempdirManager, LoggingSilencer, + copy_xxmodule_c, fixup_build_ext) +from distutils.extension import Extension +from distutils.errors import ( + CompileError, DistutilsPlatformError, DistutilsSetupError, + UnknownFileError) + +import unittest +from test import support +from test.support import os_helper +from test.support.script_helper import assert_python_ok +from test.support import threading_helper + +# http://bugs.python.org/issue4373 +# Don't load the xx module more than once. +ALREADY_TESTED = False + + +class BuildExtTestCase(TempdirManager, + LoggingSilencer, + unittest.TestCase): + def setUp(self): + # Create a simple test environment + super(BuildExtTestCase, self).setUp() + self.tmp_dir = self.mkdtemp() + import site + self.old_user_base = site.USER_BASE + site.USER_BASE = self.mkdtemp() + from distutils.command import build_ext + build_ext.USER_BASE = site.USER_BASE + self.old_config_vars = dict(sysconfig._config_vars) + + # bpo-30132: On Windows, a .pdb file may be created in the current + # working directory. Create a temporary working directory to cleanup + # everything at the end of the test. + self.enterContext(os_helper.change_cwd(self.tmp_dir)) + + def tearDown(self): + import site + site.USER_BASE = self.old_user_base + from distutils.command import build_ext + build_ext.USER_BASE = self.old_user_base + sysconfig._config_vars.clear() + sysconfig._config_vars.update(self.old_config_vars) + super(BuildExtTestCase, self).tearDown() + + def build_ext(self, *args, **kwargs): + return build_ext(*args, **kwargs) + + @support.requires_subprocess() + def test_build_ext(self): + cmd = support.missing_compiler_executable() + if cmd is not None: + self.skipTest('The %r command is not found' % cmd) + global ALREADY_TESTED + copy_xxmodule_c(self.tmp_dir) + xx_c = os.path.join(self.tmp_dir, 'xxmodule.c') + xx_ext = Extension('xx', [xx_c]) + dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]}) + dist.package_dir = self.tmp_dir + cmd = self.build_ext(dist) + fixup_build_ext(cmd) + cmd.build_lib = self.tmp_dir + cmd.build_temp = self.tmp_dir + + old_stdout = sys.stdout + if not support.verbose: + # silence compiler output + sys.stdout = StringIO() + try: + cmd.ensure_finalized() + cmd.run() + finally: + sys.stdout = old_stdout + + if ALREADY_TESTED: + self.skipTest('Already tested in %s' % ALREADY_TESTED) + else: + ALREADY_TESTED = type(self).__name__ + + code = textwrap.dedent(f""" + tmp_dir = {self.tmp_dir!r} + + import sys + import unittest + from test import support + + sys.path.insert(0, tmp_dir) + import xx + + class Tests(unittest.TestCase): + def test_xx(self): + for attr in ('error', 'foo', 'new', 'roj'): + self.assertTrue(hasattr(xx, attr)) + + self.assertEqual(xx.foo(2, 5), 7) + self.assertEqual(xx.foo(13,15), 28) + self.assertEqual(xx.new().demo(), None) + if support.HAVE_DOCSTRINGS: + doc = 'This is a template module just for instruction.' + self.assertEqual(xx.__doc__, doc) + self.assertIsInstance(xx.Null(), xx.Null) + self.assertIsInstance(xx.Str(), xx.Str) + + + unittest.main() + """) + assert_python_ok('-c', code) + + def test_solaris_enable_shared(self): + dist = Distribution({'name': 'xx'}) + cmd = self.build_ext(dist) + old = sys.platform + + sys.platform = 'sunos' # fooling finalize_options + from distutils.sysconfig import _config_vars + old_var = _config_vars.get('Py_ENABLE_SHARED') + _config_vars['Py_ENABLE_SHARED'] = 1 + try: + cmd.ensure_finalized() + finally: + sys.platform = old + if old_var is None: + del _config_vars['Py_ENABLE_SHARED'] + else: + _config_vars['Py_ENABLE_SHARED'] = old_var + + # make sure we get some library dirs under solaris + self.assertGreater(len(cmd.library_dirs), 0) + + def test_user_site(self): + import site + dist = Distribution({'name': 'xx'}) + cmd = self.build_ext(dist) + + # making sure the user option is there + options = [name for name, short, lable in + cmd.user_options] + self.assertIn('user', options) + + # setting a value + cmd.user = 1 + + # setting user based lib and include + lib = os.path.join(site.USER_BASE, 'lib') + incl = os.path.join(site.USER_BASE, 'include') + os.mkdir(lib) + os.mkdir(incl) + + # let's run finalize + cmd.ensure_finalized() + + # see if include_dirs and library_dirs + # were set + self.assertIn(lib, cmd.library_dirs) + self.assertIn(lib, cmd.rpath) + self.assertIn(incl, cmd.include_dirs) + + @threading_helper.requires_working_threading() + def test_optional_extension(self): + + # this extension will fail, but let's ignore this failure + # with the optional argument. + modules = [Extension('foo', ['xxx'], optional=False)] + dist = Distribution({'name': 'xx', 'ext_modules': modules}) + cmd = self.build_ext(dist) + cmd.ensure_finalized() + self.assertRaises((UnknownFileError, CompileError), + cmd.run) # should raise an error + + modules = [Extension('foo', ['xxx'], optional=True)] + dist = Distribution({'name': 'xx', 'ext_modules': modules}) + cmd = self.build_ext(dist) + cmd.ensure_finalized() + cmd.run() # should pass + + def test_finalize_options(self): + # Make sure Python's include directories (for Python.h, pyconfig.h, + # etc.) are in the include search path. + modules = [Extension('foo', ['xxx'], optional=False)] + dist = Distribution({'name': 'xx', 'ext_modules': modules}) + cmd = self.build_ext(dist) + cmd.finalize_options() + + py_include = sysconfig.get_python_inc() + for p in py_include.split(os.path.pathsep): + self.assertIn(p, cmd.include_dirs) + + plat_py_include = sysconfig.get_python_inc(plat_specific=1) + for p in plat_py_include.split(os.path.pathsep): + self.assertIn(p, cmd.include_dirs) + + # make sure cmd.libraries is turned into a list + # if it's a string + cmd = self.build_ext(dist) + cmd.libraries = 'my_lib, other_lib lastlib' + cmd.finalize_options() + self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib']) + + # make sure cmd.library_dirs is turned into a list + # if it's a string + cmd = self.build_ext(dist) + cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep + cmd.finalize_options() + self.assertIn('my_lib_dir', cmd.library_dirs) + self.assertIn('other_lib_dir', cmd.library_dirs) + + # make sure rpath is turned into a list + # if it's a string + cmd = self.build_ext(dist) + cmd.rpath = 'one%stwo' % os.pathsep + cmd.finalize_options() + self.assertEqual(cmd.rpath, ['one', 'two']) + + # make sure cmd.link_objects is turned into a list + # if it's a string + cmd = build_ext(dist) + cmd.link_objects = 'one two,three' + cmd.finalize_options() + self.assertEqual(cmd.link_objects, ['one', 'two', 'three']) + + # XXX more tests to perform for win32 + + # make sure define is turned into 2-tuples + # strings if they are ','-separated strings + cmd = self.build_ext(dist) + cmd.define = 'one,two' + cmd.finalize_options() + self.assertEqual(cmd.define, [('one', '1'), ('two', '1')]) + + # make sure undef is turned into a list of + # strings if they are ','-separated strings + cmd = self.build_ext(dist) + cmd.undef = 'one,two' + cmd.finalize_options() + self.assertEqual(cmd.undef, ['one', 'two']) + + # make sure swig_opts is turned into a list + cmd = self.build_ext(dist) + cmd.swig_opts = None + cmd.finalize_options() + self.assertEqual(cmd.swig_opts, []) + + cmd = self.build_ext(dist) + cmd.swig_opts = '1 2' + cmd.finalize_options() + self.assertEqual(cmd.swig_opts, ['1', '2']) + + def test_check_extensions_list(self): + dist = Distribution() + cmd = self.build_ext(dist) + cmd.finalize_options() + + #'extensions' option must be a list of Extension instances + self.assertRaises(DistutilsSetupError, + cmd.check_extensions_list, 'foo') + + # each element of 'ext_modules' option must be an + # Extension instance or 2-tuple + exts = [('bar', 'foo', 'bar'), 'foo'] + self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + + # first element of each tuple in 'ext_modules' + # must be the extension name (a string) and match + # a python dotted-separated name + exts = [('foo-bar', '')] + self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + + # second element of each tuple in 'ext_modules' + # must be a dictionary (build info) + exts = [('foo.bar', '')] + self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + + # ok this one should pass + exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', + 'some': 'bar'})] + cmd.check_extensions_list(exts) + ext = exts[0] + self.assertIsInstance(ext, Extension) + + # check_extensions_list adds in ext the values passed + # when they are in ('include_dirs', 'library_dirs', 'libraries' + # 'extra_objects', 'extra_compile_args', 'extra_link_args') + self.assertEqual(ext.libraries, 'foo') + self.assertFalse(hasattr(ext, 'some')) + + # 'macros' element of build info dict must be 1- or 2-tuple + exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', + 'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})] + self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) + + exts[0][1]['macros'] = [('1', '2'), ('3',)] + cmd.check_extensions_list(exts) + self.assertEqual(exts[0].undef_macros, ['3']) + self.assertEqual(exts[0].define_macros, [('1', '2')]) + + def test_get_source_files(self): + modules = [Extension('foo', ['xxx'], optional=False)] + dist = Distribution({'name': 'xx', 'ext_modules': modules}) + cmd = self.build_ext(dist) + cmd.ensure_finalized() + self.assertEqual(cmd.get_source_files(), ['xxx']) + + def test_unicode_module_names(self): + modules = [ + Extension('foo', ['aaa'], optional=False), + Extension('föö', ['uuu'], optional=False), + ] + dist = Distribution({'name': 'xx', 'ext_modules': modules}) + cmd = self.build_ext(dist) + cmd.ensure_finalized() + self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*') + self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*') + self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo']) + self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_gkaa']) + + def test_compiler_option(self): + # cmd.compiler is an option and + # should not be overridden by a compiler instance + # when the command is run + dist = Distribution() + cmd = self.build_ext(dist) + cmd.compiler = 'unix' + cmd.ensure_finalized() + cmd.run() + self.assertEqual(cmd.compiler, 'unix') + + @support.requires_subprocess() + def test_get_outputs(self): + cmd = support.missing_compiler_executable() + if cmd is not None: + self.skipTest('The %r command is not found' % cmd) + tmp_dir = self.mkdtemp() + c_file = os.path.join(tmp_dir, 'foo.c') + self.write_file(c_file, 'void PyInit_foo(void) {}\n') + ext = Extension('foo', [c_file], optional=False) + dist = Distribution({'name': 'xx', + 'ext_modules': [ext]}) + cmd = self.build_ext(dist) + fixup_build_ext(cmd) + cmd.ensure_finalized() + self.assertEqual(len(cmd.get_outputs()), 1) + + cmd.build_lib = os.path.join(self.tmp_dir, 'build') + cmd.build_temp = os.path.join(self.tmp_dir, 'tempt') + + # issue #5977 : distutils build_ext.get_outputs + # returns wrong result with --inplace + other_tmp_dir = os.path.realpath(self.mkdtemp()) + old_wd = os.getcwd() + os.chdir(other_tmp_dir) + try: + cmd.inplace = 1 + cmd.run() + so_file = cmd.get_outputs()[0] + finally: + os.chdir(old_wd) + self.assertTrue(os.path.exists(so_file)) + ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') + self.assertTrue(so_file.endswith(ext_suffix)) + so_dir = os.path.dirname(so_file) + self.assertEqual(so_dir, other_tmp_dir) + + cmd.inplace = 0 + cmd.compiler = None + cmd.run() + so_file = cmd.get_outputs()[0] + self.assertTrue(os.path.exists(so_file)) + self.assertTrue(so_file.endswith(ext_suffix)) + so_dir = os.path.dirname(so_file) + self.assertEqual(so_dir, cmd.build_lib) + + # inplace = 0, cmd.package = 'bar' + build_py = cmd.get_finalized_command('build_py') + build_py.package_dir = {'': 'bar'} + path = cmd.get_ext_fullpath('foo') + # checking that the last directory is the build_dir + path = os.path.split(path)[0] + self.assertEqual(path, cmd.build_lib) + + # inplace = 1, cmd.package = 'bar' + cmd.inplace = 1 + other_tmp_dir = os.path.realpath(self.mkdtemp()) + old_wd = os.getcwd() + os.chdir(other_tmp_dir) + try: + path = cmd.get_ext_fullpath('foo') + finally: + os.chdir(old_wd) + # checking that the last directory is bar + path = os.path.split(path)[0] + lastdir = os.path.split(path)[-1] + self.assertEqual(lastdir, 'bar') + + def test_ext_fullpath(self): + ext = sysconfig.get_config_var('EXT_SUFFIX') + # building lxml.etree inplace + #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') + #etree_ext = Extension('lxml.etree', [etree_c]) + #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) + dist = Distribution() + cmd = self.build_ext(dist) + cmd.inplace = 1 + cmd.distribution.package_dir = {'': 'src'} + cmd.distribution.packages = ['lxml', 'lxml.html'] + curdir = os.getcwd() + wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) + path = cmd.get_ext_fullpath('lxml.etree') + self.assertEqual(wanted, path) + + # building lxml.etree not inplace + cmd.inplace = 0 + cmd.build_lib = os.path.join(curdir, 'tmpdir') + wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) + path = cmd.get_ext_fullpath('lxml.etree') + self.assertEqual(wanted, path) + + # building twisted.runner.portmap not inplace + build_py = cmd.get_finalized_command('build_py') + build_py.package_dir = {} + cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] + path = cmd.get_ext_fullpath('twisted.runner.portmap') + wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', + 'portmap' + ext) + self.assertEqual(wanted, path) + + # building twisted.runner.portmap inplace + cmd.inplace = 1 + path = cmd.get_ext_fullpath('twisted.runner.portmap') + wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) + self.assertEqual(wanted, path) + + + @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') + def test_deployment_target_default(self): + # Issue 9516: Test that, in the absence of the environment variable, + # an extension module is compiled with the same deployment target as + # the interpreter. + self._try_compile_deployment_target('==', None) + + @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') + def test_deployment_target_too_low(self): + # Issue 9516: Test that an extension module is not allowed to be + # compiled with a deployment target less than that of the interpreter. + self.assertRaises(DistutilsPlatformError, + self._try_compile_deployment_target, '>', '10.1') + + @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') + def test_deployment_target_higher_ok(self): + # Issue 9516: Test that an extension module can be compiled with a + # deployment target higher than that of the interpreter: the ext + # module may depend on some newer OS feature. + deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') + if deptarget: + # increment the minor version number (i.e. 10.6 -> 10.7) + deptarget = [int(x) for x in deptarget.split('.')] + deptarget[-1] += 1 + deptarget = '.'.join(str(i) for i in deptarget) + self._try_compile_deployment_target('<', deptarget) + + def _try_compile_deployment_target(self, operator, target): + orig_environ = os.environ + os.environ = orig_environ.copy() + self.addCleanup(setattr, os, 'environ', orig_environ) + + if target is None: + if os.environ.get('MACOSX_DEPLOYMENT_TARGET'): + del os.environ['MACOSX_DEPLOYMENT_TARGET'] + else: + os.environ['MACOSX_DEPLOYMENT_TARGET'] = target + + deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') + + with open(deptarget_c, 'w') as fp: + fp.write(textwrap.dedent('''\ + #include + + int dummy; + + #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED + #else + #error "Unexpected target" + #endif + + ''' % operator)) + + # get the deployment target that the interpreter was built with + target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') + target = tuple(map(int, target.split('.')[0:2])) + # format the target value as defined in the Apple + # Availability Macros. We can't use the macro names since + # at least one value we test with will not exist yet. + if target[:2] < (10, 10): + # for 10.1 through 10.9.x -> "10n0" + target = '%02d%01d0' % target + else: + # for 10.10 and beyond -> "10nn00" + if len(target) >= 2: + target = '%02d%02d00' % target + else: + # 11 and later can have no minor version (11 instead of 11.0) + target = '%02d0000' % target + deptarget_ext = Extension( + 'deptarget', + [deptarget_c], + extra_compile_args=['-DTARGET=%s'%(target,)], + ) + dist = Distribution({ + 'name': 'deptarget', + 'ext_modules': [deptarget_ext] + }) + dist.package_dir = self.tmp_dir + cmd = self.build_ext(dist) + cmd.build_lib = self.tmp_dir + cmd.build_temp = self.tmp_dir + + try: + old_stdout = sys.stdout + if not support.verbose: + # silence compiler output + sys.stdout = StringIO() + try: + cmd.ensure_finalized() + cmd.run() + finally: + sys.stdout = old_stdout + + except CompileError: + self.fail("Wrong deployment target during compilation") + + +class ParallelBuildExtTestCase(BuildExtTestCase): + + def build_ext(self, *args, **kwargs): + build_ext = super().build_ext(*args, **kwargs) + build_ext.parallel = True + return build_ext + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(BuildExtTestCase)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(ParallelBuildExtTestCase)) + return suite + +if __name__ == '__main__': + support.run_unittest(__name__) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_py.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_py.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_py.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_py.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,181 @@ +"""Tests for distutils.command.build_py.""" + +import os +import sys +import unittest + +from distutils.command.build_py import build_py +from distutils.core import Distribution +from distutils.errors import DistutilsFileError + +from distutils.tests import support +from test.support import run_unittest, requires_subprocess + + +class BuildPyTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + def test_package_data(self): + sources = self.mkdtemp() + f = open(os.path.join(sources, "__init__.py"), "w") + try: + f.write("# Pretend this is a package.") + finally: + f.close() + f = open(os.path.join(sources, "README.txt"), "w") + try: + f.write("Info about this package") + finally: + f.close() + + destination = self.mkdtemp() + + dist = Distribution({"packages": ["pkg"], + "package_dir": {"pkg": sources}}) + # script_name need not exist, it just need to be initialized + dist.script_name = os.path.join(sources, "setup.py") + dist.command_obj["build"] = support.DummyCommand( + force=0, + build_lib=destination) + dist.packages = ["pkg"] + dist.package_data = {"pkg": ["README.txt"]} + dist.package_dir = {"pkg": sources} + + cmd = build_py(dist) + cmd.compile = 1 + cmd.ensure_finalized() + self.assertEqual(cmd.package_data, dist.package_data) + + cmd.run() + + # This makes sure the list of outputs includes byte-compiled + # files for Python modules but not for package data files + # (there shouldn't *be* byte-code files for those!). + self.assertEqual(len(cmd.get_outputs()), 3) + pkgdest = os.path.join(destination, "pkg") + files = os.listdir(pkgdest) + pycache_dir = os.path.join(pkgdest, "__pycache__") + self.assertIn("__init__.py", files) + self.assertIn("README.txt", files) + if sys.dont_write_bytecode: + self.assertFalse(os.path.exists(pycache_dir)) + else: + pyc_files = os.listdir(pycache_dir) + self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag, + pyc_files) + + def test_empty_package_dir(self): + # See bugs #1668596/#1720897 + sources = self.mkdtemp() + open(os.path.join(sources, "__init__.py"), "w").close() + + testdir = os.path.join(sources, "doc") + os.mkdir(testdir) + open(os.path.join(testdir, "testfile"), "w").close() + + os.chdir(sources) + dist = Distribution({"packages": ["pkg"], + "package_dir": {"pkg": ""}, + "package_data": {"pkg": ["doc/*"]}}) + # script_name need not exist, it just need to be initialized + dist.script_name = os.path.join(sources, "setup.py") + dist.script_args = ["build"] + dist.parse_command_line() + + try: + dist.run_commands() + except DistutilsFileError: + self.fail("failed package_data test when package_dir is ''") + + @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') + @requires_subprocess() + def test_byte_compile(self): + project_dir, dist = self.create_dist(py_modules=['boiledeggs']) + os.chdir(project_dir) + self.write_file('boiledeggs.py', 'import antigravity') + cmd = build_py(dist) + cmd.compile = 1 + cmd.build_lib = 'here' + cmd.finalize_options() + cmd.run() + + found = os.listdir(cmd.build_lib) + self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) + found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) + self.assertEqual(found, + ['boiledeggs.%s.pyc' % sys.implementation.cache_tag]) + + @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') + @requires_subprocess() + def test_byte_compile_optimized(self): + project_dir, dist = self.create_dist(py_modules=['boiledeggs']) + os.chdir(project_dir) + self.write_file('boiledeggs.py', 'import antigravity') + cmd = build_py(dist) + cmd.compile = 0 + cmd.optimize = 1 + cmd.build_lib = 'here' + cmd.finalize_options() + cmd.run() + + found = os.listdir(cmd.build_lib) + self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) + found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) + expect = 'boiledeggs.{}.opt-1.pyc'.format(sys.implementation.cache_tag) + self.assertEqual(sorted(found), [expect]) + + def test_dir_in_package_data(self): + """ + A directory in package_data should not be added to the filelist. + """ + # See bug 19286 + sources = self.mkdtemp() + pkg_dir = os.path.join(sources, "pkg") + + os.mkdir(pkg_dir) + open(os.path.join(pkg_dir, "__init__.py"), "w").close() + + docdir = os.path.join(pkg_dir, "doc") + os.mkdir(docdir) + open(os.path.join(docdir, "testfile"), "w").close() + + # create the directory that could be incorrectly detected as a file + os.mkdir(os.path.join(docdir, 'otherdir')) + + os.chdir(sources) + dist = Distribution({"packages": ["pkg"], + "package_data": {"pkg": ["doc/*"]}}) + # script_name need not exist, it just need to be initialized + dist.script_name = os.path.join(sources, "setup.py") + dist.script_args = ["build"] + dist.parse_command_line() + + try: + dist.run_commands() + except DistutilsFileError: + self.fail("failed package_data when data dir includes a dir") + + def test_dont_write_bytecode(self): + # makes sure byte_compile is not used + dist = self.create_dist()[1] + cmd = build_py(dist) + cmd.compile = 1 + cmd.optimize = 1 + + old_dont_write_bytecode = sys.dont_write_bytecode + sys.dont_write_bytecode = True + try: + cmd.byte_compile([]) + finally: + sys.dont_write_bytecode = old_dont_write_bytecode + + self.assertIn('byte-compiling is disabled', + self.logs[0][1] % self.logs[0][2]) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(BuildPyTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_scripts.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_scripts.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_build_scripts.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_build_scripts.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,112 @@ +"""Tests for distutils.command.build_scripts.""" + +import os +import unittest + +from distutils.command.build_scripts import build_scripts +from distutils.core import Distribution +from distutils import sysconfig + +from distutils.tests import support +from test.support import run_unittest + + +class BuildScriptsTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + def test_default_settings(self): + cmd = self.get_build_scripts_cmd("/foo/bar", []) + self.assertFalse(cmd.force) + self.assertIsNone(cmd.build_dir) + + cmd.finalize_options() + + self.assertTrue(cmd.force) + self.assertEqual(cmd.build_dir, "/foo/bar") + + def test_build(self): + source = self.mkdtemp() + target = self.mkdtemp() + expected = self.write_sample_scripts(source) + + cmd = self.get_build_scripts_cmd(target, + [os.path.join(source, fn) + for fn in expected]) + cmd.finalize_options() + cmd.run() + + built = os.listdir(target) + for name in expected: + self.assertIn(name, built) + + def get_build_scripts_cmd(self, target, scripts): + import sys + dist = Distribution() + dist.scripts = scripts + dist.command_obj["build"] = support.DummyCommand( + build_scripts=target, + force=1, + executable=sys.executable + ) + return build_scripts(dist) + + def write_sample_scripts(self, dir): + expected = [] + expected.append("script1.py") + self.write_script(dir, "script1.py", + ("#! /usr/bin/env python2.3\n" + "# bogus script w/ Python sh-bang\n" + "pass\n")) + expected.append("script2.py") + self.write_script(dir, "script2.py", + ("#!/usr/bin/python\n" + "# bogus script w/ Python sh-bang\n" + "pass\n")) + expected.append("shell.sh") + self.write_script(dir, "shell.sh", + ("#!/bin/sh\n" + "# bogus shell script w/ sh-bang\n" + "exit 0\n")) + return expected + + def write_script(self, dir, name, text): + f = open(os.path.join(dir, name), "w") + try: + f.write(text) + finally: + f.close() + + def test_version_int(self): + source = self.mkdtemp() + target = self.mkdtemp() + expected = self.write_sample_scripts(source) + + + cmd = self.get_build_scripts_cmd(target, + [os.path.join(source, fn) + for fn in expected]) + cmd.finalize_options() + + # http://bugs.python.org/issue4524 + # + # On linux-g++-32 with command line `./configure --enable-ipv6 + # --with-suffix=3`, python is compiled okay but the build scripts + # failed when writing the name of the executable + old = sysconfig.get_config_vars().get('VERSION') + sysconfig._config_vars['VERSION'] = 4 + try: + cmd.run() + finally: + if old is not None: + sysconfig._config_vars['VERSION'] = old + + built = os.listdir(target) + for name in expected: + self.assertIn(name, built) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(BuildScriptsTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_check.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_check.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_check.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_check.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,163 @@ +"""Tests for distutils.command.check.""" +import os +import textwrap +import unittest +from test.support import run_unittest + +from distutils.command.check import check, HAS_DOCUTILS +from distutils.tests import support +from distutils.errors import DistutilsSetupError + +try: + import pygments +except ImportError: + pygments = None + + +HERE = os.path.dirname(__file__) + + +class CheckTestCase(support.LoggingSilencer, + support.TempdirManager, + unittest.TestCase): + + def _run(self, metadata=None, cwd=None, **options): + if metadata is None: + metadata = {} + if cwd is not None: + old_dir = os.getcwd() + os.chdir(cwd) + pkg_info, dist = self.create_dist(**metadata) + cmd = check(dist) + cmd.initialize_options() + for name, value in options.items(): + setattr(cmd, name, value) + cmd.ensure_finalized() + cmd.run() + if cwd is not None: + os.chdir(old_dir) + return cmd + + def test_check_metadata(self): + # let's run the command with no metadata at all + # by default, check is checking the metadata + # should have some warnings + cmd = self._run() + self.assertEqual(cmd._warnings, 2) + + # now let's add the required fields + # and run it again, to make sure we don't get + # any warning anymore + metadata = {'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', 'version': 'xxx'} + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 0) + + # now with the strict mode, we should + # get an error if there are missing metadata + self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1}) + + # and of course, no error when all metadata are present + cmd = self._run(metadata, strict=1) + self.assertEqual(cmd._warnings, 0) + + # now a test with non-ASCII characters + metadata = {'url': 'xxx', 'author': '\u00c9ric', + 'author_email': 'xxx', 'name': 'xxx', + 'version': 'xxx', + 'description': 'Something about esszet \u00df', + 'long_description': 'More things about esszet \u00df'} + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 0) + + @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") + def test_check_document(self): + pkg_info, dist = self.create_dist() + cmd = check(dist) + + # let's see if it detects broken rest + broken_rest = 'title\n===\n\ntest' + msgs = cmd._check_rst_data(broken_rest) + self.assertEqual(len(msgs), 1) + + # and non-broken rest + rest = 'title\n=====\n\ntest' + msgs = cmd._check_rst_data(rest) + self.assertEqual(len(msgs), 0) + + @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") + def test_check_restructuredtext(self): + # let's see if it detects broken rest in long_description + broken_rest = 'title\n===\n\ntest' + pkg_info, dist = self.create_dist(long_description=broken_rest) + cmd = check(dist) + cmd.check_restructuredtext() + self.assertEqual(cmd._warnings, 1) + + # let's see if we have an error with strict=1 + metadata = {'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', 'version': 'xxx', + 'long_description': broken_rest} + self.assertRaises(DistutilsSetupError, self._run, metadata, + **{'strict': 1, 'restructuredtext': 1}) + + # and non-broken rest, including a non-ASCII character to test #12114 + metadata['long_description'] = 'title\n=====\n\ntest \u00df' + cmd = self._run(metadata, strict=1, restructuredtext=1) + self.assertEqual(cmd._warnings, 0) + + # check that includes work to test #31292 + metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst' + cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1) + self.assertEqual(cmd._warnings, 0) + + @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") + def test_check_restructuredtext_with_syntax_highlight(self): + # Don't fail if there is a `code` or `code-block` directive + + example_rst_docs = [] + example_rst_docs.append(textwrap.dedent("""\ + Here's some code: + + .. code:: python + + def foo(): + pass + """)) + example_rst_docs.append(textwrap.dedent("""\ + Here's some code: + + .. code-block:: python + + def foo(): + pass + """)) + + for rest_with_code in example_rst_docs: + pkg_info, dist = self.create_dist(long_description=rest_with_code) + cmd = check(dist) + cmd.check_restructuredtext() + msgs = cmd._check_rst_data(rest_with_code) + if pygments is not None: + self.assertEqual(len(msgs), 0) + else: + self.assertEqual(len(msgs), 1) + self.assertEqual( + str(msgs[0][1]), + 'Cannot analyze code. Pygments package not found.' + ) + + def test_check_all(self): + + metadata = {'url': 'xxx', 'author': 'xxx'} + self.assertRaises(DistutilsSetupError, self._run, + {}, **{'strict': 1, + 'restructuredtext': 1}) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(CheckTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_clean.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_clean.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_clean.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_clean.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,49 @@ +"""Tests for distutils.command.clean.""" +import os +import unittest + +from distutils.command.clean import clean +from distutils.tests import support +from test.support import run_unittest + +class cleanTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + def test_simple_run(self): + pkg_dir, dist = self.create_dist() + cmd = clean(dist) + + # let's add some elements clean should remove + dirs = [(d, os.path.join(pkg_dir, d)) + for d in ('build_temp', 'build_lib', 'bdist_base', + 'build_scripts', 'build_base')] + + for name, path in dirs: + os.mkdir(path) + setattr(cmd, name, path) + if name == 'build_base': + continue + for f in ('one', 'two', 'three'): + self.write_file(os.path.join(path, f)) + + # let's run the command + cmd.all = 1 + cmd.ensure_finalized() + cmd.run() + + # make sure the files where removed + for name, path in dirs: + self.assertFalse(os.path.exists(path), + '%s was not removed' % path) + + # let's run the command again (should spit warnings but succeed) + cmd.all = 1 + cmd.ensure_finalized() + cmd.run() + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(cleanTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_cmd.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_cmd.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_cmd.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_cmd.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,126 @@ +"""Tests for distutils.cmd.""" +import unittest +import os +from test.support import captured_stdout, run_unittest + +from distutils.cmd import Command +from distutils.dist import Distribution +from distutils.errors import DistutilsOptionError +from distutils import debug + +class MyCmd(Command): + def initialize_options(self): + pass + +class CommandTestCase(unittest.TestCase): + + def setUp(self): + dist = Distribution() + self.cmd = MyCmd(dist) + + def test_ensure_string_list(self): + + cmd = self.cmd + cmd.not_string_list = ['one', 2, 'three'] + cmd.yes_string_list = ['one', 'two', 'three'] + cmd.not_string_list2 = object() + cmd.yes_string_list2 = 'ok' + cmd.ensure_string_list('yes_string_list') + cmd.ensure_string_list('yes_string_list2') + + self.assertRaises(DistutilsOptionError, + cmd.ensure_string_list, 'not_string_list') + + self.assertRaises(DistutilsOptionError, + cmd.ensure_string_list, 'not_string_list2') + + cmd.option1 = 'ok,dok' + cmd.ensure_string_list('option1') + self.assertEqual(cmd.option1, ['ok', 'dok']) + + cmd.option2 = ['xxx', 'www'] + cmd.ensure_string_list('option2') + + cmd.option3 = ['ok', 2] + self.assertRaises(DistutilsOptionError, cmd.ensure_string_list, + 'option3') + + + def test_make_file(self): + + cmd = self.cmd + + # making sure it raises when infiles is not a string or a list/tuple + self.assertRaises(TypeError, cmd.make_file, + infiles=1, outfile='', func='func', args=()) + + # making sure execute gets called properly + def _execute(func, args, exec_msg, level): + self.assertEqual(exec_msg, 'generating out from in') + cmd.force = True + cmd.execute = _execute + cmd.make_file(infiles='in', outfile='out', func='func', args=()) + + def test_dump_options(self): + + msgs = [] + def _announce(msg, level): + msgs.append(msg) + cmd = self.cmd + cmd.announce = _announce + cmd.option1 = 1 + cmd.option2 = 1 + cmd.user_options = [('option1', '', ''), ('option2', '', '')] + cmd.dump_options() + + wanted = ["command options for 'MyCmd':", ' option1 = 1', + ' option2 = 1'] + self.assertEqual(msgs, wanted) + + def test_ensure_string(self): + cmd = self.cmd + cmd.option1 = 'ok' + cmd.ensure_string('option1') + + cmd.option2 = None + cmd.ensure_string('option2', 'xxx') + self.assertTrue(hasattr(cmd, 'option2')) + + cmd.option3 = 1 + self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3') + + def test_ensure_filename(self): + cmd = self.cmd + cmd.option1 = __file__ + cmd.ensure_filename('option1') + cmd.option2 = 'xxx' + self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2') + + def test_ensure_dirname(self): + cmd = self.cmd + cmd.option1 = os.path.dirname(__file__) or os.curdir + cmd.ensure_dirname('option1') + cmd.option2 = 'xxx' + self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2') + + def test_debug_print(self): + cmd = self.cmd + with captured_stdout() as stdout: + cmd.debug_print('xxx') + stdout.seek(0) + self.assertEqual(stdout.read(), '') + + debug.DEBUG = True + try: + with captured_stdout() as stdout: + cmd.debug_print('xxx') + stdout.seek(0) + self.assertEqual(stdout.read(), 'xxx\n') + finally: + debug.DEBUG = False + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(CommandTestCase) + +if __name__ == '__main__': + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_config.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_config.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_config.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_config.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,141 @@ +"""Tests for distutils.pypirc.pypirc.""" +import os +import unittest + +from distutils.core import PyPIRCCommand +from distutils.core import Distribution +from distutils.log import set_threshold +from distutils.log import WARN + +from distutils.tests import support +from test.support import run_unittest + +PYPIRC = """\ +[distutils] + +index-servers = + server1 + server2 + server3 + +[server1] +username:me +password:secret + +[server2] +username:meagain +password: secret +realm:acme +repository:http://another.pypi/ + +[server3] +username:cbiggles +password:yh^%#rest-of-my-password +""" + +PYPIRC_OLD = """\ +[server-login] +username:tarek +password:secret +""" + +WANTED = """\ +[distutils] +index-servers = + pypi + +[pypi] +username:tarek +password:xxx +""" + + +class BasePyPIRCCommandTestCase(support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase): + + def setUp(self): + """Patches the environment.""" + super(BasePyPIRCCommandTestCase, self).setUp() + self.tmp_dir = self.mkdtemp() + os.environ['HOME'] = self.tmp_dir + os.environ['USERPROFILE'] = self.tmp_dir + self.rc = os.path.join(self.tmp_dir, '.pypirc') + self.dist = Distribution() + + class command(PyPIRCCommand): + def __init__(self, dist): + PyPIRCCommand.__init__(self, dist) + def initialize_options(self): + pass + finalize_options = initialize_options + + self._cmd = command + self.old_threshold = set_threshold(WARN) + + def tearDown(self): + """Removes the patch.""" + set_threshold(self.old_threshold) + super(BasePyPIRCCommandTestCase, self).tearDown() + + +class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): + + def test_server_registration(self): + # This test makes sure PyPIRCCommand knows how to: + # 1. handle several sections in .pypirc + # 2. handle the old format + + # new format + self.write_file(self.rc, PYPIRC) + cmd = self._cmd(self.dist) + config = cmd._read_pypirc() + + config = list(sorted(config.items())) + waited = [('password', 'secret'), ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server1'), ('username', 'me')] + self.assertEqual(config, waited) + + # old format + self.write_file(self.rc, PYPIRC_OLD) + config = cmd._read_pypirc() + config = list(sorted(config.items())) + waited = [('password', 'secret'), ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server-login'), ('username', 'tarek')] + self.assertEqual(config, waited) + + def test_server_empty_registration(self): + cmd = self._cmd(self.dist) + rc = cmd._get_rc_file() + self.assertFalse(os.path.exists(rc)) + cmd._store_pypirc('tarek', 'xxx') + self.assertTrue(os.path.exists(rc)) + f = open(rc) + try: + content = f.read() + self.assertEqual(content, WANTED) + finally: + f.close() + + def test_config_interpolation(self): + # using the % character in .pypirc should not raise an error (#20120) + self.write_file(self.rc, PYPIRC) + cmd = self._cmd(self.dist) + cmd.repository = 'server3' + config = cmd._read_pypirc() + + config = list(sorted(config.items())) + waited = [('password', 'yh^%#rest-of-my-password'), ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server3'), ('username', 'cbiggles')] + self.assertEqual(config, waited) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(PyPIRCCommandTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_config_cmd.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_config_cmd.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_config_cmd.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_config_cmd.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,103 @@ +"""Tests for distutils.command.config.""" +import unittest +import os +import sys +import sysconfig +from test.support import ( + run_unittest, missing_compiler_executable, requires_subprocess +) + +from distutils.command.config import dump_file, config +from distutils.tests import support +from distutils import log + +class ConfigTestCase(support.LoggingSilencer, + support.TempdirManager, + unittest.TestCase): + + def _info(self, msg, *args): + for line in msg.splitlines(): + self._logs.append(line) + + def setUp(self): + super(ConfigTestCase, self).setUp() + self._logs = [] + self.old_log = log.info + log.info = self._info + self.old_config_vars = dict(sysconfig._CONFIG_VARS) + + def tearDown(self): + log.info = self.old_log + sysconfig._CONFIG_VARS.clear() + sysconfig._CONFIG_VARS.update(self.old_config_vars) + super(ConfigTestCase, self).tearDown() + + def test_dump_file(self): + this_file = os.path.splitext(__file__)[0] + '.py' + f = open(this_file) + try: + numlines = len(f.readlines()) + finally: + f.close() + + dump_file(this_file, 'I am the header') + self.assertEqual(len(self._logs), numlines+1) + + @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + @requires_subprocess() + def test_search_cpp(self): + cmd = missing_compiler_executable(['preprocessor']) + if cmd is not None: + self.skipTest('The %r command is not found' % cmd) + pkg_dir, dist = self.create_dist() + cmd = config(dist) + cmd._check_compiler() + compiler = cmd.compiler + if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower(): + self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options') + + # simple pattern searches + match = cmd.search_cpp(pattern='xxx', body='/* xxx */') + self.assertEqual(match, 0) + + match = cmd.search_cpp(pattern='_configtest', body='/* xxx */') + self.assertEqual(match, 1) + + def test_finalize_options(self): + # finalize_options does a bit of transformation + # on options + pkg_dir, dist = self.create_dist() + cmd = config(dist) + cmd.include_dirs = 'one%stwo' % os.pathsep + cmd.libraries = 'one' + cmd.library_dirs = 'three%sfour' % os.pathsep + cmd.ensure_finalized() + + self.assertEqual(cmd.include_dirs, ['one', 'two']) + self.assertEqual(cmd.libraries, ['one']) + self.assertEqual(cmd.library_dirs, ['three', 'four']) + + def test_clean(self): + # _clean removes files + tmp_dir = self.mkdtemp() + f1 = os.path.join(tmp_dir, 'one') + f2 = os.path.join(tmp_dir, 'two') + + self.write_file(f1, 'xxx') + self.write_file(f2, 'xxx') + + for f in (f1, f2): + self.assertTrue(os.path.exists(f)) + + pkg_dir, dist = self.create_dist() + cmd = config(dist) + cmd._clean(f1, f2) + + for f in (f1, f2): + self.assertFalse(os.path.exists(f)) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_core.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_core.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_core.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_core.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,140 @@ +"""Tests for distutils.core.""" + +import io +import distutils.core +import os +import shutil +import sys +from test.support import captured_stdout, run_unittest +from test.support import os_helper +import unittest +from distutils.tests import support +from distutils import log + +# setup script that uses __file__ +setup_using___file__ = """\ + +__file__ + +from distutils.core import setup +setup() +""" + +setup_prints_cwd = """\ + +import os +print(os.getcwd()) + +from distutils.core import setup +setup() +""" + +setup_does_nothing = """\ +from distutils.core import setup +setup() +""" + + +setup_defines_subclass = """\ +from distutils.core import setup +from distutils.command.install import install as _install + +class install(_install): + sub_commands = _install.sub_commands + ['cmd'] + +setup(cmdclass={'install': install}) +""" + +class CoreTestCase(support.EnvironGuard, unittest.TestCase): + + def setUp(self): + super(CoreTestCase, self).setUp() + self.old_stdout = sys.stdout + self.cleanup_testfn() + self.old_argv = sys.argv, sys.argv[:] + self.addCleanup(log.set_threshold, log._global_log.threshold) + + def tearDown(self): + sys.stdout = self.old_stdout + self.cleanup_testfn() + sys.argv = self.old_argv[0] + sys.argv[:] = self.old_argv[1] + super(CoreTestCase, self).tearDown() + + def cleanup_testfn(self): + path = os_helper.TESTFN + if os.path.isfile(path): + os.remove(path) + elif os.path.isdir(path): + shutil.rmtree(path) + + def write_setup(self, text, path=os_helper.TESTFN): + f = open(path, "w") + try: + f.write(text) + finally: + f.close() + return path + + def test_run_setup_provides_file(self): + # Make sure the script can use __file__; if that's missing, the test + # setup.py script will raise NameError. + distutils.core.run_setup( + self.write_setup(setup_using___file__)) + + def test_run_setup_preserves_sys_argv(self): + # Make sure run_setup does not clobber sys.argv + argv_copy = sys.argv.copy() + distutils.core.run_setup( + self.write_setup(setup_does_nothing)) + self.assertEqual(sys.argv, argv_copy) + + def test_run_setup_defines_subclass(self): + # Make sure the script can use __file__; if that's missing, the test + # setup.py script will raise NameError. + dist = distutils.core.run_setup( + self.write_setup(setup_defines_subclass)) + install = dist.get_command_obj('install') + self.assertIn('cmd', install.sub_commands) + + def test_run_setup_uses_current_dir(self): + # This tests that the setup script is run with the current directory + # as its own current directory; this was temporarily broken by a + # previous patch when TESTFN did not use the current directory. + sys.stdout = io.StringIO() + cwd = os.getcwd() + + # Create a directory and write the setup.py file there: + os.mkdir(os_helper.TESTFN) + setup_py = os.path.join(os_helper.TESTFN, "setup.py") + distutils.core.run_setup( + self.write_setup(setup_prints_cwd, path=setup_py)) + + output = sys.stdout.getvalue() + if output.endswith("\n"): + output = output[:-1] + self.assertEqual(cwd, output) + + def test_debug_mode(self): + # this covers the code called when DEBUG is set + sys.argv = ['setup.py', '--name'] + with captured_stdout() as stdout: + distutils.core.setup(name='bar') + stdout.seek(0) + self.assertEqual(stdout.read(), 'bar\n') + + distutils.core.DEBUG = True + try: + with captured_stdout() as stdout: + distutils.core.setup(name='bar') + finally: + distutils.core.DEBUG = False + stdout.seek(0) + wanted = "options (after parsing config files):\n" + self.assertEqual(stdout.readlines()[0], wanted) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(CoreTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_cygwinccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_cygwinccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_cygwinccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_cygwinccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,154 @@ +"""Tests for distutils.cygwinccompiler.""" +import unittest +import sys +import os +from io import BytesIO +from test.support import run_unittest + +from distutils import cygwinccompiler +from distutils.cygwinccompiler import (check_config_h, + CONFIG_H_OK, CONFIG_H_NOTOK, + CONFIG_H_UNCERTAIN, get_versions, + get_msvcr) +from distutils.tests import support + +class FakePopen(object): + test_class = None + + def __init__(self, cmd, shell, stdout): + self.cmd = cmd.split()[0] + exes = self.test_class._exes + if self.cmd in exes: + # issue #6438 in Python 3.x, Popen returns bytes + self.stdout = BytesIO(exes[self.cmd]) + else: + self.stdout = os.popen(cmd, 'r') + + +class CygwinCCompilerTestCase(support.TempdirManager, + unittest.TestCase): + + def setUp(self): + super(CygwinCCompilerTestCase, self).setUp() + self.version = sys.version + self.python_h = os.path.join(self.mkdtemp(), 'python.h') + from distutils import sysconfig + self.old_get_config_h_filename = sysconfig.get_config_h_filename + sysconfig.get_config_h_filename = self._get_config_h_filename + self.old_find_executable = cygwinccompiler.find_executable + cygwinccompiler.find_executable = self._find_executable + self._exes = {} + self.old_popen = cygwinccompiler.Popen + FakePopen.test_class = self + cygwinccompiler.Popen = FakePopen + + def tearDown(self): + sys.version = self.version + from distutils import sysconfig + sysconfig.get_config_h_filename = self.old_get_config_h_filename + cygwinccompiler.find_executable = self.old_find_executable + cygwinccompiler.Popen = self.old_popen + super(CygwinCCompilerTestCase, self).tearDown() + + def _get_config_h_filename(self): + return self.python_h + + def _find_executable(self, name): + if name in self._exes: + return name + return None + + def test_check_config_h(self): + + # check_config_h looks for "GCC" in sys.version first + # returns CONFIG_H_OK if found + sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' + '4.0.1 (Apple Computer, Inc. build 5370)]') + + self.assertEqual(check_config_h()[0], CONFIG_H_OK) + + # then it tries to see if it can find "__GNUC__" in pyconfig.h + sys.version = 'something without the *CC word' + + # if the file doesn't exist it returns CONFIG_H_UNCERTAIN + self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN) + + # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK + self.write_file(self.python_h, 'xxx') + self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK) + + # and CONFIG_H_OK if __GNUC__ is found + self.write_file(self.python_h, 'xxx __GNUC__ xxx') + self.assertEqual(check_config_h()[0], CONFIG_H_OK) + + def test_get_versions(self): + + # get_versions calls distutils.spawn.find_executable on + # 'gcc', 'ld' and 'dllwrap' + self.assertEqual(get_versions(), (None, None, None)) + + # Let's fake we have 'gcc' and it returns '3.4.5' + self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF' + res = get_versions() + self.assertEqual(str(res[0]), '3.4.5') + + # and let's see what happens when the version + # doesn't match the regular expression + # (\d+\.\d+(\.\d+)*) + self._exes['gcc'] = b'very strange output' + res = get_versions() + self.assertEqual(res[0], None) + + # same thing for ld + self._exes['ld'] = b'GNU ld version 2.17.50 20060824' + res = get_versions() + self.assertEqual(str(res[1]), '2.17.50') + self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77' + res = get_versions() + self.assertEqual(res[1], None) + + # and dllwrap + self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF' + res = get_versions() + self.assertEqual(str(res[2]), '2.17.50') + self._exes['dllwrap'] = b'Cheese Wrap' + res = get_versions() + self.assertEqual(res[2], None) + + def test_get_msvcr(self): + + # none + sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' + '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') + self.assertEqual(get_msvcr(), None) + + # MSVC 7.0 + sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' + '[MSC v.1300 32 bits (Intel)]') + self.assertEqual(get_msvcr(), ['msvcr70']) + + # MSVC 7.1 + sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' + '[MSC v.1310 32 bits (Intel)]') + self.assertEqual(get_msvcr(), ['msvcr71']) + + # VS2005 / MSVC 8.0 + sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' + '[MSC v.1400 32 bits (Intel)]') + self.assertEqual(get_msvcr(), ['msvcr80']) + + # VS2008 / MSVC 9.0 + sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' + '[MSC v.1500 32 bits (Intel)]') + self.assertEqual(get_msvcr(), ['msvcr90']) + + # unknown + sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' + '[MSC v.1999 32 bits (Intel)]') + self.assertRaises(ValueError, get_msvcr) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(CygwinCCompilerTestCase) + +if __name__ == '__main__': + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_dep_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_dep_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_dep_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_dep_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,80 @@ +"""Tests for distutils.dep_util.""" +import unittest +import os + +from distutils.dep_util import newer, newer_pairwise, newer_group +from distutils.errors import DistutilsFileError +from distutils.tests import support +from test.support import run_unittest + +class DepUtilTestCase(support.TempdirManager, unittest.TestCase): + + def test_newer(self): + + tmpdir = self.mkdtemp() + new_file = os.path.join(tmpdir, 'new') + old_file = os.path.abspath(__file__) + + # Raise DistutilsFileError if 'new_file' does not exist. + self.assertRaises(DistutilsFileError, newer, new_file, old_file) + + # Return true if 'new_file' exists and is more recently modified than + # 'old_file', or if 'new_file' exists and 'old_file' doesn't. + self.write_file(new_file) + self.assertTrue(newer(new_file, 'I_dont_exist')) + self.assertTrue(newer(new_file, old_file)) + + # Return false if both exist and 'old_file' is the same age or younger + # than 'new_file'. + self.assertFalse(newer(old_file, new_file)) + + def test_newer_pairwise(self): + tmpdir = self.mkdtemp() + sources = os.path.join(tmpdir, 'sources') + targets = os.path.join(tmpdir, 'targets') + os.mkdir(sources) + os.mkdir(targets) + one = os.path.join(sources, 'one') + two = os.path.join(sources, 'two') + three = os.path.abspath(__file__) # I am the old file + four = os.path.join(targets, 'four') + self.write_file(one) + self.write_file(two) + self.write_file(four) + + self.assertEqual(newer_pairwise([one, two], [three, four]), + ([one],[three])) + + def test_newer_group(self): + tmpdir = self.mkdtemp() + sources = os.path.join(tmpdir, 'sources') + os.mkdir(sources) + one = os.path.join(sources, 'one') + two = os.path.join(sources, 'two') + three = os.path.join(sources, 'three') + old_file = os.path.abspath(__file__) + + # return true if 'old_file' is out-of-date with respect to any file + # listed in 'sources'. + self.write_file(one) + self.write_file(two) + self.write_file(three) + self.assertTrue(newer_group([one, two, three], old_file)) + self.assertFalse(newer_group([one, two, old_file], three)) + + # missing handling + os.remove(one) + self.assertRaises(OSError, newer_group, [one, two, old_file], three) + + self.assertFalse(newer_group([one, two, old_file], three, + missing='ignore')) + + self.assertTrue(newer_group([one, two, old_file], three, + missing='newer')) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(DepUtilTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_dir_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_dir_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_dir_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_dir_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,143 @@ +"""Tests for distutils.dir_util.""" +import unittest +import os +import stat +import sys +from unittest.mock import patch + +from distutils import dir_util, errors +from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree, + ensure_relative) + +from distutils import log +from distutils.tests import support +from test.support import run_unittest, is_emscripten, is_wasi + + +class DirUtilTestCase(support.TempdirManager, unittest.TestCase): + + def _log(self, msg, *args): + if len(args) > 0: + self._logs.append(msg % args) + else: + self._logs.append(msg) + + def setUp(self): + super(DirUtilTestCase, self).setUp() + self._logs = [] + tmp_dir = self.mkdtemp() + self.root_target = os.path.join(tmp_dir, 'deep') + self.target = os.path.join(self.root_target, 'here') + self.target2 = os.path.join(tmp_dir, 'deep2') + self.old_log = log.info + log.info = self._log + + def tearDown(self): + log.info = self.old_log + super(DirUtilTestCase, self).tearDown() + + def test_mkpath_remove_tree_verbosity(self): + + mkpath(self.target, verbose=0) + wanted = [] + self.assertEqual(self._logs, wanted) + remove_tree(self.root_target, verbose=0) + + mkpath(self.target, verbose=1) + wanted = ['creating %s' % self.root_target, + 'creating %s' % self.target] + self.assertEqual(self._logs, wanted) + self._logs = [] + + remove_tree(self.root_target, verbose=1) + wanted = ["removing '%s' (and everything under it)" % self.root_target] + self.assertEqual(self._logs, wanted) + + @unittest.skipIf(sys.platform.startswith('win'), + "This test is only appropriate for POSIX-like systems.") + @unittest.skipIf( + is_emscripten or is_wasi, + "Emscripten's/WASI's umask is a stub." + ) + def test_mkpath_with_custom_mode(self): + # Get and set the current umask value for testing mode bits. + umask = os.umask(0o002) + os.umask(umask) + mkpath(self.target, 0o700) + self.assertEqual( + stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask) + mkpath(self.target2, 0o555) + self.assertEqual( + stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask) + + def test_create_tree_verbosity(self): + + create_tree(self.root_target, ['one', 'two', 'three'], verbose=0) + self.assertEqual(self._logs, []) + remove_tree(self.root_target, verbose=0) + + wanted = ['creating %s' % self.root_target] + create_tree(self.root_target, ['one', 'two', 'three'], verbose=1) + self.assertEqual(self._logs, wanted) + + remove_tree(self.root_target, verbose=0) + + def test_copy_tree_verbosity(self): + + mkpath(self.target, verbose=0) + + copy_tree(self.target, self.target2, verbose=0) + self.assertEqual(self._logs, []) + + remove_tree(self.root_target, verbose=0) + + mkpath(self.target, verbose=0) + a_file = os.path.join(self.target, 'ok.txt') + with open(a_file, 'w') as f: + f.write('some content') + + wanted = ['copying %s -> %s' % (a_file, self.target2)] + copy_tree(self.target, self.target2, verbose=1) + self.assertEqual(self._logs, wanted) + + remove_tree(self.root_target, verbose=0) + remove_tree(self.target2, verbose=0) + + def test_copy_tree_skips_nfs_temp_files(self): + mkpath(self.target, verbose=0) + + a_file = os.path.join(self.target, 'ok.txt') + nfs_file = os.path.join(self.target, '.nfs123abc') + for f in a_file, nfs_file: + with open(f, 'w') as fh: + fh.write('some content') + + copy_tree(self.target, self.target2) + self.assertEqual(os.listdir(self.target2), ['ok.txt']) + + remove_tree(self.root_target, verbose=0) + remove_tree(self.target2, verbose=0) + + def test_ensure_relative(self): + if os.sep == '/': + self.assertEqual(ensure_relative('/home/foo'), 'home/foo') + self.assertEqual(ensure_relative('some/path'), 'some/path') + else: # \\ + self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo') + self.assertEqual(ensure_relative('home\\foo'), 'home\\foo') + + def test_copy_tree_exception_in_listdir(self): + """ + An exception in listdir should raise a DistutilsFileError + """ + with patch("os.listdir", side_effect=OSError()), \ + self.assertRaises(errors.DistutilsFileError): + src = self.tempdirs[-1] + dir_util.copy_tree(src, None) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(DirUtilTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_dist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_dist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_dist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_dist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,529 @@ +"""Tests for distutils.dist.""" +import os +import io +import sys +import unittest +import warnings +import textwrap + +from unittest import mock + +from distutils.dist import Distribution, fix_help_options +from distutils.cmd import Command + +from test.support import ( + captured_stdout, captured_stderr, run_unittest +) +from test.support.os_helper import TESTFN +from distutils.tests import support +from distutils import log + + +class test_dist(Command): + """Sample distutils extension command.""" + + user_options = [ + ("sample-option=", "S", "help text"), + ] + + def initialize_options(self): + self.sample_option = None + + +class TestDistribution(Distribution): + """Distribution subclasses that avoids the default search for + configuration files. + + The ._config_files attribute must be set before + .parse_config_files() is called. + """ + + def find_config_files(self): + return self._config_files + + +class DistributionTestCase(support.LoggingSilencer, + support.TempdirManager, + support.EnvironGuard, + unittest.TestCase): + + def setUp(self): + super(DistributionTestCase, self).setUp() + self.argv = sys.argv, sys.argv[:] + del sys.argv[1:] + + def tearDown(self): + sys.argv = self.argv[0] + sys.argv[:] = self.argv[1] + super(DistributionTestCase, self).tearDown() + + def create_distribution(self, configfiles=()): + d = TestDistribution() + d._config_files = configfiles + d.parse_config_files() + d.parse_command_line() + return d + + def test_command_packages_unspecified(self): + sys.argv.append("build") + d = self.create_distribution() + self.assertEqual(d.get_command_packages(), ["distutils.command"]) + + def test_command_packages_cmdline(self): + from distutils.tests.test_dist import test_dist + sys.argv.extend(["--command-packages", + "foo.bar,distutils.tests", + "test_dist", + "-Ssometext", + ]) + d = self.create_distribution() + # let's actually try to load our test command: + self.assertEqual(d.get_command_packages(), + ["distutils.command", "foo.bar", "distutils.tests"]) + cmd = d.get_command_obj("test_dist") + self.assertIsInstance(cmd, test_dist) + self.assertEqual(cmd.sample_option, "sometext") + + def test_venv_install_options(self): + sys.argv.append("install") + self.addCleanup(os.unlink, TESTFN) + + fakepath = '/somedir' + + with open(TESTFN, "w") as f: + print(("[install]\n" + "install-base = {0}\n" + "install-platbase = {0}\n" + "install-lib = {0}\n" + "install-platlib = {0}\n" + "install-purelib = {0}\n" + "install-headers = {0}\n" + "install-scripts = {0}\n" + "install-data = {0}\n" + "prefix = {0}\n" + "exec-prefix = {0}\n" + "home = {0}\n" + "user = {0}\n" + "root = {0}").format(fakepath), file=f) + + # Base case: Not in a Virtual Environment + with mock.patch.multiple(sys, prefix='/a', base_prefix='/a') as values: + d = self.create_distribution([TESTFN]) + + option_tuple = (TESTFN, fakepath) + + result_dict = { + 'install_base': option_tuple, + 'install_platbase': option_tuple, + 'install_lib': option_tuple, + 'install_platlib': option_tuple, + 'install_purelib': option_tuple, + 'install_headers': option_tuple, + 'install_scripts': option_tuple, + 'install_data': option_tuple, + 'prefix': option_tuple, + 'exec_prefix': option_tuple, + 'home': option_tuple, + 'user': option_tuple, + 'root': option_tuple, + } + + self.assertEqual( + sorted(d.command_options.get('install').keys()), + sorted(result_dict.keys())) + + for (key, value) in d.command_options.get('install').items(): + self.assertEqual(value, result_dict[key]) + + # Test case: In a Virtual Environment + with mock.patch.multiple(sys, prefix='/a', base_prefix='/b') as values: + d = self.create_distribution([TESTFN]) + + for key in result_dict.keys(): + self.assertNotIn(key, d.command_options.get('install', {})) + + def test_command_packages_configfile(self): + sys.argv.append("build") + self.addCleanup(os.unlink, TESTFN) + f = open(TESTFN, "w") + try: + print("[global]", file=f) + print("command_packages = foo.bar, splat", file=f) + finally: + f.close() + + d = self.create_distribution([TESTFN]) + self.assertEqual(d.get_command_packages(), + ["distutils.command", "foo.bar", "splat"]) + + # ensure command line overrides config: + sys.argv[1:] = ["--command-packages", "spork", "build"] + d = self.create_distribution([TESTFN]) + self.assertEqual(d.get_command_packages(), + ["distutils.command", "spork"]) + + # Setting --command-packages to '' should cause the default to + # be used even if a config file specified something else: + sys.argv[1:] = ["--command-packages", "", "build"] + d = self.create_distribution([TESTFN]) + self.assertEqual(d.get_command_packages(), ["distutils.command"]) + + def test_empty_options(self): + # an empty options dictionary should not stay in the + # list of attributes + + # catching warnings + warns = [] + + def _warn(msg): + warns.append(msg) + + self.addCleanup(setattr, warnings, 'warn', warnings.warn) + warnings.warn = _warn + dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx', + 'version': 'xxx', 'url': 'xxxx', + 'options': {}}) + + self.assertEqual(len(warns), 0) + self.assertNotIn('options', dir(dist)) + + def test_finalize_options(self): + attrs = {'keywords': 'one,two', + 'platforms': 'one,two'} + + dist = Distribution(attrs=attrs) + dist.finalize_options() + + # finalize_option splits platforms and keywords + self.assertEqual(dist.metadata.platforms, ['one', 'two']) + self.assertEqual(dist.metadata.keywords, ['one', 'two']) + + attrs = {'keywords': 'foo bar', + 'platforms': 'foo bar'} + dist = Distribution(attrs=attrs) + dist.finalize_options() + self.assertEqual(dist.metadata.platforms, ['foo bar']) + self.assertEqual(dist.metadata.keywords, ['foo bar']) + + def test_get_command_packages(self): + dist = Distribution() + self.assertEqual(dist.command_packages, None) + cmds = dist.get_command_packages() + self.assertEqual(cmds, ['distutils.command']) + self.assertEqual(dist.command_packages, + ['distutils.command']) + + dist.command_packages = 'one,two' + cmds = dist.get_command_packages() + self.assertEqual(cmds, ['distutils.command', 'one', 'two']) + + def test_announce(self): + # make sure the level is known + dist = Distribution() + args = ('ok',) + kwargs = {'level': 'ok2'} + self.assertRaises(ValueError, dist.announce, args, kwargs) + + + def test_find_config_files_disable(self): + # Ticket #1180: Allow user to disable their home config file. + temp_home = self.mkdtemp() + if os.name == 'posix': + user_filename = os.path.join(temp_home, ".pydistutils.cfg") + else: + user_filename = os.path.join(temp_home, "pydistutils.cfg") + + with open(user_filename, 'w') as f: + f.write('[distutils]\n') + + def _expander(path): + return temp_home + + old_expander = os.path.expanduser + os.path.expanduser = _expander + try: + d = Distribution() + all_files = d.find_config_files() + + d = Distribution(attrs={'script_args': ['--no-user-cfg']}) + files = d.find_config_files() + finally: + os.path.expanduser = old_expander + + # make sure --no-user-cfg disables the user cfg file + self.assertEqual(len(all_files)-1, len(files)) + +class MetadataTestCase(support.TempdirManager, support.EnvironGuard, + unittest.TestCase): + + def setUp(self): + super(MetadataTestCase, self).setUp() + self.argv = sys.argv, sys.argv[:] + + def tearDown(self): + sys.argv = self.argv[0] + sys.argv[:] = self.argv[1] + super(MetadataTestCase, self).tearDown() + + def format_metadata(self, dist): + sio = io.StringIO() + dist.metadata.write_pkg_file(sio) + return sio.getvalue() + + def test_simple_metadata(self): + attrs = {"name": "package", + "version": "1.0"} + dist = Distribution(attrs) + meta = self.format_metadata(dist) + self.assertIn("Metadata-Version: 1.0", meta) + self.assertNotIn("provides:", meta.lower()) + self.assertNotIn("requires:", meta.lower()) + self.assertNotIn("obsoletes:", meta.lower()) + + def test_provides(self): + attrs = {"name": "package", + "version": "1.0", + "provides": ["package", "package.sub"]} + dist = Distribution(attrs) + self.assertEqual(dist.metadata.get_provides(), + ["package", "package.sub"]) + self.assertEqual(dist.get_provides(), + ["package", "package.sub"]) + meta = self.format_metadata(dist) + self.assertIn("Metadata-Version: 1.1", meta) + self.assertNotIn("requires:", meta.lower()) + self.assertNotIn("obsoletes:", meta.lower()) + + def test_provides_illegal(self): + self.assertRaises(ValueError, Distribution, + {"name": "package", + "version": "1.0", + "provides": ["my.pkg (splat)"]}) + + def test_requires(self): + attrs = {"name": "package", + "version": "1.0", + "requires": ["other", "another (==1.0)"]} + dist = Distribution(attrs) + self.assertEqual(dist.metadata.get_requires(), + ["other", "another (==1.0)"]) + self.assertEqual(dist.get_requires(), + ["other", "another (==1.0)"]) + meta = self.format_metadata(dist) + self.assertIn("Metadata-Version: 1.1", meta) + self.assertNotIn("provides:", meta.lower()) + self.assertIn("Requires: other", meta) + self.assertIn("Requires: another (==1.0)", meta) + self.assertNotIn("obsoletes:", meta.lower()) + + def test_requires_illegal(self): + self.assertRaises(ValueError, Distribution, + {"name": "package", + "version": "1.0", + "requires": ["my.pkg (splat)"]}) + + def test_requires_to_list(self): + attrs = {"name": "package", + "requires": iter(["other"])} + dist = Distribution(attrs) + self.assertIsInstance(dist.metadata.requires, list) + + + def test_obsoletes(self): + attrs = {"name": "package", + "version": "1.0", + "obsoletes": ["other", "another (<1.0)"]} + dist = Distribution(attrs) + self.assertEqual(dist.metadata.get_obsoletes(), + ["other", "another (<1.0)"]) + self.assertEqual(dist.get_obsoletes(), + ["other", "another (<1.0)"]) + meta = self.format_metadata(dist) + self.assertIn("Metadata-Version: 1.1", meta) + self.assertNotIn("provides:", meta.lower()) + self.assertNotIn("requires:", meta.lower()) + self.assertIn("Obsoletes: other", meta) + self.assertIn("Obsoletes: another (<1.0)", meta) + + def test_obsoletes_illegal(self): + self.assertRaises(ValueError, Distribution, + {"name": "package", + "version": "1.0", + "obsoletes": ["my.pkg (splat)"]}) + + def test_obsoletes_to_list(self): + attrs = {"name": "package", + "obsoletes": iter(["other"])} + dist = Distribution(attrs) + self.assertIsInstance(dist.metadata.obsoletes, list) + + def test_classifier(self): + attrs = {'name': 'Boa', 'version': '3.0', + 'classifiers': ['Programming Language :: Python :: 3']} + dist = Distribution(attrs) + self.assertEqual(dist.get_classifiers(), + ['Programming Language :: Python :: 3']) + meta = self.format_metadata(dist) + self.assertIn('Metadata-Version: 1.1', meta) + + def test_classifier_invalid_type(self): + attrs = {'name': 'Boa', 'version': '3.0', + 'classifiers': ('Programming Language :: Python :: 3',)} + with captured_stderr() as error: + d = Distribution(attrs) + # should have warning about passing a non-list + self.assertIn('should be a list', error.getvalue()) + # should be converted to a list + self.assertIsInstance(d.metadata.classifiers, list) + self.assertEqual(d.metadata.classifiers, + list(attrs['classifiers'])) + + def test_keywords(self): + attrs = {'name': 'Monty', 'version': '1.0', + 'keywords': ['spam', 'eggs', 'life of brian']} + dist = Distribution(attrs) + self.assertEqual(dist.get_keywords(), + ['spam', 'eggs', 'life of brian']) + + def test_keywords_invalid_type(self): + attrs = {'name': 'Monty', 'version': '1.0', + 'keywords': ('spam', 'eggs', 'life of brian')} + with captured_stderr() as error: + d = Distribution(attrs) + # should have warning about passing a non-list + self.assertIn('should be a list', error.getvalue()) + # should be converted to a list + self.assertIsInstance(d.metadata.keywords, list) + self.assertEqual(d.metadata.keywords, list(attrs['keywords'])) + + def test_platforms(self): + attrs = {'name': 'Monty', 'version': '1.0', + 'platforms': ['GNU/Linux', 'Some Evil Platform']} + dist = Distribution(attrs) + self.assertEqual(dist.get_platforms(), + ['GNU/Linux', 'Some Evil Platform']) + + def test_platforms_invalid_types(self): + attrs = {'name': 'Monty', 'version': '1.0', + 'platforms': ('GNU/Linux', 'Some Evil Platform')} + with captured_stderr() as error: + d = Distribution(attrs) + # should have warning about passing a non-list + self.assertIn('should be a list', error.getvalue()) + # should be converted to a list + self.assertIsInstance(d.metadata.platforms, list) + self.assertEqual(d.metadata.platforms, list(attrs['platforms'])) + + def test_download_url(self): + attrs = {'name': 'Boa', 'version': '3.0', + 'download_url': 'http://example.org/boa'} + dist = Distribution(attrs) + meta = self.format_metadata(dist) + self.assertIn('Metadata-Version: 1.1', meta) + + def test_long_description(self): + long_desc = textwrap.dedent("""\ + example:: + We start here + and continue here + and end here.""") + attrs = {"name": "package", + "version": "1.0", + "long_description": long_desc} + + dist = Distribution(attrs) + meta = self.format_metadata(dist) + meta = meta.replace('\n' + 8 * ' ', '\n') + self.assertIn(long_desc, meta) + + def test_custom_pydistutils(self): + # fixes #2166 + # make sure pydistutils.cfg is found + if os.name == 'posix': + user_filename = ".pydistutils.cfg" + else: + user_filename = "pydistutils.cfg" + + temp_dir = self.mkdtemp() + user_filename = os.path.join(temp_dir, user_filename) + f = open(user_filename, 'w') + try: + f.write('.') + finally: + f.close() + + try: + dist = Distribution() + + # linux-style + if sys.platform in ('linux', 'darwin'): + os.environ['HOME'] = temp_dir + files = dist.find_config_files() + self.assertIn(user_filename, files) + + # win32-style + if sys.platform == 'win32': + # home drive should be found + os.environ['USERPROFILE'] = temp_dir + files = dist.find_config_files() + self.assertIn(user_filename, files, + '%r not found in %r' % (user_filename, files)) + finally: + os.remove(user_filename) + + def test_fix_help_options(self): + help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)] + fancy_options = fix_help_options(help_tuples) + self.assertEqual(fancy_options[0], ('a', 'b', 'c')) + self.assertEqual(fancy_options[1], (1, 2, 3)) + + def test_show_help(self): + # smoke test, just makes sure some help is displayed + self.addCleanup(log.set_threshold, log._global_log.threshold) + dist = Distribution() + sys.argv = [] + dist.help = 1 + dist.script_name = 'setup.py' + with captured_stdout() as s: + dist.parse_command_line() + + output = [line for line in s.getvalue().split('\n') + if line.strip() != ''] + self.assertTrue(output) + + + def test_read_metadata(self): + attrs = {"name": "package", + "version": "1.0", + "long_description": "desc", + "description": "xxx", + "download_url": "http://example.com", + "keywords": ['one', 'two'], + "requires": ['foo']} + + dist = Distribution(attrs) + metadata = dist.metadata + + # write it then reloads it + PKG_INFO = io.StringIO() + metadata.write_pkg_file(PKG_INFO) + PKG_INFO.seek(0) + metadata.read_pkg_file(PKG_INFO) + + self.assertEqual(metadata.name, "package") + self.assertEqual(metadata.version, "1.0") + self.assertEqual(metadata.description, "xxx") + self.assertEqual(metadata.download_url, 'http://example.com') + self.assertEqual(metadata.keywords, ['one', 'two']) + self.assertEqual(metadata.platforms, ['UNKNOWN']) + self.assertEqual(metadata.obsoletes, None) + self.assertEqual(metadata.requires, ['foo']) + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DistributionTestCase)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MetadataTestCase)) + return suite + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_extension.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_extension.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_extension.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_extension.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,70 @@ +"""Tests for distutils.extension.""" +import unittest +import os +import warnings + +from test.support import run_unittest +from test.support.warnings_helper import check_warnings +from distutils.extension import read_setup_file, Extension + +class ExtensionTestCase(unittest.TestCase): + + def test_read_setup_file(self): + # trying to read a Setup file + # (sample extracted from the PyGame project) + setup = os.path.join(os.path.dirname(__file__), 'Setup.sample') + + exts = read_setup_file(setup) + names = [ext.name for ext in exts] + names.sort() + + # here are the extensions read_setup_file should have created + # out of the file + wanted = ['_arraysurfarray', '_camera', '_numericsndarray', + '_numericsurfarray', 'base', 'bufferproxy', 'cdrom', + 'color', 'constants', 'display', 'draw', 'event', + 'fastevent', 'font', 'gfxdraw', 'image', 'imageext', + 'joystick', 'key', 'mask', 'mixer', 'mixer_music', + 'mouse', 'movie', 'overlay', 'pixelarray', 'pypm', + 'rect', 'rwobject', 'scrap', 'surface', 'surflock', + 'time', 'transform'] + + self.assertEqual(names, wanted) + + def test_extension_init(self): + # the first argument, which is the name, must be a string + self.assertRaises(AssertionError, Extension, 1, []) + ext = Extension('name', []) + self.assertEqual(ext.name, 'name') + + # the second argument, which is the list of files, must + # be a list of strings + self.assertRaises(AssertionError, Extension, 'name', 'file') + self.assertRaises(AssertionError, Extension, 'name', ['file', 1]) + ext = Extension('name', ['file1', 'file2']) + self.assertEqual(ext.sources, ['file1', 'file2']) + + # others arguments have defaults + for attr in ('include_dirs', 'define_macros', 'undef_macros', + 'library_dirs', 'libraries', 'runtime_library_dirs', + 'extra_objects', 'extra_compile_args', 'extra_link_args', + 'export_symbols', 'swig_opts', 'depends'): + self.assertEqual(getattr(ext, attr), []) + + self.assertEqual(ext.language, None) + self.assertEqual(ext.optional, None) + + # if there are unknown keyword options, warn about them + with check_warnings() as w: + warnings.simplefilter('always') + ext = Extension('name', ['file1', 'file2'], chic=True) + + self.assertEqual(len(w.warnings), 1) + self.assertEqual(str(w.warnings[0].message), + "Unknown Extension options: 'chic'") + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(ExtensionTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_file_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_file_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_file_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_file_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,126 @@ +"""Tests for distutils.file_util.""" +import unittest +import os +import errno +from unittest.mock import patch + +from distutils.file_util import move_file, copy_file +from distutils import log +from distutils.tests import support +from distutils.errors import DistutilsFileError +from test.support import run_unittest +from test.support.os_helper import unlink + + +class FileUtilTestCase(support.TempdirManager, unittest.TestCase): + + def _log(self, msg, *args): + if len(args) > 0: + self._logs.append(msg % args) + else: + self._logs.append(msg) + + def setUp(self): + super(FileUtilTestCase, self).setUp() + self._logs = [] + self.old_log = log.info + log.info = self._log + tmp_dir = self.mkdtemp() + self.source = os.path.join(tmp_dir, 'f1') + self.target = os.path.join(tmp_dir, 'f2') + self.target_dir = os.path.join(tmp_dir, 'd1') + + def tearDown(self): + log.info = self.old_log + super(FileUtilTestCase, self).tearDown() + + def test_move_file_verbosity(self): + f = open(self.source, 'w') + try: + f.write('some content') + finally: + f.close() + + move_file(self.source, self.target, verbose=0) + wanted = [] + self.assertEqual(self._logs, wanted) + + # back to original state + move_file(self.target, self.source, verbose=0) + + move_file(self.source, self.target, verbose=1) + wanted = ['moving %s -> %s' % (self.source, self.target)] + self.assertEqual(self._logs, wanted) + + # back to original state + move_file(self.target, self.source, verbose=0) + + self._logs = [] + # now the target is a dir + os.mkdir(self.target_dir) + move_file(self.source, self.target_dir, verbose=1) + wanted = ['moving %s -> %s' % (self.source, self.target_dir)] + self.assertEqual(self._logs, wanted) + + def test_move_file_exception_unpacking_rename(self): + # see issue 22182 + with patch("os.rename", side_effect=OSError("wrong", 1)), \ + self.assertRaises(DistutilsFileError): + with open(self.source, 'w') as fobj: + fobj.write('spam eggs') + move_file(self.source, self.target, verbose=0) + + def test_move_file_exception_unpacking_unlink(self): + # see issue 22182 + with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \ + patch("os.unlink", side_effect=OSError("wrong", 1)), \ + self.assertRaises(DistutilsFileError): + with open(self.source, 'w') as fobj: + fobj.write('spam eggs') + move_file(self.source, self.target, verbose=0) + + @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') + def test_copy_file_hard_link(self): + with open(self.source, 'w') as f: + f.write('some content') + # Check first that copy_file() will not fall back on copying the file + # instead of creating the hard link. + try: + os.link(self.source, self.target) + except OSError as e: + self.skipTest('os.link: %s' % e) + else: + unlink(self.target) + st = os.stat(self.source) + copy_file(self.source, self.target, link='hard') + st2 = os.stat(self.source) + st3 = os.stat(self.target) + self.assertTrue(os.path.samestat(st, st2), (st, st2)) + self.assertTrue(os.path.samestat(st2, st3), (st2, st3)) + with open(self.source, 'r') as f: + self.assertEqual(f.read(), 'some content') + + @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') + def test_copy_file_hard_link_failure(self): + # If hard linking fails, copy_file() falls back on copying file + # (some special filesystems don't support hard linking even under + # Unix, see issue #8876). + with open(self.source, 'w') as f: + f.write('some content') + st = os.stat(self.source) + with patch("os.link", side_effect=OSError(0, "linking unsupported")): + copy_file(self.source, self.target, link='hard') + st2 = os.stat(self.source) + st3 = os.stat(self.target) + self.assertTrue(os.path.samestat(st, st2), (st, st2)) + self.assertFalse(os.path.samestat(st2, st3), (st2, st3)) + for fn in (self.source, self.target): + with open(fn, 'r') as f: + self.assertEqual(f.read(), 'some content') + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(FileUtilTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_filelist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_filelist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_filelist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_filelist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,340 @@ +"""Tests for distutils.filelist.""" +import os +import re +import unittest +from distutils import debug +from distutils.log import WARN +from distutils.errors import DistutilsTemplateError +from distutils.filelist import glob_to_re, translate_pattern, FileList +from distutils import filelist + +from test.support import os_helper +from test.support import captured_stdout, run_unittest +from distutils.tests import support + +MANIFEST_IN = """\ +include ok +include xo +exclude xo +include foo.tmp +include buildout.cfg +global-include *.x +global-include *.txt +global-exclude *.tmp +recursive-include f *.oo +recursive-exclude global *.x +graft dir +prune dir3 +""" + + +def make_local_path(s): + """Converts '/' in a string to os.sep""" + return s.replace('/', os.sep) + + +class FileListTestCase(support.LoggingSilencer, + unittest.TestCase): + + def assertNoWarnings(self): + self.assertEqual(self.get_logs(WARN), []) + self.clear_logs() + + def assertWarnings(self): + self.assertGreater(len(self.get_logs(WARN)), 0) + self.clear_logs() + + def test_glob_to_re(self): + sep = os.sep + if os.sep == '\\': + sep = re.escape(os.sep) + + for glob, regex in ( + # simple cases + ('foo*', r'(?s:foo[^%(sep)s]*)\Z'), + ('foo?', r'(?s:foo[^%(sep)s])\Z'), + ('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'), + # special cases + (r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'), + (r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'), + ('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'), + (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')): + regex = regex % {'sep': sep} + self.assertEqual(glob_to_re(glob), regex) + + def test_process_template_line(self): + # testing all MANIFEST.in template patterns + file_list = FileList() + l = make_local_path + + # simulated file list + file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt', + 'buildout.cfg', + # filelist does not filter out VCS directories, + # it's sdist that does + l('.hg/last-message.txt'), + l('global/one.txt'), + l('global/two.txt'), + l('global/files.x'), + l('global/here.tmp'), + l('f/o/f.oo'), + l('dir/graft-one'), + l('dir/dir2/graft2'), + l('dir3/ok'), + l('dir3/sub/ok.txt'), + ] + + for line in MANIFEST_IN.split('\n'): + if line.strip() == '': + continue + file_list.process_template_line(line) + + wanted = ['ok', + 'buildout.cfg', + 'four.txt', + l('.hg/last-message.txt'), + l('global/one.txt'), + l('global/two.txt'), + l('f/o/f.oo'), + l('dir/graft-one'), + l('dir/dir2/graft2'), + ] + + self.assertEqual(file_list.files, wanted) + + def test_debug_print(self): + file_list = FileList() + with captured_stdout() as stdout: + file_list.debug_print('xxx') + self.assertEqual(stdout.getvalue(), '') + + debug.DEBUG = True + try: + with captured_stdout() as stdout: + file_list.debug_print('xxx') + self.assertEqual(stdout.getvalue(), 'xxx\n') + finally: + debug.DEBUG = False + + def test_set_allfiles(self): + file_list = FileList() + files = ['a', 'b', 'c'] + file_list.set_allfiles(files) + self.assertEqual(file_list.allfiles, files) + + def test_remove_duplicates(self): + file_list = FileList() + file_list.files = ['a', 'b', 'a', 'g', 'c', 'g'] + # files must be sorted beforehand (sdist does it) + file_list.sort() + file_list.remove_duplicates() + self.assertEqual(file_list.files, ['a', 'b', 'c', 'g']) + + def test_translate_pattern(self): + # not regex + self.assertTrue(hasattr( + translate_pattern('a', anchor=True, is_regex=False), + 'search')) + + # is a regex + regex = re.compile('a') + self.assertEqual( + translate_pattern(regex, anchor=True, is_regex=True), + regex) + + # plain string flagged as regex + self.assertTrue(hasattr( + translate_pattern('a', anchor=True, is_regex=True), + 'search')) + + # glob support + self.assertTrue(translate_pattern( + '*.py', anchor=True, is_regex=False).search('filelist.py')) + + def test_exclude_pattern(self): + # return False if no match + file_list = FileList() + self.assertFalse(file_list.exclude_pattern('*.py')) + + # return True if files match + file_list = FileList() + file_list.files = ['a.py', 'b.py'] + self.assertTrue(file_list.exclude_pattern('*.py')) + + # test excludes + file_list = FileList() + file_list.files = ['a.py', 'a.txt'] + file_list.exclude_pattern('*.py') + self.assertEqual(file_list.files, ['a.txt']) + + def test_include_pattern(self): + # return False if no match + file_list = FileList() + file_list.set_allfiles([]) + self.assertFalse(file_list.include_pattern('*.py')) + + # return True if files match + file_list = FileList() + file_list.set_allfiles(['a.py', 'b.txt']) + self.assertTrue(file_list.include_pattern('*.py')) + + # test * matches all files + file_list = FileList() + self.assertIsNone(file_list.allfiles) + file_list.set_allfiles(['a.py', 'b.txt']) + file_list.include_pattern('*') + self.assertEqual(file_list.allfiles, ['a.py', 'b.txt']) + + def test_process_template(self): + l = make_local_path + # invalid lines + file_list = FileList() + for action in ('include', 'exclude', 'global-include', + 'global-exclude', 'recursive-include', + 'recursive-exclude', 'graft', 'prune', 'blarg'): + self.assertRaises(DistutilsTemplateError, + file_list.process_template_line, action) + + # include + file_list = FileList() + file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + + file_list.process_template_line('include *.py') + self.assertEqual(file_list.files, ['a.py']) + self.assertNoWarnings() + + file_list.process_template_line('include *.rb') + self.assertEqual(file_list.files, ['a.py']) + self.assertWarnings() + + # exclude + file_list = FileList() + file_list.files = ['a.py', 'b.txt', l('d/c.py')] + + file_list.process_template_line('exclude *.py') + self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) + self.assertNoWarnings() + + file_list.process_template_line('exclude *.rb') + self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) + self.assertWarnings() + + # global-include + file_list = FileList() + file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + + file_list.process_template_line('global-include *.py') + self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) + self.assertNoWarnings() + + file_list.process_template_line('global-include *.rb') + self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) + self.assertWarnings() + + # global-exclude + file_list = FileList() + file_list.files = ['a.py', 'b.txt', l('d/c.py')] + + file_list.process_template_line('global-exclude *.py') + self.assertEqual(file_list.files, ['b.txt']) + self.assertNoWarnings() + + file_list.process_template_line('global-exclude *.rb') + self.assertEqual(file_list.files, ['b.txt']) + self.assertWarnings() + + # recursive-include + file_list = FileList() + file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'), + l('d/d/e.py')]) + + file_list.process_template_line('recursive-include d *.py') + self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertNoWarnings() + + file_list.process_template_line('recursive-include e *.py') + self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertWarnings() + + # recursive-exclude + file_list = FileList() + file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')] + + file_list.process_template_line('recursive-exclude d *.py') + self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) + self.assertNoWarnings() + + file_list.process_template_line('recursive-exclude e *.py') + self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) + self.assertWarnings() + + # graft + file_list = FileList() + file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'), + l('f/f.py')]) + + file_list.process_template_line('graft d') + self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertNoWarnings() + + file_list.process_template_line('graft e') + self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertWarnings() + + # prune + file_list = FileList() + file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')] + + file_list.process_template_line('prune d') + self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) + self.assertNoWarnings() + + file_list.process_template_line('prune e') + self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) + self.assertWarnings() + + +class FindAllTestCase(unittest.TestCase): + @os_helper.skip_unless_symlink + def test_missing_symlink(self): + with os_helper.temp_cwd(): + os.symlink('foo', 'bar') + self.assertEqual(filelist.findall(), []) + + def test_basic_discovery(self): + """ + When findall is called with no parameters or with + '.' as the parameter, the dot should be omitted from + the results. + """ + with os_helper.temp_cwd(): + os.mkdir('foo') + file1 = os.path.join('foo', 'file1.txt') + os_helper.create_empty_file(file1) + os.mkdir('bar') + file2 = os.path.join('bar', 'file2.txt') + os_helper.create_empty_file(file2) + expected = [file2, file1] + self.assertEqual(sorted(filelist.findall()), expected) + + def test_non_local_discovery(self): + """ + When findall is called with another path, the full + path name should be returned. + """ + with os_helper.temp_dir() as temp_dir: + file1 = os.path.join(temp_dir, 'file1.txt') + os_helper.create_empty_file(file1) + expected = [file1] + self.assertEqual(filelist.findall(temp_dir), expected) + + +def test_suite(): + return unittest.TestSuite([ + unittest.TestLoader().loadTestsFromTestCase(FileListTestCase), + unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase), + ]) + + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,261 @@ +"""Tests for distutils.command.install.""" + +import os +import sys +import unittest +import site + +from test.support import captured_stdout, run_unittest, requires_subprocess + +from distutils import sysconfig +from distutils.command.install import install, HAS_USER_SITE +from distutils.command import install as install_module +from distutils.command.build_ext import build_ext +from distutils.command.install import INSTALL_SCHEMES +from distutils.core import Distribution +from distutils.errors import DistutilsOptionError +from distutils.extension import Extension + +from distutils.tests import support +from test import support as test_support + + +def _make_ext_name(modname): + return modname + sysconfig.get_config_var('EXT_SUFFIX') + + +class InstallTestCase(support.TempdirManager, + support.EnvironGuard, + support.LoggingSilencer, + unittest.TestCase): + + def setUp(self): + super().setUp() + self._backup_config_vars = dict(sysconfig._config_vars) + + def tearDown(self): + super().tearDown() + sysconfig._config_vars.clear() + sysconfig._config_vars.update(self._backup_config_vars) + + def test_home_installation_scheme(self): + # This ensure two things: + # - that --home generates the desired set of directory names + # - test --home is supported on all platforms + builddir = self.mkdtemp() + destination = os.path.join(builddir, "installation") + + dist = Distribution({"name": "foopkg"}) + # script_name need not exist, it just need to be initialized + dist.script_name = os.path.join(builddir, "setup.py") + dist.command_obj["build"] = support.DummyCommand( + build_base=builddir, + build_lib=os.path.join(builddir, "lib"), + ) + + cmd = install(dist) + cmd.home = destination + cmd.ensure_finalized() + + self.assertEqual(cmd.install_base, destination) + self.assertEqual(cmd.install_platbase, destination) + + def check_path(got, expected): + got = os.path.normpath(got) + expected = os.path.normpath(expected) + self.assertEqual(got, expected) + + libdir = os.path.join(destination, "lib", "python") + check_path(cmd.install_lib, libdir) + platlibdir = os.path.join(destination, sys.platlibdir, "python") + check_path(cmd.install_platlib, platlibdir) + check_path(cmd.install_purelib, libdir) + check_path(cmd.install_headers, + os.path.join(destination, "include", "python", "foopkg")) + check_path(cmd.install_scripts, os.path.join(destination, "bin")) + check_path(cmd.install_data, destination) + + @unittest.skipUnless(HAS_USER_SITE, 'need user site') + def test_user_site(self): + # test install with --user + # preparing the environment for the test + self.old_user_base = site.USER_BASE + self.old_user_site = site.USER_SITE + self.tmpdir = self.mkdtemp() + self.user_base = os.path.join(self.tmpdir, 'B') + self.user_site = os.path.join(self.tmpdir, 'S') + site.USER_BASE = self.user_base + site.USER_SITE = self.user_site + install_module.USER_BASE = self.user_base + install_module.USER_SITE = self.user_site + + def _expanduser(path): + return self.tmpdir + self.old_expand = os.path.expanduser + os.path.expanduser = _expanduser + + def cleanup(): + site.USER_BASE = self.old_user_base + site.USER_SITE = self.old_user_site + install_module.USER_BASE = self.old_user_base + install_module.USER_SITE = self.old_user_site + os.path.expanduser = self.old_expand + + self.addCleanup(cleanup) + + if HAS_USER_SITE: + for key in ('nt_user', 'unix_user'): + self.assertIn(key, INSTALL_SCHEMES) + + dist = Distribution({'name': 'xx'}) + cmd = install(dist) + + # making sure the user option is there + options = [name for name, short, lable in + cmd.user_options] + self.assertIn('user', options) + + # setting a value + cmd.user = 1 + + # user base and site shouldn't be created yet + self.assertFalse(os.path.exists(self.user_base)) + self.assertFalse(os.path.exists(self.user_site)) + + # let's run finalize + cmd.ensure_finalized() + + # now they should + self.assertTrue(os.path.exists(self.user_base)) + self.assertTrue(os.path.exists(self.user_site)) + + self.assertIn('userbase', cmd.config_vars) + self.assertIn('usersite', cmd.config_vars) + + def test_handle_extra_path(self): + dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) + cmd = install(dist) + + # two elements + cmd.handle_extra_path() + self.assertEqual(cmd.extra_path, ['path', 'dirs']) + self.assertEqual(cmd.extra_dirs, 'dirs') + self.assertEqual(cmd.path_file, 'path') + + # one element + cmd.extra_path = ['path'] + cmd.handle_extra_path() + self.assertEqual(cmd.extra_path, ['path']) + self.assertEqual(cmd.extra_dirs, 'path') + self.assertEqual(cmd.path_file, 'path') + + # none + dist.extra_path = cmd.extra_path = None + cmd.handle_extra_path() + self.assertEqual(cmd.extra_path, None) + self.assertEqual(cmd.extra_dirs, '') + self.assertEqual(cmd.path_file, None) + + # three elements (no way !) + cmd.extra_path = 'path,dirs,again' + self.assertRaises(DistutilsOptionError, cmd.handle_extra_path) + + def test_finalize_options(self): + dist = Distribution({'name': 'xx'}) + cmd = install(dist) + + # must supply either prefix/exec-prefix/home or + # install-base/install-platbase -- not both + cmd.prefix = 'prefix' + cmd.install_base = 'base' + self.assertRaises(DistutilsOptionError, cmd.finalize_options) + + # must supply either home or prefix/exec-prefix -- not both + cmd.install_base = None + cmd.home = 'home' + self.assertRaises(DistutilsOptionError, cmd.finalize_options) + + # can't combine user with prefix/exec_prefix/home or + # install_(plat)base + cmd.prefix = None + cmd.user = 'user' + self.assertRaises(DistutilsOptionError, cmd.finalize_options) + + def test_record(self): + install_dir = self.mkdtemp() + project_dir, dist = self.create_dist(py_modules=['hello'], + scripts=['sayhi']) + os.chdir(project_dir) + self.write_file('hello.py', "def main(): print('o hai')") + self.write_file('sayhi', 'from hello import main; main()') + + cmd = install(dist) + dist.command_obj['install'] = cmd + cmd.root = install_dir + cmd.record = os.path.join(project_dir, 'filelist') + cmd.ensure_finalized() + cmd.run() + + f = open(cmd.record) + try: + content = f.read() + finally: + f.close() + + found = [os.path.basename(line) for line in content.splitlines()] + expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag, + 'sayhi', + 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] + self.assertEqual(found, expected) + + @requires_subprocess() + def test_record_extensions(self): + cmd = test_support.missing_compiler_executable() + if cmd is not None: + self.skipTest('The %r command is not found' % cmd) + install_dir = self.mkdtemp() + project_dir, dist = self.create_dist(ext_modules=[ + Extension('xx', ['xxmodule.c'])]) + os.chdir(project_dir) + support.copy_xxmodule_c(project_dir) + + buildextcmd = build_ext(dist) + support.fixup_build_ext(buildextcmd) + buildextcmd.ensure_finalized() + + cmd = install(dist) + dist.command_obj['install'] = cmd + dist.command_obj['build_ext'] = buildextcmd + cmd.root = install_dir + cmd.record = os.path.join(project_dir, 'filelist') + cmd.ensure_finalized() + cmd.run() + + f = open(cmd.record) + try: + content = f.read() + finally: + f.close() + + found = [os.path.basename(line) for line in content.splitlines()] + expected = [_make_ext_name('xx'), + 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] + self.assertEqual(found, expected) + + def test_debug_mode(self): + # this covers the code called when DEBUG is set + old_logs_len = len(self.logs) + install_module.DEBUG = True + try: + with captured_stdout(): + self.test_record() + finally: + install_module.DEBUG = False + self.assertGreater(len(self.logs), old_logs_len) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(InstallTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_data.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_data.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_data.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_data.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,75 @@ +"""Tests for distutils.command.install_data.""" +import os +import unittest + +from distutils.command.install_data import install_data +from distutils.tests import support +from test.support import run_unittest + +class InstallDataTestCase(support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase): + + def test_simple_run(self): + pkg_dir, dist = self.create_dist() + cmd = install_data(dist) + cmd.install_dir = inst = os.path.join(pkg_dir, 'inst') + + # data_files can contain + # - simple files + # - a tuple with a path, and a list of file + one = os.path.join(pkg_dir, 'one') + self.write_file(one, 'xxx') + inst2 = os.path.join(pkg_dir, 'inst2') + two = os.path.join(pkg_dir, 'two') + self.write_file(two, 'xxx') + + cmd.data_files = [one, (inst2, [two])] + self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])]) + + # let's run the command + cmd.ensure_finalized() + cmd.run() + + # let's check the result + self.assertEqual(len(cmd.get_outputs()), 2) + rtwo = os.path.split(two)[-1] + self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) + rone = os.path.split(one)[-1] + self.assertTrue(os.path.exists(os.path.join(inst, rone))) + cmd.outfiles = [] + + # let's try with warn_dir one + cmd.warn_dir = 1 + cmd.ensure_finalized() + cmd.run() + + # let's check the result + self.assertEqual(len(cmd.get_outputs()), 2) + self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) + self.assertTrue(os.path.exists(os.path.join(inst, rone))) + cmd.outfiles = [] + + # now using root and empty dir + cmd.root = os.path.join(pkg_dir, 'root') + inst3 = os.path.join(cmd.install_dir, 'inst3') + inst4 = os.path.join(pkg_dir, 'inst4') + three = os.path.join(cmd.install_dir, 'three') + self.write_file(three, 'xx') + cmd.data_files = [one, (inst2, [two]), + ('inst3', [three]), + (inst4, [])] + cmd.ensure_finalized() + cmd.run() + + # let's check the result + self.assertEqual(len(cmd.get_outputs()), 4) + self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) + self.assertTrue(os.path.exists(os.path.join(inst, rone))) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(InstallDataTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_headers.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_headers.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_headers.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_headers.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,39 @@ +"""Tests for distutils.command.install_headers.""" +import os +import unittest + +from distutils.command.install_headers import install_headers +from distutils.tests import support +from test.support import run_unittest + +class InstallHeadersTestCase(support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase): + + def test_simple_run(self): + # we have two headers + header_list = self.mkdtemp() + header1 = os.path.join(header_list, 'header1') + header2 = os.path.join(header_list, 'header2') + self.write_file(header1) + self.write_file(header2) + headers = [header1, header2] + + pkg_dir, dist = self.create_dist(headers=headers) + cmd = install_headers(dist) + self.assertEqual(cmd.get_inputs(), headers) + + # let's run the command + cmd.install_dir = os.path.join(pkg_dir, 'inst') + cmd.ensure_finalized() + cmd.run() + + # let's check the results + self.assertEqual(len(cmd.get_outputs()), 2) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(InstallHeadersTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_lib.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_lib.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_lib.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_lib.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,117 @@ +"""Tests for distutils.command.install_data.""" +import sys +import os +import importlib.util +import unittest + +from distutils.command.install_lib import install_lib +from distutils.extension import Extension +from distutils.tests import support +from distutils.errors import DistutilsOptionError +from test.support import run_unittest, requires_subprocess + + +class InstallLibTestCase(support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase): + + def test_finalize_options(self): + dist = self.create_dist()[1] + cmd = install_lib(dist) + + cmd.finalize_options() + self.assertEqual(cmd.compile, 1) + self.assertEqual(cmd.optimize, 0) + + # optimize must be 0, 1, or 2 + cmd.optimize = 'foo' + self.assertRaises(DistutilsOptionError, cmd.finalize_options) + cmd.optimize = '4' + self.assertRaises(DistutilsOptionError, cmd.finalize_options) + + cmd.optimize = '2' + cmd.finalize_options() + self.assertEqual(cmd.optimize, 2) + + @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') + @requires_subprocess() + def test_byte_compile(self): + project_dir, dist = self.create_dist() + os.chdir(project_dir) + cmd = install_lib(dist) + cmd.compile = cmd.optimize = 1 + + f = os.path.join(project_dir, 'foo.py') + self.write_file(f, '# python file') + cmd.byte_compile([f]) + pyc_file = importlib.util.cache_from_source('foo.py', optimization='') + pyc_opt_file = importlib.util.cache_from_source('foo.py', + optimization=cmd.optimize) + self.assertTrue(os.path.exists(pyc_file)) + self.assertTrue(os.path.exists(pyc_opt_file)) + + def test_get_outputs(self): + project_dir, dist = self.create_dist() + os.chdir(project_dir) + os.mkdir('spam') + cmd = install_lib(dist) + + # setting up a dist environment + cmd.compile = cmd.optimize = 1 + cmd.install_dir = self.mkdtemp() + f = os.path.join(project_dir, 'spam', '__init__.py') + self.write_file(f, '# python package') + cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] + cmd.distribution.packages = ['spam'] + cmd.distribution.script_name = 'setup.py' + + # get_outputs should return 4 elements: spam/__init__.py and .pyc, + # foo.import-tag-abiflags.so / foo.pyd + outputs = cmd.get_outputs() + self.assertEqual(len(outputs), 4, outputs) + + def test_get_inputs(self): + project_dir, dist = self.create_dist() + os.chdir(project_dir) + os.mkdir('spam') + cmd = install_lib(dist) + + # setting up a dist environment + cmd.compile = cmd.optimize = 1 + cmd.install_dir = self.mkdtemp() + f = os.path.join(project_dir, 'spam', '__init__.py') + self.write_file(f, '# python package') + cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] + cmd.distribution.packages = ['spam'] + cmd.distribution.script_name = 'setup.py' + + # get_inputs should return 2 elements: spam/__init__.py and + # foo.import-tag-abiflags.so / foo.pyd + inputs = cmd.get_inputs() + self.assertEqual(len(inputs), 2, inputs) + + @requires_subprocess() + def test_dont_write_bytecode(self): + # makes sure byte_compile is not used + dist = self.create_dist()[1] + cmd = install_lib(dist) + cmd.compile = 1 + cmd.optimize = 1 + + old_dont_write_bytecode = sys.dont_write_bytecode + sys.dont_write_bytecode = True + try: + cmd.byte_compile([]) + finally: + sys.dont_write_bytecode = old_dont_write_bytecode + + self.assertIn('byte-compiling is disabled', + self.logs[0][1] % self.logs[0][2]) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(InstallLibTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_scripts.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_scripts.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_install_scripts.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_install_scripts.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,82 @@ +"""Tests for distutils.command.install_scripts.""" + +import os +import unittest + +from distutils.command.install_scripts import install_scripts +from distutils.core import Distribution + +from distutils.tests import support +from test.support import run_unittest + + +class InstallScriptsTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + def test_default_settings(self): + dist = Distribution() + dist.command_obj["build"] = support.DummyCommand( + build_scripts="/foo/bar") + dist.command_obj["install"] = support.DummyCommand( + install_scripts="/splat/funk", + force=1, + skip_build=1, + ) + cmd = install_scripts(dist) + self.assertFalse(cmd.force) + self.assertFalse(cmd.skip_build) + self.assertIsNone(cmd.build_dir) + self.assertIsNone(cmd.install_dir) + + cmd.finalize_options() + + self.assertTrue(cmd.force) + self.assertTrue(cmd.skip_build) + self.assertEqual(cmd.build_dir, "/foo/bar") + self.assertEqual(cmd.install_dir, "/splat/funk") + + def test_installation(self): + source = self.mkdtemp() + expected = [] + + def write_script(name, text): + expected.append(name) + f = open(os.path.join(source, name), "w") + try: + f.write(text) + finally: + f.close() + + write_script("script1.py", ("#! /usr/bin/env python2.3\n" + "# bogus script w/ Python sh-bang\n" + "pass\n")) + write_script("script2.py", ("#!/usr/bin/python\n" + "# bogus script w/ Python sh-bang\n" + "pass\n")) + write_script("shell.sh", ("#!/bin/sh\n" + "# bogus shell script w/ sh-bang\n" + "exit 0\n")) + + target = self.mkdtemp() + dist = Distribution() + dist.command_obj["build"] = support.DummyCommand(build_scripts=source) + dist.command_obj["install"] = support.DummyCommand( + install_scripts=target, + force=1, + skip_build=1, + ) + cmd = install_scripts(dist) + cmd.finalize_options() + cmd.run() + + installed = os.listdir(target) + for name in expected: + self.assertIn(name, installed) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(InstallScriptsTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_log.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_log.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_log.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_log.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,46 @@ +"""Tests for distutils.log""" + +import io +import sys +import unittest +from test.support import swap_attr, run_unittest + +from distutils import log + +class TestLog(unittest.TestCase): + def test_non_ascii(self): + # Issues #8663, #34421: test that non-encodable text is escaped with + # backslashreplace error handler and encodable non-ASCII text is + # output as is. + for errors in ('strict', 'backslashreplace', 'surrogateescape', + 'replace', 'ignore'): + with self.subTest(errors=errors): + stdout = io.TextIOWrapper(io.BytesIO(), + encoding='cp437', errors=errors) + stderr = io.TextIOWrapper(io.BytesIO(), + encoding='cp437', errors=errors) + old_threshold = log.set_threshold(log.DEBUG) + try: + with swap_attr(sys, 'stdout', stdout), \ + swap_attr(sys, 'stderr', stderr): + log.debug('Dεbug\tMėssãge') + log.fatal('Fαtal\tÈrrōr') + finally: + log.set_threshold(old_threshold) + + stdout.seek(0) + self.assertEqual(stdout.read().rstrip(), + 'Dεbug\tM?ss?ge' if errors == 'replace' else + 'Dεbug\tMssge' if errors == 'ignore' else + 'Dεbug\tM\\u0117ss\\xe3ge') + stderr.seek(0) + self.assertEqual(stderr.read().rstrip(), + 'Fαtal\t?rr?r' if errors == 'replace' else + 'Fαtal\trrr' if errors == 'ignore' else + 'Fαtal\t\\xc8rr\\u014dr') + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(TestLog) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_msvc9compiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_msvc9compiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_msvc9compiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_msvc9compiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,184 @@ +"""Tests for distutils.msvc9compiler.""" +import sys +import unittest +import os + +from distutils.errors import DistutilsPlatformError +from distutils.tests import support +from test.support import run_unittest + +# A manifest with the only assembly reference being the msvcrt assembly, so +# should have the assembly completely stripped. Note that although the +# assembly has a reference the assembly is removed - that is +# currently a "feature", not a bug :) +_MANIFEST_WITH_ONLY_MSVC_REFERENCE = """\ + + + + + + + + + + + + + + + + + +""" + +# A manifest with references to assemblies other than msvcrt. When processed, +# this assembly should be returned with just the msvcrt part removed. +_MANIFEST_WITH_MULTIPLE_REFERENCES = """\ + + + + + + + + + + + + + + + + + + + + + + +""" + +_CLEANED_MANIFEST = """\ + + + + + + + + + + + + + + + + + + +""" + +if sys.platform=="win32": + from distutils.msvccompiler import get_build_version + if get_build_version()>=8.0: + SKIP_MESSAGE = None + else: + SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" +else: + SKIP_MESSAGE = "These tests are only for win32" + +@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) +class msvc9compilerTestCase(support.TempdirManager, + unittest.TestCase): + + def test_no_compiler(self): + # makes sure query_vcvarsall raises + # a DistutilsPlatformError if the compiler + # is not found + from distutils.msvc9compiler import query_vcvarsall + def _find_vcvarsall(version): + return None + + from distutils import msvc9compiler + old_find_vcvarsall = msvc9compiler.find_vcvarsall + msvc9compiler.find_vcvarsall = _find_vcvarsall + try: + self.assertRaises(DistutilsPlatformError, query_vcvarsall, + 'wont find this version') + finally: + msvc9compiler.find_vcvarsall = old_find_vcvarsall + + def test_reg_class(self): + from distutils.msvc9compiler import Reg + self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx') + + # looking for values that should exist on all + # windows registry versions. + path = r'Control Panel\Desktop' + v = Reg.get_value(path, 'dragfullwindows') + self.assertIn(v, ('0', '1', '2')) + + import winreg + HKCU = winreg.HKEY_CURRENT_USER + keys = Reg.read_keys(HKCU, 'xxxx') + self.assertEqual(keys, None) + + keys = Reg.read_keys(HKCU, r'Control Panel') + self.assertIn('Desktop', keys) + + def test_remove_visual_c_ref(self): + from distutils.msvc9compiler import MSVCCompiler + tempdir = self.mkdtemp() + manifest = os.path.join(tempdir, 'manifest') + f = open(manifest, 'w') + try: + f.write(_MANIFEST_WITH_MULTIPLE_REFERENCES) + finally: + f.close() + + compiler = MSVCCompiler() + compiler._remove_visual_c_ref(manifest) + + # see what we got + f = open(manifest) + try: + # removing trailing spaces + content = '\n'.join([line.rstrip() for line in f.readlines()]) + finally: + f.close() + + # makes sure the manifest was properly cleaned + self.assertEqual(content, _CLEANED_MANIFEST) + + def test_remove_entire_manifest(self): + from distutils.msvc9compiler import MSVCCompiler + tempdir = self.mkdtemp() + manifest = os.path.join(tempdir, 'manifest') + f = open(manifest, 'w') + try: + f.write(_MANIFEST_WITH_ONLY_MSVC_REFERENCE) + finally: + f.close() + + compiler = MSVCCompiler() + got = compiler._remove_visual_c_ref(manifest) + self.assertIsNone(got) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(msvc9compilerTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_msvccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_msvccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_msvccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_msvccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,81 @@ +"""Tests for distutils._msvccompiler.""" +import sys +import unittest +import os + +from distutils.errors import DistutilsPlatformError +from distutils.tests import support +from test.support import run_unittest + + +SKIP_MESSAGE = (None if sys.platform == "win32" else + "These tests are only for win32") + +@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) +class msvccompilerTestCase(support.TempdirManager, + unittest.TestCase): + + def test_no_compiler(self): + import distutils._msvccompiler as _msvccompiler + # makes sure query_vcvarsall raises + # a DistutilsPlatformError if the compiler + # is not found + def _find_vcvarsall(plat_spec): + return None, None + + old_find_vcvarsall = _msvccompiler._find_vcvarsall + _msvccompiler._find_vcvarsall = _find_vcvarsall + try: + self.assertRaises(DistutilsPlatformError, + _msvccompiler._get_vc_env, + 'wont find this version') + finally: + _msvccompiler._find_vcvarsall = old_find_vcvarsall + + def test_get_vc_env_unicode(self): + import distutils._msvccompiler as _msvccompiler + + test_var = 'ṰḖṤṪ┅ṼẨṜ' + test_value = '₃⁴₅' + + # Ensure we don't early exit from _get_vc_env + old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None) + os.environ[test_var] = test_value + try: + env = _msvccompiler._get_vc_env('x86') + self.assertIn(test_var.lower(), env) + self.assertEqual(test_value, env[test_var.lower()]) + finally: + os.environ.pop(test_var) + if old_distutils_use_sdk: + os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk + + def test_get_vc2017(self): + import distutils._msvccompiler as _msvccompiler + + # This function cannot be mocked, so pass it if we find VS 2017 + # and mark it skipped if we do not. + version, path = _msvccompiler._find_vc2017() + if version: + self.assertGreaterEqual(version, 15) + self.assertTrue(os.path.isdir(path)) + else: + raise unittest.SkipTest("VS 2017 is not installed") + + def test_get_vc2015(self): + import distutils._msvccompiler as _msvccompiler + + # This function cannot be mocked, so pass it if we find VS 2015 + # and mark it skipped if we do not. + version, path = _msvccompiler._find_vc2015() + if version: + self.assertGreaterEqual(version, 14) + self.assertTrue(os.path.isdir(path)) + else: + raise unittest.SkipTest("VS 2015 is not installed") + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(msvccompilerTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_register.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_register.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_register.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_register.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,324 @@ +"""Tests for distutils.command.register.""" +import os +import unittest +import getpass +import urllib +import warnings + +from test.support import run_unittest +from test.support.warnings_helper import check_warnings + +from distutils.command import register as register_module +from distutils.command.register import register +from distutils.errors import DistutilsSetupError +from distutils.log import INFO + +from distutils.tests.test_config import BasePyPIRCCommandTestCase + +try: + import docutils +except ImportError: + docutils = None + +PYPIRC_NOPASSWORD = """\ +[distutils] + +index-servers = + server1 + +[server1] +username:me +""" + +WANTED_PYPIRC = """\ +[distutils] +index-servers = + pypi + +[pypi] +username:tarek +password:password +""" + +class Inputs(object): + """Fakes user inputs.""" + def __init__(self, *answers): + self.answers = answers + self.index = 0 + + def __call__(self, prompt=''): + try: + return self.answers[self.index] + finally: + self.index += 1 + +class FakeOpener(object): + """Fakes a PyPI server""" + def __init__(self): + self.reqs = [] + + def __call__(self, *args): + return self + + def open(self, req, data=None, timeout=None): + self.reqs.append(req) + return self + + def read(self): + return b'xxx' + + def getheader(self, name, default=None): + return { + 'content-type': 'text/plain; charset=utf-8', + }.get(name.lower(), default) + + +class RegisterTestCase(BasePyPIRCCommandTestCase): + + def setUp(self): + super(RegisterTestCase, self).setUp() + # patching the password prompt + self._old_getpass = getpass.getpass + def _getpass(prompt): + return 'password' + getpass.getpass = _getpass + urllib.request._opener = None + self.old_opener = urllib.request.build_opener + self.conn = urllib.request.build_opener = FakeOpener() + + def tearDown(self): + getpass.getpass = self._old_getpass + urllib.request._opener = None + urllib.request.build_opener = self.old_opener + super(RegisterTestCase, self).tearDown() + + def _get_cmd(self, metadata=None): + if metadata is None: + metadata = {'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', 'version': 'xxx'} + pkg_info, dist = self.create_dist(**metadata) + return register(dist) + + def test_create_pypirc(self): + # this test makes sure a .pypirc file + # is created when requested. + + # let's create a register instance + cmd = self._get_cmd() + + # we shouldn't have a .pypirc file yet + self.assertFalse(os.path.exists(self.rc)) + + # patching input and getpass.getpass + # so register gets happy + # + # Here's what we are faking : + # use your existing login (choice 1.) + # Username : 'tarek' + # Password : 'password' + # Save your login (y/N)? : 'y' + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs.__call__ + # let's run the command + try: + cmd.run() + finally: + del register_module.input + + # we should have a brand new .pypirc file + self.assertTrue(os.path.exists(self.rc)) + + # with the content similar to WANTED_PYPIRC + f = open(self.rc) + try: + content = f.read() + self.assertEqual(content, WANTED_PYPIRC) + finally: + f.close() + + # now let's make sure the .pypirc file generated + # really works : we shouldn't be asked anything + # if we run the command again + def _no_way(prompt=''): + raise AssertionError(prompt) + register_module.input = _no_way + + cmd.show_response = 1 + cmd.run() + + # let's see what the server received : we should + # have 2 similar requests + self.assertEqual(len(self.conn.reqs), 2) + req1 = dict(self.conn.reqs[0].headers) + req2 = dict(self.conn.reqs[1].headers) + + self.assertEqual(req1['Content-length'], '1374') + self.assertEqual(req2['Content-length'], '1374') + self.assertIn(b'xxx', self.conn.reqs[1].data) + + def test_password_not_in_file(self): + + self.write_file(self.rc, PYPIRC_NOPASSWORD) + cmd = self._get_cmd() + cmd._set_config() + cmd.finalize_options() + cmd.send_metadata() + + # dist.password should be set + # therefore used afterwards by other commands + self.assertEqual(cmd.distribution.password, 'password') + + def test_registering(self): + # this test runs choice 2 + cmd = self._get_cmd() + inputs = Inputs('2', 'tarek', 'tarek@ziade.org') + register_module.input = inputs.__call__ + try: + # let's run the command + cmd.run() + finally: + del register_module.input + + # we should have send a request + self.assertEqual(len(self.conn.reqs), 1) + req = self.conn.reqs[0] + headers = dict(req.headers) + self.assertEqual(headers['Content-length'], '608') + self.assertIn(b'tarek', req.data) + + def test_password_reset(self): + # this test runs choice 3 + cmd = self._get_cmd() + inputs = Inputs('3', 'tarek@ziade.org') + register_module.input = inputs.__call__ + try: + # let's run the command + cmd.run() + finally: + del register_module.input + + # we should have send a request + self.assertEqual(len(self.conn.reqs), 1) + req = self.conn.reqs[0] + headers = dict(req.headers) + self.assertEqual(headers['Content-length'], '290') + self.assertIn(b'tarek', req.data) + + @unittest.skipUnless(docutils is not None, 'needs docutils') + def test_strict(self): + # testing the script option + # when on, the register command stops if + # the metadata is incomplete or if + # long_description is not reSt compliant + + # empty metadata + cmd = self._get_cmd({}) + cmd.ensure_finalized() + cmd.strict = 1 + self.assertRaises(DistutilsSetupError, cmd.run) + + # metadata are OK but long_description is broken + metadata = {'url': 'xxx', 'author': 'xxx', + 'author_email': 'éxéxé', + 'name': 'xxx', 'version': 'xxx', + 'long_description': 'title\n==\n\ntext'} + + cmd = self._get_cmd(metadata) + cmd.ensure_finalized() + cmd.strict = 1 + self.assertRaises(DistutilsSetupError, cmd.run) + + # now something that works + metadata['long_description'] = 'title\n=====\n\ntext' + cmd = self._get_cmd(metadata) + cmd.ensure_finalized() + cmd.strict = 1 + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs.__call__ + # let's run the command + try: + cmd.run() + finally: + del register_module.input + + # strict is not by default + cmd = self._get_cmd() + cmd.ensure_finalized() + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs.__call__ + # let's run the command + try: + cmd.run() + finally: + del register_module.input + + # and finally a Unicode test (bug #12114) + metadata = {'url': 'xxx', 'author': '\u00c9ric', + 'author_email': 'xxx', 'name': 'xxx', + 'version': 'xxx', + 'description': 'Something about esszet \u00df', + 'long_description': 'More things about esszet \u00df'} + + cmd = self._get_cmd(metadata) + cmd.ensure_finalized() + cmd.strict = 1 + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs.__call__ + # let's run the command + try: + cmd.run() + finally: + del register_module.input + + @unittest.skipUnless(docutils is not None, 'needs docutils') + def test_register_invalid_long_description(self): + description = ':funkie:`str`' # mimic Sphinx-specific markup + metadata = {'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', 'version': 'xxx', + 'long_description': description} + cmd = self._get_cmd(metadata) + cmd.ensure_finalized() + cmd.strict = True + inputs = Inputs('2', 'tarek', 'tarek@ziade.org') + register_module.input = inputs + self.addCleanup(delattr, register_module, 'input') + + self.assertRaises(DistutilsSetupError, cmd.run) + + def test_check_metadata_deprecated(self): + # makes sure make_metadata is deprecated + cmd = self._get_cmd() + with check_warnings() as w: + warnings.simplefilter("always") + cmd.check_metadata() + self.assertEqual(len(w.warnings), 1) + + def test_list_classifiers(self): + cmd = self._get_cmd() + cmd.list_classifiers = 1 + cmd.run() + results = self.get_logs(INFO) + self.assertEqual(results, ['running check', 'xxx']) + + def test_show_response(self): + # test that the --show-response option return a well formatted response + cmd = self._get_cmd() + inputs = Inputs('1', 'tarek', 'y') + register_module.input = inputs.__call__ + cmd.show_response = 1 + try: + cmd.run() + finally: + del register_module.input + + results = self.get_logs(INFO) + self.assertEqual(results[3], 75 * '-' + '\nxxx\n' + 75 * '-') + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(RegisterTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_sdist.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_sdist.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_sdist.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_sdist.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,493 @@ +"""Tests for distutils.command.sdist.""" +import os +import tarfile +import unittest +import warnings +import zipfile +from os.path import join +from textwrap import dedent +from test.support import captured_stdout, run_unittest +from test.support.warnings_helper import check_warnings + +try: + import zlib + ZLIB_SUPPORT = True +except ImportError: + ZLIB_SUPPORT = False + +try: + import grp + import pwd + UID_GID_SUPPORT = True +except ImportError: + UID_GID_SUPPORT = False + +from distutils.command.sdist import sdist, show_formats +from distutils.core import Distribution +from distutils.tests.test_config import BasePyPIRCCommandTestCase +from distutils.errors import DistutilsOptionError +from distutils.spawn import find_executable +from distutils.log import WARN +from distutils.filelist import FileList +from distutils.archive_util import ARCHIVE_FORMATS + +SETUP_PY = """ +from distutils.core import setup +import somecode + +setup(name='fake') +""" + +MANIFEST = """\ +# file GENERATED by distutils, do NOT edit +README +buildout.cfg +inroot.txt +setup.py +data%(sep)sdata.dt +scripts%(sep)sscript.py +some%(sep)sfile.txt +some%(sep)sother_file.txt +somecode%(sep)s__init__.py +somecode%(sep)sdoc.dat +somecode%(sep)sdoc.txt +""" + +class SDistTestCase(BasePyPIRCCommandTestCase): + + def setUp(self): + # PyPIRCCommandTestCase creates a temp dir already + # and put it in self.tmp_dir + super(SDistTestCase, self).setUp() + # setting up an environment + self.old_path = os.getcwd() + os.mkdir(join(self.tmp_dir, 'somecode')) + os.mkdir(join(self.tmp_dir, 'dist')) + # a package, and a README + self.write_file((self.tmp_dir, 'README'), 'xxx') + self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#') + self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY) + os.chdir(self.tmp_dir) + + def tearDown(self): + # back to normal + os.chdir(self.old_path) + super(SDistTestCase, self).tearDown() + + def get_cmd(self, metadata=None): + """Returns a cmd""" + if metadata is None: + metadata = {'name': 'fake', 'version': '1.0', + 'url': 'xxx', 'author': 'xxx', + 'author_email': 'xxx'} + dist = Distribution(metadata) + dist.script_name = 'setup.py' + dist.packages = ['somecode'] + dist.include_package_data = True + cmd = sdist(dist) + cmd.dist_dir = 'dist' + return dist, cmd + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_prune_file_list(self): + # this test creates a project with some VCS dirs and an NFS rename + # file, then launches sdist to check they get pruned on all systems + + # creating VCS directories with some files in them + os.mkdir(join(self.tmp_dir, 'somecode', '.svn')) + self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx') + + os.mkdir(join(self.tmp_dir, 'somecode', '.hg')) + self.write_file((self.tmp_dir, 'somecode', '.hg', + 'ok'), 'xxx') + + os.mkdir(join(self.tmp_dir, 'somecode', '.git')) + self.write_file((self.tmp_dir, 'somecode', '.git', + 'ok'), 'xxx') + + self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx') + + # now building a sdist + dist, cmd = self.get_cmd() + + # zip is available universally + # (tar might not be installed under win32) + cmd.formats = ['zip'] + + cmd.ensure_finalized() + cmd.run() + + # now let's check what we have + dist_folder = join(self.tmp_dir, 'dist') + files = os.listdir(dist_folder) + self.assertEqual(files, ['fake-1.0.zip']) + + zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) + try: + content = zip_file.namelist() + finally: + zip_file.close() + + # making sure everything has been pruned correctly + expected = ['', 'PKG-INFO', 'README', 'setup.py', + 'somecode/', 'somecode/__init__.py'] + self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + @unittest.skipIf(find_executable('tar') is None, + "The tar command is not found") + @unittest.skipIf(find_executable('gzip') is None, + "The gzip command is not found") + def test_make_distribution(self): + # now building a sdist + dist, cmd = self.get_cmd() + + # creating a gztar then a tar + cmd.formats = ['gztar', 'tar'] + cmd.ensure_finalized() + cmd.run() + + # making sure we have two files + dist_folder = join(self.tmp_dir, 'dist') + result = os.listdir(dist_folder) + result.sort() + self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) + + os.remove(join(dist_folder, 'fake-1.0.tar')) + os.remove(join(dist_folder, 'fake-1.0.tar.gz')) + + # now trying a tar then a gztar + cmd.formats = ['tar', 'gztar'] + + cmd.ensure_finalized() + cmd.run() + + result = os.listdir(dist_folder) + result.sort() + self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_add_defaults(self): + + # http://bugs.python.org/issue2279 + + # add_default should also include + # data_files and package_data + dist, cmd = self.get_cmd() + + # filling data_files by pointing files + # in package_data + dist.package_data = {'': ['*.cfg', '*.dat'], + 'somecode': ['*.txt']} + self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') + self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#') + + # adding some data in data_files + data_dir = join(self.tmp_dir, 'data') + os.mkdir(data_dir) + self.write_file((data_dir, 'data.dt'), '#') + some_dir = join(self.tmp_dir, 'some') + os.mkdir(some_dir) + # make sure VCS directories are pruned (#14004) + hg_dir = join(self.tmp_dir, '.hg') + os.mkdir(hg_dir) + self.write_file((hg_dir, 'last-message.txt'), '#') + # a buggy regex used to prevent this from working on windows (#6884) + self.write_file((self.tmp_dir, 'buildout.cfg'), '#') + self.write_file((self.tmp_dir, 'inroot.txt'), '#') + self.write_file((some_dir, 'file.txt'), '#') + self.write_file((some_dir, 'other_file.txt'), '#') + + dist.data_files = [('data', ['data/data.dt', + 'buildout.cfg', + 'inroot.txt', + 'notexisting']), + 'some/file.txt', + 'some/other_file.txt'] + + # adding a script + script_dir = join(self.tmp_dir, 'scripts') + os.mkdir(script_dir) + self.write_file((script_dir, 'script.py'), '#') + dist.scripts = [join('scripts', 'script.py')] + + cmd.formats = ['zip'] + cmd.use_defaults = True + + cmd.ensure_finalized() + cmd.run() + + # now let's check what we have + dist_folder = join(self.tmp_dir, 'dist') + files = os.listdir(dist_folder) + self.assertEqual(files, ['fake-1.0.zip']) + + zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) + try: + content = zip_file.namelist() + finally: + zip_file.close() + + # making sure everything was added + expected = ['', 'PKG-INFO', 'README', 'buildout.cfg', + 'data/', 'data/data.dt', 'inroot.txt', + 'scripts/', 'scripts/script.py', 'setup.py', + 'some/', 'some/file.txt', 'some/other_file.txt', + 'somecode/', 'somecode/__init__.py', 'somecode/doc.dat', + 'somecode/doc.txt'] + self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) + + # checking the MANIFEST + f = open(join(self.tmp_dir, 'MANIFEST')) + try: + manifest = f.read() + finally: + f.close() + self.assertEqual(manifest, MANIFEST % {'sep': os.sep}) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_metadata_check_option(self): + # testing the `medata-check` option + dist, cmd = self.get_cmd(metadata={}) + + # this should raise some warnings ! + # with the `check` subcommand + cmd.ensure_finalized() + cmd.run() + warnings = [msg for msg in self.get_logs(WARN) if + msg.startswith('warning: check:')] + self.assertEqual(len(warnings), 2) + + # trying with a complete set of metadata + self.clear_logs() + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + cmd.metadata_check = 0 + cmd.run() + warnings = [msg for msg in self.get_logs(WARN) if + msg.startswith('warning: check:')] + self.assertEqual(len(warnings), 0) + + def test_check_metadata_deprecated(self): + # makes sure make_metadata is deprecated + dist, cmd = self.get_cmd() + with check_warnings() as w: + warnings.simplefilter("always") + cmd.check_metadata() + self.assertEqual(len(w.warnings), 1) + + def test_show_formats(self): + with captured_stdout() as stdout: + show_formats() + + # the output should be a header line + one line per format + num_formats = len(ARCHIVE_FORMATS.keys()) + output = [line for line in stdout.getvalue().split('\n') + if line.strip().startswith('--formats=')] + self.assertEqual(len(output), num_formats) + + def test_finalize_options(self): + dist, cmd = self.get_cmd() + cmd.finalize_options() + + # default options set by finalize + self.assertEqual(cmd.manifest, 'MANIFEST') + self.assertEqual(cmd.template, 'MANIFEST.in') + self.assertEqual(cmd.dist_dir, 'dist') + + # formats has to be a string splitable on (' ', ',') or + # a stringlist + cmd.formats = 1 + self.assertRaises(DistutilsOptionError, cmd.finalize_options) + cmd.formats = ['zip'] + cmd.finalize_options() + + # formats has to be known + cmd.formats = 'supazipa' + self.assertRaises(DistutilsOptionError, cmd.finalize_options) + + # the following tests make sure there is a nice error message instead + # of a traceback when parsing an invalid manifest template + + def _check_template(self, content): + dist, cmd = self.get_cmd() + os.chdir(self.tmp_dir) + self.write_file('MANIFEST.in', content) + cmd.ensure_finalized() + cmd.filelist = FileList() + cmd.read_template() + warnings = self.get_logs(WARN) + self.assertEqual(len(warnings), 1) + + def test_invalid_template_unknown_command(self): + self._check_template('taunt knights *') + + def test_invalid_template_wrong_arguments(self): + # this manifest command takes one argument + self._check_template('prune') + + @unittest.skipIf(os.name != 'nt', 'test relevant for Windows only') + def test_invalid_template_wrong_path(self): + # on Windows, trailing slashes are not allowed + # this used to crash instead of raising a warning: #8286 + self._check_template('include examples/') + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_get_file_list(self): + # make sure MANIFEST is recalculated + dist, cmd = self.get_cmd() + + # filling data_files by pointing files in package_data + dist.package_data = {'somecode': ['*.txt']} + self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') + cmd.formats = ['gztar'] + cmd.ensure_finalized() + cmd.run() + + f = open(cmd.manifest) + try: + manifest = [line.strip() for line in f.read().split('\n') + if line.strip() != ''] + finally: + f.close() + + self.assertEqual(len(manifest), 5) + + # adding a file + self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') + + # make sure build_py is reinitialized, like a fresh run + build_py = dist.get_command_obj('build_py') + build_py.finalized = False + build_py.ensure_finalized() + + cmd.run() + + f = open(cmd.manifest) + try: + manifest2 = [line.strip() for line in f.read().split('\n') + if line.strip() != ''] + finally: + f.close() + + # do we have the new file in MANIFEST ? + self.assertEqual(len(manifest2), 6) + self.assertIn('doc2.txt', manifest2[-1]) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_manifest_marker(self): + # check that autogenerated MANIFESTs have a marker + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + cmd.run() + + f = open(cmd.manifest) + try: + manifest = [line.strip() for line in f.read().split('\n') + if line.strip() != ''] + finally: + f.close() + + self.assertEqual(manifest[0], + '# file GENERATED by distutils, do NOT edit') + + @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") + def test_manifest_comments(self): + # make sure comments don't cause exceptions or wrong includes + contents = dedent("""\ + # bad.py + #bad.py + good.py + """) + dist, cmd = self.get_cmd() + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), contents) + self.write_file((self.tmp_dir, 'good.py'), '# pick me!') + self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!") + self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!") + cmd.run() + self.assertEqual(cmd.filelist.files, ['good.py']) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_manual_manifest(self): + # check that a MANIFEST without a marker is left alone + dist, cmd = self.get_cmd() + cmd.formats = ['gztar'] + cmd.ensure_finalized() + self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') + self.write_file((self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.') + cmd.run() + self.assertEqual(cmd.filelist.files, ['README.manual']) + + f = open(cmd.manifest) + try: + manifest = [line.strip() for line in f.read().split('\n') + if line.strip() != ''] + finally: + f.close() + + self.assertEqual(manifest, ['README.manual']) + + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + filenames = [tarinfo.name for tarinfo in archive] + finally: + archive.close() + self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', + 'fake-1.0/README.manual']) + + @unittest.skipUnless(ZLIB_SUPPORT, "requires zlib") + @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") + @unittest.skipIf(find_executable('tar') is None, + "The tar command is not found") + @unittest.skipIf(find_executable('gzip') is None, + "The gzip command is not found") + def test_make_distribution_owner_group(self): + # now building a sdist + dist, cmd = self.get_cmd() + + # creating a gztar and specifying the owner+group + cmd.formats = ['gztar'] + cmd.owner = pwd.getpwuid(0)[0] + cmd.group = grp.getgrgid(0)[0] + cmd.ensure_finalized() + cmd.run() + + # making sure we have the good rights + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + try: + for member in archive.getmembers(): + self.assertEqual(member.uid, 0) + self.assertEqual(member.gid, 0) + finally: + archive.close() + + # building a sdist again + dist, cmd = self.get_cmd() + + # creating a gztar + cmd.formats = ['gztar'] + cmd.ensure_finalized() + cmd.run() + + # making sure we have the good rights + archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive = tarfile.open(archive_name) + + # note that we are not testing the group ownership here + # because, depending on the platforms and the container + # rights (see #7408) + try: + for member in archive.getmembers(): + self.assertEqual(member.uid, os.getuid()) + finally: + archive.close() + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(SDistTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_spawn.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_spawn.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_spawn.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_spawn.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,139 @@ +"""Tests for distutils.spawn.""" +import os +import stat +import sys +import unittest.mock +from test.support import run_unittest, unix_shell, requires_subprocess +from test.support import os_helper + +from distutils.spawn import find_executable +from distutils.spawn import spawn +from distutils.errors import DistutilsExecError +from distutils.tests import support + + +@requires_subprocess() +class SpawnTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): + + @unittest.skipUnless(os.name in ('nt', 'posix'), + 'Runs only under posix or nt') + def test_spawn(self): + tmpdir = self.mkdtemp() + + # creating something executable + # through the shell that returns 1 + if sys.platform != 'win32': + exe = os.path.join(tmpdir, 'foo.sh') + self.write_file(exe, '#!%s\nexit 1' % unix_shell) + else: + exe = os.path.join(tmpdir, 'foo.bat') + self.write_file(exe, 'exit 1') + + os.chmod(exe, 0o777) + self.assertRaises(DistutilsExecError, spawn, [exe]) + + # now something that works + if sys.platform != 'win32': + exe = os.path.join(tmpdir, 'foo.sh') + self.write_file(exe, '#!%s\nexit 0' % unix_shell) + else: + exe = os.path.join(tmpdir, 'foo.bat') + self.write_file(exe, 'exit 0') + + os.chmod(exe, 0o777) + spawn([exe]) # should work without any error + + def test_find_executable(self): + with os_helper.temp_dir() as tmp_dir: + # use TESTFN to get a pseudo-unique filename + program_noeext = os_helper.TESTFN + # Give the temporary program an ".exe" suffix for all. + # It's needed on Windows and not harmful on other platforms. + program = program_noeext + ".exe" + + filename = os.path.join(tmp_dir, program) + with open(filename, "wb"): + pass + os.chmod(filename, stat.S_IXUSR) + + # test path parameter + rv = find_executable(program, path=tmp_dir) + self.assertEqual(rv, filename) + + if sys.platform == 'win32': + # test without ".exe" extension + rv = find_executable(program_noeext, path=tmp_dir) + self.assertEqual(rv, filename) + + # test find in the current directory + with os_helper.change_cwd(tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, program) + + # test non-existent program + dont_exist_program = "dontexist_" + program + rv = find_executable(dont_exist_program , path=tmp_dir) + self.assertIsNone(rv) + + # PATH='': no match, except in the current directory + with os_helper.EnvironmentVarGuard() as env: + env['PATH'] = '' + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value=tmp_dir, create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', + tmp_dir): + rv = find_executable(program) + self.assertIsNone(rv) + + # look in current directory + with os_helper.change_cwd(tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, program) + + # PATH=':': explicitly looks in the current directory + with os_helper.EnvironmentVarGuard() as env: + env['PATH'] = os.pathsep + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value='', create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', ''): + rv = find_executable(program) + self.assertIsNone(rv) + + # look in current directory + with os_helper.change_cwd(tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, program) + + # missing PATH: test os.confstr("CS_PATH") and os.defpath + with os_helper.EnvironmentVarGuard() as env: + env.pop('PATH', None) + + # without confstr + with unittest.mock.patch('distutils.spawn.os.confstr', + side_effect=ValueError, + create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', + tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, filename) + + # with confstr + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value=tmp_dir, create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', ''): + rv = find_executable(program) + self.assertEqual(rv, filename) + + def test_spawn_missing_exe(self): + with self.assertRaises(DistutilsExecError) as ctx: + spawn(['does-not-exist']) + self.assertIn("command 'does-not-exist' failed", str(ctx.exception)) + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(SpawnTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_sysconfig.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_sysconfig.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_sysconfig.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_sysconfig.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,263 @@ +"""Tests for distutils.sysconfig.""" +import contextlib +import os +import shutil +import subprocess +import sys +import textwrap +import unittest + +from distutils import sysconfig +from distutils.ccompiler import get_default_compiler +from distutils.tests import support +from test.support import run_unittest, swap_item, requires_subprocess, is_wasi +from test.support.os_helper import TESTFN +from test.support.warnings_helper import check_warnings + + +class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): + def setUp(self): + super(SysconfigTestCase, self).setUp() + self.makefile = None + + def tearDown(self): + if self.makefile is not None: + os.unlink(self.makefile) + self.cleanup_testfn() + super(SysconfigTestCase, self).tearDown() + + def cleanup_testfn(self): + if os.path.isfile(TESTFN): + os.remove(TESTFN) + elif os.path.isdir(TESTFN): + shutil.rmtree(TESTFN) + + @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") + def test_get_config_h_filename(self): + config_h = sysconfig.get_config_h_filename() + self.assertTrue(os.path.isfile(config_h), config_h) + + def test_get_python_lib(self): + # XXX doesn't work on Linux when Python was never installed before + #self.assertTrue(os.path.isdir(lib_dir), lib_dir) + # test for pythonxx.lib? + self.assertNotEqual(sysconfig.get_python_lib(), + sysconfig.get_python_lib(prefix=TESTFN)) + + def test_get_config_vars(self): + cvars = sysconfig.get_config_vars() + self.assertIsInstance(cvars, dict) + self.assertTrue(cvars) + + def test_srcdir(self): + # See Issues #15322, #15364. + srcdir = sysconfig.get_config_var('srcdir') + + self.assertTrue(os.path.isabs(srcdir), srcdir) + self.assertTrue(os.path.isdir(srcdir), srcdir) + + if sysconfig.python_build: + # The python executable has not been installed so srcdir + # should be a full source checkout. + Python_h = os.path.join(srcdir, 'Include', 'Python.h') + self.assertTrue(os.path.exists(Python_h), Python_h) + # /PC/pyconfig.h always exists even if unused on POSIX. + pyconfig_h = os.path.join(srcdir, 'PC', 'pyconfig.h') + self.assertTrue(os.path.exists(pyconfig_h), pyconfig_h) + pyconfig_h_in = os.path.join(srcdir, 'pyconfig.h.in') + self.assertTrue(os.path.exists(pyconfig_h_in), pyconfig_h_in) + elif os.name == 'posix': + self.assertEqual( + os.path.dirname(sysconfig.get_makefile_filename()), + srcdir) + + def test_srcdir_independent_of_cwd(self): + # srcdir should be independent of the current working directory + # See Issues #15322, #15364. + srcdir = sysconfig.get_config_var('srcdir') + cwd = os.getcwd() + try: + os.chdir('..') + srcdir2 = sysconfig.get_config_var('srcdir') + finally: + os.chdir(cwd) + self.assertEqual(srcdir, srcdir2) + + def customize_compiler(self): + # make sure AR gets caught + class compiler: + compiler_type = 'unix' + + def set_executables(self, **kw): + self.exes = kw + + sysconfig_vars = { + 'AR': 'sc_ar', + 'CC': 'sc_cc', + 'CXX': 'sc_cxx', + 'ARFLAGS': '--sc-arflags', + 'CFLAGS': '--sc-cflags', + 'CCSHARED': '--sc-ccshared', + 'LDSHARED': 'sc_ldshared', + 'SHLIB_SUFFIX': 'sc_shutil_suffix', + + # On macOS, disable _osx_support.customize_compiler() + 'CUSTOMIZED_OSX_COMPILER': 'True', + } + + comp = compiler() + with contextlib.ExitStack() as cm: + for key, value in sysconfig_vars.items(): + cm.enter_context(swap_item(sysconfig._config_vars, key, value)) + sysconfig.customize_compiler(comp) + + return comp + + @unittest.skipUnless(get_default_compiler() == 'unix', + 'not testing if default compiler is not unix') + def test_customize_compiler(self): + # Make sure that sysconfig._config_vars is initialized + sysconfig.get_config_vars() + + os.environ['AR'] = 'env_ar' + os.environ['CC'] = 'env_cc' + os.environ['CPP'] = 'env_cpp' + os.environ['CXX'] = 'env_cxx --env-cxx-flags' + os.environ['LDSHARED'] = 'env_ldshared' + os.environ['LDFLAGS'] = '--env-ldflags' + os.environ['ARFLAGS'] = '--env-arflags' + os.environ['CFLAGS'] = '--env-cflags' + os.environ['CPPFLAGS'] = '--env-cppflags' + + comp = self.customize_compiler() + self.assertEqual(comp.exes['archiver'], + 'env_ar --env-arflags') + self.assertEqual(comp.exes['preprocessor'], + 'env_cpp --env-cppflags') + self.assertEqual(comp.exes['compiler'], + 'env_cc --sc-cflags --env-cflags --env-cppflags') + self.assertEqual(comp.exes['compiler_so'], + ('env_cc --sc-cflags ' + '--env-cflags ''--env-cppflags --sc-ccshared')) + self.assertEqual(comp.exes['compiler_cxx'], + 'env_cxx --env-cxx-flags') + self.assertEqual(comp.exes['linker_exe'], + 'env_cc') + self.assertEqual(comp.exes['linker_so'], + ('env_ldshared --env-ldflags --env-cflags' + ' --env-cppflags')) + self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') + + del os.environ['AR'] + del os.environ['CC'] + del os.environ['CPP'] + del os.environ['CXX'] + del os.environ['LDSHARED'] + del os.environ['LDFLAGS'] + del os.environ['ARFLAGS'] + del os.environ['CFLAGS'] + del os.environ['CPPFLAGS'] + + comp = self.customize_compiler() + self.assertEqual(comp.exes['archiver'], + 'sc_ar --sc-arflags') + self.assertEqual(comp.exes['preprocessor'], + 'sc_cc -E') + self.assertEqual(comp.exes['compiler'], + 'sc_cc --sc-cflags') + self.assertEqual(comp.exes['compiler_so'], + 'sc_cc --sc-cflags --sc-ccshared') + self.assertEqual(comp.exes['compiler_cxx'], + 'sc_cxx') + self.assertEqual(comp.exes['linker_exe'], + 'sc_cc') + self.assertEqual(comp.exes['linker_so'], + 'sc_ldshared') + self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') + + def test_parse_makefile_base(self): + self.makefile = TESTFN + fd = open(self.makefile, 'w') + try: + fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n') + fd.write('VAR=$OTHER\nOTHER=foo') + finally: + fd.close() + d = sysconfig.parse_makefile(self.makefile) + self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", + 'OTHER': 'foo'}) + + def test_parse_makefile_literal_dollar(self): + self.makefile = TESTFN + fd = open(self.makefile, 'w') + try: + fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n') + fd.write('VAR=$OTHER\nOTHER=foo') + finally: + fd.close() + d = sysconfig.parse_makefile(self.makefile) + self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", + 'OTHER': 'foo'}) + + + def test_sysconfig_module(self): + import sysconfig as global_sysconfig + self.assertEqual(global_sysconfig.get_config_var('CFLAGS'), + sysconfig.get_config_var('CFLAGS')) + self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'), + sysconfig.get_config_var('LDFLAGS')) + + @unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'), + 'compiler flags customized') + def test_sysconfig_compiler_vars(self): + # On OS X, binary installers support extension module building on + # various levels of the operating system with differing Xcode + # configurations. This requires customization of some of the + # compiler configuration directives to suit the environment on + # the installed machine. Some of these customizations may require + # running external programs and, so, are deferred until needed by + # the first extension module build. With Python 3.3, only + # the Distutils version of sysconfig is used for extension module + # builds, which happens earlier in the Distutils tests. This may + # cause the following tests to fail since no tests have caused + # the global version of sysconfig to call the customization yet. + # The solution for now is to simply skip this test in this case. + # The longer-term solution is to only have one version of sysconfig. + + import sysconfig as global_sysconfig + if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'): + self.skipTest('compiler flags customized') + self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), + sysconfig.get_config_var('LDSHARED')) + self.assertEqual(global_sysconfig.get_config_var('CC'), + sysconfig.get_config_var('CC')) + + @requires_subprocess() + def test_customize_compiler_before_get_config_vars(self): + # Issue #21923: test that a Distribution compiler + # instance can be called without an explicit call to + # get_config_vars(). + with open(TESTFN, 'w') as f: + f.writelines(textwrap.dedent('''\ + from distutils.core import Distribution + config = Distribution().get_command_obj('config') + # try_compile may pass or it may fail if no compiler + # is found but it should not raise an exception. + rc = config.try_compile('int x;') + ''')) + p = subprocess.Popen([str(sys.executable), TESTFN], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True) + outs, errs = p.communicate() + self.assertEqual(0, p.returncode, "Subprocess failed: " + outs) + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(SysconfigTestCase)) + return suite + + +if __name__ == '__main__': + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_text_file.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_text_file.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_text_file.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_text_file.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,107 @@ +"""Tests for distutils.text_file.""" +import os +import unittest +from distutils.text_file import TextFile +from distutils.tests import support +from test.support import run_unittest + +TEST_DATA = """# test file + +line 3 \\ +# intervening comment + continues on next line +""" + +class TextFileTestCase(support.TempdirManager, unittest.TestCase): + + def test_class(self): + # old tests moved from text_file.__main__ + # so they are really called by the buildbots + + # result 1: no fancy options + result1 = ['# test file\n', '\n', 'line 3 \\\n', + '# intervening comment\n', + ' continues on next line\n'] + + # result 2: just strip comments + result2 = ["\n", + "line 3 \\\n", + " continues on next line\n"] + + # result 3: just strip blank lines + result3 = ["# test file\n", + "line 3 \\\n", + "# intervening comment\n", + " continues on next line\n"] + + # result 4: default, strip comments, blank lines, + # and trailing whitespace + result4 = ["line 3 \\", + " continues on next line"] + + # result 5: strip comments and blanks, plus join lines (but don't + # "collapse" joined lines + result5 = ["line 3 continues on next line"] + + # result 6: strip comments and blanks, plus join lines (and + # "collapse" joined lines + result6 = ["line 3 continues on next line"] + + def test_input(count, description, file, expected_result): + result = file.readlines() + self.assertEqual(result, expected_result) + + tmpdir = self.mkdtemp() + filename = os.path.join(tmpdir, "test.txt") + out_file = open(filename, "w") + try: + out_file.write(TEST_DATA) + finally: + out_file.close() + + in_file = TextFile(filename, strip_comments=0, skip_blanks=0, + lstrip_ws=0, rstrip_ws=0) + try: + test_input(1, "no processing", in_file, result1) + finally: + in_file.close() + + in_file = TextFile(filename, strip_comments=1, skip_blanks=0, + lstrip_ws=0, rstrip_ws=0) + try: + test_input(2, "strip comments", in_file, result2) + finally: + in_file.close() + + in_file = TextFile(filename, strip_comments=0, skip_blanks=1, + lstrip_ws=0, rstrip_ws=0) + try: + test_input(3, "strip blanks", in_file, result3) + finally: + in_file.close() + + in_file = TextFile(filename) + try: + test_input(4, "default processing", in_file, result4) + finally: + in_file.close() + + in_file = TextFile(filename, strip_comments=1, skip_blanks=1, + join_lines=1, rstrip_ws=1) + try: + test_input(5, "join lines without collapsing", in_file, result5) + finally: + in_file.close() + + in_file = TextFile(filename, strip_comments=1, skip_blanks=1, + join_lines=1, rstrip_ws=1, collapse_join=1) + try: + test_input(6, "join lines with collapsing", in_file, result6) + finally: + in_file.close() + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(TextFileTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_unixccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_unixccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_unixccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_unixccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,145 @@ +"""Tests for distutils.unixccompiler.""" +import sys +import unittest +from test.support import run_unittest +from test.support.os_helper import EnvironmentVarGuard + +from distutils import sysconfig +from distutils.unixccompiler import UnixCCompiler + +class UnixCCompilerTestCase(unittest.TestCase): + + def setUp(self): + self._backup_platform = sys.platform + self._backup_get_config_var = sysconfig.get_config_var + self._backup_config_vars = dict(sysconfig._config_vars) + class CompilerWrapper(UnixCCompiler): + def rpath_foo(self): + return self.runtime_library_dir_option('/foo') + self.cc = CompilerWrapper() + + def tearDown(self): + sys.platform = self._backup_platform + sysconfig.get_config_var = self._backup_get_config_var + sysconfig._config_vars.clear() + sysconfig._config_vars.update(self._backup_config_vars) + + @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + def test_runtime_libdir_option(self): + # Issue#5900 + # + # Ensure RUNPATH is added to extension modules with RPATH if + # GNU ld is used + + # darwin + sys.platform = 'darwin' + self.assertEqual(self.cc.rpath_foo(), '-L/foo') + + # hp-ux + sys.platform = 'hp-ux' + old_gcv = sysconfig.get_config_var + def gcv(v): + return 'xxx' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo']) + + def gcv(v): + return 'gcc' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) + + def gcv(v): + return 'g++' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) + + sysconfig.get_config_var = old_gcv + + # GCC GNULD + sys.platform = 'bar' + def gcv(v): + if v == 'CC': + return 'gcc' + elif v == 'GNULD': + return 'yes' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') + + # GCC non-GNULD + sys.platform = 'bar' + def gcv(v): + if v == 'CC': + return 'gcc' + elif v == 'GNULD': + return 'no' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo') + + # GCC GNULD with fully qualified configuration prefix + # see #7617 + sys.platform = 'bar' + def gcv(v): + if v == 'CC': + return 'x86_64-pc-linux-gnu-gcc-4.4.2' + elif v == 'GNULD': + return 'yes' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') + + # non-GCC GNULD + sys.platform = 'bar' + def gcv(v): + if v == 'CC': + return 'cc' + elif v == 'GNULD': + return 'yes' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), '-R/foo') + + # non-GCC non-GNULD + sys.platform = 'bar' + def gcv(v): + if v == 'CC': + return 'cc' + elif v == 'GNULD': + return 'no' + sysconfig.get_config_var = gcv + self.assertEqual(self.cc.rpath_foo(), '-R/foo') + + @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X') + def test_osx_cc_overrides_ldshared(self): + # Issue #18080: + # ensure that setting CC env variable also changes default linker + def gcv(v): + if v == 'LDSHARED': + return 'gcc-4.2 -bundle -undefined dynamic_lookup ' + return 'gcc-4.2' + sysconfig.get_config_var = gcv + with EnvironmentVarGuard() as env: + env['CC'] = 'my_cc' + del env['LDSHARED'] + sysconfig.customize_compiler(self.cc) + self.assertEqual(self.cc.linker_so[0], 'my_cc') + + @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for OS X') + def test_osx_explicit_ldshared(self): + # Issue #18080: + # ensure that setting CC env variable does not change + # explicit LDSHARED setting for linker + def gcv(v): + if v == 'LDSHARED': + return 'gcc-4.2 -bundle -undefined dynamic_lookup ' + return 'gcc-4.2' + sysconfig.get_config_var = gcv + with EnvironmentVarGuard() as env: + env['CC'] = 'my_cc' + env['LDSHARED'] = 'my_ld -bundle -dynamic' + sysconfig.customize_compiler(self.cc) + self.assertEqual(self.cc.linker_so[0], 'my_ld') + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(UnixCCompilerTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_upload.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_upload.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_upload.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_upload.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,223 @@ +"""Tests for distutils.command.upload.""" +import os +import unittest +import unittest.mock as mock +from urllib.error import HTTPError + +from test.support import run_unittest + +from distutils.command import upload as upload_mod +from distutils.command.upload import upload +from distutils.core import Distribution +from distutils.errors import DistutilsError +from distutils.log import ERROR, INFO + +from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase + +PYPIRC_LONG_PASSWORD = """\ +[distutils] + +index-servers = + server1 + server2 + +[server1] +username:me +password:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + +[server2] +username:meagain +password: secret +realm:acme +repository:http://another.pypi/ +""" + + +PYPIRC_NOPASSWORD = """\ +[distutils] + +index-servers = + server1 + +[server1] +username:me +""" + +class FakeOpen(object): + + def __init__(self, url, msg=None, code=None): + self.url = url + if not isinstance(url, str): + self.req = url + else: + self.req = None + self.msg = msg or 'OK' + self.code = code or 200 + + def getheader(self, name, default=None): + return { + 'content-type': 'text/plain; charset=utf-8', + }.get(name.lower(), default) + + def read(self): + return b'xyzzy' + + def getcode(self): + return self.code + + +class uploadTestCase(BasePyPIRCCommandTestCase): + + def setUp(self): + super(uploadTestCase, self).setUp() + self.old_open = upload_mod.urlopen + upload_mod.urlopen = self._urlopen + self.last_open = None + self.next_msg = None + self.next_code = None + + def tearDown(self): + upload_mod.urlopen = self.old_open + super(uploadTestCase, self).tearDown() + + def _urlopen(self, url): + self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code) + return self.last_open + + def test_finalize_options(self): + + # new format + self.write_file(self.rc, PYPIRC) + dist = Distribution() + cmd = upload(dist) + cmd.finalize_options() + for attr, waited in (('username', 'me'), ('password', 'secret'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/')): + self.assertEqual(getattr(cmd, attr), waited) + + def test_saved_password(self): + # file with no password + self.write_file(self.rc, PYPIRC_NOPASSWORD) + + # make sure it passes + dist = Distribution() + cmd = upload(dist) + cmd.finalize_options() + self.assertEqual(cmd.password, None) + + # make sure we get it as well, if another command + # initialized it at the dist level + dist.password = 'xxx' + cmd = upload(dist) + cmd.finalize_options() + self.assertEqual(cmd.password, 'xxx') + + def test_upload(self): + tmp = self.mkdtemp() + path = os.path.join(tmp, 'xxx') + self.write_file(path) + command, pyversion, filename = 'xxx', '2.6', path + dist_files = [(command, pyversion, filename)] + self.write_file(self.rc, PYPIRC_LONG_PASSWORD) + + # lets run it + pkg_dir, dist = self.create_dist(dist_files=dist_files) + cmd = upload(dist) + cmd.show_response = 1 + cmd.ensure_finalized() + cmd.run() + + # what did we send ? + headers = dict(self.last_open.req.headers) + self.assertGreaterEqual(int(headers['Content-length']), 2162) + content_type = headers['Content-type'] + self.assertTrue(content_type.startswith('multipart/form-data')) + self.assertEqual(self.last_open.req.get_method(), 'POST') + expected_url = 'https://upload.pypi.org/legacy/' + self.assertEqual(self.last_open.req.get_full_url(), expected_url) + data = self.last_open.req.data + self.assertIn(b'xxx',data) + self.assertIn(b'protocol_version', data) + self.assertIn(b'sha256_digest', data) + self.assertIn( + b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf' + b'6860', + data + ) + if b'md5_digest' in data: + self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data) + if b'blake2_256_digest' in data: + self.assertIn( + b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be' + b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc' + b'ce443f1534330a', + data + ) + + # The PyPI response body was echoed + results = self.get_logs(INFO) + self.assertEqual(results[-1], 75 * '-' + '\nxyzzy\n' + 75 * '-') + + # bpo-32304: archives whose last byte was b'\r' were corrupted due to + # normalization intended for Mac OS 9. + def test_upload_correct_cr(self): + # content that ends with \r should not be modified. + tmp = self.mkdtemp() + path = os.path.join(tmp, 'xxx') + self.write_file(path, content='yy\r') + command, pyversion, filename = 'xxx', '2.6', path + dist_files = [(command, pyversion, filename)] + self.write_file(self.rc, PYPIRC_LONG_PASSWORD) + + # other fields that ended with \r used to be modified, now are + # preserved. + pkg_dir, dist = self.create_dist( + dist_files=dist_files, + description='long description\r' + ) + cmd = upload(dist) + cmd.show_response = 1 + cmd.ensure_finalized() + cmd.run() + + headers = dict(self.last_open.req.headers) + self.assertGreaterEqual(int(headers['Content-length']), 2172) + self.assertIn(b'long description\r', self.last_open.req.data) + + def test_upload_fails(self): + self.next_msg = "Not Found" + self.next_code = 404 + self.assertRaises(DistutilsError, self.test_upload) + + def test_wrong_exception_order(self): + tmp = self.mkdtemp() + path = os.path.join(tmp, 'xxx') + self.write_file(path) + dist_files = [('xxx', '2.6', path)] # command, pyversion, filename + self.write_file(self.rc, PYPIRC_LONG_PASSWORD) + + pkg_dir, dist = self.create_dist(dist_files=dist_files) + tests = [ + (OSError('oserror'), 'oserror', OSError), + (HTTPError('url', 400, 'httperror', {}, None), + 'Upload failed (400): httperror', DistutilsError), + ] + for exception, expected, raised_exception in tests: + with self.subTest(exception=type(exception).__name__): + with mock.patch('distutils.command.upload.urlopen', + new=mock.Mock(side_effect=exception)): + with self.assertRaises(raised_exception): + cmd = upload(dist) + cmd.ensure_finalized() + cmd.run() + results = self.get_logs(ERROR) + self.assertIn(expected, results[-1]) + self.clear_logs() + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(uploadTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,313 @@ +"""Tests for distutils.util.""" +import os +import sys +import unittest +from copy import copy +from test.support import run_unittest +from unittest import mock + +from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError +from distutils.util import (get_platform, convert_path, change_root, + check_environ, split_quoted, strtobool, + rfc822_escape, byte_compile, + grok_environment_error) +from distutils import util # used to patch _environ_checked +from distutils.sysconfig import get_config_vars +from distutils import sysconfig +from distutils.tests import support +import _osx_support + +class UtilTestCase(support.EnvironGuard, unittest.TestCase): + + def setUp(self): + super(UtilTestCase, self).setUp() + # saving the environment + self.name = os.name + self.platform = sys.platform + self.version = sys.version + self.sep = os.sep + self.join = os.path.join + self.isabs = os.path.isabs + self.splitdrive = os.path.splitdrive + self._config_vars = copy(sysconfig._config_vars) + + # patching os.uname + if hasattr(os, 'uname'): + self.uname = os.uname + self._uname = os.uname() + else: + self.uname = None + self._uname = None + + os.uname = self._get_uname + + def tearDown(self): + # getting back the environment + os.name = self.name + sys.platform = self.platform + sys.version = self.version + os.sep = self.sep + os.path.join = self.join + os.path.isabs = self.isabs + os.path.splitdrive = self.splitdrive + if self.uname is not None: + os.uname = self.uname + else: + del os.uname + sysconfig._config_vars.clear() + sysconfig._config_vars.update(self._config_vars) + super(UtilTestCase, self).tearDown() + + def _set_uname(self, uname): + self._uname = uname + + def _get_uname(self): + return self._uname + + def test_get_platform(self): + + # windows XP, 32bits + os.name = 'nt' + sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' + '[MSC v.1310 32 bit (Intel)]') + sys.platform = 'win32' + self.assertEqual(get_platform(), 'win32') + + # windows XP, amd64 + os.name = 'nt' + sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' + '[MSC v.1310 32 bit (Amd64)]') + sys.platform = 'win32' + self.assertEqual(get_platform(), 'win-amd64') + + # macbook + os.name = 'posix' + sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' + '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') + sys.platform = 'darwin' + self._set_uname(('Darwin', 'macziade', '8.11.1', + ('Darwin Kernel Version 8.11.1: ' + 'Wed Oct 10 18:23:28 PDT 2007; ' + 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) + _osx_support._remove_original_values(get_config_vars()) + get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' + + get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' + '-fwrapv -O3 -Wall -Wstrict-prototypes') + + cursize = sys.maxsize + sys.maxsize = (2 ** 31)-1 + try: + self.assertEqual(get_platform(), 'macosx-10.3-i386') + finally: + sys.maxsize = cursize + + # macbook with fat binaries (fat, universal or fat64) + _osx_support._remove_original_values(get_config_vars()) + get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' + get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' + '/Developer/SDKs/MacOSX10.4u.sdk ' + '-fno-strict-aliasing -fno-common ' + '-dynamic -DNDEBUG -g -O3') + + self.assertEqual(get_platform(), 'macosx-10.4-fat') + + _osx_support._remove_original_values(get_config_vars()) + os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.1' + self.assertEqual(get_platform(), 'macosx-10.4-fat') + + + _osx_support._remove_original_values(get_config_vars()) + get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' + '/Developer/SDKs/MacOSX10.4u.sdk ' + '-fno-strict-aliasing -fno-common ' + '-dynamic -DNDEBUG -g -O3') + + self.assertEqual(get_platform(), 'macosx-10.4-intel') + + _osx_support._remove_original_values(get_config_vars()) + get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot ' + '/Developer/SDKs/MacOSX10.4u.sdk ' + '-fno-strict-aliasing -fno-common ' + '-dynamic -DNDEBUG -g -O3') + self.assertEqual(get_platform(), 'macosx-10.4-fat3') + + _osx_support._remove_original_values(get_config_vars()) + get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' + '/Developer/SDKs/MacOSX10.4u.sdk ' + '-fno-strict-aliasing -fno-common ' + '-dynamic -DNDEBUG -g -O3') + self.assertEqual(get_platform(), 'macosx-10.4-universal') + + _osx_support._remove_original_values(get_config_vars()) + get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' + '/Developer/SDKs/MacOSX10.4u.sdk ' + '-fno-strict-aliasing -fno-common ' + '-dynamic -DNDEBUG -g -O3') + + self.assertEqual(get_platform(), 'macosx-10.4-fat64') + + for arch in ('ppc', 'i386', 'x86_64', 'ppc64'): + _osx_support._remove_original_values(get_config_vars()) + get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' + '/Developer/SDKs/MacOSX10.4u.sdk ' + '-fno-strict-aliasing -fno-common ' + '-dynamic -DNDEBUG -g -O3'%(arch,)) + + self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,)) + + + # linux debian sarge + os.name = 'posix' + sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' + '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') + sys.platform = 'linux2' + self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', + '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) + + self.assertEqual(get_platform(), 'linux-i686') + + # XXX more platforms to tests here + + def test_convert_path(self): + # linux/mac + os.sep = '/' + def _join(path): + return '/'.join(path) + os.path.join = _join + + self.assertEqual(convert_path('/home/to/my/stuff'), + '/home/to/my/stuff') + + # win + os.sep = '\\' + def _join(*path): + return '\\'.join(path) + os.path.join = _join + + self.assertRaises(ValueError, convert_path, '/home/to/my/stuff') + self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/') + + self.assertEqual(convert_path('home/to/my/stuff'), + 'home\\to\\my\\stuff') + self.assertEqual(convert_path('.'), + os.curdir) + + def test_change_root(self): + # linux/mac + os.name = 'posix' + def _isabs(path): + return path[0] == '/' + os.path.isabs = _isabs + def _join(*path): + return '/'.join(path) + os.path.join = _join + + self.assertEqual(change_root('/root', '/old/its/here'), + '/root/old/its/here') + self.assertEqual(change_root('/root', 'its/here'), + '/root/its/here') + + # windows + os.name = 'nt' + def _isabs(path): + return path.startswith('c:\\') + os.path.isabs = _isabs + def _splitdrive(path): + if path.startswith('c:'): + return ('', path.replace('c:', '')) + return ('', path) + os.path.splitdrive = _splitdrive + def _join(*path): + return '\\'.join(path) + os.path.join = _join + + self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'), + 'c:\\root\\old\\its\\here') + self.assertEqual(change_root('c:\\root', 'its\\here'), + 'c:\\root\\its\\here') + + # BugsBunny os (it's a great os) + os.name = 'BugsBunny' + self.assertRaises(DistutilsPlatformError, + change_root, 'c:\\root', 'its\\here') + + # XXX platforms to be covered: mac + + def test_check_environ(self): + util._environ_checked = 0 + os.environ.pop('HOME', None) + + check_environ() + + self.assertEqual(os.environ['PLAT'], get_platform()) + self.assertEqual(util._environ_checked, 1) + + @unittest.skipUnless(os.name == 'posix', 'specific to posix') + def test_check_environ_getpwuid(self): + util._environ_checked = 0 + os.environ.pop('HOME', None) + + try: + import pwd + except ImportError: + raise unittest.SkipTest("Test requires pwd module.") + + # only set pw_dir field, other fields are not used + result = pwd.struct_passwd((None, None, None, None, None, + '/home/distutils', None)) + with mock.patch.object(pwd, 'getpwuid', return_value=result): + check_environ() + self.assertEqual(os.environ['HOME'], '/home/distutils') + + util._environ_checked = 0 + os.environ.pop('HOME', None) + + # bpo-10496: Catch pwd.getpwuid() error + with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError): + check_environ() + self.assertNotIn('HOME', os.environ) + + def test_split_quoted(self): + self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'), + ['one', 'two', 'three', 'four']) + + def test_strtobool(self): + yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1') + no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N') + + for y in yes: + self.assertTrue(strtobool(y)) + + for n in no: + self.assertFalse(strtobool(n)) + + def test_rfc822_escape(self): + header = 'I am a\npoor\nlonesome\nheader\n' + res = rfc822_escape(header) + wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' + 'header%(8s)s') % {'8s': '\n'+8*' '} + self.assertEqual(res, wanted) + + def test_dont_write_bytecode(self): + # makes sure byte_compile raise a DistutilsError + # if sys.dont_write_bytecode is True + old_dont_write_bytecode = sys.dont_write_bytecode + sys.dont_write_bytecode = True + try: + self.assertRaises(DistutilsByteCompileError, byte_compile, []) + finally: + sys.dont_write_bytecode = old_dont_write_bytecode + + def test_grok_environment_error(self): + # test obsolete function to ensure backward compat (#4931) + exc = IOError("Unable to find batch file") + msg = grok_environment_error(exc) + self.assertEqual(msg, "error: Unable to find batch file") + + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(UtilTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_version.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_version.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_version.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_version.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,87 @@ +"""Tests for distutils.version.""" +import unittest +from distutils.version import LooseVersion +from distutils.version import StrictVersion +from test.support import run_unittest + +class VersionTestCase(unittest.TestCase): + + def test_prerelease(self): + version = StrictVersion('1.2.3a1') + self.assertEqual(version.version, (1, 2, 3)) + self.assertEqual(version.prerelease, ('a', 1)) + self.assertEqual(str(version), '1.2.3a1') + + version = StrictVersion('1.2.0') + self.assertEqual(str(version), '1.2') + + def test_cmp_strict(self): + versions = (('1.5.1', '1.5.2b2', -1), + ('161', '3.10a', ValueError), + ('8.02', '8.02', 0), + ('3.4j', '1996.07.12', ValueError), + ('3.2.pl0', '3.1.1.6', ValueError), + ('2g6', '11g', ValueError), + ('0.9', '2.2', -1), + ('1.2.1', '1.2', 1), + ('1.1', '1.2.2', -1), + ('1.2', '1.1', 1), + ('1.2.1', '1.2.2', -1), + ('1.2.2', '1.2', 1), + ('1.2', '1.2.2', -1), + ('0.4.0', '0.4', 0), + ('1.13++', '5.5.kw', ValueError)) + + for v1, v2, wanted in versions: + try: + res = StrictVersion(v1)._cmp(StrictVersion(v2)) + except ValueError: + if wanted is ValueError: + continue + else: + raise AssertionError(("cmp(%s, %s) " + "shouldn't raise ValueError") + % (v1, v2)) + self.assertEqual(res, wanted, + 'cmp(%s, %s) should be %s, got %s' % + (v1, v2, wanted, res)) + res = StrictVersion(v1)._cmp(v2) + self.assertEqual(res, wanted, + 'cmp(%s, %s) should be %s, got %s' % + (v1, v2, wanted, res)) + res = StrictVersion(v1)._cmp(object()) + self.assertIs(res, NotImplemented, + 'cmp(%s, %s) should be NotImplemented, got %s' % + (v1, v2, res)) + + + def test_cmp(self): + versions = (('1.5.1', '1.5.2b2', -1), + ('161', '3.10a', 1), + ('8.02', '8.02', 0), + ('3.4j', '1996.07.12', -1), + ('3.2.pl0', '3.1.1.6', 1), + ('2g6', '11g', -1), + ('0.960923', '2.2beta29', -1), + ('1.13++', '5.5.kw', -1)) + + + for v1, v2, wanted in versions: + res = LooseVersion(v1)._cmp(LooseVersion(v2)) + self.assertEqual(res, wanted, + 'cmp(%s, %s) should be %s, got %s' % + (v1, v2, wanted, res)) + res = LooseVersion(v1)._cmp(v2) + self.assertEqual(res, wanted, + 'cmp(%s, %s) should be %s, got %s' % + (v1, v2, wanted, res)) + res = LooseVersion(v1)._cmp(object()) + self.assertIs(res, NotImplemented, + 'cmp(%s, %s) should be NotImplemented, got %s' % + (v1, v2, res)) + +def test_suite(): + return unittest.TestLoader().loadTestsFromTestCase(VersionTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_versionpredicate.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_versionpredicate.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/tests/test_versionpredicate.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/tests/test_versionpredicate.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,13 @@ +"""Tests harness for distutils.versionpredicate. + +""" + +import distutils.versionpredicate +import doctest +from test.support import run_unittest + +def test_suite(): + return doctest.DocTestSuite(distutils.versionpredicate) + +if __name__ == '__main__': + run_unittest(test_suite()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/text_file.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/text_file.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/text_file.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/text_file.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,286 @@ +"""text_file + +provides the TextFile class, which gives an interface to text files +that (optionally) takes care of stripping comments, ignoring blank +lines, and joining lines with backslashes.""" + +import sys, io + + +class TextFile: + """Provides a file-like object that takes care of all the things you + commonly want to do when processing a text file that has some + line-by-line syntax: strip comments (as long as "#" is your + comment character), skip blank lines, join adjacent lines by + escaping the newline (ie. backslash at end of line), strip + leading and/or trailing whitespace. All of these are optional + and independently controllable. + + Provides a 'warn()' method so you can generate warning messages that + report physical line number, even if the logical line in question + spans multiple physical lines. Also provides 'unreadline()' for + implementing line-at-a-time lookahead. + + Constructor is called as: + + TextFile (filename=None, file=None, **options) + + It bombs (RuntimeError) if both 'filename' and 'file' are None; + 'filename' should be a string, and 'file' a file object (or + something that provides 'readline()' and 'close()' methods). It is + recommended that you supply at least 'filename', so that TextFile + can include it in warning messages. If 'file' is not supplied, + TextFile creates its own using 'io.open()'. + + The options are all boolean, and affect the value returned by + 'readline()': + strip_comments [default: true] + strip from "#" to end-of-line, as well as any whitespace + leading up to the "#" -- unless it is escaped by a backslash + lstrip_ws [default: false] + strip leading whitespace from each line before returning it + rstrip_ws [default: true] + strip trailing whitespace (including line terminator!) from + each line before returning it + skip_blanks [default: true} + skip lines that are empty *after* stripping comments and + whitespace. (If both lstrip_ws and rstrip_ws are false, + then some lines may consist of solely whitespace: these will + *not* be skipped, even if 'skip_blanks' is true.) + join_lines [default: false] + if a backslash is the last non-newline character on a line + after stripping comments and whitespace, join the following line + to it to form one "logical line"; if N consecutive lines end + with a backslash, then N+1 physical lines will be joined to + form one logical line. + collapse_join [default: false] + strip leading whitespace from lines that are joined to their + predecessor; only matters if (join_lines and not lstrip_ws) + errors [default: 'strict'] + error handler used to decode the file content + + Note that since 'rstrip_ws' can strip the trailing newline, the + semantics of 'readline()' must differ from those of the builtin file + object's 'readline()' method! In particular, 'readline()' returns + None for end-of-file: an empty string might just be a blank line (or + an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is + not.""" + + default_options = { 'strip_comments': 1, + 'skip_blanks': 1, + 'lstrip_ws': 0, + 'rstrip_ws': 1, + 'join_lines': 0, + 'collapse_join': 0, + 'errors': 'strict', + } + + def __init__(self, filename=None, file=None, **options): + """Construct a new TextFile object. At least one of 'filename' + (a string) and 'file' (a file-like object) must be supplied. + They keyword argument options are described above and affect + the values returned by 'readline()'.""" + if filename is None and file is None: + raise RuntimeError("you must supply either or both of 'filename' and 'file'") + + # set values for all options -- either from client option hash + # or fallback to default_options + for opt in self.default_options.keys(): + if opt in options: + setattr(self, opt, options[opt]) + else: + setattr(self, opt, self.default_options[opt]) + + # sanity check client option hash + for opt in options.keys(): + if opt not in self.default_options: + raise KeyError("invalid TextFile option '%s'" % opt) + + if file is None: + self.open(filename) + else: + self.filename = filename + self.file = file + self.current_line = 0 # assuming that file is at BOF! + + # 'linebuf' is a stack of lines that will be emptied before we + # actually read from the file; it's only populated by an + # 'unreadline()' operation + self.linebuf = [] + + def open(self, filename): + """Open a new file named 'filename'. This overrides both the + 'filename' and 'file' arguments to the constructor.""" + self.filename = filename + self.file = io.open(self.filename, 'r', errors=self.errors) + self.current_line = 0 + + def close(self): + """Close the current file and forget everything we know about it + (filename, current line number).""" + file = self.file + self.file = None + self.filename = None + self.current_line = None + file.close() + + def gen_error(self, msg, line=None): + outmsg = [] + if line is None: + line = self.current_line + outmsg.append(self.filename + ", ") + if isinstance(line, (list, tuple)): + outmsg.append("lines %d-%d: " % tuple(line)) + else: + outmsg.append("line %d: " % line) + outmsg.append(str(msg)) + return "".join(outmsg) + + def error(self, msg, line=None): + raise ValueError("error: " + self.gen_error(msg, line)) + + def warn(self, msg, line=None): + """Print (to stderr) a warning message tied to the current logical + line in the current file. If the current logical line in the + file spans multiple physical lines, the warning refers to the + whole range, eg. "lines 3-5". If 'line' supplied, it overrides + the current line number; it may be a list or tuple to indicate a + range of physical lines, or an integer for a single physical + line.""" + sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") + + def readline(self): + """Read and return a single logical line from the current file (or + from an internal buffer if lines have previously been "unread" + with 'unreadline()'). If the 'join_lines' option is true, this + may involve reading multiple physical lines concatenated into a + single string. Updates the current line number, so calling + 'warn()' after 'readline()' emits a warning about the physical + line(s) just read. Returns None on end-of-file, since the empty + string can occur if 'rstrip_ws' is true but 'strip_blanks' is + not.""" + # If any "unread" lines waiting in 'linebuf', return the top + # one. (We don't actually buffer read-ahead data -- lines only + # get put in 'linebuf' if the client explicitly does an + # 'unreadline()'. + if self.linebuf: + line = self.linebuf[-1] + del self.linebuf[-1] + return line + + buildup_line = '' + + while True: + # read the line, make it None if EOF + line = self.file.readline() + if line == '': + line = None + + if self.strip_comments and line: + + # Look for the first "#" in the line. If none, never + # mind. If we find one and it's the first character, or + # is not preceded by "\", then it starts a comment -- + # strip the comment, strip whitespace before it, and + # carry on. Otherwise, it's just an escaped "#", so + # unescape it (and any other escaped "#"'s that might be + # lurking in there) and otherwise leave the line alone. + + pos = line.find("#") + if pos == -1: # no "#" -- no comments + pass + + # It's definitely a comment -- either "#" is the first + # character, or it's elsewhere and unescaped. + elif pos == 0 or line[pos-1] != "\\": + # Have to preserve the trailing newline, because it's + # the job of a later step (rstrip_ws) to remove it -- + # and if rstrip_ws is false, we'd better preserve it! + # (NB. this means that if the final line is all comment + # and has no trailing newline, we will think that it's + # EOF; I think that's OK.) + eol = (line[-1] == '\n') and '\n' or '' + line = line[0:pos] + eol + + # If all that's left is whitespace, then skip line + # *now*, before we try to join it to 'buildup_line' -- + # that way constructs like + # hello \\ + # # comment that should be ignored + # there + # result in "hello there". + if line.strip() == "": + continue + else: # it's an escaped "#" + line = line.replace("\\#", "#") + + # did previous line end with a backslash? then accumulate + if self.join_lines and buildup_line: + # oops: end of file + if line is None: + self.warn("continuation line immediately precedes " + "end-of-file") + return buildup_line + + if self.collapse_join: + line = line.lstrip() + line = buildup_line + line + + # careful: pay attention to line number when incrementing it + if isinstance(self.current_line, list): + self.current_line[1] = self.current_line[1] + 1 + else: + self.current_line = [self.current_line, + self.current_line + 1] + # just an ordinary line, read it as usual + else: + if line is None: # eof + return None + + # still have to be careful about incrementing the line number! + if isinstance(self.current_line, list): + self.current_line = self.current_line[1] + 1 + else: + self.current_line = self.current_line + 1 + + # strip whitespace however the client wants (leading and + # trailing, or one or the other, or neither) + if self.lstrip_ws and self.rstrip_ws: + line = line.strip() + elif self.lstrip_ws: + line = line.lstrip() + elif self.rstrip_ws: + line = line.rstrip() + + # blank line (whether we rstrip'ed or not)? skip to next line + # if appropriate + if (line == '' or line == '\n') and self.skip_blanks: + continue + + if self.join_lines: + if line[-1] == '\\': + buildup_line = line[:-1] + continue + + if line[-2:] == '\\\n': + buildup_line = line[0:-2] + '\n' + continue + + # well, I guess there's some actual content there: return it + return line + + def readlines(self): + """Read and return the list of all logical lines remaining in the + current file.""" + lines = [] + while True: + line = self.readline() + if line is None: + return lines + lines.append(line) + + def unreadline(self, line): + """Push 'line' (a string) onto an internal buffer that will be + checked by future 'readline()' calls. Handy for implementing + a parser with line-at-a-time lookahead.""" + self.linebuf.append(line) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/unixccompiler.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/unixccompiler.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/unixccompiler.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/unixccompiler.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,329 @@ +"""distutils.unixccompiler + +Contains the UnixCCompiler class, a subclass of CCompiler that handles +the "typical" Unix-style command-line C compiler: + * macros defined with -Dname[=value] + * macros undefined with -Uname + * include search directories specified with -Idir + * libraries specified with -lllib + * library search directories specified with -Ldir + * compile handled by 'cc' (or similar) executable with -c option: + compiles .c to .o + * link static library handled by 'ar' command (possibly with 'ranlib') + * link shared library handled by 'cc -shared' +""" + +import os, sys, re + +from distutils import sysconfig +from distutils.dep_util import newer +from distutils.ccompiler import \ + CCompiler, gen_preprocess_options, gen_lib_options +from distutils.errors import \ + DistutilsExecError, CompileError, LibError, LinkError +from distutils import log + +if sys.platform == 'darwin': + import _osx_support + +# XXX Things not currently handled: +# * optimization/debug/warning flags; we just use whatever's in Python's +# Makefile and live with it. Is this adequate? If not, we might +# have to have a bunch of subclasses GNUCCompiler, SGICCompiler, +# SunCCompiler, and I suspect down that road lies madness. +# * even if we don't know a warning flag from an optimization flag, +# we need some way for outsiders to feed preprocessor/compiler/linker +# flags in to us -- eg. a sysadmin might want to mandate certain flags +# via a site config file, or a user might want to set something for +# compiling this module distribution only via the setup.py command +# line, whatever. As long as these options come from something on the +# current system, they can be as system-dependent as they like, and we +# should just happily stuff them into the preprocessor/compiler/linker +# options and carry on. + + +class UnixCCompiler(CCompiler): + + compiler_type = 'unix' + + # These are used by CCompiler in two places: the constructor sets + # instance attributes 'preprocessor', 'compiler', etc. from them, and + # 'set_executable()' allows any of these to be set. The defaults here + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). + executables = {'preprocessor' : None, + 'compiler' : ["cc"], + 'compiler_so' : ["cc"], + 'compiler_cxx' : ["cc"], + 'linker_so' : ["cc", "-shared"], + 'linker_exe' : ["cc"], + 'archiver' : ["ar", "-cr"], + 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": + executables['ranlib'] = ["ranlib"] + + # Needed for the filename generation methods provided by the base + # class, CCompiler. NB. whoever instantiates/uses a particular + # UnixCCompiler instance should set 'shared_lib_ext' -- we set a + # reasonable common default here, but it's not necessarily used on all + # Unices! + + src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] + obj_extension = ".o" + static_lib_extension = ".a" + shared_lib_extension = ".so" + dylib_lib_extension = ".dylib" + xcode_stub_lib_extension = ".tbd" + static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" + xcode_stub_lib_format = dylib_lib_format + if sys.platform == "cygwin": + exe_extension = ".exe" + + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + fixed_args = self._fix_compile_args(None, macros, include_dirs) + ignore, macros, include_dirs = fixed_args + pp_opts = gen_preprocess_options(macros, include_dirs) + pp_args = self.preprocessor + pp_opts + if output_file: + pp_args.extend(['-o', output_file]) + if extra_preargs: + pp_args[:0] = extra_preargs + if extra_postargs: + pp_args.extend(extra_postargs) + pp_args.append(source) + + # We need to preprocess: either we're being forced to, or we're + # generating output to stdout, or there's a target output file and + # the source file is newer than the target (or the target doesn't + # exist). + if self.force or output_file is None or newer(source, output_file): + if output_file: + self.mkpath(os.path.dirname(output_file)) + try: + self.spawn(pp_args) + except DistutilsExecError as msg: + raise CompileError(msg) + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) + try: + self.spawn(compiler_so + cc_args + [src, '-o', obj] + + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + def create_static_lib(self, objects, output_libname, + output_dir=None, debug=0, target_lang=None): + objects, output_dir = self._fix_object_args(objects, output_dir) + + output_filename = \ + self.library_filename(output_libname, output_dir=output_dir) + + if self._need_link(objects, output_filename): + self.mkpath(os.path.dirname(output_filename)) + self.spawn(self.archiver + + [output_filename] + + objects + self.objects) + + # Not many Unices required ranlib anymore -- SunOS 4.x is, I + # think the only major Unix that does. Maybe we need some + # platform intelligence here to skip ranlib if it's not + # needed -- or maybe Python's configure script took care of + # it for us, hence the check for leading colon. + if self.ranlib: + try: + self.spawn(self.ranlib + [output_filename]) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + def link(self, target_desc, objects, + output_filename, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, + export_symbols=None, debug=0, extra_preargs=None, + extra_postargs=None, build_temp=None, target_lang=None): + objects, output_dir = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + libraries, library_dirs, runtime_library_dirs = fixed_args + + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, + libraries) + if not isinstance(output_dir, (str, type(None))): + raise TypeError("'output_dir' must be a string or None") + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + ld_args = (objects + self.objects + + lib_opts + ['-o', output_filename]) + if debug: + ld_args[:0] = ['-g'] + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: + if target_desc == CCompiler.EXECUTABLE: + linker = self.linker_exe[:] + else: + linker = self.linker_so[:] + if target_lang == "c++" and self.compiler_cxx: + # skip over environment variable settings if /usr/bin/env + # is used to set up the linker's environment. + # This is needed on OSX. Note: this assumes that the + # normal and C++ compiler have the same environment + # settings. + i = 0 + if os.path.basename(linker[0]) == "env": + i = 1 + while '=' in linker[i]: + i += 1 + + if os.path.basename(linker[i]) == 'ld_so_aix': + # AIX platforms prefix the compiler with the ld_so_aix + # script, so we need to adjust our linker index + offset = 1 + else: + offset = 0 + + linker[i+offset] = self.compiler_cxx[i] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) + + self.spawn(linker + ld_args) + except DistutilsExecError as msg: + raise LinkError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "-L" + dir + + def _is_gcc(self, compiler_name): + # clang uses same syntax for rpath as gcc + return any(name in compiler_name for name in ("gcc", "g++", "clang")) + + def runtime_library_dir_option(self, dir): + # XXX Hackish, at the very least. See Python bug #445902: + # http://sourceforge.net/tracker/index.php + # ?func=detail&aid=445902&group_id=5470&atid=105470 + # Linkers on different platforms need different options to + # specify that directories need to be added to the list of + # directories searched for dependencies when a dynamic library + # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to + # be told to pass the -R option through to the linker, whereas + # other compilers and gcc on other systems just know this. + # Other compilers may need something slightly different. At + # this time, there's no way to determine this information from + # the configuration data stored in the Python installation, so + # we use this hack. + compiler = os.path.basename(sysconfig.get_config_var("CC")) + if sys.platform[:6] == "darwin": + # MacOSX's linker doesn't understand the -R flag at all + return "-L" + dir + elif sys.platform[:7] == "freebsd": + return "-Wl,-rpath=" + dir + elif sys.platform[:5] == "hp-ux": + if self._is_gcc(compiler): + return ["-Wl,+s", "-L" + dir] + return ["+s", "-L" + dir] + else: + if self._is_gcc(compiler): + # gcc on non-GNU systems does not need -Wl, but can + # use it anyway. Since distutils has always passed in + # -Wl whenever gcc was used in the past it is probably + # safest to keep doing so. + if sysconfig.get_config_var("GNULD") == "yes": + # GNU ld needs an extra option to get a RUNPATH + # instead of just an RPATH. + return "-Wl,--enable-new-dtags,-R" + dir + else: + return "-Wl,-R" + dir + else: + # No idea how --enable-new-dtags would be passed on to + # ld if this system was using GNU ld. Don't know if a + # system like this even exists. + return "-R" + dir + + def library_option(self, lib): + return "-l" + lib + + def find_library_file(self, dirs, lib, debug=0): + shared_f = self.library_filename(lib, lib_type='shared') + dylib_f = self.library_filename(lib, lib_type='dylib') + xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub') + static_f = self.library_filename(lib, lib_type='static') + + if sys.platform == 'darwin': + # On OSX users can specify an alternate SDK using + # '-isysroot', calculate the SDK root if it is specified + # (and use it further on) + # + # Note that, as of Xcode 7, Apple SDKs may contain textual stub + # libraries with .tbd extensions rather than the normal .dylib + # shared libraries installed in /. The Apple compiler tool + # chain handles this transparently but it can cause problems + # for programs that are being built with an SDK and searching + # for specific libraries. Callers of find_library_file need to + # keep in mind that the base filename of the returned SDK library + # file might have a different extension from that of the library + # file installed on the running system, for example: + # /Applications/Xcode.app/Contents/Developer/Platforms/ + # MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ + # usr/lib/libedit.tbd + # vs + # /usr/lib/libedit.dylib + cflags = sysconfig.get_config_var('CFLAGS') + m = re.search(r'-isysroot\s*(\S+)', cflags) + if m is None: + sysroot = _osx_support._default_sysroot(sysconfig.get_config_var('CC')) + else: + sysroot = m.group(1) + + + + for dir in dirs: + shared = os.path.join(dir, shared_f) + dylib = os.path.join(dir, dylib_f) + static = os.path.join(dir, static_f) + xcode_stub = os.path.join(dir, xcode_stub_f) + + if sys.platform == 'darwin' and ( + dir.startswith('/System/') or ( + dir.startswith('/usr/') and not dir.startswith('/usr/local/'))): + + shared = os.path.join(sysroot, dir[1:], shared_f) + dylib = os.path.join(sysroot, dir[1:], dylib_f) + static = os.path.join(sysroot, dir[1:], static_f) + xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f) + + # We're second-guessing the linker here, with not much hard + # data to go on: GCC seems to prefer the shared library, so I'm + # assuming that *all* Unix C compilers do. And of course I'm + # ignoring even GCC's "-static" option. So sue me. + if os.path.exists(dylib): + return dylib + elif os.path.exists(xcode_stub): + return xcode_stub + elif os.path.exists(shared): + return shared + elif os.path.exists(static): + return static + + # Oops, didn't find it in *any* of 'dirs' + return None diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/util.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,562 @@ +"""distutils.util + +Miscellaneous utility functions -- anything that doesn't fit into +one of the other *util.py modules. +""" + +import os +import re +import importlib.util +import string +import sys +import distutils +from distutils.errors import DistutilsPlatformError +from distutils.dep_util import newer +from distutils.spawn import spawn +from distutils import log +from distutils.errors import DistutilsByteCompileError + +def get_host_platform(): + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ + if os.name == 'nt': + if 'amd64' in sys.version.lower(): + return 'win-amd64' + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + return sys.platform + + # Set for cross builds explicitly + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + + (osname, host, release, version, machine) = os.uname() + + # Convert the OS name to lowercase, remove '/' characters, and translate + # spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # We can't use "platform.architecture()[0]" because a + # bootstrap problem. We use a dict to get an error + # if some suspicious happens. + bitness = {2147483647:"32bit", 9223372036854775807:"64bit"} + machine += ".%s" % bitness[sys.maxsize] + # fall through to standard osname-release-machine representation + elif osname[:3] == "aix": + from _aix_support import aix_platform + return aix_platform() + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile (r'[\d.]+', re.ASCII) + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + import _osx_support, distutils.sysconfig + osname, release, machine = _osx_support.get_platform_osx( + distutils.sysconfig.get_config_vars(), + osname, release, machine) + + return "%s-%s-%s" % (osname, release, machine) + +def get_platform(): + if os.name == 'nt': + TARGET_TO_PLAT = { + 'x86' : 'win32', + 'x64' : 'win-amd64', + 'arm' : 'win-arm32', + } + return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform() + else: + return get_host_platform() + +def convert_path (pathname): + """Return 'pathname' as a name that will work on the native filesystem, + i.e. split it on '/' and put it back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while '.' in paths: + paths.remove('.') + if not paths: + return os.curdir + return os.path.join(*paths) + +# convert_path () + + +def change_root (new_root, pathname): + """Return 'pathname' with 'new_root' prepended. If 'pathname' is + relative, this is equivalent to "os.path.join(new_root,pathname)". + Otherwise, it requires making 'pathname' relative and then joining the + two, which is tricky on DOS/Windows and Mac OS. + """ + if os.name == 'posix': + if not os.path.isabs(pathname): + return os.path.join(new_root, pathname) + else: + return os.path.join(new_root, pathname[1:]) + + elif os.name == 'nt': + (drive, path) = os.path.splitdrive(pathname) + if path[0] == '\\': + path = path[1:] + return os.path.join(new_root, path) + + else: + raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) + + +_environ_checked = 0 +def check_environ (): + """Ensure that 'os.environ' has all the environment variables we + guarantee that users can use in config files, command-line options, + etc. Currently this includes: + HOME - user's home directory (Unix only) + PLAT - description of the current platform, including hardware + and OS (see 'get_platform()') + """ + global _environ_checked + if _environ_checked: + return + + if os.name == 'posix' and 'HOME' not in os.environ: + try: + import pwd + os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] + except (ImportError, KeyError): + # bpo-10496: if the current user identifier doesn't exist in the + # password database, do nothing + pass + + if 'PLAT' not in os.environ: + os.environ['PLAT'] = get_platform() + + _environ_checked = 1 + + +def subst_vars (s, local_vars): + """Perform shell/Perl-style variable substitution on 'string'. Every + occurrence of '$' followed by a name is considered a variable, and + variable is substituted by the value found in the 'local_vars' + dictionary, or in 'os.environ' if it's not in 'local_vars'. + 'os.environ' is first checked/augmented to guarantee that it contains + certain values: see 'check_environ()'. Raise ValueError for any + variables not found in either 'local_vars' or 'os.environ'. + """ + check_environ() + def _subst (match, local_vars=local_vars): + var_name = match.group(1) + if var_name in local_vars: + return str(local_vars[var_name]) + else: + return os.environ[var_name] + + try: + return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) + except KeyError as var: + raise ValueError("invalid variable '$%s'" % var) + +# subst_vars () + + +def grok_environment_error (exc, prefix="error: "): + # Function kept for backward compatibility. + # Used to try clever things with EnvironmentErrors, + # but nowadays str(exception) produces good messages. + return prefix + str(exc) + + +# Needed by 'split_quoted()' +_wordchars_re = _squote_re = _dquote_re = None +def _init_regex(): + global _wordchars_re, _squote_re, _dquote_re + _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) + _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") + _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') + +def split_quoted (s): + """Split a string up according to Unix shell-like rules for quotes and + backslashes. In short: words are delimited by spaces, as long as those + spaces are not escaped by a backslash, or inside a quoted string. + Single and double quotes are equivalent, and the quote characters can + be backslash-escaped. The backslash is stripped from any two-character + escape sequence, leaving only the escaped character. The quote + characters are stripped from any quoted string. Returns a list of + words. + """ + + # This is a nice algorithm for splitting up a single string, since it + # doesn't require character-by-character examination. It was a little + # bit of a brain-bender to get it working right, though... + if _wordchars_re is None: _init_regex() + + s = s.strip() + words = [] + pos = 0 + + while s: + m = _wordchars_re.match(s, pos) + end = m.end() + if end == len(s): + words.append(s[:end]) + break + + if s[end] in string.whitespace: # unescaped, unquoted whitespace: now + words.append(s[:end]) # we definitely have a word delimiter + s = s[end:].lstrip() + pos = 0 + + elif s[end] == '\\': # preserve whatever is being escaped; + # will become part of the current word + s = s[:end] + s[end+1:] + pos = end+1 + + else: + if s[end] == "'": # slurp singly-quoted string + m = _squote_re.match(s, end) + elif s[end] == '"': # slurp doubly-quoted string + m = _dquote_re.match(s, end) + else: + raise RuntimeError("this can't happen (bad char '%c')" % s[end]) + + if m is None: + raise ValueError("bad string (mismatched %s quotes?)" % s[end]) + + (beg, end) = m.span() + s = s[:beg] + s[beg+1:end-1] + s[end:] + pos = m.end() - 2 + + if pos >= len(s): + words.append(s) + break + + return words + +# split_quoted () + + +def execute (func, args, msg=None, verbose=0, dry_run=0): + """Perform some action that affects the outside world (eg. by + writing to the filesystem). Such actions are special because they + are disabled by the 'dry_run' flag. This method takes care of all + that bureaucracy for you; all you have to do is supply the + function to call and an argument tuple for it (to embody the + "external action" being performed), and an optional message to + print. + """ + if msg is None: + msg = "%s%r" % (func.__name__, args) + if msg[-2:] == ',)': # correct for singleton tuple + msg = msg[0:-2] + ')' + + log.info(msg) + if not dry_run: + func(*args) + + +def strtobool (val): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ('y', 'yes', 't', 'true', 'on', '1'): + return 1 + elif val in ('n', 'no', 'f', 'false', 'off', '0'): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) + + +def byte_compile (py_files, + optimize=0, force=0, + prefix=None, base_dir=None, + verbose=1, dry_run=0, + direct=None): + """Byte-compile a collection of Python source files to .pyc + files in a __pycache__ subdirectory. 'py_files' is a list + of files to compile; any files that don't end in ".py" are silently + skipped. 'optimize' must be one of the following: + 0 - don't optimize + 1 - normal optimization (like "python -O") + 2 - extra optimization (like "python -OO") + If 'force' is true, all files are recompiled regardless of + timestamps. + + The source filename encoded in each bytecode file defaults to the + filenames listed in 'py_files'; you can modify these with 'prefix' and + 'basedir'. 'prefix' is a string that will be stripped off of each + source filename, and 'base_dir' is a directory name that will be + prepended (after 'prefix' is stripped). You can supply either or both + (or neither) of 'prefix' and 'base_dir', as you wish. + + If 'dry_run' is true, doesn't actually do anything that would + affect the filesystem. + + Byte-compilation is either done directly in this interpreter process + with the standard py_compile module, or indirectly by writing a + temporary script and executing it. Normally, you should let + 'byte_compile()' figure out to use direct compilation or not (see + the source for details). The 'direct' flag is used by the script + generated in indirect mode; unless you know what you're doing, leave + it set to None. + """ + + # Late import to fix a bootstrap issue: _posixsubprocess is built by + # setup.py, but setup.py uses distutils. + import subprocess + + # nothing is done if sys.dont_write_bytecode is True + if sys.dont_write_bytecode: + raise DistutilsByteCompileError('byte-compiling is disabled.') + + # First, if the caller didn't force us into direct or indirect mode, + # figure out which mode we should be in. We take a conservative + # approach: choose direct mode *only* if the current interpreter is + # in debug mode and optimize is 0. If we're not in debug mode (-O + # or -OO), we don't know which level of optimization this + # interpreter is running with, so we can't do direct + # byte-compilation and be certain that it's the right thing. Thus, + # always compile indirectly if the current interpreter is in either + # optimize mode, or if either optimization level was requested by + # the caller. + if direct is None: + direct = (__debug__ and optimize == 0) + + # "Indirect" byte-compilation: write a temporary script and then + # run it with the appropriate flags. + if not direct: + try: + from tempfile import mkstemp + (script_fd, script_name) = mkstemp(".py") + except ImportError: + from tempfile import mktemp + (script_fd, script_name) = None, mktemp(".py") + log.info("writing byte-compilation script '%s'", script_name) + if not dry_run: + if script_fd is not None: + script = os.fdopen(script_fd, "w") + else: + script = open(script_name, "w") + + with script: + script.write("""\ +from distutils.util import byte_compile +files = [ +""") + + # XXX would be nice to write absolute filenames, just for + # safety's sake (script should be more robust in the face of + # chdir'ing before running it). But this requires abspath'ing + # 'prefix' as well, and that breaks the hack in build_lib's + # 'byte_compile()' method that carefully tacks on a trailing + # slash (os.sep really) to make sure the prefix here is "just + # right". This whole prefix business is rather delicate -- the + # problem is that it's really a directory, but I'm treating it + # as a dumb string, so trailing slashes and so forth matter. + + #py_files = map(os.path.abspath, py_files) + #if prefix: + # prefix = os.path.abspath(prefix) + + script.write(",\n".join(map(repr, py_files)) + "]\n") + script.write(""" +byte_compile(files, optimize=%r, force=%r, + prefix=%r, base_dir=%r, + verbose=%r, dry_run=0, + direct=1) +""" % (optimize, force, prefix, base_dir, verbose)) + + msg = distutils._DEPRECATION_MESSAGE + cmd = [sys.executable] + cmd.extend(subprocess._optim_args_from_interpreter_flags()) + cmd.append(f'-Wignore:{msg}:DeprecationWarning') + cmd.append(script_name) + spawn(cmd, dry_run=dry_run) + execute(os.remove, (script_name,), "removing %s" % script_name, + dry_run=dry_run) + + # "Direct" byte-compilation: use the py_compile module to compile + # right here, right now. Note that the script generated in indirect + # mode simply calls 'byte_compile()' in direct mode, a weird sort of + # cross-process recursion. Hey, it works! + else: + from py_compile import compile + + for file in py_files: + if file[-3:] != ".py": + # This lets us be lazy and not filter filenames in + # the "install_lib" command. + continue + + # Terminology from the py_compile module: + # cfile - byte-compiled file + # dfile - purported source filename (same as 'file' by default) + if optimize >= 0: + opt = '' if optimize == 0 else optimize + cfile = importlib.util.cache_from_source( + file, optimization=opt) + else: + cfile = importlib.util.cache_from_source(file) + dfile = file + if prefix: + if file[:len(prefix)] != prefix: + raise ValueError("invalid prefix: filename %r doesn't start with %r" + % (file, prefix)) + dfile = dfile[len(prefix):] + if base_dir: + dfile = os.path.join(base_dir, dfile) + + cfile_base = os.path.basename(cfile) + if direct: + if force or newer(file, cfile): + log.info("byte-compiling %s to %s", file, cfile_base) + if not dry_run: + compile(file, cfile, dfile) + else: + log.debug("skipping byte-compilation of %s to %s", + file, cfile_base) + +# byte_compile () + +def rfc822_escape (header): + """Return a version of the string escaped for inclusion in an + RFC-822 header, by ensuring there are 8 spaces space after each newline. + """ + lines = header.split('\n') + sep = '\n' + 8 * ' ' + return sep.join(lines) + +# 2to3 support + +def run_2to3(files, fixer_names=None, options=None, explicit=None): + """Invoke 2to3 on a list of Python files. + The files should all come from the build area, as the + modification is done in-place. To reduce the build time, + only files modified since the last invocation of this + function should be passed in the files argument.""" + + if not files: + return + + # Make this class local, to delay import of 2to3 + from lib2to3.refactor import RefactoringTool, get_fixers_from_package + class DistutilsRefactoringTool(RefactoringTool): + def log_error(self, msg, *args, **kw): + log.error(msg, *args) + + def log_message(self, msg, *args): + log.info(msg, *args) + + def log_debug(self, msg, *args): + log.debug(msg, *args) + + if fixer_names is None: + fixer_names = get_fixers_from_package('lib2to3.fixes') + r = DistutilsRefactoringTool(fixer_names, options=options) + r.refactor(files, write=True) + +def copydir_run_2to3(src, dest, template=None, fixer_names=None, + options=None, explicit=None): + """Recursively copy a directory, only copying new and changed files, + running run_2to3 over all newly copied Python modules afterward. + + If you give a template string, it's parsed like a MANIFEST.in. + """ + from distutils.dir_util import mkpath + from distutils.file_util import copy_file + from distutils.filelist import FileList + filelist = FileList() + curdir = os.getcwd() + os.chdir(src) + try: + filelist.findall() + finally: + os.chdir(curdir) + filelist.files[:] = filelist.allfiles + if template: + for line in template.splitlines(): + line = line.strip() + if not line: continue + filelist.process_template_line(line) + copied = [] + for filename in filelist.files: + outname = os.path.join(dest, filename) + mkpath(os.path.dirname(outname)) + res = copy_file(os.path.join(src, filename), outname, update=1) + if res[1]: copied.append(outname) + run_2to3([fn for fn in copied if fn.lower().endswith('.py')], + fixer_names=fixer_names, options=options, explicit=explicit) + return copied + +class Mixin2to3: + '''Mixin class for commands that run 2to3. + To configure 2to3, setup scripts may either change + the class variables, or inherit from individual commands + to override how 2to3 is invoked.''' + + # provide list of fixers to run; + # defaults to all from lib2to3.fixers + fixer_names = None + + # options dictionary + options = None + + # list of fixers to invoke even though they are marked as explicit + explicit = None + + def run_2to3(self, files): + return run_2to3(files, self.fixer_names, self.options, self.explicit) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/version.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/version.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/version.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/version.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,347 @@ +# +# distutils/version.py +# +# Implements multiple version numbering conventions for the +# Python Module Distribution Utilities. +# +# $Id$ +# + +"""Provides classes to represent module version numbers (one class for +each style of version numbering). There are currently two such classes +implemented: StrictVersion and LooseVersion. + +Every version number class implements the following interface: + * the 'parse' method takes a string and parses it to some internal + representation; if the string is an invalid version number, + 'parse' raises a ValueError exception + * the class constructor takes an optional string argument which, + if supplied, is passed to 'parse' + * __str__ reconstructs the string that was passed to 'parse' (or + an equivalent string -- ie. one that will generate an equivalent + version number instance) + * __repr__ generates Python code to recreate the version number instance + * _cmp compares the current instance with either another instance + of the same class or a string (which will be parsed to an instance + of the same class, thus must follow the same rules) +""" + +import re + +class Version: + """Abstract base class for version numbering classes. Just provides + constructor (__init__) and reproducer (__repr__), because those + seem to be the same for all version numbering classes; and route + rich comparisons to _cmp. + """ + + def __init__ (self, vstring=None): + if vstring: + self.parse(vstring) + + def __repr__ (self): + return "%s ('%s')" % (self.__class__.__name__, str(self)) + + def __eq__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c == 0 + + def __lt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c < 0 + + def __le__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c <= 0 + + def __gt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c > 0 + + def __ge__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return c + return c >= 0 + + +# Interface for version-number classes -- must be implemented +# by the following classes (the concrete ones -- Version should +# be treated as an abstract class). +# __init__ (string) - create and take same action as 'parse' +# (string parameter is optional) +# parse (string) - convert a string representation to whatever +# internal representation is appropriate for +# this style of version numbering +# __str__ (self) - convert back to a string; should be very similar +# (if not identical to) the string supplied to parse +# __repr__ (self) - generate Python code to recreate +# the instance +# _cmp (self, other) - compare two version numbers ('other' may +# be an unparsed version string, or another +# instance of your version class) + + +class StrictVersion (Version): + + """Version numbering for anal retentives and software idealists. + Implements the standard interface for version number classes as + described above. A version number consists of two or three + dot-separated numeric components, with an optional "pre-release" tag + on the end. The pre-release tag consists of the letter 'a' or 'b' + followed by a number. If the numeric components of two version + numbers are equal, then one with a pre-release tag will always + be deemed earlier (lesser) than one without. + + The following are valid version numbers (shown in the order that + would be obtained by sorting according to the supplied cmp function): + + 0.4 0.4.0 (these two are equivalent) + 0.4.1 + 0.5a1 + 0.5b3 + 0.5 + 0.9.6 + 1.0 + 1.0.4a3 + 1.0.4b1 + 1.0.4 + + The following are examples of invalid version numbers: + + 1 + 2.7.2.2 + 1.3.a4 + 1.3pl1 + 1.3c4 + + The rationale for this version numbering system will be explained + in the distutils documentation. + """ + + version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', + re.VERBOSE | re.ASCII) + + + def parse (self, vstring): + match = self.version_re.match(vstring) + if not match: + raise ValueError("invalid version number '%s'" % vstring) + + (major, minor, patch, prerelease, prerelease_num) = \ + match.group(1, 2, 4, 5, 6) + + if patch: + self.version = tuple(map(int, [major, minor, patch])) + else: + self.version = tuple(map(int, [major, minor])) + (0,) + + if prerelease: + self.prerelease = (prerelease[0], int(prerelease_num)) + else: + self.prerelease = None + + + def __str__ (self): + + if self.version[2] == 0: + vstring = '.'.join(map(str, self.version[0:2])) + else: + vstring = '.'.join(map(str, self.version)) + + if self.prerelease: + vstring = vstring + self.prerelease[0] + str(self.prerelease[1]) + + return vstring + + + def _cmp (self, other): + if isinstance(other, str): + other = StrictVersion(other) + elif not isinstance(other, StrictVersion): + return NotImplemented + + if self.version != other.version: + # numeric versions don't match + # prerelease stuff doesn't matter + if self.version < other.version: + return -1 + else: + return 1 + + # have to compare prerelease + # case 1: neither has prerelease; they're equal + # case 2: self has prerelease, other doesn't; other is greater + # case 3: self doesn't have prerelease, other does: self is greater + # case 4: both have prerelease: must compare them! + + if (not self.prerelease and not other.prerelease): + return 0 + elif (self.prerelease and not other.prerelease): + return -1 + elif (not self.prerelease and other.prerelease): + return 1 + elif (self.prerelease and other.prerelease): + if self.prerelease == other.prerelease: + return 0 + elif self.prerelease < other.prerelease: + return -1 + else: + return 1 + else: + assert False, "never get here" + +# end class StrictVersion + + +# The rules according to Greg Stein: +# 1) a version number has 1 or more numbers separated by a period or by +# sequences of letters. If only periods, then these are compared +# left-to-right to determine an ordering. +# 2) sequences of letters are part of the tuple for comparison and are +# compared lexicographically +# 3) recognize the numeric components may have leading zeroes +# +# The LooseVersion class below implements these rules: a version number +# string is split up into a tuple of integer and string components, and +# comparison is a simple tuple comparison. This means that version +# numbers behave in a predictable and obvious way, but a way that might +# not necessarily be how people *want* version numbers to behave. There +# wouldn't be a problem if people could stick to purely numeric version +# numbers: just split on period and compare the numbers as tuples. +# However, people insist on putting letters into their version numbers; +# the most common purpose seems to be: +# - indicating a "pre-release" version +# ('alpha', 'beta', 'a', 'b', 'pre', 'p') +# - indicating a post-release patch ('p', 'pl', 'patch') +# but of course this can't cover all version number schemes, and there's +# no way to know what a programmer means without asking him. +# +# The problem is what to do with letters (and other non-numeric +# characters) in a version number. The current implementation does the +# obvious and predictable thing: keep them as strings and compare +# lexically within a tuple comparison. This has the desired effect if +# an appended letter sequence implies something "post-release": +# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". +# +# However, if letters in a version number imply a pre-release version, +# the "obvious" thing isn't correct. Eg. you would expect that +# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison +# implemented here, this just isn't so. +# +# Two possible solutions come to mind. The first is to tie the +# comparison algorithm to a particular set of semantic rules, as has +# been done in the StrictVersion class above. This works great as long +# as everyone can go along with bondage and discipline. Hopefully a +# (large) subset of Python module programmers will agree that the +# particular flavour of bondage and discipline provided by StrictVersion +# provides enough benefit to be worth using, and will submit their +# version numbering scheme to its domination. The free-thinking +# anarchists in the lot will never give in, though, and something needs +# to be done to accommodate them. +# +# Perhaps a "moderately strict" version class could be implemented that +# lets almost anything slide (syntactically), and makes some heuristic +# assumptions about non-digits in version number strings. This could +# sink into special-case-hell, though; if I was as talented and +# idiosyncratic as Larry Wall, I'd go ahead and implement a class that +# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is +# just as happy dealing with things like "2g6" and "1.13++". I don't +# think I'm smart enough to do it right though. +# +# In any case, I've coded the test suite for this module (see +# ../test/test_version.py) specifically to fail on things like comparing +# "1.2a2" and "1.2". That's not because the *code* is doing anything +# wrong, it's because the simple, obvious design doesn't match my +# complicated, hairy expectations for real-world version numbers. It +# would be a snap to fix the test suite to say, "Yep, LooseVersion does +# the Right Thing" (ie. the code matches the conception). But I'd rather +# have a conception that matches common notions about version numbers. + +class LooseVersion (Version): + + """Version numbering for anarchists and software realists. + Implements the standard interface for version number classes as + described above. A version number consists of a series of numbers, + separated by either periods or strings of letters. When comparing + version numbers, the numeric components will be compared + numerically, and the alphabetic components lexically. The following + are all valid version numbers, in no particular order: + + 1.5.1 + 1.5.2b2 + 161 + 3.10a + 8.02 + 3.4j + 1996.07.12 + 3.2.pl0 + 3.1.1.6 + 2g6 + 11g + 0.960923 + 2.2beta29 + 1.13++ + 5.5.kw + 2.0b1pl0 + + In fact, there is no such thing as an invalid version number under + this scheme; the rules for comparison are simple and predictable, + but may not always give the results you want (for some definition + of "want"). + """ + + component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) + + def __init__ (self, vstring=None): + if vstring: + self.parse(vstring) + + + def parse (self, vstring): + # I've given up on thinking I can reconstruct the version string + # from the parsed tuple -- so I just store the string here for + # use by __str__ + self.vstring = vstring + components = [x for x in self.component_re.split(vstring) + if x and x != '.'] + for i, obj in enumerate(components): + try: + components[i] = int(obj) + except ValueError: + pass + + self.version = components + + + def __str__ (self): + return self.vstring + + + def __repr__ (self): + return "LooseVersion ('%s')" % str(self) + + + def _cmp (self, other): + if isinstance(other, str): + other = LooseVersion(other) + elif not isinstance(other, LooseVersion): + return NotImplemented + + if self.version == other.version: + return 0 + if self.version < other.version: + return -1 + if self.version > other.version: + return 1 + + +# end class LooseVersion diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/versionpredicate.py python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/versionpredicate.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/distutils/versionpredicate.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/distutils/versionpredicate.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,166 @@ +"""Module for parsing and testing package version predicate strings. +""" +import re +import distutils.version +import operator + + +re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", + re.ASCII) +# (package) (rest) + +re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses +re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") +# (comp) (version) + + +def splitUp(pred): + """Parse a single version comparison. + + Return (comparison string, StrictVersion) + """ + res = re_splitComparison.match(pred) + if not res: + raise ValueError("bad package restriction syntax: %r" % pred) + comp, verStr = res.groups() + return (comp, distutils.version.StrictVersion(verStr)) + +compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, + ">": operator.gt, ">=": operator.ge, "!=": operator.ne} + +class VersionPredicate: + """Parse and test package version predicates. + + >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)') + + The `name` attribute provides the full dotted name that is given:: + + >>> v.name + 'pyepat.abc' + + The str() of a `VersionPredicate` provides a normalized + human-readable version of the expression:: + + >>> print(v) + pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3) + + The `satisfied_by()` method can be used to determine with a given + version number is included in the set described by the version + restrictions:: + + >>> v.satisfied_by('1.1') + True + >>> v.satisfied_by('1.4') + True + >>> v.satisfied_by('1.0') + False + >>> v.satisfied_by('4444.4') + False + >>> v.satisfied_by('1555.1b3') + False + + `VersionPredicate` is flexible in accepting extra whitespace:: + + >>> v = VersionPredicate(' pat( == 0.1 ) ') + >>> v.name + 'pat' + >>> v.satisfied_by('0.1') + True + >>> v.satisfied_by('0.2') + False + + If any version numbers passed in do not conform to the + restrictions of `StrictVersion`, a `ValueError` is raised:: + + >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)') + Traceback (most recent call last): + ... + ValueError: invalid version number '1.2zb3' + + It the module or package name given does not conform to what's + allowed as a legal module or package name, `ValueError` is + raised:: + + >>> v = VersionPredicate('foo-bar') + Traceback (most recent call last): + ... + ValueError: expected parenthesized list: '-bar' + + >>> v = VersionPredicate('foo bar (12.21)') + Traceback (most recent call last): + ... + ValueError: expected parenthesized list: 'bar (12.21)' + + """ + + def __init__(self, versionPredicateStr): + """Parse a version predicate string. + """ + # Fields: + # name: package name + # pred: list of (comparison string, StrictVersion) + + versionPredicateStr = versionPredicateStr.strip() + if not versionPredicateStr: + raise ValueError("empty package restriction") + match = re_validPackage.match(versionPredicateStr) + if not match: + raise ValueError("bad package name in %r" % versionPredicateStr) + self.name, paren = match.groups() + paren = paren.strip() + if paren: + match = re_paren.match(paren) + if not match: + raise ValueError("expected parenthesized list: %r" % paren) + str = match.groups()[0] + self.pred = [splitUp(aPred) for aPred in str.split(",")] + if not self.pred: + raise ValueError("empty parenthesized list in %r" + % versionPredicateStr) + else: + self.pred = [] + + def __str__(self): + if self.pred: + seq = [cond + " " + str(ver) for cond, ver in self.pred] + return self.name + " (" + ", ".join(seq) + ")" + else: + return self.name + + def satisfied_by(self, version): + """True if version is compatible with all the predicates in self. + The parameter version must be acceptable to the StrictVersion + constructor. It may be either a string or StrictVersion. + """ + for cond, ver in self.pred: + if not compmap[cond](version, ver): + return False + return True + + +_provision_rx = None + +def split_provision(value): + """Return the name and optional version number of a provision. + + The version number, if given, will be returned as a `StrictVersion` + instance, otherwise it will be `None`. + + >>> split_provision('mypkg') + ('mypkg', None) + >>> split_provision(' mypkg( 1.2 ) ') + ('mypkg', StrictVersion ('1.2')) + """ + global _provision_rx + if _provision_rx is None: + _provision_rx = re.compile( + r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", + re.ASCII) + value = value.strip() + m = _provision_rx.match(value) + if not m: + raise ValueError("illegal provides specification: %r" % value) + ver = m.group(2) or None + if ver: + ver = distutils.version.StrictVersion(ver) + return m.group(1), ver diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/Grammar.txt python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/Grammar.txt --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/Grammar.txt 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/Grammar.txt 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,196 @@ +# Grammar for 2to3. This grammar supports Python 2.x and 3.x. + +# NOTE WELL: You should also follow all the steps listed at +# https://devguide.python.org/grammar/ + +# Start symbols for the grammar: +# file_input is a module or sequence of commands read from an input file; +# single_input is a single interactive statement; +# eval_input is the input for the eval() and input() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +file_input: (NEWLINE | stmt)* ENDMARKER +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) +async_funcdef: ASYNC funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite +parameters: '(' [typedargslist] ')' + +# The following definition for typedarglist is equivalent to this set of rules: +# +# arguments = argument (',' argument)* +# argument = tfpdef ['=' test] +# kwargs = '**' tname [','] +# args = '*' [tname] +# kwonly_kwargs = (',' argument)* [',' [kwargs]] +# args_kwonly_kwargs = args kwonly_kwargs | kwargs +# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]] +# typedargslist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs +# typedarglist = arguments ',' '/' [',' [typedargslist_no_posonly]])|(typedargslist_no_posonly)" +# +# It needs to be fully expanded to allow our LL(1) parser to work on it. + +typedargslist: tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [ + ',' [((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])* + [',' ['**' tname [',']]] | '**' tname [',']) + | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])] + ] | ((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])* + [',' ['**' tname [',']]] | '**' tname [',']) + | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) + +tname: NAME [':' test] +tfpdef: tname | '(' tfplist ')' +tfplist: tfpdef (',' tfpdef)* [','] + +# The following definition for varargslist is equivalent to this set of rules: +# +# arguments = argument (',' argument )* +# argument = vfpdef ['=' test] +# kwargs = '**' vname [','] +# args = '*' [vname] +# kwonly_kwargs = (',' argument )* [',' [kwargs]] +# args_kwonly_kwargs = args kwonly_kwargs | kwargs +# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]] +# vararglist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs +# varargslist = arguments ',' '/' [','[(vararglist_no_posonly)]] | (vararglist_no_posonly) +# +# It needs to be fully expanded to allow our LL(1) parser to work on it. + +varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ + ((vfpdef ['=' test] ',')* ('*' [vname] (',' vname ['=' test])* + [',' ['**' vname [',']]] | '**' vname [',']) + | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) + ]] | ((vfpdef ['=' test] ',')* + ('*' [vname] (',' vname ['=' test])* [',' ['**' vname [',']]]| '**' vname [',']) + | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) + +vname: NAME +vfpdef: vname | '(' vfplist ')' +vfplist: vfpdef (',' vfpdef)* [','] + +stmt: simple_stmt | compound_stmt +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | exec_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' test] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +print_stmt: 'print' ( [ test (',' test)* [','] ] | + '>>' test [ (',' test)+ [','] ] ) +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist_star_expr] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +import_from: ('from' ('.'* dotted_name | '.'+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: ('global' | 'nonlocal') NAME (',' NAME)* +exec_stmt: 'exec' expr ['in' test [',' test]] +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: ASYNC (funcdef | with_stmt | for_stmt) +if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite] +while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +with_var: 'as' expr +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test [(',' | 'as') test]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +# Backward compatibility cruft to support: +# [ x for x in lambda: True, lambda: False if x() ] +# even while also allowing: +# lambda x: 5 if x else 2 +# (But not a mix of the two) +testlist_safe: old_test [(',' old_test)+ [',']] +old_test: or_test | old_lambdef +old_lambdef: 'lambda' [varargslist] ':' old_test + +namedexpr_test: test [':=' test] +test: or_test ['if' or_test 'else' test] | lambdef +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: [AWAIT] atom trailer* ['**' factor] +atom: ('(' [yield_expr|testlist_gexp] ')' | + '[' [listmaker] ']' | + '{' [dictsetmaker] '}' | + '`' testlist1 '`' | + NAME | NUMBER | STRING+ | '.' '.' '.') +listmaker: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] ) +testlist_gexp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] ) +lambdef: 'lambda' [varargslist] ':' test +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test | star_expr) + (comp_for | (',' (test | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test ':=' test | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [comp_iter] +comp_if: 'if' old_test [comp_iter] + +testlist1: test (',' test)* + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist_star_expr diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/PatternGrammar.txt python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/PatternGrammar.txt --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/PatternGrammar.txt 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/PatternGrammar.txt 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,28 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# A grammar to describe tree matching patterns. +# Not shown here: +# - 'TOKEN' stands for any token (leaf node) +# - 'any' stands for any node (leaf or interior) +# With 'any' we can still specify the sub-structure. + +# The start symbol is 'Matcher'. + +Matcher: Alternatives ENDMARKER + +Alternatives: Alternative ('|' Alternative)* + +Alternative: (Unit | NegatedUnit)+ + +Unit: [NAME '='] ( STRING [Repeater] + | NAME [Details] [Repeater] + | '(' Alternatives ')' [Repeater] + | '[' Alternatives ']' + ) + +NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')') + +Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}' + +Details: '<' Alternatives '>' diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,8 @@ +import warnings + + +warnings.warn( + "lib2to3 package is deprecated and may not be able to parse Python 3.10+", + DeprecationWarning, + stacklevel=2, +) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/__main__.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/__main__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/__main__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/__main__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,4 @@ +import sys +from .main import main + +sys.exit(main("lib2to3.fixes")) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/btm_matcher.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/btm_matcher.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/btm_matcher.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/btm_matcher.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,163 @@ +"""A bottom-up tree matching algorithm implementation meant to speed +up 2to3's matching process. After the tree patterns are reduced to +their rarest linear path, a linear Aho-Corasick automaton is +created. The linear automaton traverses the linear paths from the +leaves to the root of the AST and returns a set of nodes for further +matching. This reduces significantly the number of candidate nodes.""" + +__author__ = "George Boutsioukis " + +import logging +import itertools +from collections import defaultdict + +from . import pytree +from .btm_utils import reduce_tree + +class BMNode(object): + """Class for a node of the Aho-Corasick automaton used in matching""" + count = itertools.count() + def __init__(self): + self.transition_table = {} + self.fixers = [] + self.id = next(BMNode.count) + self.content = '' + +class BottomMatcher(object): + """The main matcher class. After instantiating the patterns should + be added using the add_fixer method""" + + def __init__(self): + self.match = set() + self.root = BMNode() + self.nodes = [self.root] + self.fixers = [] + self.logger = logging.getLogger("RefactoringTool") + + def add_fixer(self, fixer): + """Reduces a fixer's pattern tree to a linear path and adds it + to the matcher(a common Aho-Corasick automaton). The fixer is + appended on the matching states and called when they are + reached""" + self.fixers.append(fixer) + tree = reduce_tree(fixer.pattern_tree) + linear = tree.get_linear_subpattern() + match_nodes = self.add(linear, start=self.root) + for match_node in match_nodes: + match_node.fixers.append(fixer) + + def add(self, pattern, start): + "Recursively adds a linear pattern to the AC automaton" + #print("adding pattern", pattern, "to", start) + if not pattern: + #print("empty pattern") + return [start] + if isinstance(pattern[0], tuple): + #alternatives + #print("alternatives") + match_nodes = [] + for alternative in pattern[0]: + #add all alternatives, and add the rest of the pattern + #to each end node + end_nodes = self.add(alternative, start=start) + for end in end_nodes: + match_nodes.extend(self.add(pattern[1:], end)) + return match_nodes + else: + #single token + #not last + if pattern[0] not in start.transition_table: + #transition did not exist, create new + next_node = BMNode() + start.transition_table[pattern[0]] = next_node + else: + #transition exists already, follow + next_node = start.transition_table[pattern[0]] + + if pattern[1:]: + end_nodes = self.add(pattern[1:], start=next_node) + else: + end_nodes = [next_node] + return end_nodes + + def run(self, leaves): + """The main interface with the bottom matcher. The tree is + traversed from the bottom using the constructed + automaton. Nodes are only checked once as the tree is + retraversed. When the automaton fails, we give it one more + shot(in case the above tree matches as a whole with the + rejected leaf), then we break for the next leaf. There is the + special case of multiple arguments(see code comments) where we + recheck the nodes + + Args: + The leaves of the AST tree to be matched + + Returns: + A dictionary of node matches with fixers as the keys + """ + current_ac_node = self.root + results = defaultdict(list) + for leaf in leaves: + current_ast_node = leaf + while current_ast_node: + current_ast_node.was_checked = True + for child in current_ast_node.children: + # multiple statements, recheck + if isinstance(child, pytree.Leaf) and child.value == ";": + current_ast_node.was_checked = False + break + if current_ast_node.type == 1: + #name + node_token = current_ast_node.value + else: + node_token = current_ast_node.type + + if node_token in current_ac_node.transition_table: + #token matches + current_ac_node = current_ac_node.transition_table[node_token] + for fixer in current_ac_node.fixers: + results[fixer].append(current_ast_node) + else: + #matching failed, reset automaton + current_ac_node = self.root + if (current_ast_node.parent is not None + and current_ast_node.parent.was_checked): + #the rest of the tree upwards has been checked, next leaf + break + + #recheck the rejected node once from the root + if node_token in current_ac_node.transition_table: + #token matches + current_ac_node = current_ac_node.transition_table[node_token] + for fixer in current_ac_node.fixers: + results[fixer].append(current_ast_node) + + current_ast_node = current_ast_node.parent + return results + + def print_ac(self): + "Prints a graphviz diagram of the BM automaton(for debugging)" + print("digraph g{") + def print_node(node): + for subnode_key in node.transition_table.keys(): + subnode = node.transition_table[subnode_key] + print("%d -> %d [label=%s] //%s" % + (node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers))) + if subnode_key == 1: + print(subnode.content) + print_node(subnode) + print_node(self.root) + print("}") + +# taken from pytree.py for debugging; only used by print_ac +_type_reprs = {} +def type_repr(type_num): + global _type_reprs + if not _type_reprs: + from .pygram import python_symbols + # printing tokens is possible but not as useful + # from .pgen2 import token // token.__dict__.items(): + for name, val in python_symbols.__dict__.items(): + if type(val) == int: _type_reprs[val] = name + return _type_reprs.setdefault(type_num, type_num) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/btm_utils.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/btm_utils.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/btm_utils.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/btm_utils.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,280 @@ +"Utility functions used by the btm_matcher module" + +from . import pytree +from .pgen2 import grammar, token +from .pygram import pattern_symbols, python_symbols + +syms = pattern_symbols +pysyms = python_symbols +tokens = grammar.opmap +token_labels = token + +TYPE_ANY = -1 +TYPE_ALTERNATIVES = -2 +TYPE_GROUP = -3 + +class MinNode(object): + """This class serves as an intermediate representation of the + pattern tree during the conversion to sets of leaf-to-root + subpatterns""" + + def __init__(self, type=None, name=None): + self.type = type + self.name = name + self.children = [] + self.leaf = False + self.parent = None + self.alternatives = [] + self.group = [] + + def __repr__(self): + return str(self.type) + ' ' + str(self.name) + + def leaf_to_root(self): + """Internal method. Returns a characteristic path of the + pattern tree. This method must be run for all leaves until the + linear subpatterns are merged into a single""" + node = self + subp = [] + while node: + if node.type == TYPE_ALTERNATIVES: + node.alternatives.append(subp) + if len(node.alternatives) == len(node.children): + #last alternative + subp = [tuple(node.alternatives)] + node.alternatives = [] + node = node.parent + continue + else: + node = node.parent + subp = None + break + + if node.type == TYPE_GROUP: + node.group.append(subp) + #probably should check the number of leaves + if len(node.group) == len(node.children): + subp = get_characteristic_subpattern(node.group) + node.group = [] + node = node.parent + continue + else: + node = node.parent + subp = None + break + + if node.type == token_labels.NAME and node.name: + #in case of type=name, use the name instead + subp.append(node.name) + else: + subp.append(node.type) + + node = node.parent + return subp + + def get_linear_subpattern(self): + """Drives the leaf_to_root method. The reason that + leaf_to_root must be run multiple times is because we need to + reject 'group' matches; for example the alternative form + (a | b c) creates a group [b c] that needs to be matched. Since + matching multiple linear patterns overcomes the automaton's + capabilities, leaf_to_root merges each group into a single + choice based on 'characteristic'ity, + + i.e. (a|b c) -> (a|b) if b more characteristic than c + + Returns: The most 'characteristic'(as defined by + get_characteristic_subpattern) path for the compiled pattern + tree. + """ + + for l in self.leaves(): + subp = l.leaf_to_root() + if subp: + return subp + + def leaves(self): + "Generator that returns the leaves of the tree" + for child in self.children: + yield from child.leaves() + if not self.children: + yield self + +def reduce_tree(node, parent=None): + """ + Internal function. Reduces a compiled pattern tree to an + intermediate representation suitable for feeding the + automaton. This also trims off any optional pattern elements(like + [a], a*). + """ + + new_node = None + #switch on the node type + if node.type == syms.Matcher: + #skip + node = node.children[0] + + if node.type == syms.Alternatives : + #2 cases + if len(node.children) <= 2: + #just a single 'Alternative', skip this node + new_node = reduce_tree(node.children[0], parent) + else: + #real alternatives + new_node = MinNode(type=TYPE_ALTERNATIVES) + #skip odd children('|' tokens) + for child in node.children: + if node.children.index(child)%2: + continue + reduced = reduce_tree(child, new_node) + if reduced is not None: + new_node.children.append(reduced) + elif node.type == syms.Alternative: + if len(node.children) > 1: + + new_node = MinNode(type=TYPE_GROUP) + for child in node.children: + reduced = reduce_tree(child, new_node) + if reduced: + new_node.children.append(reduced) + if not new_node.children: + # delete the group if all of the children were reduced to None + new_node = None + + else: + new_node = reduce_tree(node.children[0], parent) + + elif node.type == syms.Unit: + if (isinstance(node.children[0], pytree.Leaf) and + node.children[0].value == '('): + #skip parentheses + return reduce_tree(node.children[1], parent) + if ((isinstance(node.children[0], pytree.Leaf) and + node.children[0].value == '[') + or + (len(node.children)>1 and + hasattr(node.children[1], "value") and + node.children[1].value == '[')): + #skip whole unit if its optional + return None + + leaf = True + details_node = None + alternatives_node = None + has_repeater = False + repeater_node = None + has_variable_name = False + + for child in node.children: + if child.type == syms.Details: + leaf = False + details_node = child + elif child.type == syms.Repeater: + has_repeater = True + repeater_node = child + elif child.type == syms.Alternatives: + alternatives_node = child + if hasattr(child, 'value') and child.value == '=': # variable name + has_variable_name = True + + #skip variable name + if has_variable_name: + #skip variable name, '=' + name_leaf = node.children[2] + if hasattr(name_leaf, 'value') and name_leaf.value == '(': + # skip parenthesis + name_leaf = node.children[3] + else: + name_leaf = node.children[0] + + #set node type + if name_leaf.type == token_labels.NAME: + #(python) non-name or wildcard + if name_leaf.value == 'any': + new_node = MinNode(type=TYPE_ANY) + else: + if hasattr(token_labels, name_leaf.value): + new_node = MinNode(type=getattr(token_labels, name_leaf.value)) + else: + new_node = MinNode(type=getattr(pysyms, name_leaf.value)) + + elif name_leaf.type == token_labels.STRING: + #(python) name or character; remove the apostrophes from + #the string value + name = name_leaf.value.strip("'") + if name in tokens: + new_node = MinNode(type=tokens[name]) + else: + new_node = MinNode(type=token_labels.NAME, name=name) + elif name_leaf.type == syms.Alternatives: + new_node = reduce_tree(alternatives_node, parent) + + #handle repeaters + if has_repeater: + if repeater_node.children[0].value == '*': + #reduce to None + new_node = None + elif repeater_node.children[0].value == '+': + #reduce to a single occurrence i.e. do nothing + pass + else: + #TODO: handle {min, max} repeaters + raise NotImplementedError + + #add children + if details_node and new_node is not None: + for child in details_node.children[1:-1]: + #skip '<', '>' markers + reduced = reduce_tree(child, new_node) + if reduced is not None: + new_node.children.append(reduced) + if new_node: + new_node.parent = parent + return new_node + + +def get_characteristic_subpattern(subpatterns): + """Picks the most characteristic from a list of linear patterns + Current order used is: + names > common_names > common_chars + """ + if not isinstance(subpatterns, list): + return subpatterns + if len(subpatterns)==1: + return subpatterns[0] + + # first pick out the ones containing variable names + subpatterns_with_names = [] + subpatterns_with_common_names = [] + common_names = ['in', 'for', 'if' , 'not', 'None'] + subpatterns_with_common_chars = [] + common_chars = "[]().,:" + for subpattern in subpatterns: + if any(rec_test(subpattern, lambda x: type(x) is str)): + if any(rec_test(subpattern, + lambda x: isinstance(x, str) and x in common_chars)): + subpatterns_with_common_chars.append(subpattern) + elif any(rec_test(subpattern, + lambda x: isinstance(x, str) and x in common_names)): + subpatterns_with_common_names.append(subpattern) + + else: + subpatterns_with_names.append(subpattern) + + if subpatterns_with_names: + subpatterns = subpatterns_with_names + elif subpatterns_with_common_names: + subpatterns = subpatterns_with_common_names + elif subpatterns_with_common_chars: + subpatterns = subpatterns_with_common_chars + # of the remaining subpatterns pick out the longest one + return max(subpatterns, key=len) + +def rec_test(sequence, test_func): + """Tests test_func on all items of sequence and items of included + sub-iterables""" + for x in sequence: + if isinstance(x, (list, tuple)): + yield from rec_test(x, test_func) + else: + yield test_func(x) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixer_base.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixer_base.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixer_base.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixer_base.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,186 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Base class for fixers (optional, but recommended).""" + +# Python imports +import itertools + +# Local imports +from .patcomp import PatternCompiler +from . import pygram +from .fixer_util import does_tree_import + +class BaseFix(object): + + """Optional base class for fixers. + + The subclass name must be FixFooBar where FooBar is the result of + removing underscores and capitalizing the words of the fix name. + For example, the class name for a fixer named 'has_key' should be + FixHasKey. + """ + + PATTERN = None # Most subclasses should override with a string literal + pattern = None # Compiled pattern, set by compile_pattern() + pattern_tree = None # Tree representation of the pattern + options = None # Options object passed to initializer + filename = None # The filename (set by set_filename) + numbers = itertools.count(1) # For new_name() + used_names = set() # A set of all used NAMEs + order = "post" # Does the fixer prefer pre- or post-order traversal + explicit = False # Is this ignored by refactor.py -f all? + run_order = 5 # Fixers will be sorted by run order before execution + # Lower numbers will be run first. + _accept_type = None # [Advanced and not public] This tells RefactoringTool + # which node type to accept when there's not a pattern. + + keep_line_order = False # For the bottom matcher: match with the + # original line order + BM_compatible = False # Compatibility with the bottom matching + # module; every fixer should set this + # manually + + # Shortcut for access to Python grammar symbols + syms = pygram.python_symbols + + def __init__(self, options, log): + """Initializer. Subclass may override. + + Args: + options: a dict containing the options passed to RefactoringTool + that could be used to customize the fixer through the command line. + log: a list to append warnings and other messages to. + """ + self.options = options + self.log = log + self.compile_pattern() + + def compile_pattern(self): + """Compiles self.PATTERN into self.pattern. + + Subclass may override if it doesn't want to use + self.{pattern,PATTERN} in .match(). + """ + if self.PATTERN is not None: + PC = PatternCompiler() + self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN, + with_tree=True) + + def set_filename(self, filename): + """Set the filename. + + The main refactoring tool should call this. + """ + self.filename = filename + + def match(self, node): + """Returns match for a given parse tree node. + + Should return a true or false object (not necessarily a bool). + It may return a non-empty dict of matching sub-nodes as + returned by a matching pattern. + + Subclass may override. + """ + results = {"node": node} + return self.pattern.match(node, results) and results + + def transform(self, node, results): + """Returns the transformation for a given parse tree node. + + Args: + node: the root of the parse tree that matched the fixer. + results: a dict mapping symbolic names to part of the match. + + Returns: + None, or a node that is a modified copy of the + argument node. The node argument may also be modified in-place to + effect the same change. + + Subclass *must* override. + """ + raise NotImplementedError() + + def new_name(self, template="xxx_todo_changeme"): + """Return a string suitable for use as an identifier + + The new name is guaranteed not to conflict with other identifiers. + """ + name = template + while name in self.used_names: + name = template + str(next(self.numbers)) + self.used_names.add(name) + return name + + def log_message(self, message): + if self.first_log: + self.first_log = False + self.log.append("### In file %s ###" % self.filename) + self.log.append(message) + + def cannot_convert(self, node, reason=None): + """Warn the user that a given chunk of code is not valid Python 3, + but that it cannot be converted automatically. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + for_output = node.clone() + for_output.prefix = "" + msg = "Line %d: could not convert: %s" + self.log_message(msg % (lineno, for_output)) + if reason: + self.log_message(reason) + + def warning(self, node, reason): + """Used for warning the user about possible uncertainty in the + translation. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + self.log_message("Line %d: %s" % (lineno, reason)) + + def start_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the start of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + self.used_names = tree.used_names + self.set_filename(filename) + self.numbers = itertools.count(1) + self.first_log = True + + def finish_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the conclusion of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + pass + + +class ConditionalFix(BaseFix): + """ Base class for fixers which not execute if an import is found. """ + + # This is the name of the import which, if found, will cause the test to be skipped + skip_on = None + + def start_tree(self, *args): + super(ConditionalFix, self).start_tree(*args) + self._should_skip = None + + def should_skip(self, node): + if self._should_skip is not None: + return self._should_skip + pkg = self.skip_on.split(".") + name = pkg[-1] + pkg = ".".join(pkg[:-1]) + self._should_skip = does_tree_import(pkg, name, node) + return self._should_skip diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixer_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixer_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixer_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixer_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,453 @@ +"""Utility functions, node construction macros, etc.""" +# Author: Collin Winter + +# Local imports +from .pgen2 import token +from .pytree import Leaf, Node +from .pygram import python_symbols as syms +from . import patcomp + + +########################################################### +### Common node-construction "macros" +########################################################### + +def KeywordArg(keyword, value): + return Node(syms.argument, + [keyword, Leaf(token.EQUAL, "="), value]) + +def LParen(): + return Leaf(token.LPAR, "(") + +def RParen(): + return Leaf(token.RPAR, ")") + +def Assign(target, source): + """Build an assignment statement""" + if not isinstance(target, list): + target = [target] + if not isinstance(source, list): + source.prefix = " " + source = [source] + + return Node(syms.atom, + target + [Leaf(token.EQUAL, "=", prefix=" ")] + source) + +def Name(name, prefix=None): + """Return a NAME leaf""" + return Leaf(token.NAME, name, prefix=prefix) + +def Attr(obj, attr): + """A node tuple for obj.attr""" + return [obj, Node(syms.trailer, [Dot(), attr])] + +def Comma(): + """A comma leaf""" + return Leaf(token.COMMA, ",") + +def Dot(): + """A period (.) leaf""" + return Leaf(token.DOT, ".") + +def ArgList(args, lparen=LParen(), rparen=RParen()): + """A parenthesised argument list, used by Call()""" + node = Node(syms.trailer, [lparen.clone(), rparen.clone()]) + if args: + node.insert_child(1, Node(syms.arglist, args)) + return node + +def Call(func_name, args=None, prefix=None): + """A function call""" + node = Node(syms.power, [func_name, ArgList(args)]) + if prefix is not None: + node.prefix = prefix + return node + +def Newline(): + """A newline literal""" + return Leaf(token.NEWLINE, "\n") + +def BlankLine(): + """A blank line""" + return Leaf(token.NEWLINE, "") + +def Number(n, prefix=None): + return Leaf(token.NUMBER, n, prefix=prefix) + +def Subscript(index_node): + """A numeric or string subscript""" + return Node(syms.trailer, [Leaf(token.LBRACE, "["), + index_node, + Leaf(token.RBRACE, "]")]) + +def String(string, prefix=None): + """A string leaf""" + return Leaf(token.STRING, string, prefix=prefix) + +def ListComp(xp, fp, it, test=None): + """A list comprehension of the form [xp for fp in it if test]. + + If test is None, the "if test" part is omitted. + """ + xp.prefix = "" + fp.prefix = " " + it.prefix = " " + for_leaf = Leaf(token.NAME, "for") + for_leaf.prefix = " " + in_leaf = Leaf(token.NAME, "in") + in_leaf.prefix = " " + inner_args = [for_leaf, fp, in_leaf, it] + if test: + test.prefix = " " + if_leaf = Leaf(token.NAME, "if") + if_leaf.prefix = " " + inner_args.append(Node(syms.comp_if, [if_leaf, test])) + inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) + return Node(syms.atom, + [Leaf(token.LBRACE, "["), + inner, + Leaf(token.RBRACE, "]")]) + +def FromImport(package_name, name_leafs): + """ Return an import statement in the form: + from package import name_leafs""" + # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') + #assert package_name == '.' or '.' not in package_name, "FromImport has "\ + # "not been tested with dotted package names -- use at your own "\ + # "peril!" + + for leaf in name_leafs: + # Pull the leaves out of their old tree + leaf.remove() + + children = [Leaf(token.NAME, "from"), + Leaf(token.NAME, package_name, prefix=" "), + Leaf(token.NAME, "import", prefix=" "), + Node(syms.import_as_names, name_leafs)] + imp = Node(syms.import_from, children) + return imp + +def ImportAndCall(node, results, names): + """Returns an import statement and calls a method + of the module: + + import module + module.name()""" + obj = results["obj"].clone() + if obj.type == syms.arglist: + newarglist = obj.clone() + else: + newarglist = Node(syms.arglist, [obj.clone()]) + after = results["after"] + if after: + after = [n.clone() for n in after] + new = Node(syms.power, + Attr(Name(names[0]), Name(names[1])) + + [Node(syms.trailer, + [results["lpar"].clone(), + newarglist, + results["rpar"].clone()])] + after) + new.prefix = node.prefix + return new + + +########################################################### +### Determine whether a node represents a given literal +########################################################### + +def is_tuple(node): + """Does the node represent a tuple literal?""" + if isinstance(node, Node) and node.children == [LParen(), RParen()]: + return True + return (isinstance(node, Node) + and len(node.children) == 3 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[1], Node) + and isinstance(node.children[2], Leaf) + and node.children[0].value == "(" + and node.children[2].value == ")") + +def is_list(node): + """Does the node represent a list literal?""" + return (isinstance(node, Node) + and len(node.children) > 1 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[-1], Leaf) + and node.children[0].value == "[" + and node.children[-1].value == "]") + + +########################################################### +### Misc +########################################################### + +def parenthesize(node): + return Node(syms.atom, [LParen(), node, RParen()]) + + +consuming_calls = {"sorted", "list", "set", "any", "all", "tuple", "sum", + "min", "max", "enumerate"} + +def attr_chain(obj, attr): + """Follow an attribute chain. + + If you have a chain of objects where a.foo -> b, b.foo-> c, etc, + use this to iterate over all objects in the chain. Iteration is + terminated by getattr(x, attr) is None. + + Args: + obj: the starting object + attr: the name of the chaining attribute + + Yields: + Each successive object in the chain. + """ + next = getattr(obj, attr) + while next: + yield next + next = getattr(next, attr) + +p0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ +p1 = """ +power< + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | + 'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) ) + trailer< '(' node=any ')' > + any* +> +""" +p2 = """ +power< + ( 'sorted' | 'enumerate' ) + trailer< '(' arglist ')' > + any* +> +""" +pats_built = False +def in_special_context(node): + """ Returns true if node is in an environment where all that is required + of it is being iterable (ie, it doesn't matter if it returns a list + or an iterator). + See test_map_nochange in test_fixers.py for some examples and tests. + """ + global p0, p1, p2, pats_built + if not pats_built: + p0 = patcomp.compile_pattern(p0) + p1 = patcomp.compile_pattern(p1) + p2 = patcomp.compile_pattern(p2) + pats_built = True + patterns = [p0, p1, p2] + for pattern, parent in zip(patterns, attr_chain(node, "parent")): + results = {} + if pattern.match(parent, results) and results["node"] is node: + return True + return False + +def is_probably_builtin(node): + """ + Check that something isn't an attribute or function name etc. + """ + prev = node.prev_sibling + if prev is not None and prev.type == token.DOT: + # Attribute lookup. + return False + parent = node.parent + if parent.type in (syms.funcdef, syms.classdef): + return False + if parent.type == syms.expr_stmt and parent.children[0] is node: + # Assignment. + return False + if parent.type == syms.parameters or \ + (parent.type == syms.typedargslist and ( + (prev is not None and prev.type == token.COMMA) or + parent.children[0] is node + )): + # The name of an argument. + return False + return True + +def find_indentation(node): + """Find the indentation of *node*.""" + while node is not None: + if node.type == syms.suite and len(node.children) > 2: + indent = node.children[1] + if indent.type == token.INDENT: + return indent.value + node = node.parent + return "" + +########################################################### +### The following functions are to find bindings in a suite +########################################################### + +def make_suite(node): + if node.type == syms.suite: + return node + node = node.clone() + parent, node.parent = node.parent, None + suite = Node(syms.suite, [node]) + suite.parent = parent + return suite + +def find_root(node): + """Find the top level namespace.""" + # Scamper up to the top level namespace + while node.type != syms.file_input: + node = node.parent + if not node: + raise ValueError("root found before file_input node was found.") + return node + +def does_tree_import(package, name, node): + """ Returns true if name is imported from package at the + top level of the tree which node belongs to. + To cover the case of an import like 'import foo', use + None for the package and 'foo' for the name. """ + binding = find_binding(name, find_root(node), package) + return bool(binding) + +def is_import(node): + """Returns true if the node is an import statement.""" + return node.type in (syms.import_name, syms.import_from) + +def touch_import(package, name, node): + """ Works like `does_tree_import` but adds an import statement + if it was not imported. """ + def is_import_stmt(node): + return (node.type == syms.simple_stmt and node.children and + is_import(node.children[0])) + + root = find_root(node) + + if does_tree_import(package, name, root): + return + + # figure out where to insert the new import. First try to find + # the first import and then skip to the last one. + insert_pos = offset = 0 + for idx, node in enumerate(root.children): + if not is_import_stmt(node): + continue + for offset, node2 in enumerate(root.children[idx:]): + if not is_import_stmt(node2): + break + insert_pos = idx + offset + break + + # if there are no imports where we can insert, find the docstring. + # if that also fails, we stick to the beginning of the file + if insert_pos == 0: + for idx, node in enumerate(root.children): + if (node.type == syms.simple_stmt and node.children and + node.children[0].type == token.STRING): + insert_pos = idx + 1 + break + + if package is None: + import_ = Node(syms.import_name, [ + Leaf(token.NAME, "import"), + Leaf(token.NAME, name, prefix=" ") + ]) + else: + import_ = FromImport(package, [Leaf(token.NAME, name, prefix=" ")]) + + children = [import_, Newline()] + root.insert_child(insert_pos, Node(syms.simple_stmt, children)) + + +_def_syms = {syms.classdef, syms.funcdef} +def find_binding(name, node, package=None): + """ Returns the node which binds variable name, otherwise None. + If optional argument package is supplied, only imports will + be returned. + See test cases for examples.""" + for child in node.children: + ret = None + if child.type == syms.for_stmt: + if _find(name, child.children[1]): + return child + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type in (syms.if_stmt, syms.while_stmt): + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type == syms.try_stmt: + n = find_binding(name, make_suite(child.children[2]), package) + if n: + ret = n + else: + for i, kid in enumerate(child.children[3:]): + if kid.type == token.COLON and kid.value == ":": + # i+3 is the colon, i+4 is the suite + n = find_binding(name, make_suite(child.children[i+4]), package) + if n: ret = n + elif child.type in _def_syms and child.children[1].value == name: + ret = child + elif _is_import_binding(child, name, package): + ret = child + elif child.type == syms.simple_stmt: + ret = find_binding(name, child, package) + elif child.type == syms.expr_stmt: + if _find(name, child.children[0]): + ret = child + + if ret: + if not package: + return ret + if is_import(ret): + return ret + return None + +_block_syms = {syms.funcdef, syms.classdef, syms.trailer} +def _find(name, node): + nodes = [node] + while nodes: + node = nodes.pop() + if node.type > 256 and node.type not in _block_syms: + nodes.extend(node.children) + elif node.type == token.NAME and node.value == name: + return node + return None + +def _is_import_binding(node, name, package=None): + """ Will return node if node will import name, or node + will import * from package. None is returned otherwise. + See test cases for examples. """ + + if node.type == syms.import_name and not package: + imp = node.children[1] + if imp.type == syms.dotted_as_names: + for child in imp.children: + if child.type == syms.dotted_as_name: + if child.children[2].value == name: + return node + elif child.type == token.NAME and child.value == name: + return node + elif imp.type == syms.dotted_as_name: + last = imp.children[-1] + if last.type == token.NAME and last.value == name: + return node + elif imp.type == token.NAME and imp.value == name: + return node + elif node.type == syms.import_from: + # str(...) is used to make life easier here, because + # from a.b import parses to ['import', ['a', '.', 'b'], ...] + if package and str(node.children[1]).strip() != package: + return None + n = node.children[3] + if package and _find("as", n): + # See test_from_import_as for explanation + return None + elif n.type == syms.import_as_names and _find(name, n): + return node + elif n.type == syms.import_as_name: + child = n.children[2] + if child.type == token.NAME and child.value == name: + return node + elif n.type == token.NAME and n.value == name: + return node + elif package and n.type == token.STAR: + return node + return None diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1 @@ +# Dummy file to make this directory a package. diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_apply.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_apply.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_apply.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_apply.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,68 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for apply(). + +This converts apply(func, v, k) into (func)(*v, **k).""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Call, Comma, parenthesize + +class FixApply(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'apply' + trailer< + '(' + arglist< + (not argument + ')' + > + > + """ + + def transform(self, node, results): + syms = self.syms + assert results + func = results["func"] + args = results["args"] + kwds = results.get("kwds") + # I feel like we should be able to express this logic in the + # PATTERN above but I don't know how to do it so... + if args: + if (args.type == self.syms.argument and + args.children[0].value in {'**', '*'}): + return # Make no change. + if kwds and (kwds.type == self.syms.argument and + kwds.children[0].value == '**'): + return # Make no change. + prefix = node.prefix + func = func.clone() + if (func.type not in (token.NAME, syms.atom) and + (func.type != syms.power or + func.children[-2].type == token.DOUBLESTAR)): + # Need to parenthesize + func = parenthesize(func) + func.prefix = "" + args = args.clone() + args.prefix = "" + if kwds is not None: + kwds = kwds.clone() + kwds.prefix = "" + l_newargs = [pytree.Leaf(token.STAR, "*"), args] + if kwds is not None: + l_newargs.extend([Comma(), + pytree.Leaf(token.DOUBLESTAR, "**"), + kwds]) + l_newargs[-2].prefix = " " # that's the ** token + # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t) + # can be translated into f(x, y, *t) instead of f(*(x, y) + t) + #new = pytree.Node(syms.power, (func, ArgList(l_newargs))) + return Call(func, l_newargs, prefix=prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_asserts.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_asserts.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_asserts.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_asserts.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,34 @@ +"""Fixer that replaces deprecated unittest method names.""" + +# Author: Ezio Melotti + +from ..fixer_base import BaseFix +from ..fixer_util import Name + +NAMES = dict( + assert_="assertTrue", + assertEquals="assertEqual", + assertNotEquals="assertNotEqual", + assertAlmostEquals="assertAlmostEqual", + assertNotAlmostEquals="assertNotAlmostEqual", + assertRegexpMatches="assertRegex", + assertRaisesRegexp="assertRaisesRegex", + failUnlessEqual="assertEqual", + failIfEqual="assertNotEqual", + failUnlessAlmostEqual="assertAlmostEqual", + failIfAlmostEqual="assertNotAlmostEqual", + failUnless="assertTrue", + failUnlessRaises="assertRaises", + failIf="assertFalse", +) + + +class FixAsserts(BaseFix): + + PATTERN = """ + power< any+ trailer< '.' meth=(%s)> any* > + """ % '|'.join(map(repr, NAMES)) + + def transform(self, node, results): + name = results["meth"][0] + name.replace(Name(NAMES[str(name)], prefix=name.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_basestring.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_basestring.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_basestring.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_basestring.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,14 @@ +"""Fixer for basestring -> str.""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixBasestring(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = "'basestring'" + + def transform(self, node, results): + return Name("str", prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_buffer.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_buffer.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_buffer.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_buffer.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,22 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes buffer(...) into memoryview(...).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixBuffer(fixer_base.BaseFix): + BM_compatible = True + + explicit = True # The user must ask for this fixer + + PATTERN = """ + power< name='buffer' trailer< '(' [any] ')' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name("memoryview", prefix=name.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_dict.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_dict.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_dict.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_dict.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,106 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for dict methods. + +d.keys() -> list(d.keys()) +d.items() -> list(d.items()) +d.values() -> list(d.values()) + +d.iterkeys() -> iter(d.keys()) +d.iteritems() -> iter(d.items()) +d.itervalues() -> iter(d.values()) + +d.viewkeys() -> d.keys() +d.viewitems() -> d.items() +d.viewvalues() -> d.values() + +Except in certain very specific contexts: the iter() can be dropped +when the context is list(), sorted(), iter() or for...in; the list() +can be dropped when the context is list() or sorted() (but not iter() +or for...in!). Special contexts that apply to both: list(), sorted(), tuple() +set(), any(), all(), sum(). + +Note: iter(d.keys()) could be written as iter(d) but since the +original d.iterkeys() was also redundant we don't fix this. And there +are (rare) contexts where it makes a difference (e.g. when passing it +as an argument to a function that introspects the argument). +""" + +# Local imports +from .. import pytree +from .. import patcomp +from .. import fixer_base +from ..fixer_util import Name, Call, Dot +from .. import fixer_util + + +iter_exempt = fixer_util.consuming_calls | {"iter"} + + +class FixDict(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< head=any+ + trailer< '.' method=('keys'|'items'|'values'| + 'iterkeys'|'iteritems'|'itervalues'| + 'viewkeys'|'viewitems'|'viewvalues') > + parens=trailer< '(' ')' > + tail=any* + > + """ + + def transform(self, node, results): + head = results["head"] + method = results["method"][0] # Extract node for method name + tail = results["tail"] + syms = self.syms + method_name = method.value + isiter = method_name.startswith("iter") + isview = method_name.startswith("view") + if isiter or isview: + method_name = method_name[4:] + assert method_name in ("keys", "items", "values"), repr(method) + head = [n.clone() for n in head] + tail = [n.clone() for n in tail] + special = not tail and self.in_special_context(node, isiter) + args = head + [pytree.Node(syms.trailer, + [Dot(), + Name(method_name, + prefix=method.prefix)]), + results["parens"].clone()] + new = pytree.Node(syms.power, args) + if not (special or isview): + new.prefix = "" + new = Call(Name("iter" if isiter else "list"), [new]) + if tail: + new = pytree.Node(syms.power, [new] + tail) + new.prefix = node.prefix + return new + + P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" + p1 = patcomp.compile_pattern(P1) + + P2 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ + p2 = patcomp.compile_pattern(P2) + + def in_special_context(self, node, isiter): + if node.parent is None: + return False + results = {} + if (node.parent.parent is not None and + self.p1.match(node.parent.parent, results) and + results["node"] is node): + if isiter: + # iter(d.iterkeys()) -> iter(d.keys()), etc. + return results["func"].value in iter_exempt + else: + # list(d.keys()) -> list(d.keys()), etc. + return results["func"].value in fixer_util.consuming_calls + if not isiter: + return False + # for ... in d.iterkeys() -> for ... in d.keys(), etc. + return self.p2.match(node.parent, results) and results["node"] is node diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_except.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_except.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_except.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_except.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,93 @@ +"""Fixer for except statements with named exceptions. + +The following cases will be converted: + +- "except E, T:" where T is a name: + + except E as T: + +- "except E, T:" where T is not a name, tuple or list: + + except E as t: + T = t + + This is done because the target of an "except" clause must be a + name. + +- "except E, T:" where T is a tuple or list literal: + + except E as t: + T = t.args +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms + +def find_excepts(nodes): + for i, n in enumerate(nodes): + if n.type == syms.except_clause: + if n.children[0].value == 'except': + yield (n, nodes[i+2]) + +class FixExcept(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + try_stmt< 'try' ':' (simple_stmt | suite) + cleanup=(except_clause ':' (simple_stmt | suite))+ + tail=(['except' ':' (simple_stmt | suite)] + ['else' ':' (simple_stmt | suite)] + ['finally' ':' (simple_stmt | suite)]) > + """ + + def transform(self, node, results): + syms = self.syms + + tail = [n.clone() for n in results["tail"]] + + try_cleanup = [ch.clone() for ch in results["cleanup"]] + for except_clause, e_suite in find_excepts(try_cleanup): + if len(except_clause.children) == 4: + (E, comma, N) = except_clause.children[1:4] + comma.replace(Name("as", prefix=" ")) + + if N.type != token.NAME: + # Generate a new N for the except clause + new_N = Name(self.new_name(), prefix=" ") + target = N.clone() + target.prefix = "" + N.replace(new_N) + new_N = new_N.clone() + + # Insert "old_N = new_N" as the first statement in + # the except body. This loop skips leading whitespace + # and indents + #TODO(cwinter) suite-cleanup + suite_stmts = e_suite.children + for i, stmt in enumerate(suite_stmts): + if isinstance(stmt, pytree.Node): + break + + # The assignment is different if old_N is a tuple or list + # In that case, the assignment is old_N = new_N.args + if is_tuple(N) or is_list(N): + assign = Assign(target, Attr(new_N, Name('args'))) + else: + assign = Assign(target, new_N) + + #TODO(cwinter) stopgap until children becomes a smart list + for child in reversed(suite_stmts[:i]): + e_suite.insert_child(0, child) + e_suite.insert_child(i, assign) + elif N.prefix == "": + # No space after a comma is legal; no space after "as", + # not so much. + N.prefix = " " + + #TODO(cwinter) fix this when children becomes a smart list + children = [c.clone() for c in node.children[:3]] + try_cleanup + tail + return pytree.Node(node.type, children) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_exec.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_exec.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_exec.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_exec.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,39 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for exec. + +This converts usages of the exec statement into calls to a built-in +exec() function. + +exec code in ns1, ns2 -> exec(code, ns1, ns2) +""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Comma, Name, Call + + +class FixExec(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > + | + exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > + """ + + def transform(self, node, results): + assert results + syms = self.syms + a = results["a"] + b = results.get("b") + c = results.get("c") + args = [a.clone()] + args[0].prefix = "" + if b is not None: + args.extend([Comma(), b.clone()]) + if c is not None: + args.extend([Comma(), c.clone()]) + + return Call(Name("exec"), args, prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_execfile.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_execfile.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_execfile.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_execfile.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,53 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for execfile. + +This converts usages of the execfile function into calls to the built-in +exec() function. +""" + +from .. import fixer_base +from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node, + ArgList, String, syms) + + +class FixExecfile(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > + | + power< 'execfile' trailer< '(' filename=any ')' > > + """ + + def transform(self, node, results): + assert results + filename = results["filename"] + globals = results.get("globals") + locals = results.get("locals") + + # Copy over the prefix from the right parentheses end of the execfile + # call. + execfile_paren = node.children[-1].children[-1].clone() + # Construct open().read(). + open_args = ArgList([filename.clone(), Comma(), String('"rb"', ' ')], + rparen=execfile_paren) + open_call = Node(syms.power, [Name("open"), open_args]) + read = [Node(syms.trailer, [Dot(), Name('read')]), + Node(syms.trailer, [LParen(), RParen()])] + open_expr = [open_call] + read + # Wrap the open call in a compile call. This is so the filename will be + # preserved in the execed code. + filename_arg = filename.clone() + filename_arg.prefix = " " + exec_str = String("'exec'", " ") + compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str] + compile_call = Call(Name("compile"), compile_args, "") + # Finally, replace the execfile call with an exec call. + args = [compile_call] + if globals is not None: + args.extend([Comma(), globals.clone()]) + if locals is not None: + args.extend([Comma(), locals.clone()]) + return Call(Name("exec"), args, prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_exitfunc.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_exitfunc.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_exitfunc.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_exitfunc.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,72 @@ +""" +Convert use of sys.exitfunc to use the atexit module. +""" + +# Author: Benjamin Peterson + +from lib2to3 import pytree, fixer_base +from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms + + +class FixExitfunc(fixer_base.BaseFix): + keep_line_order = True + BM_compatible = True + + PATTERN = """ + ( + sys_import=import_name<'import' + ('sys' + | + dotted_as_names< (any ',')* 'sys' (',' any)* > + ) + > + | + expr_stmt< + power< 'sys' trailer< '.' 'exitfunc' > > + '=' func=any > + ) + """ + + def __init__(self, *args): + super(FixExitfunc, self).__init__(*args) + + def start_tree(self, tree, filename): + super(FixExitfunc, self).start_tree(tree, filename) + self.sys_import = None + + def transform(self, node, results): + # First, find the sys import. We'll just hope it's global scope. + if "sys_import" in results: + if self.sys_import is None: + self.sys_import = results["sys_import"] + return + + func = results["func"].clone() + func.prefix = "" + register = pytree.Node(syms.power, + Attr(Name("atexit"), Name("register")) + ) + call = Call(register, [func], node.prefix) + node.replace(call) + + if self.sys_import is None: + # That's interesting. + self.warning(node, "Can't find sys import; Please add an atexit " + "import at the top of your file.") + return + + # Now add an atexit import after the sys import. + names = self.sys_import.children[1] + if names.type == syms.dotted_as_names: + names.append_child(Comma()) + names.append_child(Name("atexit", " ")) + else: + containing_stmt = self.sys_import.parent + position = containing_stmt.children.index(self.sys_import) + stmt_container = containing_stmt.parent + new_import = pytree.Node(syms.import_name, + [Name("import"), Name("atexit", " ")] + ) + new = pytree.Node(syms.simple_stmt, [new_import]) + containing_stmt.insert_child(position + 1, Newline()) + containing_stmt.insert_child(position + 2, new) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_filter.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_filter.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_filter.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_filter.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,94 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes filter(F, X) into list(filter(F, X)). + +We avoid the transformation if the filter() call is directly contained +in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or +for V in <>:. + +NOTE: This is still not correct if the original code was depending on +filter(F, X) to return a string if X is a string and a tuple if X is a +tuple. That would require type inference, which we don't do. Let +Python 2.6 figure it out. +""" + +# Local imports +from .. import fixer_base +from ..pytree import Node +from ..pygram import python_symbols as syms +from ..fixer_util import Name, ArgList, ListComp, in_special_context, parenthesize + + +class FixFilter(fixer_base.ConditionalFix): + BM_compatible = True + + PATTERN = """ + filter_lambda=power< + 'filter' + trailer< + '(' + arglist< + lambdef< 'lambda' + (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any + > + ',' + it=any + > + ')' + > + [extra_trailers=trailer*] + > + | + power< + 'filter' + trailer< '(' arglist< none='None' ',' seq=any > ')' > + [extra_trailers=trailer*] + > + | + power< + 'filter' + args=trailer< '(' [any] ')' > + [extra_trailers=trailer*] + > + """ + + skip_on = "future_builtins.filter" + + def transform(self, node, results): + if self.should_skip(node): + return + + trailers = [] + if 'extra_trailers' in results: + for t in results['extra_trailers']: + trailers.append(t.clone()) + + if "filter_lambda" in results: + xp = results.get("xp").clone() + if xp.type == syms.test: + xp.prefix = "" + xp = parenthesize(xp) + + new = ListComp(results.get("fp").clone(), + results.get("fp").clone(), + results.get("it").clone(), xp) + new = Node(syms.power, [new] + trailers, prefix="") + + elif "none" in results: + new = ListComp(Name("_f"), + Name("_f"), + results["seq"].clone(), + Name("_f")) + new = Node(syms.power, [new] + trailers, prefix="") + + else: + if in_special_context(node): + return None + + args = results['args'].clone() + new = Node(syms.power, [Name("filter"), args], prefix="") + new = Node(syms.power, [Name("list"), ArgList([new])] + trailers) + new.prefix = "" + new.prefix = node.prefix + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_funcattrs.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_funcattrs.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_funcattrs.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_funcattrs.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,21 @@ +"""Fix function attribute names (f.func_x -> f.__x__).""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixFuncattrs(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' + | 'func_name' | 'func_defaults' | 'func_code' + | 'func_dict') > any* > + """ + + def transform(self, node, results): + attr = results["attr"][0] + attr.replace(Name(("__%s__" % attr.value[5:]), + prefix=attr.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_future.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_future.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_future.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_future.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,22 @@ +"""Remove __future__ imports + +from __future__ import foo is replaced with an empty line. +""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import BlankLine + +class FixFuture(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """import_from< 'from' module_name="__future__" 'import' any >""" + + # This should be run last -- some things check for the import + run_order = 10 + + def transform(self, node, results): + new = BlankLine() + new.prefix = node.prefix + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_getcwdu.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_getcwdu.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_getcwdu.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_getcwdu.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,19 @@ +""" +Fixer that changes os.getcwdu() to os.getcwd(). +""" +# Author: Victor Stinner + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixGetcwdu(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'os' trailer< dot='.' name='getcwdu' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name("getcwd", prefix=name.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_has_key.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_has_key.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_has_key.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_has_key.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,109 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for has_key(). + +Calls to .has_key() methods are expressed in terms of the 'in' +operator: + + d.has_key(k) -> k in d + +CAVEATS: +1) While the primary target of this fixer is dict.has_key(), the + fixer will change any has_key() method call, regardless of its + class. + +2) Cases like this will not be converted: + + m = d.has_key + if m(k): + ... + + Only *calls* to has_key() are converted. While it is possible to + convert the above to something like + + m = d.__contains__ + if m(k): + ... + + this is currently not done. +""" + +# Local imports +from .. import pytree +from .. import fixer_base +from ..fixer_util import Name, parenthesize + + +class FixHasKey(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + anchor=power< + before=any+ + trailer< '.' 'has_key' > + trailer< + '(' + ( not(arglist | argument) arg=any ','> + ) + ')' + > + after=any* + > + | + negation=not_test< + 'not' + anchor=power< + before=any+ + trailer< '.' 'has_key' > + trailer< + '(' + ( not(arglist | argument) arg=any ','> + ) + ')' + > + > + > + """ + + def transform(self, node, results): + assert results + syms = self.syms + if (node.parent.type == syms.not_test and + self.pattern.match(node.parent)): + # Don't transform a node matching the first alternative of the + # pattern when its parent matches the second alternative + return None + negation = results.get("negation") + anchor = results["anchor"] + prefix = node.prefix + before = [n.clone() for n in results["before"]] + arg = results["arg"].clone() + after = results.get("after") + if after: + after = [n.clone() for n in after] + if arg.type in (syms.comparison, syms.not_test, syms.and_test, + syms.or_test, syms.test, syms.lambdef, syms.argument): + arg = parenthesize(arg) + if len(before) == 1: + before = before[0] + else: + before = pytree.Node(syms.power, before) + before.prefix = " " + n_op = Name("in", prefix=" ") + if negation: + n_not = Name("not", prefix=" ") + n_op = pytree.Node(syms.comp_op, (n_not, n_op)) + new = pytree.Node(syms.comparison, (arg, n_op, before)) + if after: + new = parenthesize(new) + new = pytree.Node(syms.power, (new,) + tuple(after)) + if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr, + syms.and_expr, syms.shift_expr, + syms.arith_expr, syms.term, + syms.factor, syms.power): + new = parenthesize(new) + new.prefix = prefix + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_idioms.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_idioms.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_idioms.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_idioms.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,152 @@ +"""Adjust some old Python 2 idioms to their modern counterparts. + +* Change some type comparisons to isinstance() calls: + type(x) == T -> isinstance(x, T) + type(x) is T -> isinstance(x, T) + type(x) != T -> not isinstance(x, T) + type(x) is not T -> not isinstance(x, T) + +* Change "while 1:" into "while True:". + +* Change both + + v = list(EXPR) + v.sort() + foo(v) + +and the more general + + v = EXPR + v.sort() + foo(v) + +into + + v = sorted(EXPR) + foo(v) +""" +# Author: Jacques Frechet, Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms + +CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)" +TYPE = "power< 'type' trailer< '(' x=any ')' > >" + +class FixIdioms(fixer_base.BaseFix): + explicit = True # The user must ask for this fixer + + PATTERN = r""" + isinstance=comparison< %s %s T=any > + | + isinstance=comparison< T=any %s %s > + | + while_stmt< 'while' while='1' ':' any+ > + | + sorted=any< + any* + simple_stmt< + expr_stmt< id1=any '=' + power< list='list' trailer< '(' (not arglist) any ')' > > + > + '\n' + > + sort= + simple_stmt< + power< id2=any + trailer< '.' 'sort' > trailer< '(' ')' > + > + '\n' + > + next=any* + > + | + sorted=any< + any* + simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' > + sort= + simple_stmt< + power< id2=any + trailer< '.' 'sort' > trailer< '(' ')' > + > + '\n' + > + next=any* + > + """ % (TYPE, CMP, CMP, TYPE) + + def match(self, node): + r = super(FixIdioms, self).match(node) + # If we've matched one of the sort/sorted subpatterns above, we + # want to reject matches where the initial assignment and the + # subsequent .sort() call involve different identifiers. + if r and "sorted" in r: + if r["id1"] == r["id2"]: + return r + return None + return r + + def transform(self, node, results): + if "isinstance" in results: + return self.transform_isinstance(node, results) + elif "while" in results: + return self.transform_while(node, results) + elif "sorted" in results: + return self.transform_sort(node, results) + else: + raise RuntimeError("Invalid match") + + def transform_isinstance(self, node, results): + x = results["x"].clone() # The thing inside of type() + T = results["T"].clone() # The type being compared against + x.prefix = "" + T.prefix = " " + test = Call(Name("isinstance"), [x, Comma(), T]) + if "n" in results: + test.prefix = " " + test = Node(syms.not_test, [Name("not"), test]) + test.prefix = node.prefix + return test + + def transform_while(self, node, results): + one = results["while"] + one.replace(Name("True", prefix=one.prefix)) + + def transform_sort(self, node, results): + sort_stmt = results["sort"] + next_stmt = results["next"] + list_call = results.get("list") + simple_expr = results.get("expr") + + if list_call: + list_call.replace(Name("sorted", prefix=list_call.prefix)) + elif simple_expr: + new = simple_expr.clone() + new.prefix = "" + simple_expr.replace(Call(Name("sorted"), [new], + prefix=simple_expr.prefix)) + else: + raise RuntimeError("should not have reached here") + sort_stmt.remove() + + btwn = sort_stmt.prefix + # Keep any prefix lines between the sort_stmt and the list_call and + # shove them right after the sorted() call. + if "\n" in btwn: + if next_stmt: + # The new prefix should be everything from the sort_stmt's + # prefix up to the last newline, then the old prefix after a new + # line. + prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix) + next_stmt[0].prefix = "\n".join(prefix_lines) + else: + assert list_call.parent + assert list_call.next_sibling is None + # Put a blank line after list_call and set its prefix. + end_line = BlankLine() + list_call.parent.append_child(end_line) + assert list_call.next_sibling is end_line + # The new prefix should be everything up to the first new line + # of sort_stmt's prefix. + end_line.prefix = btwn.rpartition("\n")[0] diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_import.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_import.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_import.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_import.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,99 @@ +"""Fixer for import statements. +If spam is being imported from the local directory, this import: + from spam import eggs +Becomes: + from .spam import eggs + +And this import: + import spam +Becomes: + from . import spam +""" + +# Local imports +from .. import fixer_base +from os.path import dirname, join, exists, sep +from ..fixer_util import FromImport, syms, token + + +def traverse_imports(names): + """ + Walks over all the names imported in a dotted_as_names node. + """ + pending = [names] + while pending: + node = pending.pop() + if node.type == token.NAME: + yield node.value + elif node.type == syms.dotted_name: + yield "".join([ch.value for ch in node.children]) + elif node.type == syms.dotted_as_name: + pending.append(node.children[0]) + elif node.type == syms.dotted_as_names: + pending.extend(node.children[::-2]) + else: + raise AssertionError("unknown node type") + + +class FixImport(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + import_from< 'from' imp=any 'import' ['('] any [')'] > + | + import_name< 'import' imp=any > + """ + + def start_tree(self, tree, name): + super(FixImport, self).start_tree(tree, name) + self.skip = "absolute_import" in tree.future_features + + def transform(self, node, results): + if self.skip: + return + imp = results['imp'] + + if node.type == syms.import_from: + # Some imps are top-level (eg: 'import ham') + # some are first level (eg: 'import ham.eggs') + # some are third level (eg: 'import ham.eggs as spam') + # Hence, the loop + while not hasattr(imp, 'value'): + imp = imp.children[0] + if self.probably_a_local_import(imp.value): + imp.value = "." + imp.value + imp.changed() + else: + have_local = False + have_absolute = False + for mod_name in traverse_imports(imp): + if self.probably_a_local_import(mod_name): + have_local = True + else: + have_absolute = True + if have_absolute: + if have_local: + # We won't handle both sibling and absolute imports in the + # same statement at the moment. + self.warning(node, "absolute and local imports together") + return + + new = FromImport(".", [imp]) + new.prefix = node.prefix + return new + + def probably_a_local_import(self, imp_name): + if imp_name.startswith("."): + # Relative imports are certainly not local imports. + return False + imp_name = imp_name.split(".", 1)[0] + base_path = dirname(self.filename) + base_path = join(base_path, imp_name) + # If there is no __init__.py next to the file its not in a package + # so can't be a relative import. + if not exists(join(dirname(base_path), "__init__.py")): + return False + for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]: + if exists(base_path + ext): + return True + return False diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_imports.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_imports.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_imports.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_imports.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,145 @@ +"""Fix incompatible imports and module references.""" +# Authors: Collin Winter, Nick Edds + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, attr_chain + +MAPPING = {'StringIO': 'io', + 'cStringIO': 'io', + 'cPickle': 'pickle', + '__builtin__' : 'builtins', + 'copy_reg': 'copyreg', + 'Queue': 'queue', + 'SocketServer': 'socketserver', + 'ConfigParser': 'configparser', + 'repr': 'reprlib', + 'FileDialog': 'tkinter.filedialog', + 'tkFileDialog': 'tkinter.filedialog', + 'SimpleDialog': 'tkinter.simpledialog', + 'tkSimpleDialog': 'tkinter.simpledialog', + 'tkColorChooser': 'tkinter.colorchooser', + 'tkCommonDialog': 'tkinter.commondialog', + 'Dialog': 'tkinter.dialog', + 'Tkdnd': 'tkinter.dnd', + 'tkFont': 'tkinter.font', + 'tkMessageBox': 'tkinter.messagebox', + 'ScrolledText': 'tkinter.scrolledtext', + 'Tkconstants': 'tkinter.constants', + 'Tix': 'tkinter.tix', + 'ttk': 'tkinter.ttk', + 'Tkinter': 'tkinter', + 'markupbase': '_markupbase', + '_winreg': 'winreg', + 'thread': '_thread', + 'dummy_thread': '_dummy_thread', + # anydbm and whichdb are handled by fix_imports2 + 'dbhash': 'dbm.bsd', + 'dumbdbm': 'dbm.dumb', + 'dbm': 'dbm.ndbm', + 'gdbm': 'dbm.gnu', + 'xmlrpclib': 'xmlrpc.client', + 'DocXMLRPCServer': 'xmlrpc.server', + 'SimpleXMLRPCServer': 'xmlrpc.server', + 'httplib': 'http.client', + 'htmlentitydefs' : 'html.entities', + 'HTMLParser' : 'html.parser', + 'Cookie': 'http.cookies', + 'cookielib': 'http.cookiejar', + 'BaseHTTPServer': 'http.server', + 'SimpleHTTPServer': 'http.server', + 'CGIHTTPServer': 'http.server', + #'test.test_support': 'test.support', + 'commands': 'subprocess', + 'UserString' : 'collections', + 'UserList' : 'collections', + 'urlparse' : 'urllib.parse', + 'robotparser' : 'urllib.robotparser', +} + + +def alternates(members): + return "(" + "|".join(map(repr, members)) + ")" + + +def build_pattern(mapping=MAPPING): + mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) + bare_names = alternates(mapping.keys()) + + yield """name_import=import_name< 'import' ((%s) | + multiple_imports=dotted_as_names< any* (%s) any* >) > + """ % (mod_list, mod_list) + yield """import_from< 'from' (%s) 'import' ['('] + ( any | import_as_name< any 'as' any > | + import_as_names< any* >) [')'] > + """ % mod_list + yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | + multiple_imports=dotted_as_names< + any* dotted_as_name< (%s) 'as' any > any* >) > + """ % (mod_list, mod_list) + + # Find usages of module members in code e.g. thread.foo(bar) + yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names + + +class FixImports(fixer_base.BaseFix): + + BM_compatible = True + keep_line_order = True + # This is overridden in fix_imports2. + mapping = MAPPING + + # We want to run this fixer late, so fix_import doesn't try to make stdlib + # renames into relative imports. + run_order = 6 + + def build_pattern(self): + return "|".join(build_pattern(self.mapping)) + + def compile_pattern(self): + # We override this, so MAPPING can be pragmatically altered and the + # changes will be reflected in PATTERN. + self.PATTERN = self.build_pattern() + super(FixImports, self).compile_pattern() + + # Don't match the node if it's within another match. + def match(self, node): + match = super(FixImports, self).match + results = match(node) + if results: + # Module usage could be in the trailer of an attribute lookup, so we + # might have nested matches when "bare_with_attr" is present. + if "bare_with_attr" not in results and \ + any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + def start_tree(self, tree, filename): + super(FixImports, self).start_tree(tree, filename) + self.replace = {} + + def transform(self, node, results): + import_mod = results.get("module_name") + if import_mod: + mod_name = import_mod.value + new_name = self.mapping[mod_name] + import_mod.replace(Name(new_name, prefix=import_mod.prefix)) + if "name_import" in results: + # If it's not a "from x import x, y" or "import x as y" import, + # marked its usage to be replaced. + self.replace[mod_name] = new_name + if "multiple_imports" in results: + # This is a nasty hack to fix multiple imports on a line (e.g., + # "import StringIO, urlparse"). The problem is that I can't + # figure out an easy way to make a pattern recognize the keys of + # MAPPING randomly sprinkled in an import statement. + results = self.match(node) + if results: + self.transform(node, results) + else: + # Replace usage of the module. + bare_name = results["bare_with_attr"][0] + new_name = self.replace.get(bare_name.value) + if new_name: + bare_name.replace(Name(new_name, prefix=bare_name.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_imports2.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_imports2.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_imports2.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_imports2.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,16 @@ +"""Fix incompatible imports and module references that must be fixed after +fix_imports.""" +from . import fix_imports + + +MAPPING = { + 'whichdb': 'dbm', + 'anydbm': 'dbm', + } + + +class FixImports2(fix_imports.FixImports): + + run_order = 7 + + mapping = MAPPING diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_input.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_input.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_input.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_input.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,26 @@ +"""Fixer that changes input(...) into eval(input(...)).""" +# Author: Andre Roberge + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Name +from .. import patcomp + + +context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >") + + +class FixInput(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< 'input' args=trailer< '(' [any] ')' > > + """ + + def transform(self, node, results): + # If we're already wrapped in an eval() call, we're done. + if context.match(node.parent.parent): + return + + new = node.clone() + new.prefix = "" + return Call(Name("eval"), [new], prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_intern.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_intern.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_intern.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_intern.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,39 @@ +# Copyright 2006 Georg Brandl. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for intern(). + +intern(s) -> sys.intern(s)""" + +# Local imports +from .. import fixer_base +from ..fixer_util import ImportAndCall, touch_import + + +class FixIntern(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'intern' + trailer< lpar='(' + ( not(arglist | argument) any ','> ) + rpar=')' > + after=any* + > + """ + + def transform(self, node, results): + if results: + # I feel like we should be able to express this logic in the + # PATTERN above but I don't know how to do it so... + obj = results['obj'] + if obj: + if (obj.type == self.syms.argument and + obj.children[0].value in {'**', '*'}): + return # Make no change. + names = ('sys', 'intern') + new = ImportAndCall(node, results, names) + touch_import(None, 'sys', node) + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_isinstance.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_isinstance.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_isinstance.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_isinstance.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,52 @@ +# Copyright 2008 Armin Ronacher. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that cleans up a tuple argument to isinstance after the tokens +in it were fixed. This is mainly used to remove double occurrences of +tokens as a leftover of the long -> int / unicode -> str conversion. + +eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) + -> isinstance(x, int) +""" + +from .. import fixer_base +from ..fixer_util import token + + +class FixIsinstance(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< + 'isinstance' + trailer< '(' arglist< any ',' atom< '(' + args=testlist_gexp< any+ > + ')' > > ')' > + > + """ + + run_order = 6 + + def transform(self, node, results): + names_inserted = set() + testlist = results["args"] + args = testlist.children + new_args = [] + iterator = enumerate(args) + for idx, arg in iterator: + if arg.type == token.NAME and arg.value in names_inserted: + if idx < len(args) - 1 and args[idx + 1].type == token.COMMA: + next(iterator) + continue + else: + new_args.append(arg) + if arg.type == token.NAME: + names_inserted.add(arg.value) + if new_args and new_args[-1].type == token.COMMA: + del new_args[-1] + if len(new_args) == 1: + atom = testlist.parent + new_args[0].prefix = atom.prefix + atom.replace(new_args[0]) + else: + args[:] = new_args + node.changed() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_itertools.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_itertools.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_itertools.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_itertools.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,43 @@ +""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and + itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) + + imports from itertools are fixed in fix_itertools_import.py + + If itertools is imported as something else (ie: import itertools as it; + it.izip(spam, eggs)) method calls will not get fixed. + """ + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixItertools(fixer_base.BaseFix): + BM_compatible = True + it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')" + PATTERN = """ + power< it='itertools' + trailer< + dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > > + | + power< func=%(it_funcs)s trailer< '(' [any] ')' > > + """ %(locals()) + + # Needs to be run after fix_(map|zip|filter) + run_order = 6 + + def transform(self, node, results): + prefix = None + func = results['func'][0] + if ('it' in results and + func.value not in ('ifilterfalse', 'izip_longest')): + dot, it = (results['dot'], results['it']) + # Remove the 'itertools' + prefix = it.prefix + it.remove() + # Replace the node which contains ('.', 'function') with the + # function (to be consistent with the second part of the pattern) + dot.remove() + func.parent.replace(func) + + prefix = prefix or func.prefix + func.replace(Name(func.value[1:], prefix=prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_itertools_imports.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_itertools_imports.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_itertools_imports.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_itertools_imports.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,57 @@ +""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """ + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import BlankLine, syms, token + + +class FixItertoolsImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + import_from< 'from' 'itertools' 'import' imports=any > + """ %(locals()) + + def transform(self, node, results): + imports = results['imports'] + if imports.type == syms.import_as_name or not imports.children: + children = [imports] + else: + children = imports.children + for child in children[::2]: + if child.type == token.NAME: + member = child.value + name_node = child + elif child.type == token.STAR: + # Just leave the import as is. + return + else: + assert child.type == syms.import_as_name + name_node = child.children[0] + member_name = name_node.value + if member_name in ('imap', 'izip', 'ifilter'): + child.value = None + child.remove() + elif member_name in ('ifilterfalse', 'izip_longest'): + node.changed() + name_node.value = ('filterfalse' if member_name[1] == 'f' + else 'zip_longest') + + # Make sure the import statement is still sane + children = imports.children[:] or [imports] + remove_comma = True + for child in children: + if remove_comma and child.type == token.COMMA: + child.remove() + else: + remove_comma ^= True + + while children and children[-1].type == token.COMMA: + children.pop().remove() + + # If there are no imports left, just get rid of the entire statement + if (not (imports.children or getattr(imports, 'value', None)) or + imports.parent is None): + p = node.prefix + node = BlankLine() + node.prefix = p + return node diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_long.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_long.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_long.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_long.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,19 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that turns 'long' into 'int' everywhere. +""" + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import is_probably_builtin + + +class FixLong(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "'long'" + + def transform(self, node, results): + if is_probably_builtin(node): + node.value = "int" + node.changed() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_map.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_map.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_map.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_map.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,110 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there +exists a 'from future_builtins import map' statement in the top-level +namespace. + +As a special case, map(None, X) is changed into list(X). (This is +necessary because the semantics are changed in this case -- the new +map(None, X) is equivalent to [(x,) for x in X].) + +We avoid the transformation (except for the special case mentioned +above) if the map() call is directly contained in iter(<>), list(<>), +tuple(<>), sorted(<>), ...join(<>), or for V in <>:. + +NOTE: This is still not correct if the original code was depending on +map(F, X, Y, ...) to go on until the longest argument is exhausted, +substituting None for missing values -- like zip(), it now stops as +soon as the shortest argument is exhausted. +""" + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, ArgList, Call, ListComp, in_special_context +from ..pygram import python_symbols as syms +from ..pytree import Node + + +class FixMap(fixer_base.ConditionalFix): + BM_compatible = True + + PATTERN = """ + map_none=power< + 'map' + trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > + [extra_trailers=trailer*] + > + | + map_lambda=power< + 'map' + trailer< + '(' + arglist< + lambdef< 'lambda' + (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any + > + ',' + it=any + > + ')' + > + [extra_trailers=trailer*] + > + | + power< + 'map' args=trailer< '(' [any] ')' > + [extra_trailers=trailer*] + > + """ + + skip_on = 'future_builtins.map' + + def transform(self, node, results): + if self.should_skip(node): + return + + trailers = [] + if 'extra_trailers' in results: + for t in results['extra_trailers']: + trailers.append(t.clone()) + + if node.parent.type == syms.simple_stmt: + self.warning(node, "You should use a for loop here") + new = node.clone() + new.prefix = "" + new = Call(Name("list"), [new]) + elif "map_lambda" in results: + new = ListComp(results["xp"].clone(), + results["fp"].clone(), + results["it"].clone()) + new = Node(syms.power, [new] + trailers, prefix="") + + else: + if "map_none" in results: + new = results["arg"].clone() + new.prefix = "" + else: + if "args" in results: + args = results["args"] + if args.type == syms.trailer and \ + args.children[1].type == syms.arglist and \ + args.children[1].children[0].type == token.NAME and \ + args.children[1].children[0].value == "None": + self.warning(node, "cannot convert map(None, ...) " + "with multiple arguments because map() " + "now truncates to the shortest sequence") + return + + new = Node(syms.power, [Name("map"), args.clone()]) + new.prefix = "" + + if in_special_context(node): + return None + + new = Node(syms.power, [Name("list"), ArgList([new])] + trailers) + new.prefix = "" + + new.prefix = node.prefix + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_metaclass.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_metaclass.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_metaclass.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_metaclass.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,228 @@ +"""Fixer for __metaclass__ = X -> (metaclass=X) methods. + + The various forms of classef (inherits nothing, inherits once, inherits + many) don't parse the same in the CST so we look at ALL classes for + a __metaclass__ and if we find one normalize the inherits to all be + an arglist. + + For one-liner classes ('class X: pass') there is no indent/dedent so + we normalize those into having a suite. + + Moving the __metaclass__ into the classdef can also cause the class + body to be empty so there is some special casing for that as well. + + This fixer also tries very hard to keep original indenting and spacing + in all those corner cases. + +""" +# Author: Jack Diederich + +# Local imports +from .. import fixer_base +from ..pygram import token +from ..fixer_util import syms, Node, Leaf + + +def has_metaclass(parent): + """ we have to check the cls_node without changing it. + There are two possibilities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """ + for node in parent.children: + if node.type == syms.suite: + return has_metaclass(node) + elif node.type == syms.simple_stmt and node.children: + expr_node = node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + left_side = expr_node.children[0] + if isinstance(left_side, Leaf) and \ + left_side.value == '__metaclass__': + return True + return False + + +def fixup_parse_tree(cls_node): + """ one-line classes don't get a suite in the parse tree so we add + one to normalize the tree + """ + for node in cls_node.children: + if node.type == syms.suite: + # already in the preferred format, do nothing + return + + # !%@#! one-liners have no suite node, we have to fake one up + for i, node in enumerate(cls_node.children): + if node.type == token.COLON: + break + else: + raise ValueError("No class suite and no ':'!") + + # move everything into a suite node + suite = Node(syms.suite, []) + while cls_node.children[i+1:]: + move_node = cls_node.children[i+1] + suite.append_child(move_node.clone()) + move_node.remove() + cls_node.append_child(suite) + node = suite + + +def fixup_simple_stmt(parent, i, stmt_node): + """ if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything after the semi-colon into its own simple_stmt node + """ + for semi_ind, node in enumerate(stmt_node.children): + if node.type == token.SEMI: # *sigh* + break + else: + return + + node.remove() # kill the semicolon + new_expr = Node(syms.expr_stmt, []) + new_stmt = Node(syms.simple_stmt, [new_expr]) + while stmt_node.children[semi_ind:]: + move_node = stmt_node.children[semi_ind] + new_expr.append_child(move_node.clone()) + move_node.remove() + parent.insert_child(i, new_stmt) + new_leaf1 = new_stmt.children[0].children[0] + old_leaf1 = stmt_node.children[0].children[0] + new_leaf1.prefix = old_leaf1.prefix + + +def remove_trailing_newline(node): + if node.children and node.children[-1].type == token.NEWLINE: + node.children[-1].remove() + + +def find_metas(cls_node): + # find the suite node (Mmm, sweet nodes) + for node in cls_node.children: + if node.type == syms.suite: + break + else: + raise ValueError("No class suite!") + + # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] + for i, simple_node in list(enumerate(node.children)): + if simple_node.type == syms.simple_stmt and simple_node.children: + expr_node = simple_node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + # Check if the expr_node is a simple assignment. + left_node = expr_node.children[0] + if isinstance(left_node, Leaf) and \ + left_node.value == '__metaclass__': + # We found an assignment to __metaclass__. + fixup_simple_stmt(node, i, simple_node) + remove_trailing_newline(simple_node) + yield (node, i, simple_node) + + +def fixup_indent(suite): + """ If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start + """ + kids = suite.children[::-1] + # find the first indent + while kids: + node = kids.pop() + if node.type == token.INDENT: + break + + # find the first Leaf + while kids: + node = kids.pop() + if isinstance(node, Leaf) and node.type != token.DEDENT: + if node.prefix: + node.prefix = '' + return + else: + kids.extend(node.children[::-1]) + + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + classdef + """ + + def transform(self, node, results): + if not has_metaclass(node): + return + + fixup_parse_tree(node) + + # find metaclasses, keep the last one + last_metaclass = None + for suite, i, stmt in find_metas(node): + last_metaclass = stmt + stmt.remove() + + text_type = node.children[0].type # always Leaf(nnn, 'class') + + # figure out what kind of classdef we have + if len(node.children) == 7: + # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) + # 0 1 2 3 4 5 6 + if node.children[3].type == syms.arglist: + arglist = node.children[3] + # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) + else: + parent = node.children[3].clone() + arglist = Node(syms.arglist, [parent]) + node.set_child(3, arglist) + elif len(node.children) == 6: + # Node(classdef, ['class', 'name', '(', ')', ':', suite]) + # 0 1 2 3 4 5 + arglist = Node(syms.arglist, []) + node.insert_child(3, arglist) + elif len(node.children) == 4: + # Node(classdef, ['class', 'name', ':', suite]) + # 0 1 2 3 + arglist = Node(syms.arglist, []) + node.insert_child(2, Leaf(token.RPAR, ')')) + node.insert_child(2, arglist) + node.insert_child(2, Leaf(token.LPAR, '(')) + else: + raise ValueError("Unexpected class definition") + + # now stick the metaclass in the arglist + meta_txt = last_metaclass.children[0].children[0] + meta_txt.value = 'metaclass' + orig_meta_prefix = meta_txt.prefix + + if arglist.children: + arglist.append_child(Leaf(token.COMMA, ',')) + meta_txt.prefix = ' ' + else: + meta_txt.prefix = '' + + # compact the expression "metaclass = Meta" -> "metaclass=Meta" + expr_stmt = last_metaclass.children[0] + assert expr_stmt.type == syms.expr_stmt + expr_stmt.children[1].prefix = '' + expr_stmt.children[2].prefix = '' + + arglist.append_child(last_metaclass) + + fixup_indent(suite) + + # check for empty suite + if not suite.children: + # one-liner that was just __metaclass_ + suite.remove() + pass_leaf = Leaf(text_type, 'pass') + pass_leaf.prefix = orig_meta_prefix + node.append_child(pass_leaf) + node.append_child(Leaf(token.NEWLINE, '\n')) + + elif len(suite.children) > 1 and \ + (suite.children[-2].type == token.INDENT and + suite.children[-1].type == token.DEDENT): + # there was only one line in the class body and it was __metaclass__ + pass_leaf = Leaf(text_type, 'pass') + suite.insert_child(-1, pass_leaf) + suite.insert_child(-1, Leaf(token.NEWLINE, '\n')) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_methodattrs.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_methodattrs.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_methodattrs.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_methodattrs.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,24 @@ +"""Fix bound method attributes (method.im_? -> method.__?__). +""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +MAP = { + "im_func" : "__func__", + "im_self" : "__self__", + "im_class" : "__self__.__class__" + } + +class FixMethodattrs(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > + """ + + def transform(self, node, results): + attr = results["attr"][0] + new = MAP[attr.value] + attr.replace(Name(new, prefix=attr.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_ne.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_ne.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_ne.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_ne.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,23 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that turns <> into !=.""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base + + +class FixNe(fixer_base.BaseFix): + # This is so simple that we don't need the pattern compiler. + + _accept_type = token.NOTEQUAL + + def match(self, node): + # Override + return node.value == "<>" + + def transform(self, node, results): + new = pytree.Leaf(token.NOTEQUAL, "!=", prefix=node.prefix) + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_next.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_next.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_next.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_next.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,103 @@ +"""Fixer for it.next() -> next(it), per PEP 3114.""" +# Author: Collin Winter + +# Things that currently aren't covered: +# - listcomp "next" names aren't warned +# - "with" statement targets aren't checked + +# Local imports +from ..pgen2 import token +from ..pygram import python_symbols as syms +from .. import fixer_base +from ..fixer_util import Name, Call, find_binding + +bind_warning = "Calls to builtin next() possibly shadowed by global binding" + + +class FixNext(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > + | + power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > + | + classdef< 'class' any+ ':' + suite< any* + funcdef< 'def' + name='next' + parameters< '(' NAME ')' > any+ > + any* > > + | + global=global_stmt< 'global' any* 'next' any* > + """ + + order = "pre" # Pre-order tree traversal + + def start_tree(self, tree, filename): + super(FixNext, self).start_tree(tree, filename) + + n = find_binding('next', tree) + if n: + self.warning(n, bind_warning) + self.shadowed_next = True + else: + self.shadowed_next = False + + def transform(self, node, results): + assert results + + base = results.get("base") + attr = results.get("attr") + name = results.get("name") + + if base: + if self.shadowed_next: + attr.replace(Name("__next__", prefix=attr.prefix)) + else: + base = [n.clone() for n in base] + base[0].prefix = "" + node.replace(Call(Name("next", prefix=node.prefix), base)) + elif name: + n = Name("__next__", prefix=name.prefix) + name.replace(n) + elif attr: + # We don't do this transformation if we're assigning to "x.next". + # Unfortunately, it doesn't seem possible to do this in PATTERN, + # so it's being done here. + if is_assign_target(node): + head = results["head"] + if "".join([str(n) for n in head]).strip() == '__builtin__': + self.warning(node, bind_warning) + return + attr.replace(Name("__next__")) + elif "global" in results: + self.warning(node, bind_warning) + self.shadowed_next = True + + +### The following functions help test if node is part of an assignment +### target. + +def is_assign_target(node): + assign = find_assign(node) + if assign is None: + return False + + for child in assign.children: + if child.type == token.EQUAL: + return False + elif is_subtree(child, node): + return True + return False + +def find_assign(node): + if node.type == syms.expr_stmt: + return node + if node.type == syms.simple_stmt or node.parent is None: + return None + return find_assign(node.parent) + +def is_subtree(root, node): + if root == node: + return True + return any(is_subtree(c, node) for c in root.children) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_nonzero.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_nonzero.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_nonzero.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_nonzero.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,21 @@ +"""Fixer for __nonzero__ -> __bool__ methods.""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixNonzero(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + classdef< 'class' any+ ':' + suite< any* + funcdef< 'def' name='__nonzero__' + parameters< '(' NAME ')' > any+ > + any* > > + """ + + def transform(self, node, results): + name = results["name"] + new = Name("__bool__", prefix=name.prefix) + name.replace(new) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_numliterals.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_numliterals.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_numliterals.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_numliterals.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,28 @@ +"""Fixer that turns 1L into 1, 0755 into 0o755. +""" +# Copyright 2007 Georg Brandl. +# Licensed to PSF under a Contributor Agreement. + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Number + + +class FixNumliterals(fixer_base.BaseFix): + # This is so simple that we don't need the pattern compiler. + + _accept_type = token.NUMBER + + def match(self, node): + # Override + return (node.value.startswith("0") or node.value[-1] in "Ll") + + def transform(self, node, results): + val = node.value + if val[-1] in 'Ll': + val = val[:-1] + elif val.startswith('0') and val.isdigit() and len(set(val)) > 1: + val = "0o" + val[1:] + + return Number(val, prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_operator.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_operator.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_operator.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_operator.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,97 @@ +"""Fixer for operator functions. + +operator.isCallable(obj) -> callable(obj) +operator.sequenceIncludes(obj) -> operator.contains(obj) +operator.isSequenceType(obj) -> isinstance(obj, collections.abc.Sequence) +operator.isMappingType(obj) -> isinstance(obj, collections.abc.Mapping) +operator.isNumberType(obj) -> isinstance(obj, numbers.Number) +operator.repeat(obj, n) -> operator.mul(obj, n) +operator.irepeat(obj, n) -> operator.imul(obj, n) +""" + +import collections.abc + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Call, Name, String, touch_import + + +def invocation(s): + def dec(f): + f.invocation = s + return f + return dec + + +class FixOperator(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + methods = """ + method=('isCallable'|'sequenceIncludes' + |'isSequenceType'|'isMappingType'|'isNumberType' + |'repeat'|'irepeat') + """ + obj = "'(' obj=any ')'" + PATTERN = """ + power< module='operator' + trailer< '.' %(methods)s > trailer< %(obj)s > > + | + power< %(methods)s trailer< %(obj)s > > + """ % dict(methods=methods, obj=obj) + + def transform(self, node, results): + method = self._check_method(node, results) + if method is not None: + return method(node, results) + + @invocation("operator.contains(%s)") + def _sequenceIncludes(self, node, results): + return self._handle_rename(node, results, "contains") + + @invocation("callable(%s)") + def _isCallable(self, node, results): + obj = results["obj"] + return Call(Name("callable"), [obj.clone()], prefix=node.prefix) + + @invocation("operator.mul(%s)") + def _repeat(self, node, results): + return self._handle_rename(node, results, "mul") + + @invocation("operator.imul(%s)") + def _irepeat(self, node, results): + return self._handle_rename(node, results, "imul") + + @invocation("isinstance(%s, collections.abc.Sequence)") + def _isSequenceType(self, node, results): + return self._handle_type2abc(node, results, "collections.abc", "Sequence") + + @invocation("isinstance(%s, collections.abc.Mapping)") + def _isMappingType(self, node, results): + return self._handle_type2abc(node, results, "collections.abc", "Mapping") + + @invocation("isinstance(%s, numbers.Number)") + def _isNumberType(self, node, results): + return self._handle_type2abc(node, results, "numbers", "Number") + + def _handle_rename(self, node, results, name): + method = results["method"][0] + method.value = name + method.changed() + + def _handle_type2abc(self, node, results, module, abc): + touch_import(None, module, node) + obj = results["obj"] + args = [obj.clone(), String(", " + ".".join([module, abc]))] + return Call(Name("isinstance"), args, prefix=node.prefix) + + def _check_method(self, node, results): + method = getattr(self, "_" + results["method"][0].value) + if isinstance(method, collections.abc.Callable): + if "module" in results: + return method + else: + sub = (str(results["obj"]),) + invocation_str = method.invocation % sub + self.warning(node, "You should use '%s' here." % invocation_str) + return None diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_paren.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_paren.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_paren.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_paren.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,44 @@ +"""Fixer that adds parentheses where they are required + +This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.""" + +# By Taek Joo Kim and Benjamin Peterson + +# Local imports +from .. import fixer_base +from ..fixer_util import LParen, RParen + +# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2] +class FixParen(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + atom< ('[' | '(') + (listmaker< any + comp_for< + 'for' NAME 'in' + target=testlist_safe< any (',' any)+ [','] + > + [any] + > + > + | + testlist_gexp< any + comp_for< + 'for' NAME 'in' + target=testlist_safe< any (',' any)+ [','] + > + [any] + > + >) + (']' | ')') > + """ + + def transform(self, node, results): + target = results["target"] + + lparen = LParen() + lparen.prefix = target.prefix + target.prefix = "" # Make it hug the parentheses + target.insert_child(0, lparen) + target.append_child(RParen()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_print.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_print.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_print.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_print.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,87 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for print. + +Change: + 'print' into 'print()' + 'print ...' into 'print(...)' + 'print ... ,' into 'print(..., end=" ")' + 'print >>x, ...' into 'print(..., file=x)' + +No changes are applied if print_function is imported from __future__ + +""" + +# Local imports +from .. import patcomp +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, Comma, String + + +parend_expr = patcomp.compile_pattern( + """atom< '(' [atom|STRING|NAME] ')' >""" + ) + + +class FixPrint(fixer_base.BaseFix): + + BM_compatible = True + + PATTERN = """ + simple_stmt< any* bare='print' any* > | print_stmt + """ + + def transform(self, node, results): + assert results + + bare_print = results.get("bare") + + if bare_print: + # Special-case print all by itself + bare_print.replace(Call(Name("print"), [], + prefix=bare_print.prefix)) + return + assert node.children[0] == Name("print") + args = node.children[1:] + if len(args) == 1 and parend_expr.match(args[0]): + # We don't want to keep sticking parens around an + # already-parenthesised expression. + return + + sep = end = file = None + if args and args[-1] == Comma(): + args = args[:-1] + end = " " + if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, ">>"): + assert len(args) >= 2 + file = args[1].clone() + args = args[3:] # Strip a possible comma after the file expression + # Now synthesize a print(args, sep=..., end=..., file=...) node. + l_args = [arg.clone() for arg in args] + if l_args: + l_args[0].prefix = "" + if sep is not None or end is not None or file is not None: + if sep is not None: + self.add_kwarg(l_args, "sep", String(repr(sep))) + if end is not None: + self.add_kwarg(l_args, "end", String(repr(end))) + if file is not None: + self.add_kwarg(l_args, "file", file) + n_stmt = Call(Name("print"), l_args) + n_stmt.prefix = node.prefix + return n_stmt + + def add_kwarg(self, l_nodes, s_kwd, n_expr): + # XXX All this prefix-setting may lose comments (though rarely) + n_expr.prefix = "" + n_argument = pytree.Node(self.syms.argument, + (Name(s_kwd), + pytree.Leaf(token.EQUAL, "="), + n_expr)) + if l_nodes: + l_nodes.append(Comma()) + n_argument.prefix = " " + l_nodes.append(n_argument) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_raise.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_raise.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_raise.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_raise.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,90 @@ +"""Fixer for 'raise E, V, T' + +raise -> raise +raise E -> raise E +raise E, V -> raise E(V) +raise E, V, T -> raise E(V).with_traceback(T) +raise E, None, T -> raise E.with_traceback(T) + +raise (((E, E'), E''), E'''), V -> raise E(V) +raise "foo", V, T -> warns about string exceptions + + +CAVEATS: +1) "raise E, V" will be incorrectly translated if V is an exception + instance. The correct Python 3 idiom is + + raise E from V + + but since we can't detect instance-hood by syntax alone and since + any client code would have to be changed as well, we don't automate + this. +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, Attr, ArgList, is_tuple + +class FixRaise(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type == token.STRING: + msg = "Python 3 does not support string exceptions" + self.cannot_convert(node, msg) + return + + # Python 2 supports + # raise ((((E1, E2), E3), E4), E5), V + # as a synonym for + # raise E1, V + # Since Python 3 will not support this, we recurse down any tuple + # literals, always taking the first element. + if is_tuple(exc): + while is_tuple(exc): + # exc.children[1:-1] is the unparenthesized tuple + # exc.children[1].children[0] is the first element of the tuple + exc = exc.children[1].children[0].clone() + exc.prefix = " " + + if "val" not in results: + # One-argument raise + new = pytree.Node(syms.raise_stmt, [Name("raise"), exc]) + new.prefix = node.prefix + return new + + val = results["val"].clone() + if is_tuple(val): + args = [c.clone() for c in val.children[1:-1]] + else: + val.prefix = "" + args = [val] + + if "tb" in results: + tb = results["tb"].clone() + tb.prefix = "" + + e = exc + # If there's a traceback and None is passed as the value, then don't + # add a call, since the user probably just wants to add a + # traceback. See issue #9661. + if val.type != token.NAME or val.value != "None": + e = Call(exc, args) + with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] + new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb) + new.prefix = node.prefix + return new + else: + return pytree.Node(syms.raise_stmt, + [Name("raise"), Call(exc, args)], + prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_raw_input.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_raw_input.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_raw_input.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_raw_input.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,17 @@ +"""Fixer that changes raw_input(...) into input(...).""" +# Author: Andre Roberge + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixRawInput(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + power< name='raw_input' trailer< '(' [any] ')' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name("input", prefix=name.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_reduce.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_reduce.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_reduce.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_reduce.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,35 @@ +# Copyright 2008 Armin Ronacher. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for reduce(). + +Makes sure reduce() is imported from the functools module if reduce is +used in that module. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import touch_import + + + +class FixReduce(fixer_base.BaseFix): + + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'reduce' + trailer< '(' + arglist< ( + (not(argument) any ',' + not(argument + > + """ + + def transform(self, node, results): + touch_import('functools', 'reduce', node) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_reload.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_reload.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_reload.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_reload.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,36 @@ +"""Fixer for reload(). + +reload(s) -> importlib.reload(s)""" + +# Local imports +from .. import fixer_base +from ..fixer_util import ImportAndCall, touch_import + + +class FixReload(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'reload' + trailer< lpar='(' + ( not(arglist | argument) any ','> ) + rpar=')' > + after=any* + > + """ + + def transform(self, node, results): + if results: + # I feel like we should be able to express this logic in the + # PATTERN above but I don't know how to do it so... + obj = results['obj'] + if obj: + if (obj.type == self.syms.argument and + obj.children[0].value in {'**', '*'}): + return # Make no change. + names = ('importlib', 'reload') + new = ImportAndCall(node, results, names) + touch_import(None, 'importlib', node) + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_renames.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_renames.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_renames.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_renames.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,70 @@ +"""Fix incompatible renames + +Fixes: + * sys.maxint -> sys.maxsize +""" +# Author: Christian Heimes +# based on Collin Winter's fix_import + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, attr_chain + +MAPPING = {"sys": {"maxint" : "maxsize"}, + } +LOOKUP = {} + +def alternates(members): + return "(" + "|".join(map(repr, members)) + ")" + + +def build_pattern(): + #bare = set() + for module, replace in list(MAPPING.items()): + for old_attr, new_attr in list(replace.items()): + LOOKUP[(module, old_attr)] = new_attr + #bare.add(module) + #bare.add(old_attr) + #yield """ + # import_name< 'import' (module=%r + # | dotted_as_names< any* module=%r any* >) > + # """ % (module, module) + yield """ + import_from< 'from' module_name=%r 'import' + ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > + """ % (module, old_attr, old_attr) + yield """ + power< module_name=%r trailer< '.' attr_name=%r > any* > + """ % (module, old_attr) + #yield """bare_name=%s""" % alternates(bare) + + +class FixRenames(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "|".join(build_pattern()) + + order = "pre" # Pre-order tree traversal + + # Don't match the node if it's within another match + def match(self, node): + match = super(FixRenames, self).match + results = match(node) + if results: + if any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + #def start_tree(self, tree, filename): + # super(FixRenames, self).start_tree(tree, filename) + # self.replace = {} + + def transform(self, node, results): + mod_name = results.get("module_name") + attr_name = results.get("attr_name") + #bare_name = results.get("bare_name") + #import_mod = results.get("module") + + if mod_name and attr_name: + new_attr = LOOKUP[(mod_name.value, attr_name.value)] + attr_name.replace(Name(new_attr, prefix=attr_name.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_repr.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_repr.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_repr.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_repr.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,23 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that transforms `xyzzy` into repr(xyzzy).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Name, parenthesize + + +class FixRepr(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + atom < '`' expr=any '`' > + """ + + def transform(self, node, results): + expr = results["expr"].clone() + + if expr.type == self.syms.testlist1: + expr = parenthesize(expr) + return Call(Name("repr"), [expr], prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_set_literal.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_set_literal.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_set_literal.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_set_literal.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,53 @@ +""" +Optional fixer to transform set() calls to set literals. +""" + +# Author: Benjamin Peterson + +from lib2to3 import fixer_base, pytree +from lib2to3.fixer_util import token, syms + + + +class FixSetLiteral(fixer_base.BaseFix): + + BM_compatible = True + explicit = True + + PATTERN = """power< 'set' trailer< '(' + (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) > + | + single=any) ']' > + | + atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' > + ) + ')' > > + """ + + def transform(self, node, results): + single = results.get("single") + if single: + # Make a fake listmaker + fake = pytree.Node(syms.listmaker, [single.clone()]) + single.replace(fake) + items = fake + else: + items = results["items"] + + # Build the contents of the literal + literal = [pytree.Leaf(token.LBRACE, "{")] + literal.extend(n.clone() for n in items.children) + literal.append(pytree.Leaf(token.RBRACE, "}")) + # Set the prefix of the right brace to that of the ')' or ']' + literal[-1].prefix = items.next_sibling.prefix + maker = pytree.Node(syms.dictsetmaker, literal) + maker.prefix = node.prefix + + # If the original was a one tuple, we need to remove the extra comma. + if len(maker.children) == 4: + n = maker.children[2] + n.remove() + maker.children[-1].prefix = n.prefix + + # Finally, replace the set call with our shiny new literal. + return maker diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_standarderror.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_standarderror.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_standarderror.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_standarderror.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,18 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for StandardError -> Exception.""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixStandarderror(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + 'StandardError' + """ + + def transform(self, node, results): + return Name("Exception", prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_sys_exc.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_sys_exc.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_sys_exc.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_sys_exc.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,30 @@ +"""Fixer for sys.exc_{type, value, traceback} + +sys.exc_type -> sys.exc_info()[0] +sys.exc_value -> sys.exc_info()[1] +sys.exc_traceback -> sys.exc_info()[2] +""" + +# By Jeff Balogh and Benjamin Peterson + +# Local imports +from .. import fixer_base +from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms + +class FixSysExc(fixer_base.BaseFix): + # This order matches the ordering of sys.exc_info(). + exc_info = ["exc_type", "exc_value", "exc_traceback"] + BM_compatible = True + PATTERN = """ + power< 'sys' trailer< dot='.' attribute=(%s) > > + """ % '|'.join("'%s'" % e for e in exc_info) + + def transform(self, node, results): + sys_attr = results["attribute"][0] + index = Number(self.exc_info.index(sys_attr.value)) + + call = Call(Name("exc_info"), prefix=sys_attr.prefix) + attr = Attr(Name("sys"), call) + attr[1].children[0].prefix = results["dot"].prefix + attr.append(Subscript(index)) + return Node(syms.power, attr, prefix=node.prefix) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_throw.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_throw.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_throw.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_throw.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,56 @@ +"""Fixer for generator.throw(E, V, T). + +g.throw(E) -> g.throw(E) +g.throw(E, V) -> g.throw(E(V)) +g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) + +g.throw("foo"[, V[, T]]) will warn about string exceptions.""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, ArgList, Attr, is_tuple + +class FixThrow(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< any trailer< '.' 'throw' > + trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > + > + | + power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type is token.STRING: + self.cannot_convert(node, "Python 3 does not support string exceptions") + return + + # Leave "g.throw(E)" alone + val = results.get("val") + if val is None: + return + + val = val.clone() + if is_tuple(val): + args = [c.clone() for c in val.children[1:-1]] + else: + val.prefix = "" + args = [val] + + throw_args = results["args"] + + if "tb" in results: + tb = results["tb"].clone() + tb.prefix = "" + + e = Call(exc, args) + with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] + throw_args.replace(pytree.Node(syms.power, with_tb)) + else: + throw_args.replace(Call(exc, args)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_tuple_params.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_tuple_params.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_tuple_params.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_tuple_params.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,175 @@ +"""Fixer for function definitions with tuple parameters. + +def func(((a, b), c), d): + ... + + -> + +def func(x, d): + ((a, b), c) = x + ... + +It will also support lambdas: + + lambda (x, y): x + y -> lambda t: t[0] + t[1] + + # The parens are a syntax error in Python 3 + lambda (x): x + y -> lambda x: x + y +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms + +def is_docstring(stmt): + return isinstance(stmt, pytree.Node) and \ + stmt.children[0].type == token.STRING + +class FixTupleParams(fixer_base.BaseFix): + run_order = 4 #use a lower order since lambda is part of other + #patterns + BM_compatible = True + + PATTERN = """ + funcdef< 'def' any parameters< '(' args=any ')' > + ['->' any] ':' suite=any+ > + | + lambda= + lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > + ':' body=any + > + """ + + def transform(self, node, results): + if "lambda" in results: + return self.transform_lambda(node, results) + + new_lines = [] + suite = results["suite"] + args = results["args"] + # This crap is so "def foo(...): x = 5; y = 7" is handled correctly. + # TODO(cwinter): suite-cleanup + if suite[0].children[1].type == token.INDENT: + start = 2 + indent = suite[0].children[1].value + end = Newline() + else: + start = 0 + indent = "; " + end = pytree.Leaf(token.INDENT, "") + + # We need access to self for new_name(), and making this a method + # doesn't feel right. Closing over self and new_lines makes the + # code below cleaner. + def handle_tuple(tuple_arg, add_prefix=False): + n = Name(self.new_name()) + arg = tuple_arg.clone() + arg.prefix = "" + stmt = Assign(arg, n.clone()) + if add_prefix: + n.prefix = " " + tuple_arg.replace(n) + new_lines.append(pytree.Node(syms.simple_stmt, + [stmt, end.clone()])) + + if args.type == syms.tfpdef: + handle_tuple(args) + elif args.type == syms.typedargslist: + for i, arg in enumerate(args.children): + if arg.type == syms.tfpdef: + # Without add_prefix, the emitted code is correct, + # just ugly. + handle_tuple(arg, add_prefix=(i > 0)) + + if not new_lines: + return + + # This isn't strictly necessary, but it plays nicely with other fixers. + # TODO(cwinter) get rid of this when children becomes a smart list + for line in new_lines: + line.parent = suite[0] + + # TODO(cwinter) suite-cleanup + after = start + if start == 0: + new_lines[0].prefix = " " + elif is_docstring(suite[0].children[start]): + new_lines[0].prefix = indent + after = start + 1 + + for line in new_lines: + line.parent = suite[0] + suite[0].children[after:after] = new_lines + for i in range(after+1, after+len(new_lines)+1): + suite[0].children[i].prefix = indent + suite[0].changed() + + def transform_lambda(self, node, results): + args = results["args"] + body = results["body"] + inner = simplify_args(results["inner"]) + + # Replace lambda ((((x)))): x with lambda x: x + if inner.type == token.NAME: + inner = inner.clone() + inner.prefix = " " + args.replace(inner) + return + + params = find_params(args) + to_index = map_to_index(params) + tup_name = self.new_name(tuple_name(params)) + + new_param = Name(tup_name, prefix=" ") + args.replace(new_param.clone()) + for n in body.post_order(): + if n.type == token.NAME and n.value in to_index: + subscripts = [c.clone() for c in to_index[n.value]] + new = pytree.Node(syms.power, + [new_param.clone()] + subscripts) + new.prefix = n.prefix + n.replace(new) + + +### Helper functions for transform_lambda() + +def simplify_args(node): + if node.type in (syms.vfplist, token.NAME): + return node + elif node.type == syms.vfpdef: + # These look like vfpdef< '(' x ')' > where x is NAME + # or another vfpdef instance (leading to recursion). + while node.type == syms.vfpdef: + node = node.children[1] + return node + raise RuntimeError("Received unexpected node %s" % node) + +def find_params(node): + if node.type == syms.vfpdef: + return find_params(node.children[1]) + elif node.type == token.NAME: + return node.value + return [find_params(c) for c in node.children if c.type != token.COMMA] + +def map_to_index(param_list, prefix=[], d=None): + if d is None: + d = {} + for i, obj in enumerate(param_list): + trailer = [Subscript(Number(str(i)))] + if isinstance(obj, list): + map_to_index(obj, trailer, d=d) + else: + d[obj] = prefix + trailer + return d + +def tuple_name(param_list): + l = [] + for obj in param_list: + if isinstance(obj, list): + l.append(tuple_name(obj)) + else: + l.append(obj) + return "_".join(l) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_types.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_types.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_types.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_types.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,61 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for removing uses of the types module. + +These work for only the known names in the types module. The forms above +can include types. or not. ie, It is assumed the module is imported either as: + + import types + from types import ... # either * or specific types + +The import statements are not modified. + +There should be another fixer that handles at least the following constants: + + type([]) -> list + type(()) -> tuple + type('') -> str + +""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +_TYPE_MAPPING = { + 'BooleanType' : 'bool', + 'BufferType' : 'memoryview', + 'ClassType' : 'type', + 'ComplexType' : 'complex', + 'DictType': 'dict', + 'DictionaryType' : 'dict', + 'EllipsisType' : 'type(Ellipsis)', + #'FileType' : 'io.IOBase', + 'FloatType': 'float', + 'IntType': 'int', + 'ListType': 'list', + 'LongType': 'int', + 'ObjectType' : 'object', + 'NoneType': 'type(None)', + 'NotImplementedType' : 'type(NotImplemented)', + 'SliceType' : 'slice', + 'StringType': 'bytes', # XXX ? + 'StringTypes' : '(str,)', # XXX ? + 'TupleType': 'tuple', + 'TypeType' : 'type', + 'UnicodeType': 'str', + 'XRangeType' : 'range', + } + +_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] + +class FixTypes(fixer_base.BaseFix): + BM_compatible = True + PATTERN = '|'.join(_pats) + + def transform(self, node, results): + new_value = _TYPE_MAPPING.get(results["name"].value) + if new_value: + return Name(new_value, prefix=node.prefix) + return None diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_unicode.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_unicode.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_unicode.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_unicode.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,42 @@ +r"""Fixer for unicode. + +* Changes unicode to str and unichr to chr. + +* If "...\u..." is not unicode literal change it into "...\\u...". + +* Change u"..." into "...". + +""" + +from ..pgen2 import token +from .. import fixer_base + +_mapping = {"unichr" : "chr", "unicode" : "str"} + +class FixUnicode(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING | 'unicode' | 'unichr'" + + def start_tree(self, tree, filename): + super(FixUnicode, self).start_tree(tree, filename) + self.unicode_literals = 'unicode_literals' in tree.future_features + + def transform(self, node, results): + if node.type == token.NAME: + new = node.clone() + new.value = _mapping[node.value] + return new + elif node.type == token.STRING: + val = node.value + if not self.unicode_literals and val[0] in '\'"' and '\\' in val: + val = r'\\'.join([ + v.replace('\\u', r'\\u').replace('\\U', r'\\U') + for v in val.split(r'\\') + ]) + if val[0] in 'uU': + val = val[1:] + if val == node.value: + return node + new = node.clone() + new.value = val + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_urllib.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_urllib.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_urllib.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_urllib.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,196 @@ +"""Fix changes imports of urllib which are now incompatible. + This is rather similar to fix_imports, but because of the more + complex nature of the fixing for urllib, it has its own fixer. +""" +# Author: Nick Edds + +# Local imports +from lib2to3.fixes.fix_imports import alternates, FixImports +from lib2to3.fixer_util import (Name, Comma, FromImport, Newline, + find_indentation, Node, syms) + +MAPPING = {"urllib": [ + ("urllib.request", + ["URLopener", "FancyURLopener", "urlretrieve", + "_urlopener", "urlopen", "urlcleanup", + "pathname2url", "url2pathname", "getproxies"]), + ("urllib.parse", + ["quote", "quote_plus", "unquote", "unquote_plus", + "urlencode", "splitattr", "splithost", "splitnport", + "splitpasswd", "splitport", "splitquery", "splittag", + "splittype", "splituser", "splitvalue", ]), + ("urllib.error", + ["ContentTooShortError"])], + "urllib2" : [ + ("urllib.request", + ["urlopen", "install_opener", "build_opener", + "Request", "OpenerDirector", "BaseHandler", + "HTTPDefaultErrorHandler", "HTTPRedirectHandler", + "HTTPCookieProcessor", "ProxyHandler", + "HTTPPasswordMgr", + "HTTPPasswordMgrWithDefaultRealm", + "AbstractBasicAuthHandler", + "HTTPBasicAuthHandler", "ProxyBasicAuthHandler", + "AbstractDigestAuthHandler", + "HTTPDigestAuthHandler", "ProxyDigestAuthHandler", + "HTTPHandler", "HTTPSHandler", "FileHandler", + "FTPHandler", "CacheFTPHandler", + "UnknownHandler"]), + ("urllib.error", + ["URLError", "HTTPError"]), + ] +} + +# Duplicate the url parsing functions for urllib2. +MAPPING["urllib2"].append(MAPPING["urllib"][1]) + + +def build_pattern(): + bare = set() + for old_module, changes in MAPPING.items(): + for change in changes: + new_module, members = change + members = alternates(members) + yield """import_name< 'import' (module=%r + | dotted_as_names< any* module=%r any* >) > + """ % (old_module, old_module) + yield """import_from< 'from' mod_member=%r 'import' + ( member=%s | import_as_name< member=%s 'as' any > | + import_as_names< members=any* >) > + """ % (old_module, members, members) + yield """import_from< 'from' module_star=%r 'import' star='*' > + """ % old_module + yield """import_name< 'import' + dotted_as_name< module_as=%r 'as' any > > + """ % old_module + # bare_with_attr has a special significance for FixImports.match(). + yield """power< bare_with_attr=%r trailer< '.' member=%s > any* > + """ % (old_module, members) + + +class FixUrllib(FixImports): + + def build_pattern(self): + return "|".join(build_pattern()) + + def transform_import(self, node, results): + """Transform for the basic import case. Replaces the old + import name with a comma separated list of its + replacements. + """ + import_mod = results.get("module") + pref = import_mod.prefix + + names = [] + + # create a Node list of the replacement modules + for name in MAPPING[import_mod.value][:-1]: + names.extend([Name(name[0], prefix=pref), Comma()]) + names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref)) + import_mod.replace(names) + + def transform_member(self, node, results): + """Transform for imports of specific module elements. Replaces + the module to be imported from with the appropriate new + module. + """ + mod_member = results.get("mod_member") + pref = mod_member.prefix + member = results.get("member") + + # Simple case with only a single member being imported + if member: + # this may be a list of length one, or just a node + if isinstance(member, list): + member = member[0] + new_name = None + for change in MAPPING[mod_member.value]: + if member.value in change[1]: + new_name = change[0] + break + if new_name: + mod_member.replace(Name(new_name, prefix=pref)) + else: + self.cannot_convert(node, "This is an invalid module element") + + # Multiple members being imported + else: + # a dictionary for replacements, order matters + modules = [] + mod_dict = {} + members = results["members"] + for member in members: + # we only care about the actual members + if member.type == syms.import_as_name: + as_name = member.children[2].value + member_name = member.children[0].value + else: + member_name = member.value + as_name = None + if member_name != ",": + for change in MAPPING[mod_member.value]: + if member_name in change[1]: + if change[0] not in mod_dict: + modules.append(change[0]) + mod_dict.setdefault(change[0], []).append(member) + + new_nodes = [] + indentation = find_indentation(node) + first = True + def handle_name(name, prefix): + if name.type == syms.import_as_name: + kids = [Name(name.children[0].value, prefix=prefix), + name.children[1].clone(), + name.children[2].clone()] + return [Node(syms.import_as_name, kids)] + return [Name(name.value, prefix=prefix)] + for module in modules: + elts = mod_dict[module] + names = [] + for elt in elts[:-1]: + names.extend(handle_name(elt, pref)) + names.append(Comma()) + names.extend(handle_name(elts[-1], pref)) + new = FromImport(module, names) + if not first or node.parent.prefix.endswith(indentation): + new.prefix = indentation + new_nodes.append(new) + first = False + if new_nodes: + nodes = [] + for new_node in new_nodes[:-1]: + nodes.extend([new_node, Newline()]) + nodes.append(new_nodes[-1]) + node.replace(nodes) + else: + self.cannot_convert(node, "All module elements are invalid") + + def transform_dot(self, node, results): + """Transform for calls to module members in code.""" + module_dot = results.get("bare_with_attr") + member = results.get("member") + new_name = None + if isinstance(member, list): + member = member[0] + for change in MAPPING[module_dot.value]: + if member.value in change[1]: + new_name = change[0] + break + if new_name: + module_dot.replace(Name(new_name, + prefix=module_dot.prefix)) + else: + self.cannot_convert(node, "This is an invalid module element") + + def transform(self, node, results): + if results.get("module"): + self.transform_import(node, results) + elif results.get("mod_member"): + self.transform_member(node, results) + elif results.get("bare_with_attr"): + self.transform_dot(node, results) + # Renaming and star imports are not supported for these modules. + elif results.get("module_star"): + self.cannot_convert(node, "Cannot handle star imports.") + elif results.get("module_as"): + self.cannot_convert(node, "This module is now multiple modules") diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_ws_comma.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_ws_comma.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_ws_comma.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_ws_comma.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,39 @@ +"""Fixer that changes 'a ,b' into 'a, b'. + +This also changes '{a :b}' into '{a: b}', but does not touch other +uses of colons. It does not touch other uses of whitespace. + +""" + +from .. import pytree +from ..pgen2 import token +from .. import fixer_base + +class FixWsComma(fixer_base.BaseFix): + + explicit = True # The user must ask for this fixers + + PATTERN = """ + any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> + """ + + COMMA = pytree.Leaf(token.COMMA, ",") + COLON = pytree.Leaf(token.COLON, ":") + SEPS = (COMMA, COLON) + + def transform(self, node, results): + new = node.clone() + comma = False + for child in new.children: + if child in self.SEPS: + prefix = child.prefix + if prefix.isspace() and "\n" not in prefix: + child.prefix = "" + comma = True + else: + if comma: + prefix = child.prefix + if not prefix: + child.prefix = " " + comma = False + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_xrange.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_xrange.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_xrange.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_xrange.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,73 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes xrange(...) into range(...).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, Call, consuming_calls +from .. import patcomp + + +class FixXrange(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< + (name='range'|name='xrange') trailer< '(' args=any ')' > + rest=any* > + """ + + def start_tree(self, tree, filename): + super(FixXrange, self).start_tree(tree, filename) + self.transformed_xranges = set() + + def finish_tree(self, tree, filename): + self.transformed_xranges = None + + def transform(self, node, results): + name = results["name"] + if name.value == "xrange": + return self.transform_xrange(node, results) + elif name.value == "range": + return self.transform_range(node, results) + else: + raise ValueError(repr(name)) + + def transform_xrange(self, node, results): + name = results["name"] + name.replace(Name("range", prefix=name.prefix)) + # This prevents the new range call from being wrapped in a list later. + self.transformed_xranges.add(id(node)) + + def transform_range(self, node, results): + if (id(node) not in self.transformed_xranges and + not self.in_special_context(node)): + range_call = Call(Name("range"), [results["args"].clone()]) + # Encase the range call in list(). + list_call = Call(Name("list"), [range_call], + prefix=node.prefix) + # Put things that were after the range() call after the list call. + for n in results["rest"]: + list_call.append_child(n) + return list_call + + P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" + p1 = patcomp.compile_pattern(P1) + + P2 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + | comparison< any 'in' node=any any*> + """ + p2 = patcomp.compile_pattern(P2) + + def in_special_context(self, node): + if node.parent is None: + return False + results = {} + if (node.parent.parent is not None and + self.p1.match(node.parent.parent, results) and + results["node"] is node): + # list(d.keys()) -> list(d.keys()), etc. + return results["func"].value in consuming_calls + # for ... in d.iterkeys() -> for ... in d.keys(), etc. + return self.p2.match(node.parent, results) and results["node"] is node diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_xreadlines.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_xreadlines.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_xreadlines.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_xreadlines.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,25 @@ +"""Fix "for x in f.xreadlines()" -> "for x in f". + +This fixer will also convert g(f.xreadlines) into g(f.__iter__).""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixXreadlines(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > + | + power< any+ trailer< '.' no_call='xreadlines' > > + """ + + def transform(self, node, results): + no_call = results.get("no_call") + + if no_call: + no_call.replace(Name("__iter__", prefix=no_call.prefix)) + else: + node.replace([x.clone() for x in results["call"]]) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_zip.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_zip.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/fixes/fix_zip.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/fixes/fix_zip.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,46 @@ +""" +Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) +unless there exists a 'from future_builtins import zip' statement in the +top-level namespace. + +We avoid the transformation if the zip() call is directly contained in +iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. +""" + +# Local imports +from .. import fixer_base +from ..pytree import Node +from ..pygram import python_symbols as syms +from ..fixer_util import Name, ArgList, in_special_context + + +class FixZip(fixer_base.ConditionalFix): + + BM_compatible = True + PATTERN = """ + power< 'zip' args=trailer< '(' [any] ')' > [trailers=trailer*] + > + """ + + skip_on = "future_builtins.zip" + + def transform(self, node, results): + if self.should_skip(node): + return + + if in_special_context(node): + return None + + args = results['args'].clone() + args.prefix = "" + + trailers = [] + if 'trailers' in results: + trailers = [n.clone() for n in results['trailers']] + for n in trailers: + n.prefix = "" + + new = Node(syms.power, [Name("zip"), args], prefix="") + new = Node(syms.power, [Name("list"), ArgList([new])] + trailers) + new.prefix = node.prefix + return new diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/main.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/main.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/main.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/main.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,273 @@ +""" +Main program for 2to3. +""" + +from __future__ import with_statement, print_function + +import sys +import os +import difflib +import logging +import shutil +import optparse + +from . import refactor + + +def diff_texts(a, b, filename): + """Return a unified diff of two strings.""" + a = a.splitlines() + b = b.splitlines() + return difflib.unified_diff(a, b, filename, filename, + "(original)", "(refactored)", + lineterm="") + + +class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): + """ + A refactoring tool that can avoid overwriting its input files. + Prints output to stdout. + + Output files can optionally be written to a different directory and or + have an extra file suffix appended to their name for use in situations + where you do not want to replace the input files. + """ + + def __init__(self, fixers, options, explicit, nobackups, show_diffs, + input_base_dir='', output_dir='', append_suffix=''): + """ + Args: + fixers: A list of fixers to import. + options: A dict with RefactoringTool configuration. + explicit: A list of fixers to run even if they are explicit. + nobackups: If true no backup '.bak' files will be created for those + files that are being refactored. + show_diffs: Should diffs of the refactoring be printed to stdout? + input_base_dir: The base directory for all input files. This class + will strip this path prefix off of filenames before substituting + it with output_dir. Only meaningful if output_dir is supplied. + All files processed by refactor() must start with this path. + output_dir: If supplied, all converted files will be written into + this directory tree instead of input_base_dir. + append_suffix: If supplied, all files output by this tool will have + this appended to their filename. Useful for changing .py to + .py3 for example by passing append_suffix='3'. + """ + self.nobackups = nobackups + self.show_diffs = show_diffs + if input_base_dir and not input_base_dir.endswith(os.sep): + input_base_dir += os.sep + self._input_base_dir = input_base_dir + self._output_dir = output_dir + self._append_suffix = append_suffix + super(StdoutRefactoringTool, self).__init__(fixers, options, explicit) + + def log_error(self, msg, *args, **kwargs): + self.errors.append((msg, args, kwargs)) + self.logger.error(msg, *args, **kwargs) + + def write_file(self, new_text, filename, old_text, encoding): + orig_filename = filename + if self._output_dir: + if filename.startswith(self._input_base_dir): + filename = os.path.join(self._output_dir, + filename[len(self._input_base_dir):]) + else: + raise ValueError('filename %s does not start with the ' + 'input_base_dir %s' % ( + filename, self._input_base_dir)) + if self._append_suffix: + filename += self._append_suffix + if orig_filename != filename: + output_dir = os.path.dirname(filename) + if not os.path.isdir(output_dir) and output_dir: + os.makedirs(output_dir) + self.log_message('Writing converted %s to %s.', orig_filename, + filename) + if not self.nobackups: + # Make backup + backup = filename + ".bak" + if os.path.lexists(backup): + try: + os.remove(backup) + except OSError: + self.log_message("Can't remove backup %s", backup) + try: + os.rename(filename, backup) + except OSError: + self.log_message("Can't rename %s to %s", filename, backup) + # Actually write the new file + write = super(StdoutRefactoringTool, self).write_file + write(new_text, filename, old_text, encoding) + if not self.nobackups: + shutil.copymode(backup, filename) + if orig_filename != filename: + # Preserve the file mode in the new output directory. + shutil.copymode(orig_filename, filename) + + def print_output(self, old, new, filename, equal): + if equal: + self.log_message("No changes to %s", filename) + else: + self.log_message("Refactored %s", filename) + if self.show_diffs: + diff_lines = diff_texts(old, new, filename) + try: + if self.output_lock is not None: + with self.output_lock: + for line in diff_lines: + print(line) + sys.stdout.flush() + else: + for line in diff_lines: + print(line) + except UnicodeEncodeError: + warn("couldn't encode %s's diff for your terminal" % + (filename,)) + return + +def warn(msg): + print("WARNING: %s" % (msg,), file=sys.stderr) + + +def main(fixer_pkg, args=None): + """Main program. + + Args: + fixer_pkg: the name of a package where the fixers are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. + + Returns a suggested exit status (0, 1, 2). + """ + # Set up option parser + parser = optparse.OptionParser(usage="2to3 [options] file|dir ...") + parser.add_option("-d", "--doctests_only", action="store_true", + help="Fix up doctests only") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a transformation from being run") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + parser.add_option("-p", "--print-function", action="store_true", + help="Modify the grammar so that print() is a function") + parser.add_option("-e", "--exec-function", action="store_true", + help="Modify the grammar so that exec() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files") + parser.add_option("-o", "--output-dir", action="store", type="str", + default="", help="Put output files in this directory " + "instead of overwriting the input files. Requires -n.") + parser.add_option("-W", "--write-unchanged-files", action="store_true", + help="Also write files even if no changes were required" + " (useful with --output-dir); implies -w.") + parser.add_option("--add-suffix", action="store", type="str", default="", + help="Append this string to all output filenames." + " Requires -n if non-empty. " + "ex: --add-suffix='3' will generate .py3 files.") + + # Parse command line arguments + refactor_stdin = False + flags = {} + options, args = parser.parse_args(args) + if options.write_unchanged_files: + flags["write_unchanged_files"] = True + if not options.write: + warn("--write-unchanged-files/-W implies -w.") + options.write = True + # If we allowed these, the original files would be renamed to backup names + # but not replaced. + if options.output_dir and not options.nobackups: + parser.error("Can't use --output-dir/-o without -n.") + if options.add_suffix and not options.nobackups: + parser.error("Can't use --add-suffix without -n.") + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + for fixname in refactor.get_all_fix_names(fixer_pkg): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + if options.print_function: + flags["print_function"] = True + + if options.exec_function: + flags["exec_function"] = True + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + logger = logging.getLogger('lib2to3.main') + + # Initialize the refactoring tool + avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg)) + unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == "all": + all_present = True + else: + explicit.add(fixer_pkg + ".fix_" + fix) + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + fixer_names = requested.difference(unwanted_fixes) + input_base_dir = os.path.commonprefix(args) + if (input_base_dir and not input_base_dir.endswith(os.sep) + and not os.path.isdir(input_base_dir)): + # One or more similar names were passed, their directory is the base. + # os.path.commonprefix() is ignorant of path elements, this corrects + # for that weird API. + input_base_dir = os.path.dirname(input_base_dir) + if options.output_dir: + input_base_dir = input_base_dir.rstrip(os.sep) + logger.info('Output in %r will mirror the input directory %r layout.', + options.output_dir, input_base_dir) + rt = StdoutRefactoringTool( + sorted(fixer_names), flags, sorted(explicit), + options.nobackups, not options.no_diffs, + input_base_dir=input_base_dir, + output_dir=options.output_dir, + append_suffix=options.add_suffix) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, options.doctests_only, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't supported on this platform.", + file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/patcomp.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/patcomp.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/patcomp.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/patcomp.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,204 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Pattern compiler. + +The grammar is taken from PatternGrammar.txt. + +The compiler compiles a pattern to a pytree.*Pattern instance. +""" + +__author__ = "Guido van Rossum " + +# Python imports +import io + +# Fairly local imports +from .pgen2 import driver, literals, token, tokenize, parse, grammar + +# Really local imports +from . import pytree +from . import pygram + + +class PatternSyntaxError(Exception): + pass + + +def tokenize_wrapper(input): + """Tokenizes a string suppressing significant whitespace.""" + skip = {token.NEWLINE, token.INDENT, token.DEDENT} + tokens = tokenize.generate_tokens(io.StringIO(input).readline) + for quintuple in tokens: + type, value, start, end, line_text = quintuple + if type not in skip: + yield quintuple + + +class PatternCompiler(object): + + def __init__(self, grammar_file=None): + """Initializer. + + Takes an optional alternative filename for the pattern grammar. + """ + if grammar_file is None: + self.grammar = pygram.pattern_grammar + self.syms = pygram.pattern_symbols + else: + self.grammar = driver.load_grammar(grammar_file) + self.syms = pygram.Symbols(self.grammar) + self.pygrammar = pygram.python_grammar + self.pysyms = pygram.python_symbols + self.driver = driver.Driver(self.grammar, convert=pattern_convert) + + def compile_pattern(self, input, debug=False, with_tree=False): + """Compiles a pattern string to a nested pytree.*Pattern object.""" + tokens = tokenize_wrapper(input) + try: + root = self.driver.parse_tokens(tokens, debug=debug) + except parse.ParseError as e: + raise PatternSyntaxError(str(e)) from None + if with_tree: + return self.compile_node(root), root + else: + return self.compile_node(root) + + def compile_node(self, node): + """Compiles a node, recursively. + + This is one big switch on the node type. + """ + # XXX Optimize certain Wildcard-containing-Wildcard patterns + # that can be merged + if node.type == self.syms.Matcher: + node = node.children[0] # Avoid unneeded recursion + + if node.type == self.syms.Alternatives: + # Skip the odd children since they are just '|' tokens + alts = [self.compile_node(ch) for ch in node.children[::2]] + if len(alts) == 1: + return alts[0] + p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1) + return p.optimize() + + if node.type == self.syms.Alternative: + units = [self.compile_node(ch) for ch in node.children] + if len(units) == 1: + return units[0] + p = pytree.WildcardPattern([units], min=1, max=1) + return p.optimize() + + if node.type == self.syms.NegatedUnit: + pattern = self.compile_basic(node.children[1:]) + p = pytree.NegatedPattern(pattern) + return p.optimize() + + assert node.type == self.syms.Unit + + name = None + nodes = node.children + if len(nodes) >= 3 and nodes[1].type == token.EQUAL: + name = nodes[0].value + nodes = nodes[2:] + repeat = None + if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater: + repeat = nodes[-1] + nodes = nodes[:-1] + + # Now we've reduced it to: STRING | NAME [Details] | (...) | [...] + pattern = self.compile_basic(nodes, repeat) + + if repeat is not None: + assert repeat.type == self.syms.Repeater + children = repeat.children + child = children[0] + if child.type == token.STAR: + min = 0 + max = pytree.HUGE + elif child.type == token.PLUS: + min = 1 + max = pytree.HUGE + elif child.type == token.LBRACE: + assert children[-1].type == token.RBRACE + assert len(children) in (3, 5) + min = max = self.get_int(children[1]) + if len(children) == 5: + max = self.get_int(children[3]) + else: + assert False + if min != 1 or max != 1: + pattern = pattern.optimize() + pattern = pytree.WildcardPattern([[pattern]], min=min, max=max) + + if name is not None: + pattern.name = name + return pattern.optimize() + + def compile_basic(self, nodes, repeat=None): + # Compile STRING | NAME [Details] | (...) | [...] + assert len(nodes) >= 1 + node = nodes[0] + if node.type == token.STRING: + value = str(literals.evalString(node.value)) + return pytree.LeafPattern(_type_of_literal(value), value) + elif node.type == token.NAME: + value = node.value + if value.isupper(): + if value not in TOKEN_MAP: + raise PatternSyntaxError("Invalid token: %r" % value) + if nodes[1:]: + raise PatternSyntaxError("Can't have details for token") + return pytree.LeafPattern(TOKEN_MAP[value]) + else: + if value == "any": + type = None + elif not value.startswith("_"): + type = getattr(self.pysyms, value, None) + if type is None: + raise PatternSyntaxError("Invalid symbol: %r" % value) + if nodes[1:]: # Details present + content = [self.compile_node(nodes[1].children[1])] + else: + content = None + return pytree.NodePattern(type, content) + elif node.value == "(": + return self.compile_node(nodes[1]) + elif node.value == "[": + assert repeat is None + subpattern = self.compile_node(nodes[1]) + return pytree.WildcardPattern([[subpattern]], min=0, max=1) + assert False, node + + def get_int(self, node): + assert node.type == token.NUMBER + return int(node.value) + + +# Map named tokens to the type value for a LeafPattern +TOKEN_MAP = {"NAME": token.NAME, + "STRING": token.STRING, + "NUMBER": token.NUMBER, + "TOKEN": None} + + +def _type_of_literal(value): + if value[0].isalpha(): + return token.NAME + elif value in grammar.opmap: + return grammar.opmap[value] + else: + return None + + +def pattern_convert(grammar, raw_node_info): + """Converts raw node information to a Node or Leaf instance.""" + type, value, context, children = raw_node_info + if children or type in grammar.number2symbol: + return pytree.Node(type, children, context=context) + else: + return pytree.Leaf(type, value, context=context) + + +def compile_pattern(pattern): + return PatternCompiler().compile_pattern(pattern) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,4 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""The pgen2 package.""" diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/conv.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/conv.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/conv.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/conv.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,257 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Convert graminit.[ch] spit out by pgen to Python code. + +Pgen is the Python parser generator. It is useful to quickly create a +parser from a grammar file in Python's grammar notation. But I don't +want my parsers to be written in C (yet), so I'm translating the +parsing tables to Python data structures and writing a Python parse +engine. + +Note that the token numbers are constants determined by the standard +Python tokenizer. The standard token module defines these numbers and +their names (the names are not used much). The token numbers are +hardcoded into the Python tokenizer and into pgen. A Python +implementation of the Python tokenizer is also available, in the +standard tokenize module. + +On the other hand, symbol numbers (representing the grammar's +non-terminals) are assigned by pgen based on the actual grammar +input. + +Note: this module is pretty much obsolete; the pgen module generates +equivalent grammar tables directly from the Grammar.txt input file +without having to invoke the Python pgen C program. + +""" + +# Python imports +import re + +# Local imports +from pgen2 import grammar, token + + +class Converter(grammar.Grammar): + """Grammar subclass that reads classic pgen output files. + + The run() method reads the tables as produced by the pgen parser + generator, typically contained in two C files, graminit.h and + graminit.c. The other methods are for internal use only. + + See the base class for more documentation. + + """ + + def run(self, graminit_h, graminit_c): + """Load the grammar tables from the text files written by pgen.""" + self.parse_graminit_h(graminit_h) + self.parse_graminit_c(graminit_c) + self.finish_off() + + def parse_graminit_h(self, filename): + """Parse the .h file written by pgen. (Internal) + + This file is a sequence of #define statements defining the + nonterminals of the grammar as numbers. We build two tables + mapping the numbers to names and back. + + """ + try: + f = open(filename) + except OSError as err: + print("Can't open %s: %s" % (filename, err)) + return False + self.symbol2number = {} + self.number2symbol = {} + lineno = 0 + for line in f: + lineno += 1 + mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) + if not mo and line.strip(): + print("%s(%s): can't parse %s" % (filename, lineno, + line.strip())) + else: + symbol, number = mo.groups() + number = int(number) + assert symbol not in self.symbol2number + assert number not in self.number2symbol + self.symbol2number[symbol] = number + self.number2symbol[number] = symbol + return True + + def parse_graminit_c(self, filename): + """Parse the .c file written by pgen. (Internal) + + The file looks as follows. The first two lines are always this: + + #include "pgenheaders.h" + #include "grammar.h" + + After that come four blocks: + + 1) one or more state definitions + 2) a table defining dfas + 3) a table defining labels + 4) a struct defining the grammar + + A state definition has the following form: + - one or more arc arrays, each of the form: + static arc arcs__[] = { + {, }, + ... + }; + - followed by a state array, of the form: + static state states_[] = { + {, arcs__}, + ... + }; + + """ + try: + f = open(filename) + except OSError as err: + print("Can't open %s: %s" % (filename, err)) + return False + # The code below essentially uses f's iterator-ness! + lineno = 0 + + # Expect the two #include lines + lineno, line = lineno+1, next(f) + assert line == '#include "pgenheaders.h"\n', (lineno, line) + lineno, line = lineno+1, next(f) + assert line == '#include "grammar.h"\n', (lineno, line) + + # Parse the state definitions + lineno, line = lineno+1, next(f) + allarcs = {} + states = [] + while line.startswith("static arc "): + while line.startswith("static arc "): + mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", + line) + assert mo, (lineno, line) + n, m, k = list(map(int, mo.groups())) + arcs = [] + for _ in range(k): + lineno, line = lineno+1, next(f) + mo = re.match(r"\s+{(\d+), (\d+)},$", line) + assert mo, (lineno, line) + i, j = list(map(int, mo.groups())) + arcs.append((i, j)) + lineno, line = lineno+1, next(f) + assert line == "};\n", (lineno, line) + allarcs[(n, m)] = arcs + lineno, line = lineno+1, next(f) + mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) + assert mo, (lineno, line) + s, t = list(map(int, mo.groups())) + assert s == len(states), (lineno, line) + state = [] + for _ in range(t): + lineno, line = lineno+1, next(f) + mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) + assert mo, (lineno, line) + k, n, m = list(map(int, mo.groups())) + arcs = allarcs[n, m] + assert k == len(arcs), (lineno, line) + state.append(arcs) + states.append(state) + lineno, line = lineno+1, next(f) + assert line == "};\n", (lineno, line) + lineno, line = lineno+1, next(f) + self.states = states + + # Parse the dfas + dfas = {} + mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) + assert mo, (lineno, line) + ndfas = int(mo.group(1)) + for i in range(ndfas): + lineno, line = lineno+1, next(f) + mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', + line) + assert mo, (lineno, line) + symbol = mo.group(2) + number, x, y, z = list(map(int, mo.group(1, 3, 4, 5))) + assert self.symbol2number[symbol] == number, (lineno, line) + assert self.number2symbol[number] == symbol, (lineno, line) + assert x == 0, (lineno, line) + state = states[z] + assert y == len(state), (lineno, line) + lineno, line = lineno+1, next(f) + mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) + assert mo, (lineno, line) + first = {} + rawbitset = eval(mo.group(1)) + for i, c in enumerate(rawbitset): + byte = ord(c) + for j in range(8): + if byte & (1<= os.path.getmtime(b) + + +def load_packaged_grammar(package, grammar_source): + """Normally, loads a pickled grammar by doing + pkgutil.get_data(package, pickled_grammar) + where *pickled_grammar* is computed from *grammar_source* by adding the + Python version and using a ``.pickle`` extension. + + However, if *grammar_source* is an extant file, load_grammar(grammar_source) + is called instead. This facilitates using a packaged grammar file when needed + but preserves load_grammar's automatic regeneration behavior when possible. + + """ + if os.path.isfile(grammar_source): + return load_grammar(grammar_source) + pickled_name = _generate_pickle_name(os.path.basename(grammar_source)) + data = pkgutil.get_data(package, pickled_name) + g = grammar.Grammar() + g.loads(data) + return g + + +def main(*args): + """Main program, when run as a script: produce grammar pickle files. + + Calls load_grammar for each argument, a path to a grammar text file. + """ + if not args: + args = sys.argv[1:] + logging.basicConfig(level=logging.INFO, stream=sys.stdout, + format='%(message)s') + for gt in args: + load_grammar(gt, save=True, force=True) + return True + +if __name__ == "__main__": + sys.exit(int(not main())) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/grammar.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/grammar.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/grammar.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/grammar.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,189 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""This module defines the data structures used to represent a grammar. + +These are a bit arcane because they are derived from the data +structures used by Python's 'pgen' parser generator. + +There's also a table here mapping operators to their names in the +token module; the Python tokenize module reports all operators as the +fallback token code OP, but the parser needs the actual token code. + +""" + +# Python imports +import pickle + +# Local imports +from . import token + + +class Grammar(object): + """Pgen parsing tables conversion class. + + Once initialized, this class supplies the grammar tables for the + parsing engine implemented by parse.py. The parsing engine + accesses the instance variables directly. The class here does not + provide initialization of the tables; several subclasses exist to + do this (see the conv and pgen modules). + + The load() method reads the tables from a pickle file, which is + much faster than the other ways offered by subclasses. The pickle + file is written by calling dump() (after loading the grammar + tables using a subclass). The report() method prints a readable + representation of the tables to stdout, for debugging. + + The instance variables are as follows: + + symbol2number -- a dict mapping symbol names to numbers. Symbol + numbers are always 256 or higher, to distinguish + them from token numbers, which are between 0 and + 255 (inclusive). + + number2symbol -- a dict mapping numbers to symbol names; + these two are each other's inverse. + + states -- a list of DFAs, where each DFA is a list of + states, each state is a list of arcs, and each + arc is a (i, j) pair where i is a label and j is + a state number. The DFA number is the index into + this list. (This name is slightly confusing.) + Final states are represented by a special arc of + the form (0, j) where j is its own state number. + + dfas -- a dict mapping symbol numbers to (DFA, first) + pairs, where DFA is an item from the states list + above, and first is a set of tokens that can + begin this grammar rule (represented by a dict + whose values are always 1). + + labels -- a list of (x, y) pairs where x is either a token + number or a symbol number, and y is either None + or a string; the strings are keywords. The label + number is the index in this list; label numbers + are used to mark state transitions (arcs) in the + DFAs. + + start -- the number of the grammar's start symbol. + + keywords -- a dict mapping keyword strings to arc labels. + + tokens -- a dict mapping token numbers to arc labels. + + """ + + def __init__(self): + self.symbol2number = {} + self.number2symbol = {} + self.states = [] + self.dfas = {} + self.labels = [(0, "EMPTY")] + self.keywords = {} + self.tokens = {} + self.symbol2label = {} + self.start = 256 + + def dump(self, filename): + """Dump the grammar tables to a pickle file.""" + with open(filename, "wb") as f: + pickle.dump(self.__dict__, f, pickle.HIGHEST_PROTOCOL) + + def load(self, filename): + """Load the grammar tables from a pickle file.""" + with open(filename, "rb") as f: + d = pickle.load(f) + self.__dict__.update(d) + + def loads(self, pkl): + """Load the grammar tables from a pickle bytes object.""" + self.__dict__.update(pickle.loads(pkl)) + + def copy(self): + """ + Copy the grammar. + """ + new = self.__class__() + for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords", + "tokens", "symbol2label"): + setattr(new, dict_attr, getattr(self, dict_attr).copy()) + new.labels = self.labels[:] + new.states = self.states[:] + new.start = self.start + return new + + def report(self): + """Dump the grammar tables to standard output, for debugging.""" + from pprint import pprint + print("s2n") + pprint(self.symbol2number) + print("n2s") + pprint(self.number2symbol) + print("states") + pprint(self.states) + print("dfas") + pprint(self.dfas) + print("labels") + pprint(self.labels) + print("start", self.start) + + +# Map from operator to number (since tokenize doesn't do this) + +opmap_raw = """ +( LPAR +) RPAR +[ LSQB +] RSQB +: COLON +, COMMA +; SEMI ++ PLUS +- MINUS +* STAR +/ SLASH +| VBAR +& AMPER +< LESS +> GREATER += EQUAL +. DOT +% PERCENT +` BACKQUOTE +{ LBRACE +} RBRACE +@ AT +@= ATEQUAL +== EQEQUAL +!= NOTEQUAL +<> NOTEQUAL +<= LESSEQUAL +>= GREATEREQUAL +~ TILDE +^ CIRCUMFLEX +<< LEFTSHIFT +>> RIGHTSHIFT +** DOUBLESTAR ++= PLUSEQUAL +-= MINEQUAL +*= STAREQUAL +/= SLASHEQUAL +%= PERCENTEQUAL +&= AMPEREQUAL +|= VBAREQUAL +^= CIRCUMFLEXEQUAL +<<= LEFTSHIFTEQUAL +>>= RIGHTSHIFTEQUAL +**= DOUBLESTAREQUAL +// DOUBLESLASH +//= DOUBLESLASHEQUAL +-> RARROW +:= COLONEQUAL +""" + +opmap = {} +for line in opmap_raw.splitlines(): + if line: + op, name = line.split() + opmap[op] = getattr(token, name) +del line, op, name diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/literals.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/literals.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/literals.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/literals.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,60 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Safely evaluate Python string literals without using eval().""" + +import re + +simple_escapes = {"a": "\a", + "b": "\b", + "f": "\f", + "n": "\n", + "r": "\r", + "t": "\t", + "v": "\v", + "'": "'", + '"': '"', + "\\": "\\"} + +def escape(m): + all, tail = m.group(0, 1) + assert all.startswith("\\") + esc = simple_escapes.get(tail) + if esc is not None: + return esc + if tail.startswith("x"): + hexes = tail[1:] + if len(hexes) < 2: + raise ValueError("invalid hex string escape ('\\%s')" % tail) + try: + i = int(hexes, 16) + except ValueError: + raise ValueError("invalid hex string escape ('\\%s')" % tail) from None + else: + try: + i = int(tail, 8) + except ValueError: + raise ValueError("invalid octal string escape ('\\%s')" % tail) from None + return chr(i) + +def evalString(s): + assert s.startswith("'") or s.startswith('"'), repr(s[:1]) + q = s[0] + if s[:3] == q*3: + q = q*3 + assert s.endswith(q), repr(s[-len(q):]) + assert len(s) >= 2*len(q) + s = s[len(q):-len(q)] + return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s) + +def test(): + for i in range(256): + c = chr(i) + s = repr(c) + e = evalString(s) + if e != c: + print(i, c, s, e) + + +if __name__ == "__main__": + test() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/parse.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/parse.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/parse.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/parse.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,204 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Parser engine for the grammar tables generated by pgen. + +The grammar table must be loaded first. + +See Parser/parser.c in the Python distribution for additional info on +how this parsing engine works. + +""" + +# Local imports +from . import token + +class ParseError(Exception): + """Exception to signal the parser is stuck.""" + + def __init__(self, msg, type, value, context): + Exception.__init__(self, "%s: type=%r, value=%r, context=%r" % + (msg, type, value, context)) + self.msg = msg + self.type = type + self.value = value + self.context = context + + def __reduce__(self): + return type(self), (self.msg, self.type, self.value, self.context) + +class Parser(object): + """Parser engine. + + The proper usage sequence is: + + p = Parser(grammar, [converter]) # create instance + p.setup([start]) # prepare for parsing + : + if p.addtoken(...): # parse a token; may raise ParseError + break + root = p.rootnode # root of abstract syntax tree + + A Parser instance may be reused by calling setup() repeatedly. + + A Parser instance contains state pertaining to the current token + sequence, and should not be used concurrently by different threads + to parse separate token sequences. + + See driver.py for how to get input tokens by tokenizing a file or + string. + + Parsing is complete when addtoken() returns True; the root of the + abstract syntax tree can then be retrieved from the rootnode + instance variable. When a syntax error occurs, addtoken() raises + the ParseError exception. There is no error recovery; the parser + cannot be used after a syntax error was reported (but it can be + reinitialized by calling setup()). + + """ + + def __init__(self, grammar, convert=None): + """Constructor. + + The grammar argument is a grammar.Grammar instance; see the + grammar module for more information. + + The parser is not ready yet for parsing; you must call the + setup() method to get it started. + + The optional convert argument is a function mapping concrete + syntax tree nodes to abstract syntax tree nodes. If not + given, no conversion is done and the syntax tree produced is + the concrete syntax tree. If given, it must be a function of + two arguments, the first being the grammar (a grammar.Grammar + instance), and the second being the concrete syntax tree node + to be converted. The syntax tree is converted from the bottom + up. + + A concrete syntax tree node is a (type, value, context, nodes) + tuple, where type is the node type (a token or symbol number), + value is None for symbols and a string for tokens, context is + None or an opaque value used for error reporting (typically a + (lineno, offset) pair), and nodes is a list of children for + symbols, and None for tokens. + + An abstract syntax tree node may be anything; this is entirely + up to the converter function. + + """ + self.grammar = grammar + self.convert = convert or (lambda grammar, node: node) + + def setup(self, start=None): + """Prepare for parsing. + + This *must* be called before starting to parse. + + The optional argument is an alternative start symbol; it + defaults to the grammar's start symbol. + + You can use a Parser instance to parse any number of programs; + each time you call setup() the parser is reset to an initial + state determined by the (implicit or explicit) start symbol. + + """ + if start is None: + start = self.grammar.start + # Each stack entry is a tuple: (dfa, state, node). + # A node is a tuple: (type, value, context, children), + # where children is a list of nodes or None, and context may be None. + newnode = (start, None, None, []) + stackentry = (self.grammar.dfas[start], 0, newnode) + self.stack = [stackentry] + self.rootnode = None + self.used_names = set() # Aliased to self.rootnode.used_names in pop() + + def addtoken(self, type, value, context): + """Add a token; return True iff this is the end of the program.""" + # Map from token to label + ilabel = self.classify(type, value, context) + # Loop until the token is shifted; may raise exceptions + while True: + dfa, state, node = self.stack[-1] + states, first = dfa + arcs = states[state] + # Look for a state with this label + for i, newstate in arcs: + t, v = self.grammar.labels[i] + if ilabel == i: + # Look it up in the list of labels + assert t < 256 + # Shift a token; we're done with it + self.shift(type, value, newstate, context) + # Pop while we are in an accept-only state + state = newstate + while states[state] == [(0, state)]: + self.pop() + if not self.stack: + # Done parsing! + return True + dfa, state, node = self.stack[-1] + states, first = dfa + # Done with this token + return False + elif t >= 256: + # See if it's a symbol and if we're in its first set + itsdfa = self.grammar.dfas[t] + itsstates, itsfirst = itsdfa + if ilabel in itsfirst: + # Push a symbol + self.push(t, self.grammar.dfas[t], newstate, context) + break # To continue the outer while loop + else: + if (0, state) in arcs: + # An accepting state, pop it and try something else + self.pop() + if not self.stack: + # Done parsing, but another token is input + raise ParseError("too much input", + type, value, context) + else: + # No success finding a transition + raise ParseError("bad input", type, value, context) + + def classify(self, type, value, context): + """Turn a token into a label. (Internal)""" + if type == token.NAME: + # Keep a listing of all used names + self.used_names.add(value) + # Check for reserved words + ilabel = self.grammar.keywords.get(value) + if ilabel is not None: + return ilabel + ilabel = self.grammar.tokens.get(type) + if ilabel is None: + raise ParseError("bad token", type, value, context) + return ilabel + + def shift(self, type, value, newstate, context): + """Shift a token. (Internal)""" + dfa, state, node = self.stack[-1] + newnode = (type, value, context, None) + newnode = self.convert(self.grammar, newnode) + if newnode is not None: + node[-1].append(newnode) + self.stack[-1] = (dfa, newstate, node) + + def push(self, type, newdfa, newstate, context): + """Push a nonterminal. (Internal)""" + dfa, state, node = self.stack[-1] + newnode = (type, None, context, []) + self.stack[-1] = (dfa, newstate, node) + self.stack.append((newdfa, 0, newnode)) + + def pop(self): + """Pop a nonterminal. (Internal)""" + popdfa, popstate, popnode = self.stack.pop() + newnode = self.convert(self.grammar, popnode) + if newnode is not None: + if self.stack: + dfa, state, node = self.stack[-1] + node[-1].append(newnode) + else: + self.rootnode = newnode + self.rootnode.used_names = self.used_names diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/pgen.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/pgen.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/pgen.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/pgen.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,386 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Pgen imports +from . import grammar, token, tokenize + +class PgenGrammar(grammar.Grammar): + pass + +class ParserGenerator(object): + + def __init__(self, filename, stream=None): + close_stream = None + if stream is None: + stream = open(filename, encoding="utf-8") + close_stream = stream.close + self.filename = filename + self.stream = stream + self.generator = tokenize.generate_tokens(stream.readline) + self.gettoken() # Initialize lookahead + self.dfas, self.startsymbol = self.parse() + if close_stream is not None: + close_stream() + self.first = {} # map from symbol name to set of tokens + self.addfirstsets() + + def make_grammar(self): + c = PgenGrammar() + names = list(self.dfas.keys()) + names.sort() + names.remove(self.startsymbol) + names.insert(0, self.startsymbol) + for name in names: + i = 256 + len(c.symbol2number) + c.symbol2number[name] = i + c.number2symbol[i] = name + for name in names: + dfa = self.dfas[name] + states = [] + for state in dfa: + arcs = [] + for label, next in sorted(state.arcs.items()): + arcs.append((self.make_label(c, label), dfa.index(next))) + if state.isfinal: + arcs.append((0, dfa.index(state))) + states.append(arcs) + c.states.append(states) + c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name)) + c.start = c.symbol2number[self.startsymbol] + return c + + def make_first(self, c, name): + rawfirst = self.first[name] + first = {} + for label in sorted(rawfirst): + ilabel = self.make_label(c, label) + ##assert ilabel not in first # XXX failed on <> ... != + first[ilabel] = 1 + return first + + def make_label(self, c, label): + # XXX Maybe this should be a method on a subclass of converter? + ilabel = len(c.labels) + if label[0].isalpha(): + # Either a symbol name or a named token + if label in c.symbol2number: + # A symbol name (a non-terminal) + if label in c.symbol2label: + return c.symbol2label[label] + else: + c.labels.append((c.symbol2number[label], None)) + c.symbol2label[label] = ilabel + return ilabel + else: + # A named token (NAME, NUMBER, STRING) + itoken = getattr(token, label, None) + assert isinstance(itoken, int), label + assert itoken in token.tok_name, label + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + else: + # Either a keyword or an operator + assert label[0] in ('"', "'"), label + value = eval(label) + if value[0].isalpha(): + # A keyword + if value in c.keywords: + return c.keywords[value] + else: + c.labels.append((token.NAME, value)) + c.keywords[value] = ilabel + return ilabel + else: + # An operator (any non-numeric token) + itoken = grammar.opmap[value] # Fails if unknown token + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + + def addfirstsets(self): + names = list(self.dfas.keys()) + names.sort() + for name in names: + if name not in self.first: + self.calcfirst(name) + #print name, self.first[name].keys() + + def calcfirst(self, name): + dfa = self.dfas[name] + self.first[name] = None # dummy to detect left recursion + state = dfa[0] + totalset = {} + overlapcheck = {} + for label, next in state.arcs.items(): + if label in self.dfas: + if label in self.first: + fset = self.first[label] + if fset is None: + raise ValueError("recursion for rule %r" % name) + else: + self.calcfirst(label) + fset = self.first[label] + totalset.update(fset) + overlapcheck[label] = fset + else: + totalset[label] = 1 + overlapcheck[label] = {label: 1} + inverse = {} + for label, itsfirst in overlapcheck.items(): + for symbol in itsfirst: + if symbol in inverse: + raise ValueError("rule %s is ambiguous; %s is in the" + " first sets of %s as well as %s" % + (name, symbol, label, inverse[symbol])) + inverse[symbol] = label + self.first[name] = totalset + + def parse(self): + dfas = {} + startsymbol = None + # MSTART: (NEWLINE | RULE)* ENDMARKER + while self.type != token.ENDMARKER: + while self.type == token.NEWLINE: + self.gettoken() + # RULE: NAME ':' RHS NEWLINE + name = self.expect(token.NAME) + self.expect(token.OP, ":") + a, z = self.parse_rhs() + self.expect(token.NEWLINE) + #self.dump_nfa(name, a, z) + dfa = self.make_dfa(a, z) + #self.dump_dfa(name, dfa) + oldlen = len(dfa) + self.simplify_dfa(dfa) + newlen = len(dfa) + dfas[name] = dfa + #print name, oldlen, newlen + if startsymbol is None: + startsymbol = name + return dfas, startsymbol + + def make_dfa(self, start, finish): + # To turn an NFA into a DFA, we define the states of the DFA + # to correspond to *sets* of states of the NFA. Then do some + # state reduction. Let's represent sets as dicts with 1 for + # values. + assert isinstance(start, NFAState) + assert isinstance(finish, NFAState) + def closure(state): + base = {} + addclosure(state, base) + return base + def addclosure(state, base): + assert isinstance(state, NFAState) + if state in base: + return + base[state] = 1 + for label, next in state.arcs: + if label is None: + addclosure(next, base) + states = [DFAState(closure(start), finish)] + for state in states: # NB states grows while we're iterating + arcs = {} + for nfastate in state.nfaset: + for label, next in nfastate.arcs: + if label is not None: + addclosure(next, arcs.setdefault(label, {})) + for label, nfaset in sorted(arcs.items()): + for st in states: + if st.nfaset == nfaset: + break + else: + st = DFAState(nfaset, finish) + states.append(st) + state.addarc(st, label) + return states # List of DFAState instances; first one is start + + def dump_nfa(self, name, start, finish): + print("Dump of NFA for", name) + todo = [start] + for i, state in enumerate(todo): + print(" State", i, state is finish and "(final)" or "") + for label, next in state.arcs: + if next in todo: + j = todo.index(next) + else: + j = len(todo) + todo.append(next) + if label is None: + print(" -> %d" % j) + else: + print(" %s -> %d" % (label, j)) + + def dump_dfa(self, name, dfa): + print("Dump of DFA for", name) + for i, state in enumerate(dfa): + print(" State", i, state.isfinal and "(final)" or "") + for label, next in sorted(state.arcs.items()): + print(" %s -> %d" % (label, dfa.index(next))) + + def simplify_dfa(self, dfa): + # This is not theoretically optimal, but works well enough. + # Algorithm: repeatedly look for two states that have the same + # set of arcs (same labels pointing to the same nodes) and + # unify them, until things stop changing. + + # dfa is a list of DFAState instances + changes = True + while changes: + changes = False + for i, state_i in enumerate(dfa): + for j in range(i+1, len(dfa)): + state_j = dfa[j] + if state_i == state_j: + #print " unify", i, j + del dfa[j] + for state in dfa: + state.unifystate(state_j, state_i) + changes = True + break + + def parse_rhs(self): + # RHS: ALT ('|' ALT)* + a, z = self.parse_alt() + if self.value != "|": + return a, z + else: + aa = NFAState() + zz = NFAState() + aa.addarc(a) + z.addarc(zz) + while self.value == "|": + self.gettoken() + a, z = self.parse_alt() + aa.addarc(a) + z.addarc(zz) + return aa, zz + + def parse_alt(self): + # ALT: ITEM+ + a, b = self.parse_item() + while (self.value in ("(", "[") or + self.type in (token.NAME, token.STRING)): + c, d = self.parse_item() + b.addarc(c) + b = d + return a, b + + def parse_item(self): + # ITEM: '[' RHS ']' | ATOM ['+' | '*'] + if self.value == "[": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, "]") + a.addarc(z) + return a, z + else: + a, z = self.parse_atom() + value = self.value + if value not in ("+", "*"): + return a, z + self.gettoken() + z.addarc(a) + if value == "+": + return a, z + else: + return a, a + + def parse_atom(self): + # ATOM: '(' RHS ')' | NAME | STRING + if self.value == "(": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, ")") + return a, z + elif self.type in (token.NAME, token.STRING): + a = NFAState() + z = NFAState() + a.addarc(z, self.value) + self.gettoken() + return a, z + else: + self.raise_error("expected (...) or NAME or STRING, got %s/%s", + self.type, self.value) + + def expect(self, type, value=None): + if self.type != type or (value is not None and self.value != value): + self.raise_error("expected %s/%s, got %s/%s", + type, value, self.type, self.value) + value = self.value + self.gettoken() + return value + + def gettoken(self): + tup = next(self.generator) + while tup[0] in (tokenize.COMMENT, tokenize.NL): + tup = next(self.generator) + self.type, self.value, self.begin, self.end, self.line = tup + #print token.tok_name[self.type], repr(self.value) + + def raise_error(self, msg, *args): + if args: + try: + msg = msg % args + except: + msg = " ".join([msg] + list(map(str, args))) + raise SyntaxError(msg, (self.filename, self.end[0], + self.end[1], self.line)) + +class NFAState(object): + + def __init__(self): + self.arcs = [] # list of (label, NFAState) pairs + + def addarc(self, next, label=None): + assert label is None or isinstance(label, str) + assert isinstance(next, NFAState) + self.arcs.append((label, next)) + +class DFAState(object): + + def __init__(self, nfaset, final): + assert isinstance(nfaset, dict) + assert isinstance(next(iter(nfaset)), NFAState) + assert isinstance(final, NFAState) + self.nfaset = nfaset + self.isfinal = final in nfaset + self.arcs = {} # map from label to DFAState + + def addarc(self, next, label): + assert isinstance(label, str) + assert label not in self.arcs + assert isinstance(next, DFAState) + self.arcs[label] = next + + def unifystate(self, old, new): + for label, next in self.arcs.items(): + if next is old: + self.arcs[label] = new + + def __eq__(self, other): + # Equality test -- ignore the nfaset instance variable + assert isinstance(other, DFAState) + if self.isfinal != other.isfinal: + return False + # Can't just return self.arcs == other.arcs, because that + # would invoke this method recursively, with cycles... + if len(self.arcs) != len(other.arcs): + return False + for label, next in self.arcs.items(): + if next is not other.arcs.get(label): + return False + return True + + __hash__ = None # For Py3 compatibility. + +def generate_grammar(filename="Grammar.txt"): + p = ParserGenerator(filename) + return p.make_grammar() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/token.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/token.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/token.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/token.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,86 @@ +#! /usr/bin/env python3 + +"""Token constants (from "token.h").""" + +# Taken from Python (r53757) and modified to include some tokens +# originally monkeypatched in by pgen2.tokenize + +#--start constants-- +ENDMARKER = 0 +NAME = 1 +NUMBER = 2 +STRING = 3 +NEWLINE = 4 +INDENT = 5 +DEDENT = 6 +LPAR = 7 +RPAR = 8 +LSQB = 9 +RSQB = 10 +COLON = 11 +COMMA = 12 +SEMI = 13 +PLUS = 14 +MINUS = 15 +STAR = 16 +SLASH = 17 +VBAR = 18 +AMPER = 19 +LESS = 20 +GREATER = 21 +EQUAL = 22 +DOT = 23 +PERCENT = 24 +BACKQUOTE = 25 +LBRACE = 26 +RBRACE = 27 +EQEQUAL = 28 +NOTEQUAL = 29 +LESSEQUAL = 30 +GREATEREQUAL = 31 +TILDE = 32 +CIRCUMFLEX = 33 +LEFTSHIFT = 34 +RIGHTSHIFT = 35 +DOUBLESTAR = 36 +PLUSEQUAL = 37 +MINEQUAL = 38 +STAREQUAL = 39 +SLASHEQUAL = 40 +PERCENTEQUAL = 41 +AMPEREQUAL = 42 +VBAREQUAL = 43 +CIRCUMFLEXEQUAL = 44 +LEFTSHIFTEQUAL = 45 +RIGHTSHIFTEQUAL = 46 +DOUBLESTAREQUAL = 47 +DOUBLESLASH = 48 +DOUBLESLASHEQUAL = 49 +AT = 50 +ATEQUAL = 51 +OP = 52 +COMMENT = 53 +NL = 54 +RARROW = 55 +AWAIT = 56 +ASYNC = 57 +ERRORTOKEN = 58 +COLONEQUAL = 59 +N_TOKENS = 60 +NT_OFFSET = 256 +#--end constants-- + +tok_name = {} +for _name, _value in list(globals().items()): + if type(_value) is type(0): + tok_name[_value] = _name + + +def ISTERMINAL(x): + return x < NT_OFFSET + +def ISNONTERMINAL(x): + return x >= NT_OFFSET + +def ISEOF(x): + return x == ENDMARKER diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/tokenize.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/tokenize.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pgen2/tokenize.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pgen2/tokenize.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,564 @@ +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. +# All rights reserved. + +"""Tokenization help for Python programs. + +generate_tokens(readline) is a generator that breaks a stream of +text into Python tokens. It accepts a readline-like method which is called +repeatedly to get the next line of input (or "" for EOF). It generates +5-tuples with these members: + + the token type (see token.py) + the token (a string) + the starting (row, column) indices of the token (a 2-tuple of ints) + the ending (row, column) indices of the token (a 2-tuple of ints) + the original line (string) + +It is designed to match the working of the Python tokenizer exactly, except +that it produces COMMENT tokens for comments and gives type OP for all +operators + +Older entry points + tokenize_loop(readline, tokeneater) + tokenize(readline, tokeneater=printtoken) +are the same, except instead of generating tokens, tokeneater is a callback +function to which the 5 fields described above are passed as 5 arguments, +each time a new token is found.""" + +__author__ = 'Ka-Ping Yee ' +__credits__ = \ + 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro' + +import string, re +from codecs import BOM_UTF8, lookup +from lib2to3.pgen2.token import * + +from . import token +__all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", + "generate_tokens", "untokenize"] +del token + +try: + bytes +except NameError: + # Support bytes type in Python <= 2.5, so 2to3 turns itself into + # valid Python 3 code. + bytes = str + +def group(*choices): return '(' + '|'.join(choices) + ')' +def any(*choices): return group(*choices) + '*' +def maybe(*choices): return group(*choices) + '?' +def _combinations(*l): + return set( + x + y for x in l for y in l + ("",) if x.casefold() != y.casefold() + ) + +Whitespace = r'[ \f\t]*' +Comment = r'#[^\r\n]*' +Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) +Name = r'\w+' + +Binnumber = r'0[bB]_?[01]+(?:_[01]+)*' +Hexnumber = r'0[xX]_?[\da-fA-F]+(?:_[\da-fA-F]+)*[lL]?' +Octnumber = r'0[oO]?_?[0-7]+(?:_[0-7]+)*[lL]?' +Decnumber = group(r'[1-9]\d*(?:_\d+)*[lL]?', '0[lL]?') +Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) +Exponent = r'[eE][-+]?\d+(?:_\d+)*' +Pointfloat = group(r'\d+(?:_\d+)*\.(?:\d+(?:_\d+)*)?', r'\.\d+(?:_\d+)*') + maybe(Exponent) +Expfloat = r'\d+(?:_\d+)*' + Exponent +Floatnumber = group(Pointfloat, Expfloat) +Imagnumber = group(r'\d+(?:_\d+)*[jJ]', Floatnumber + r'[jJ]') +Number = group(Imagnumber, Floatnumber, Intnumber) + +# Tail end of ' string. +Single = r"[^'\\]*(?:\\.[^'\\]*)*'" +# Tail end of " string. +Double = r'[^"\\]*(?:\\.[^"\\]*)*"' +# Tail end of ''' string. +Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" +# Tail end of """ string. +Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' +_litprefix = r"(?:[uUrRbBfF]|[rR][fFbB]|[fFbBuU][rR])?" +Triple = group(_litprefix + "'''", _litprefix + '"""') +# Single-line ' or " string. +String = group(_litprefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'", + _litprefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"') + +# Because of leftmost-then-longest match semantics, be sure to put the +# longest operators first (e.g., if = came before ==, == would get +# recognized as two instances of =). +Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", + r"//=?", r"->", + r"[+\-*/%&@|^=<>]=?", + r"~") + +Bracket = '[][(){}]' +Special = group(r'\r?\n', r':=', r'[:;.,`@]') +Funny = group(Operator, Bracket, Special) + +PlainToken = group(Number, Funny, String, Name) +Token = Ignore + PlainToken + +# First (or only) line of ' or " string. +ContStr = group(_litprefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" + + group("'", r'\\\r?\n'), + _litprefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' + + group('"', r'\\\r?\n')) +PseudoExtras = group(r'\\\r?\n', Comment, Triple) +PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) + +tokenprog, pseudoprog, single3prog, double3prog = map( + re.compile, (Token, PseudoToken, Single3, Double3)) + +_strprefixes = ( + _combinations('r', 'R', 'f', 'F') | + _combinations('r', 'R', 'b', 'B') | + {'u', 'U', 'ur', 'uR', 'Ur', 'UR'} +) + +endprogs = {"'": re.compile(Single), '"': re.compile(Double), + "'''": single3prog, '"""': double3prog, + **{f"{prefix}'''": single3prog for prefix in _strprefixes}, + **{f'{prefix}"""': double3prog for prefix in _strprefixes}, + **{prefix: None for prefix in _strprefixes}} + +triple_quoted = ( + {"'''", '"""'} | + {f"{prefix}'''" for prefix in _strprefixes} | + {f'{prefix}"""' for prefix in _strprefixes} +) +single_quoted = ( + {"'", '"'} | + {f"{prefix}'" for prefix in _strprefixes} | + {f'{prefix}"' for prefix in _strprefixes} +) + +tabsize = 8 + +class TokenError(Exception): pass + +class StopTokenizing(Exception): pass + +def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing + (srow, scol) = xxx_todo_changeme + (erow, ecol) = xxx_todo_changeme1 + print("%d,%d-%d,%d:\t%s\t%s" % \ + (srow, scol, erow, ecol, tok_name[type], repr(token))) + +def tokenize(readline, tokeneater=printtoken): + """ + The tokenize() function accepts two parameters: one representing the + input stream, and one providing an output mechanism for tokenize(). + + The first parameter, readline, must be a callable object which provides + the same interface as the readline() method of built-in file objects. + Each call to the function should return one line of input as a string. + + The second parameter, tokeneater, must also be a callable object. It is + called once for each token, with five arguments, corresponding to the + tuples generated by generate_tokens(). + """ + try: + tokenize_loop(readline, tokeneater) + except StopTokenizing: + pass + +# backwards compatible interface +def tokenize_loop(readline, tokeneater): + for token_info in generate_tokens(readline): + tokeneater(*token_info) + +class Untokenizer: + + def __init__(self): + self.tokens = [] + self.prev_row = 1 + self.prev_col = 0 + + def add_whitespace(self, start): + row, col = start + assert row <= self.prev_row + col_offset = col - self.prev_col + if col_offset: + self.tokens.append(" " * col_offset) + + def untokenize(self, iterable): + for t in iterable: + if len(t) == 2: + self.compat(t, iterable) + break + tok_type, token, start, end, line = t + self.add_whitespace(start) + self.tokens.append(token) + self.prev_row, self.prev_col = end + if tok_type in (NEWLINE, NL): + self.prev_row += 1 + self.prev_col = 0 + return "".join(self.tokens) + + def compat(self, token, iterable): + startline = False + indents = [] + toks_append = self.tokens.append + toknum, tokval = token + if toknum in (NAME, NUMBER): + tokval += ' ' + if toknum in (NEWLINE, NL): + startline = True + for tok in iterable: + toknum, tokval = tok[:2] + + if toknum in (NAME, NUMBER, ASYNC, AWAIT): + tokval += ' ' + + if toknum == INDENT: + indents.append(tokval) + continue + elif toknum == DEDENT: + indents.pop() + continue + elif toknum in (NEWLINE, NL): + startline = True + elif startline and indents: + toks_append(indents[-1]) + startline = False + toks_append(tokval) + +cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII) +blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) + +def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + +def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read + in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, but + disagree, a SyntaxError will be raised. If the encoding cookie is an invalid + charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return bytes() + + def find_cookie(line): + try: + line_string = line.decode('ascii') + except UnicodeDecodeError: + return None + match = cookie_re.match(line_string) + if not match: + return None + encoding = _get_normal_name(match.group(1)) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + raise SyntaxError("unknown encoding: " + encoding) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + raise SyntaxError('encoding problem: utf-8') + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + if not blank_re.match(first): + return default, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +def untokenize(iterable): + """Transform tokens back into Python source code. + + Each element returned by the iterable must be a token sequence + with at least two elements, a token number and token value. If + only two tokens are passed, the resulting output is poor. + + Round-trip invariant for full input: + Untokenized source will match input source exactly + + Round-trip invariant for limited input: + # Output text will tokenize the back to the input + t1 = [tok[:2] for tok in generate_tokens(f.readline)] + newcode = untokenize(t1) + readline = iter(newcode.splitlines(1)).next + t2 = [tok[:2] for tokin generate_tokens(readline)] + assert t1 == t2 + """ + ut = Untokenizer() + return ut.untokenize(iterable) + +def generate_tokens(readline): + """ + The generate_tokens() generator requires one argument, readline, which + must be a callable object which provides the same interface as the + readline() method of built-in file objects. Each call to the function + should return one line of input as a string. Alternately, readline + can be a callable function terminating with StopIteration: + readline = open(myfile).next # Example of alternate readline + + The generator produces 5-tuples with these members: the token type; the + token string; a 2-tuple (srow, scol) of ints specifying the row and + column where the token begins in the source; a 2-tuple (erow, ecol) of + ints specifying the row and column where the token ends in the source; + and the line on which the token was found. The line passed is the + physical line. + """ + lnum = parenlev = continued = 0 + contstr, needcont = '', 0 + contline = None + indents = [0] + + # 'stashed' and 'async_*' are used for async/await parsing + stashed = None + async_def = False + async_def_indent = 0 + async_def_nl = False + + while 1: # loop over lines in stream + try: + line = readline() + except StopIteration: + line = '' + lnum = lnum + 1 + pos, max = 0, len(line) + + if contstr: # continued string + if not line: + raise TokenError("EOF in multi-line string", strstart) + endmatch = endprog.match(line) + if endmatch: + pos = end = endmatch.end(0) + yield (STRING, contstr + line[:end], + strstart, (lnum, end), contline + line) + contstr, needcont = '', 0 + contline = None + elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': + yield (ERRORTOKEN, contstr + line, + strstart, (lnum, len(line)), contline) + contstr = '' + contline = None + continue + else: + contstr = contstr + line + contline = contline + line + continue + + elif parenlev == 0 and not continued: # new statement + if not line: break + column = 0 + while pos < max: # measure leading whitespace + if line[pos] == ' ': column = column + 1 + elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize + elif line[pos] == '\f': column = 0 + else: break + pos = pos + 1 + if pos == max: break + + if stashed: + yield stashed + stashed = None + + if line[pos] in '#\r\n': # skip comments or blank lines + if line[pos] == '#': + comment_token = line[pos:].rstrip('\r\n') + nl_pos = pos + len(comment_token) + yield (COMMENT, comment_token, + (lnum, pos), (lnum, pos + len(comment_token)), line) + yield (NL, line[nl_pos:], + (lnum, nl_pos), (lnum, len(line)), line) + else: + yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], + (lnum, pos), (lnum, len(line)), line) + continue + + if column > indents[-1]: # count indents or dedents + indents.append(column) + yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) + while column < indents[-1]: + if column not in indents: + raise IndentationError( + "unindent does not match any outer indentation level", + ("", lnum, pos, line)) + indents = indents[:-1] + + if async_def and async_def_indent >= indents[-1]: + async_def = False + async_def_nl = False + async_def_indent = 0 + + yield (DEDENT, '', (lnum, pos), (lnum, pos), line) + + if async_def and async_def_nl and async_def_indent >= indents[-1]: + async_def = False + async_def_nl = False + async_def_indent = 0 + + else: # continued statement + if not line: + raise TokenError("EOF in multi-line statement", (lnum, 0)) + continued = 0 + + while pos < max: + pseudomatch = pseudoprog.match(line, pos) + if pseudomatch: # scan for tokens + start, end = pseudomatch.span(1) + spos, epos, pos = (lnum, start), (lnum, end), end + token, initial = line[start:end], line[start] + + if initial in string.digits or \ + (initial == '.' and token != '.'): # ordinary number + yield (NUMBER, token, spos, epos, line) + elif initial in '\r\n': + newline = NEWLINE + if parenlev > 0: + newline = NL + elif async_def: + async_def_nl = True + if stashed: + yield stashed + stashed = None + yield (newline, token, spos, epos, line) + + elif initial == '#': + assert not token.endswith("\n") + if stashed: + yield stashed + stashed = None + yield (COMMENT, token, spos, epos, line) + elif token in triple_quoted: + endprog = endprogs[token] + endmatch = endprog.match(line, pos) + if endmatch: # all on one line + pos = endmatch.end(0) + token = line[start:pos] + if stashed: + yield stashed + stashed = None + yield (STRING, token, spos, (lnum, pos), line) + else: + strstart = (lnum, start) # multiple lines + contstr = line[start:] + contline = line + break + elif initial in single_quoted or \ + token[:2] in single_quoted or \ + token[:3] in single_quoted: + if token[-1] == '\n': # continued string + strstart = (lnum, start) + endprog = (endprogs[initial] or endprogs[token[1]] or + endprogs[token[2]]) + contstr, needcont = line[start:], 1 + contline = line + break + else: # ordinary string + if stashed: + yield stashed + stashed = None + yield (STRING, token, spos, epos, line) + elif initial.isidentifier(): # ordinary name + if token in ('async', 'await'): + if async_def: + yield (ASYNC if token == 'async' else AWAIT, + token, spos, epos, line) + continue + + tok = (NAME, token, spos, epos, line) + if token == 'async' and not stashed: + stashed = tok + continue + + if token in ('def', 'for'): + if (stashed + and stashed[0] == NAME + and stashed[1] == 'async'): + + if token == 'def': + async_def = True + async_def_indent = indents[-1] + + yield (ASYNC, stashed[1], + stashed[2], stashed[3], + stashed[4]) + stashed = None + + if stashed: + yield stashed + stashed = None + + yield tok + elif initial == '\\': # continued stmt + # This yield is new; needed for better idempotency: + if stashed: + yield stashed + stashed = None + yield (NL, token, spos, (lnum, pos), line) + continued = 1 + else: + if initial in '([{': parenlev = parenlev + 1 + elif initial in ')]}': parenlev = parenlev - 1 + if stashed: + yield stashed + stashed = None + yield (OP, token, spos, epos, line) + else: + yield (ERRORTOKEN, line[pos], + (lnum, pos), (lnum, pos+1), line) + pos = pos + 1 + + if stashed: + yield stashed + stashed = None + + for indent in indents[1:]: # pop remaining indent levels + yield (DEDENT, '', (lnum, 0), (lnum, 0), '') + yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') + +if __name__ == '__main__': # testing + import sys + if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) + else: tokenize(sys.stdin.readline) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pygram.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pygram.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pygram.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pygram.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,43 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Export the Python grammar and symbols.""" + +# Python imports +import os + +# Local imports +from .pgen2 import token +from .pgen2 import driver +from . import pytree + +# The grammar file +_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") +_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), + "PatternGrammar.txt") + + +class Symbols(object): + + def __init__(self, grammar): + """Initializer. + + Creates an attribute for each grammar symbol (nonterminal), + whose value is the symbol's type (an int >= 256). + """ + for name, symbol in grammar.symbol2number.items(): + setattr(self, name, symbol) + + +python_grammar = driver.load_packaged_grammar("lib2to3", _GRAMMAR_FILE) + +python_symbols = Symbols(python_grammar) + +python_grammar_no_print_statement = python_grammar.copy() +del python_grammar_no_print_statement.keywords["print"] + +python_grammar_no_print_and_exec_statement = python_grammar_no_print_statement.copy() +del python_grammar_no_print_and_exec_statement.keywords["exec"] + +pattern_grammar = driver.load_packaged_grammar("lib2to3", _PATTERN_GRAMMAR_FILE) +pattern_symbols = Symbols(pattern_grammar) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pytree.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pytree.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/pytree.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/pytree.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,853 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +""" +Python parse tree definitions. + +This is a very concrete parse tree; we need to keep every token and +even the comments and whitespace between tokens. + +There's also a pattern matching implementation here. +""" + +__author__ = "Guido van Rossum " + +import sys +from io import StringIO + +HUGE = 0x7FFFFFFF # maximum repeat count, default max + +_type_reprs = {} +def type_repr(type_num): + global _type_reprs + if not _type_reprs: + from .pygram import python_symbols + # printing tokens is possible but not as useful + # from .pgen2 import token // token.__dict__.items(): + for name, val in python_symbols.__dict__.items(): + if type(val) == int: _type_reprs[val] = name + return _type_reprs.setdefault(type_num, type_num) + +class Base(object): + + """ + Abstract base class for Node and Leaf. + + This provides some default functionality and boilerplate using the + template pattern. + + A node may be a subnode of at most one parent. + """ + + # Default values for instance variables + type = None # int: token number (< 256) or symbol number (>= 256) + parent = None # Parent node pointer, or None + children = () # Tuple of subnodes + was_changed = False + was_checked = False + + def __new__(cls, *args, **kwds): + """Constructor that prevents Base from being instantiated.""" + assert cls is not Base, "Cannot instantiate Base" + return object.__new__(cls) + + def __eq__(self, other): + """ + Compare two nodes for equality. + + This calls the method _eq(). + """ + if self.__class__ is not other.__class__: + return NotImplemented + return self._eq(other) + + __hash__ = None # For Py3 compatibility. + + def _eq(self, other): + """ + Compare two nodes for equality. + + This is called by __eq__ and __ne__. It is only called if the two nodes + have the same type. This must be implemented by the concrete subclass. + Nodes should be considered equal if they have the same structure, + ignoring the prefix string and other context information. + """ + raise NotImplementedError + + def clone(self): + """ + Return a cloned (deep) copy of self. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def post_order(self): + """ + Return a post-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def pre_order(self): + """ + Return a pre-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def replace(self, new): + """Replace this node with a new one in the parent.""" + assert self.parent is not None, str(self) + assert new is not None + if not isinstance(new, list): + new = [new] + l_children = [] + found = False + for ch in self.parent.children: + if ch is self: + assert not found, (self.parent.children, self, new) + if new is not None: + l_children.extend(new) + found = True + else: + l_children.append(ch) + assert found, (self.children, self, new) + self.parent.changed() + self.parent.children = l_children + for x in new: + x.parent = self.parent + self.parent = None + + def get_lineno(self): + """Return the line number which generated the invocant node.""" + node = self + while not isinstance(node, Leaf): + if not node.children: + return + node = node.children[0] + return node.lineno + + def changed(self): + if self.parent: + self.parent.changed() + self.was_changed = True + + def remove(self): + """ + Remove the node from the tree. Returns the position of the node in its + parent's children before it was removed. + """ + if self.parent: + for i, node in enumerate(self.parent.children): + if node is self: + self.parent.changed() + del self.parent.children[i] + self.parent = None + return i + + @property + def next_sibling(self): + """ + The node immediately following the invocant in their parent's children + list. If the invocant does not have a next sibling, it is None + """ + if self.parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(self.parent.children): + if child is self: + try: + return self.parent.children[i+1] + except IndexError: + return None + + @property + def prev_sibling(self): + """ + The node immediately preceding the invocant in their parent's children + list. If the invocant does not have a previous sibling, it is None. + """ + if self.parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(self.parent.children): + if child is self: + if i == 0: + return None + return self.parent.children[i-1] + + def leaves(self): + for child in self.children: + yield from child.leaves() + + def depth(self): + if self.parent is None: + return 0 + return 1 + self.parent.depth() + + def get_suffix(self): + """ + Return the string immediately following the invocant node. This is + effectively equivalent to node.next_sibling.prefix + """ + next_sib = self.next_sibling + if next_sib is None: + return "" + return next_sib.prefix + + if sys.version_info < (3, 0): + def __str__(self): + return str(self).encode("ascii") + +class Node(Base): + + """Concrete implementation for interior nodes.""" + + def __init__(self,type, children, + context=None, + prefix=None, + fixers_applied=None): + """ + Initializer. + + Takes a type constant (a symbol number >= 256), a sequence of + child nodes, and an optional context keyword argument. + + As a side effect, the parent pointers of the children are updated. + """ + assert type >= 256, type + self.type = type + self.children = list(children) + for ch in self.children: + assert ch.parent is None, repr(ch) + ch.parent = self + if prefix is not None: + self.prefix = prefix + if fixers_applied: + self.fixers_applied = fixers_applied[:] + else: + self.fixers_applied = None + + def __repr__(self): + """Return a canonical string representation.""" + return "%s(%s, %r)" % (self.__class__.__name__, + type_repr(self.type), + self.children) + + def __unicode__(self): + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return "".join(map(str, self.children)) + + if sys.version_info > (3, 0): + __str__ = __unicode__ + + def _eq(self, other): + """Compare two nodes for equality.""" + return (self.type, self.children) == (other.type, other.children) + + def clone(self): + """Return a cloned (deep) copy of self.""" + return Node(self.type, [ch.clone() for ch in self.children], + fixers_applied=self.fixers_applied) + + def post_order(self): + """Return a post-order iterator for the tree.""" + for child in self.children: + yield from child.post_order() + yield self + + def pre_order(self): + """Return a pre-order iterator for the tree.""" + yield self + for child in self.children: + yield from child.pre_order() + + @property + def prefix(self): + """ + The whitespace and comments preceding this node in the input. + """ + if not self.children: + return "" + return self.children[0].prefix + + @prefix.setter + def prefix(self, prefix): + if self.children: + self.children[0].prefix = prefix + + def set_child(self, i, child): + """ + Equivalent to 'node.children[i] = child'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children[i].parent = None + self.children[i] = child + self.changed() + + def insert_child(self, i, child): + """ + Equivalent to 'node.children.insert(i, child)'. This method also sets + the child's parent attribute appropriately. + """ + child.parent = self + self.children.insert(i, child) + self.changed() + + def append_child(self, child): + """ + Equivalent to 'node.children.append(child)'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children.append(child) + self.changed() + + +class Leaf(Base): + + """Concrete implementation for leaf nodes.""" + + # Default values for instance variables + _prefix = "" # Whitespace and comments preceding this token in the input + lineno = 0 # Line where this token starts in the input + column = 0 # Column where this token tarts in the input + + def __init__(self, type, value, + context=None, + prefix=None, + fixers_applied=[]): + """ + Initializer. + + Takes a type constant (a token number < 256), a string value, and an + optional context keyword argument. + """ + assert 0 <= type < 256, type + if context is not None: + self._prefix, (self.lineno, self.column) = context + self.type = type + self.value = value + if prefix is not None: + self._prefix = prefix + self.fixers_applied = fixers_applied[:] + + def __repr__(self): + """Return a canonical string representation.""" + return "%s(%r, %r)" % (self.__class__.__name__, + self.type, + self.value) + + def __unicode__(self): + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return self.prefix + str(self.value) + + if sys.version_info > (3, 0): + __str__ = __unicode__ + + def _eq(self, other): + """Compare two nodes for equality.""" + return (self.type, self.value) == (other.type, other.value) + + def clone(self): + """Return a cloned (deep) copy of self.""" + return Leaf(self.type, self.value, + (self.prefix, (self.lineno, self.column)), + fixers_applied=self.fixers_applied) + + def leaves(self): + yield self + + def post_order(self): + """Return a post-order iterator for the tree.""" + yield self + + def pre_order(self): + """Return a pre-order iterator for the tree.""" + yield self + + @property + def prefix(self): + """ + The whitespace and comments preceding this token in the input. + """ + return self._prefix + + @prefix.setter + def prefix(self, prefix): + self.changed() + self._prefix = prefix + +def convert(gr, raw_node): + """ + Convert raw node information to a Node or Leaf instance. + + This is passed to the parser driver which calls it whenever a reduction of a + grammar rule produces a new complete node, so that the tree is build + strictly bottom-up. + """ + type, value, context, children = raw_node + if children or type in gr.number2symbol: + # If there's exactly one child, return that child instead of + # creating a new node. + if len(children) == 1: + return children[0] + return Node(type, children, context=context) + else: + return Leaf(type, value, context=context) + + +class BasePattern(object): + + """ + A pattern is a tree matching pattern. + + It looks for a specific node type (token or symbol), and + optionally for a specific content. + + This is an abstract base class. There are three concrete + subclasses: + + - LeafPattern matches a single leaf node; + - NodePattern matches a single node (usually non-leaf); + - WildcardPattern matches a sequence of nodes of variable length. + """ + + # Defaults for instance variables + type = None # Node type (token if < 256, symbol if >= 256) + content = None # Optional content matching pattern + name = None # Optional name used to store match in results dict + + def __new__(cls, *args, **kwds): + """Constructor that prevents BasePattern from being instantiated.""" + assert cls is not BasePattern, "Cannot instantiate BasePattern" + return object.__new__(cls) + + def __repr__(self): + args = [type_repr(self.type), self.content, self.name] + while args and args[-1] is None: + del args[-1] + return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args))) + + def optimize(self): + """ + A subclass can define this as a hook for optimizations. + + Returns either self or another node with the same effect. + """ + return self + + def match(self, node, results=None): + """ + Does this pattern exactly match a node? + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + Default implementation for non-wildcard patterns. + """ + if self.type is not None and node.type != self.type: + return False + if self.content is not None: + r = None + if results is not None: + r = {} + if not self._submatch(node, r): + return False + if r: + results.update(r) + if results is not None and self.name: + results[self.name] = node + return True + + def match_seq(self, nodes, results=None): + """ + Does this pattern exactly match a sequence of nodes? + + Default implementation for non-wildcard patterns. + """ + if len(nodes) != 1: + return False + return self.match(nodes[0], results) + + def generate_matches(self, nodes): + """ + Generator yielding all matches for this pattern. + + Default implementation for non-wildcard patterns. + """ + r = {} + if nodes and self.match(nodes[0], r): + yield 1, r + + +class LeafPattern(BasePattern): + + def __init__(self, type=None, content=None, name=None): + """ + Initializer. Takes optional type, content, and name. + + The type, if given must be a token type (< 256). If not given, + this matches any *leaf* node; the content may still be required. + + The content, if given, must be a string. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert 0 <= type < 256, type + if content is not None: + assert isinstance(content, str), repr(content) + self.type = type + self.content = content + self.name = name + + def match(self, node, results=None): + """Override match() to insist on a leaf node.""" + if not isinstance(node, Leaf): + return False + return BasePattern.match(self, node, results) + + def _submatch(self, node, results=None): + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + return self.content == node.value + + +class NodePattern(BasePattern): + + wildcards = False + + def __init__(self, type=None, content=None, name=None): + """ + Initializer. Takes optional type, content, and name. + + The type, if given, must be a symbol type (>= 256). If the + type is None this matches *any* single node (leaf or not), + except if content is not None, in which it only matches + non-leaf nodes that also match the content pattern. + + The content, if not None, must be a sequence of Patterns that + must match the node's children exactly. If the content is + given, the type must not be None. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert type >= 256, type + if content is not None: + assert not isinstance(content, str), repr(content) + content = list(content) + for i, item in enumerate(content): + assert isinstance(item, BasePattern), (i, item) + if isinstance(item, WildcardPattern): + self.wildcards = True + self.type = type + self.content = content + self.name = name + + def _submatch(self, node, results=None): + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + if self.wildcards: + for c, r in generate_matches(self.content, node.children): + if c == len(node.children): + if results is not None: + results.update(r) + return True + return False + if len(self.content) != len(node.children): + return False + for subpattern, child in zip(self.content, node.children): + if not subpattern.match(child, results): + return False + return True + + +class WildcardPattern(BasePattern): + + """ + A wildcard pattern can match zero or more nodes. + + This has all the flexibility needed to implement patterns like: + + .* .+ .? .{m,n} + (a b c | d e | f) + (...)* (...)+ (...)? (...){m,n} + + except it always uses non-greedy matching. + """ + + def __init__(self, content=None, min=0, max=HUGE, name=None): + """ + Initializer. + + Args: + content: optional sequence of subsequences of patterns; + if absent, matches one node; + if present, each subsequence is an alternative [*] + min: optional minimum number of times to match, default 0 + max: optional maximum number of times to match, default HUGE + name: optional name assigned to this match + + [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is + equivalent to (a b c | d e | f g h); if content is None, + this is equivalent to '.' in regular expression terms. + The min and max parameters work as follows: + min=0, max=maxint: .* + min=1, max=maxint: .+ + min=0, max=1: .? + min=1, max=1: . + If content is not None, replace the dot with the parenthesized + list of alternatives, e.g. (a b c | d e | f g h)* + """ + assert 0 <= min <= max <= HUGE, (min, max) + if content is not None: + content = tuple(map(tuple, content)) # Protect against alterations + # Check sanity of alternatives + assert len(content), repr(content) # Can't have zero alternatives + for alt in content: + assert len(alt), repr(alt) # Can have empty alternatives + self.content = content + self.min = min + self.max = max + self.name = name + + def optimize(self): + """Optimize certain stacked wildcard patterns.""" + subpattern = None + if (self.content is not None and + len(self.content) == 1 and len(self.content[0]) == 1): + subpattern = self.content[0][0] + if self.min == 1 and self.max == 1: + if self.content is None: + return NodePattern(name=self.name) + if subpattern is not None and self.name == subpattern.name: + return subpattern.optimize() + if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and + subpattern.min <= 1 and self.name == subpattern.name): + return WildcardPattern(subpattern.content, + self.min*subpattern.min, + self.max*subpattern.max, + subpattern.name) + return self + + def match(self, node, results=None): + """Does this pattern exactly match a node?""" + return self.match_seq([node], results) + + def match_seq(self, nodes, results=None): + """Does this pattern exactly match a sequence of nodes?""" + for c, r in self.generate_matches(nodes): + if c == len(nodes): + if results is not None: + results.update(r) + if self.name: + results[self.name] = list(nodes) + return True + return False + + def generate_matches(self, nodes): + """ + Generator yielding matches for a sequence of nodes. + + Args: + nodes: sequence of nodes + + Yields: + (count, results) tuples where: + count: the match comprises nodes[:count]; + results: dict containing named submatches. + """ + if self.content is None: + # Shortcut for special case (see __init__.__doc__) + for count in range(self.min, 1 + min(len(nodes), self.max)): + r = {} + if self.name: + r[self.name] = nodes[:count] + yield count, r + elif self.name == "bare_name": + yield self._bare_name_matches(nodes) + else: + # The reason for this is that hitting the recursion limit usually + # results in some ugly messages about how RuntimeErrors are being + # ignored. We only have to do this on CPython, though, because other + # implementations don't have this nasty bug in the first place. + if hasattr(sys, "getrefcount"): + save_stderr = sys.stderr + sys.stderr = StringIO() + try: + for count, r in self._recursive_matches(nodes, 0): + if self.name: + r[self.name] = nodes[:count] + yield count, r + except RuntimeError: + # Fall back to the iterative pattern matching scheme if the + # recursive scheme hits the recursion limit (RecursionError). + for count, r in self._iterative_matches(nodes): + if self.name: + r[self.name] = nodes[:count] + yield count, r + finally: + if hasattr(sys, "getrefcount"): + sys.stderr = save_stderr + + def _iterative_matches(self, nodes): + """Helper to iteratively yield the matches.""" + nodelen = len(nodes) + if 0 >= self.min: + yield 0, {} + + results = [] + # generate matches that use just one alt from self.content + for alt in self.content: + for c, r in generate_matches(alt, nodes): + yield c, r + results.append((c, r)) + + # for each match, iterate down the nodes + while results: + new_results = [] + for c0, r0 in results: + # stop if the entire set of nodes has been matched + if c0 < nodelen and c0 <= self.max: + for alt in self.content: + for c1, r1 in generate_matches(alt, nodes[c0:]): + if c1 > 0: + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + new_results.append((c0 + c1, r)) + results = new_results + + def _bare_name_matches(self, nodes): + """Special optimized matcher for bare_name.""" + count = 0 + r = {} + done = False + max = len(nodes) + while not done and count < max: + done = True + for leaf in self.content: + if leaf[0].match(nodes[count], r): + count += 1 + done = False + break + r[self.name] = nodes[:count] + return count, r + + def _recursive_matches(self, nodes, count): + """Helper to recursively yield the matches.""" + assert self.content is not None + if count >= self.min: + yield 0, {} + if count < self.max: + for alt in self.content: + for c0, r0 in generate_matches(alt, nodes): + for c1, r1 in self._recursive_matches(nodes[c0:], count+1): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + + +class NegatedPattern(BasePattern): + + def __init__(self, content=None): + """ + Initializer. + + The argument is either a pattern or None. If it is None, this + only matches an empty sequence (effectively '$' in regex + lingo). If it is not None, this matches whenever the argument + pattern doesn't have any matches. + """ + if content is not None: + assert isinstance(content, BasePattern), repr(content) + self.content = content + + def match(self, node): + # We never match a node in its entirety + return False + + def match_seq(self, nodes): + # We only match an empty sequence of nodes in its entirety + return len(nodes) == 0 + + def generate_matches(self, nodes): + if self.content is None: + # Return a match if there is an empty sequence + if len(nodes) == 0: + yield 0, {} + else: + # Return a match if the argument pattern has no matches + for c, r in self.content.generate_matches(nodes): + return + yield 0, {} + + +def generate_matches(patterns, nodes): + """ + Generator yielding matches for a sequence of patterns and nodes. + + Args: + patterns: a sequence of patterns + nodes: a sequence of nodes + + Yields: + (count, results) tuples where: + count: the entire sequence of patterns matches nodes[:count]; + results: dict containing named submatches. + """ + if not patterns: + yield 0, {} + else: + p, rest = patterns[0], patterns[1:] + for c0, r0 in p.generate_matches(nodes): + if not rest: + yield c0, r0 + else: + for c1, r1 in generate_matches(rest, nodes[c0:]): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/refactor.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/refactor.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/refactor.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/refactor.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,732 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Refactoring framework. + +Used as a main program, this can refactor any number of files and/or +recursively descend down directories. Imported as a module, this +provides infrastructure to write your own refactoring tool. +""" + +__author__ = "Guido van Rossum " + + +# Python imports +import io +import os +import pkgutil +import sys +import logging +import operator +import collections +from itertools import chain + +# Local imports +from .pgen2 import driver, tokenize, token +from .fixer_util import find_root +from . import pytree, pygram +from . import btm_matcher as bm + + +def get_all_fix_names(fixer_pkg, remove_prefix=True): + """Return a sorted list of all available fix names in the given package.""" + pkg = __import__(fixer_pkg, [], [], ["*"]) + fix_names = [] + for finder, name, ispkg in pkgutil.iter_modules(pkg.__path__): + if name.startswith("fix_"): + if remove_prefix: + name = name[4:] + fix_names.append(name) + return fix_names + + +class _EveryNode(Exception): + pass + + +def _get_head_types(pat): + """ Accepts a pytree Pattern Node and returns a set + of the pattern types which will match first. """ + + if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): + # NodePatters must either have no type and no content + # or a type and content -- so they don't get any farther + # Always return leafs + if pat.type is None: + raise _EveryNode + return {pat.type} + + if isinstance(pat, pytree.NegatedPattern): + if pat.content: + return _get_head_types(pat.content) + raise _EveryNode # Negated Patterns don't have a type + + if isinstance(pat, pytree.WildcardPattern): + # Recurse on each node in content + r = set() + for p in pat.content: + for x in p: + r.update(_get_head_types(x)) + return r + + raise Exception("Oh no! I don't understand pattern %s" %(pat)) + + +def _get_headnode_dict(fixer_list): + """ Accepts a list of fixers and returns a dictionary + of head node type --> fixer list. """ + head_nodes = collections.defaultdict(list) + every = [] + for fixer in fixer_list: + if fixer.pattern: + try: + heads = _get_head_types(fixer.pattern) + except _EveryNode: + every.append(fixer) + else: + for node_type in heads: + head_nodes[node_type].append(fixer) + else: + if fixer._accept_type is not None: + head_nodes[fixer._accept_type].append(fixer) + else: + every.append(fixer) + for node_type in chain(pygram.python_grammar.symbol2number.values(), + pygram.python_grammar.tokens): + head_nodes[node_type].extend(every) + return dict(head_nodes) + + +def get_fixers_from_package(pkg_name): + """ + Return the fully qualified names for fixers in the package pkg_name. + """ + return [pkg_name + "." + fix_name + for fix_name in get_all_fix_names(pkg_name, False)] + +def _identity(obj): + return obj + + +def _detect_future_features(source): + have_docstring = False + gen = tokenize.generate_tokens(io.StringIO(source).readline) + def advance(): + tok = next(gen) + return tok[0], tok[1] + ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT}) + features = set() + try: + while True: + tp, value = advance() + if tp in ignore: + continue + elif tp == token.STRING: + if have_docstring: + break + have_docstring = True + elif tp == token.NAME and value == "from": + tp, value = advance() + if tp != token.NAME or value != "__future__": + break + tp, value = advance() + if tp != token.NAME or value != "import": + break + tp, value = advance() + if tp == token.OP and value == "(": + tp, value = advance() + while tp == token.NAME: + features.add(value) + tp, value = advance() + if tp != token.OP or value != ",": + break + tp, value = advance() + else: + break + except StopIteration: + pass + return frozenset(features) + + +class FixerError(Exception): + """A fixer could not be loaded.""" + + +class RefactoringTool(object): + + _default_options = {"print_function" : False, + "exec_function": False, + "write_unchanged_files" : False} + + CLASS_PREFIX = "Fix" # The prefix for fixer classes + FILE_PREFIX = "fix_" # The prefix for modules with a fixer within + + def __init__(self, fixer_names, options=None, explicit=None): + """Initializer. + + Args: + fixer_names: a list of fixers to import + options: a dict with configuration. + explicit: a list of fixers to run even if they are explicit. + """ + self.fixers = fixer_names + self.explicit = explicit or [] + self.options = self._default_options.copy() + if options is not None: + self.options.update(options) + self.grammar = pygram.python_grammar.copy() + + if self.options['print_function']: + del self.grammar.keywords["print"] + elif self.options['exec_function']: + del self.grammar.keywords["exec"] + + # When this is True, the refactor*() methods will call write_file() for + # files processed even if they were not changed during refactoring. If + # and only if the refactor method's write parameter was True. + self.write_unchanged_files = self.options.get("write_unchanged_files") + self.errors = [] + self.logger = logging.getLogger("RefactoringTool") + self.fixer_log = [] + self.wrote = False + self.driver = driver.Driver(self.grammar, + convert=pytree.convert, + logger=self.logger) + self.pre_order, self.post_order = self.get_fixers() + + + self.files = [] # List of files that were or should be modified + + self.BM = bm.BottomMatcher() + self.bmi_pre_order = [] # Bottom Matcher incompatible fixers + self.bmi_post_order = [] + + for fixer in chain(self.post_order, self.pre_order): + if fixer.BM_compatible: + self.BM.add_fixer(fixer) + # remove fixers that will be handled by the bottom-up + # matcher + elif fixer in self.pre_order: + self.bmi_pre_order.append(fixer) + elif fixer in self.post_order: + self.bmi_post_order.append(fixer) + + self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order) + self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order) + + + + def get_fixers(self): + """Inspects the options to load the requested patterns and handlers. + + Returns: + (pre_order, post_order), where pre_order is the list of fixers that + want a pre-order AST traversal, and post_order is the list that want + post-order traversal. + """ + pre_order_fixers = [] + post_order_fixers = [] + for fix_mod_path in self.fixers: + mod = __import__(fix_mod_path, {}, {}, ["*"]) + fix_name = fix_mod_path.rsplit(".", 1)[-1] + if fix_name.startswith(self.FILE_PREFIX): + fix_name = fix_name[len(self.FILE_PREFIX):] + parts = fix_name.split("_") + class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) + try: + fix_class = getattr(mod, class_name) + except AttributeError: + raise FixerError("Can't find %s.%s" % (fix_name, class_name)) from None + fixer = fix_class(self.options, self.fixer_log) + if fixer.explicit and self.explicit is not True and \ + fix_mod_path not in self.explicit: + self.log_message("Skipping optional fixer: %s", fix_name) + continue + + self.log_debug("Adding transformation: %s", fix_name) + if fixer.order == "pre": + pre_order_fixers.append(fixer) + elif fixer.order == "post": + post_order_fixers.append(fixer) + else: + raise FixerError("Illegal fixer order: %r" % fixer.order) + + key_func = operator.attrgetter("run_order") + pre_order_fixers.sort(key=key_func) + post_order_fixers.sort(key=key_func) + return (pre_order_fixers, post_order_fixers) + + def log_error(self, msg, *args, **kwds): + """Called when an error occurs.""" + raise + + def log_message(self, msg, *args): + """Hook to log a message.""" + if args: + msg = msg % args + self.logger.info(msg) + + def log_debug(self, msg, *args): + if args: + msg = msg % args + self.logger.debug(msg) + + def print_output(self, old_text, new_text, filename, equal): + """Called with the old version, new version, and filename of a + refactored file.""" + pass + + def refactor(self, items, write=False, doctests_only=False): + """Refactor a list of files and directories.""" + + for dir_or_file in items: + if os.path.isdir(dir_or_file): + self.refactor_dir(dir_or_file, write, doctests_only) + else: + self.refactor_file(dir_or_file, write, doctests_only) + + def refactor_dir(self, dir_name, write=False, doctests_only=False): + """Descends down a directory and refactor every Python file found. + + Python files are assumed to have a .py extension. + + Files and subdirectories starting with '.' are skipped. + """ + py_ext = os.extsep + "py" + for dirpath, dirnames, filenames in os.walk(dir_name): + self.log_debug("Descending into %s", dirpath) + dirnames.sort() + filenames.sort() + for name in filenames: + if (not name.startswith(".") and + os.path.splitext(name)[1] == py_ext): + fullname = os.path.join(dirpath, name) + self.refactor_file(fullname, write, doctests_only) + # Modify dirnames in-place to remove subdirs with leading dots + dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")] + + def _read_python_source(self, filename): + """ + Do our best to decode a Python source file correctly. + """ + try: + f = open(filename, "rb") + except OSError as err: + self.log_error("Can't open %s: %s", filename, err) + return None, None + try: + encoding = tokenize.detect_encoding(f.readline)[0] + finally: + f.close() + with io.open(filename, "r", encoding=encoding, newline='') as f: + return f.read(), encoding + + def refactor_file(self, filename, write=False, doctests_only=False): + """Refactors a file.""" + input, encoding = self._read_python_source(filename) + if input is None: + # Reading the file failed. + return + input += "\n" # Silence certain parse errors + if doctests_only: + self.log_debug("Refactoring doctests in %s", filename) + output = self.refactor_docstring(input, filename) + if self.write_unchanged_files or output != input: + self.processed_file(output, filename, input, write, encoding) + else: + self.log_debug("No doctest changes in %s", filename) + else: + tree = self.refactor_string(input, filename) + if self.write_unchanged_files or (tree and tree.was_changed): + # The [:-1] is to take off the \n we added earlier + self.processed_file(str(tree)[:-1], filename, + write=write, encoding=encoding) + else: + self.log_debug("No changes in %s", filename) + + def refactor_string(self, data, name): + """Refactor a given input string. + + Args: + data: a string holding the code to be refactored. + name: a human-readable name for use in error/log messages. + + Returns: + An AST corresponding to the refactored input stream; None if + there were errors during the parse. + """ + features = _detect_future_features(data) + if "print_function" in features: + self.driver.grammar = pygram.python_grammar_no_print_statement + try: + tree = self.driver.parse_string(data) + except Exception as err: + self.log_error("Can't parse %s: %s: %s", + name, err.__class__.__name__, err) + return + finally: + self.driver.grammar = self.grammar + tree.future_features = features + self.log_debug("Refactoring %s", name) + self.refactor_tree(tree, name) + return tree + + def refactor_stdin(self, doctests_only=False): + input = sys.stdin.read() + if doctests_only: + self.log_debug("Refactoring doctests in stdin") + output = self.refactor_docstring(input, "") + if self.write_unchanged_files or output != input: + self.processed_file(output, "", input) + else: + self.log_debug("No doctest changes in stdin") + else: + tree = self.refactor_string(input, "") + if self.write_unchanged_files or (tree and tree.was_changed): + self.processed_file(str(tree), "", input) + else: + self.log_debug("No changes in stdin") + + def refactor_tree(self, tree, name): + """Refactors a parse tree (modifying the tree in place). + + For compatible patterns the bottom matcher module is + used. Otherwise the tree is traversed node-to-node for + matches. + + Args: + tree: a pytree.Node instance representing the root of the tree + to be refactored. + name: a human-readable name for this tree. + + Returns: + True if the tree was modified, False otherwise. + """ + + for fixer in chain(self.pre_order, self.post_order): + fixer.start_tree(tree, name) + + #use traditional matching for the incompatible fixers + self.traverse_by(self.bmi_pre_order_heads, tree.pre_order()) + self.traverse_by(self.bmi_post_order_heads, tree.post_order()) + + # obtain a set of candidate nodes + match_set = self.BM.run(tree.leaves()) + + while any(match_set.values()): + for fixer in self.BM.fixers: + if fixer in match_set and match_set[fixer]: + #sort by depth; apply fixers from bottom(of the AST) to top + match_set[fixer].sort(key=pytree.Base.depth, reverse=True) + + if fixer.keep_line_order: + #some fixers(eg fix_imports) must be applied + #with the original file's line order + match_set[fixer].sort(key=pytree.Base.get_lineno) + + for node in list(match_set[fixer]): + if node in match_set[fixer]: + match_set[fixer].remove(node) + + try: + find_root(node) + except ValueError: + # this node has been cut off from a + # previous transformation ; skip + continue + + if node.fixers_applied and fixer in node.fixers_applied: + # do not apply the same fixer again + continue + + results = fixer.match(node) + + if results: + new = fixer.transform(node, results) + if new is not None: + node.replace(new) + #new.fixers_applied.append(fixer) + for node in new.post_order(): + # do not apply the fixer again to + # this or any subnode + if not node.fixers_applied: + node.fixers_applied = [] + node.fixers_applied.append(fixer) + + # update the original match set for + # the added code + new_matches = self.BM.run(new.leaves()) + for fxr in new_matches: + if not fxr in match_set: + match_set[fxr]=[] + + match_set[fxr].extend(new_matches[fxr]) + + for fixer in chain(self.pre_order, self.post_order): + fixer.finish_tree(tree, name) + return tree.was_changed + + def traverse_by(self, fixers, traversal): + """Traverse an AST, applying a set of fixers to each node. + + This is a helper method for refactor_tree(). + + Args: + fixers: a list of fixer instances. + traversal: a generator that yields AST nodes. + + Returns: + None + """ + if not fixers: + return + for node in traversal: + for fixer in fixers[node.type]: + results = fixer.match(node) + if results: + new = fixer.transform(node, results) + if new is not None: + node.replace(new) + node = new + + def processed_file(self, new_text, filename, old_text=None, write=False, + encoding=None): + """ + Called when a file has been refactored and there may be changes. + """ + self.files.append(filename) + if old_text is None: + old_text = self._read_python_source(filename)[0] + if old_text is None: + return + equal = old_text == new_text + self.print_output(old_text, new_text, filename, equal) + if equal: + self.log_debug("No changes to %s", filename) + if not self.write_unchanged_files: + return + if write: + self.write_file(new_text, filename, old_text, encoding) + else: + self.log_debug("Not writing changes to %s", filename) + + def write_file(self, new_text, filename, old_text, encoding=None): + """Writes a string to a file. + + It first shows a unified diff between the old text and the new text, and + then rewrites the file; the latter is only done if the write option is + set. + """ + try: + fp = io.open(filename, "w", encoding=encoding, newline='') + except OSError as err: + self.log_error("Can't create %s: %s", filename, err) + return + + with fp: + try: + fp.write(new_text) + except OSError as err: + self.log_error("Can't write %s: %s", filename, err) + self.log_debug("Wrote changes to %s", filename) + self.wrote = True + + PS1 = ">>> " + PS2 = "... " + + def refactor_docstring(self, input, filename): + """Refactors a docstring, looking for doctests. + + This returns a modified version of the input string. It looks + for doctests, which start with a ">>>" prompt, and may be + continued with "..." prompts, as long as the "..." is indented + the same as the ">>>". + + (Unfortunately we can't use the doctest module's parser, + since, like most parsers, it is not geared towards preserving + the original source.) + """ + result = [] + block = None + block_lineno = None + indent = None + lineno = 0 + for line in input.splitlines(keepends=True): + lineno += 1 + if line.lstrip().startswith(self.PS1): + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + block_lineno = lineno + block = [line] + i = line.find(self.PS1) + indent = line[:i] + elif (indent is not None and + (line.startswith(indent + self.PS2) or + line == indent + self.PS2.rstrip() + "\n")): + block.append(line) + else: + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + block = None + indent = None + result.append(line) + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + return "".join(result) + + def refactor_doctest(self, block, lineno, indent, filename): + """Refactors one doctest. + + A doctest is given as a block of lines, the first of which starts + with ">>>" (possibly indented), while the remaining lines start + with "..." (identically indented). + + """ + try: + tree = self.parse_block(block, lineno, indent) + except Exception as err: + if self.logger.isEnabledFor(logging.DEBUG): + for line in block: + self.log_debug("Source: %s", line.rstrip("\n")) + self.log_error("Can't parse docstring in %s line %s: %s: %s", + filename, lineno, err.__class__.__name__, err) + return block + if self.refactor_tree(tree, filename): + new = str(tree).splitlines(keepends=True) + # Undo the adjustment of the line numbers in wrap_toks() below. + clipped, new = new[:lineno-1], new[lineno-1:] + assert clipped == ["\n"] * (lineno-1), clipped + if not new[-1].endswith("\n"): + new[-1] += "\n" + block = [indent + self.PS1 + new.pop(0)] + if new: + block += [indent + self.PS2 + line for line in new] + return block + + def summarize(self): + if self.wrote: + were = "were" + else: + were = "need to be" + if not self.files: + self.log_message("No files %s modified.", were) + else: + self.log_message("Files that %s modified:", were) + for file in self.files: + self.log_message(file) + if self.fixer_log: + self.log_message("Warnings/messages while refactoring:") + for message in self.fixer_log: + self.log_message(message) + if self.errors: + if len(self.errors) == 1: + self.log_message("There was 1 error:") + else: + self.log_message("There were %d errors:", len(self.errors)) + for msg, args, kwds in self.errors: + self.log_message(msg, *args, **kwds) + + def parse_block(self, block, lineno, indent): + """Parses a block into a tree. + + This is necessary to get correct line number / offset information + in the parser diagnostics and embedded into the parse tree. + """ + tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent)) + tree.future_features = frozenset() + return tree + + def wrap_toks(self, block, lineno, indent): + """Wraps a tokenize stream to systematically modify start/end.""" + tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__) + for type, value, (line0, col0), (line1, col1), line_text in tokens: + line0 += lineno - 1 + line1 += lineno - 1 + # Don't bother updating the columns; this is too complicated + # since line_text would also have to be updated and it would + # still break for tokens spanning lines. Let the user guess + # that the column numbers for doctests are relative to the + # end of the prompt string (PS1 or PS2). + yield type, value, (line0, col0), (line1, col1), line_text + + + def gen_lines(self, block, indent): + """Generates lines as expected by tokenize from a list of lines. + + This strips the first len(indent + self.PS1) characters off each line. + """ + prefix1 = indent + self.PS1 + prefix2 = indent + self.PS2 + prefix = prefix1 + for line in block: + if line.startswith(prefix): + yield line[len(prefix):] + elif line == prefix.rstrip() + "\n": + yield "\n" + else: + raise AssertionError("line=%r, prefix=%r" % (line, prefix)) + prefix = prefix2 + while True: + yield "" + + +class MultiprocessingUnsupported(Exception): + pass + + +class MultiprocessRefactoringTool(RefactoringTool): + + def __init__(self, *args, **kwargs): + super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs) + self.queue = None + self.output_lock = None + + def refactor(self, items, write=False, doctests_only=False, + num_processes=1): + if num_processes == 1: + return super(MultiprocessRefactoringTool, self).refactor( + items, write, doctests_only) + try: + import multiprocessing + except ImportError: + raise MultiprocessingUnsupported + if self.queue is not None: + raise RuntimeError("already doing multiple processes") + self.queue = multiprocessing.JoinableQueue() + self.output_lock = multiprocessing.Lock() + processes = [multiprocessing.Process(target=self._child) + for i in range(num_processes)] + try: + for p in processes: + p.start() + super(MultiprocessRefactoringTool, self).refactor(items, write, + doctests_only) + finally: + self.queue.join() + for i in range(num_processes): + self.queue.put(None) + for p in processes: + if p.is_alive(): + p.join() + self.queue = None + + def _child(self): + task = self.queue.get() + while task is not None: + args, kwargs = task + try: + super(MultiprocessRefactoringTool, self).refactor_file( + *args, **kwargs) + finally: + self.queue.task_done() + task = self.queue.get() + + def refactor_file(self, *args, **kwargs): + if self.queue is not None: + self.queue.put((args, kwargs)) + else: + return super(MultiprocessRefactoringTool, self).refactor_file( + *args, **kwargs) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,11 @@ +# Author: Collin Winter + +import os +import warnings + +from test.support import load_package_tests + +def load_tests(*args): + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=DeprecationWarning, message='lib2to3') + return load_package_tests(os.path.dirname(__file__), *args) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/__main__.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/__main__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/__main__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/__main__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,4 @@ +from . import load_tests +import unittest + +unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/README python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/README --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/README 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/README 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,6 @@ +In this directory: +- py2_test_grammar.py -- test file that exercises most/all of Python 2.x's grammar. +- py3_test_grammar.py -- test file that exercises most/all of Python 3.x's grammar. +- infinite_recursion.py -- test file that causes lib2to3's faster recursive pattern matching + scheme to fail, but passes when lib2to3 falls back to iterative pattern matching. +- fixes/ -- for use by test_refactor.py diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/bom.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/bom.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/bom.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/bom.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,2 @@ +# coding: utf-8 +print "BOM BOOM!" diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/crlf.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/crlf.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/crlf.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/crlf.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,3 @@ +print "hi" + +print "Like bad Windows newlines?" diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/different_encoding.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/different_encoding.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/different_encoding.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/different_encoding.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +print u'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ' + +def f(x): + print '%s\t-> α(%2i):%s β(%s)' diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/false_encoding.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/false_encoding.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/false_encoding.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/false_encoding.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,2 @@ +#!/usr/bin/env python +print '#coding=0' diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/bad_order.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/bad_order.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/bad_order.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/bad_order.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,5 @@ +from lib2to3.fixer_base import BaseFix + +class FixBadOrder(BaseFix): + + order = "crazy" diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_explicit.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_explicit.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_explicit.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_explicit.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,6 @@ +from lib2to3.fixer_base import BaseFix + +class FixExplicit(BaseFix): + explicit = True + + def match(self): return False diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_first.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_first.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_first.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_first.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,6 @@ +from lib2to3.fixer_base import BaseFix + +class FixFirst(BaseFix): + run_order = 1 + + def match(self, node): return False diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_last.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_last.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_last.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_last.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,7 @@ +from lib2to3.fixer_base import BaseFix + +class FixLast(BaseFix): + + run_order = 10 + + def match(self, node): return False diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_parrot.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_parrot.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_parrot.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_parrot.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,13 @@ +from lib2to3.fixer_base import BaseFix +from lib2to3.fixer_util import Name + +class FixParrot(BaseFix): + """ + Change functions named 'parrot' to 'cheese'. + """ + + PATTERN = """funcdef < 'def' name='parrot' any* >""" + + def transform(self, node, results): + name = results["name"] + name.replace(Name("cheese", name.prefix)) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_preorder.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_preorder.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_preorder.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/myfixes/fix_preorder.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,6 @@ +from lib2to3.fixer_base import BaseFix + +class FixPreorder(BaseFix): + order = "pre" + + def match(self, node): return False diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/no_fixer_cls.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/no_fixer_cls.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/no_fixer_cls.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/no_fixer_cls.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1 @@ +# This is empty so trying to fetch the fixer class gives an AttributeError diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/parrot_example.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/parrot_example.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/fixers/parrot_example.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/fixers/parrot_example.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,2 @@ +def parrot(): + pass diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/infinite_recursion.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/infinite_recursion.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/infinite_recursion.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/infinite_recursion.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,2669 @@ +# Verify that 2to3 falls back from the recursive pattern matching scheme to a +# slower, iterative scheme in the event of a RecursionError. +from ctypes import * +STRING = c_char_p + + +OSUnknownByteOrder = 0 +UIT_PROMPT = 1 +P_PGID = 2 +P_PID = 1 +UIT_ERROR = 5 +UIT_INFO = 4 +UIT_NONE = 0 +P_ALL = 0 +UIT_VERIFY = 2 +OSBigEndian = 2 +UIT_BOOLEAN = 3 +OSLittleEndian = 1 +__darwin_nl_item = c_int +__darwin_wctrans_t = c_int +__darwin_wctype_t = c_ulong +__int8_t = c_byte +__uint8_t = c_ubyte +__int16_t = c_short +__uint16_t = c_ushort +__int32_t = c_int +__uint32_t = c_uint +__int64_t = c_longlong +__uint64_t = c_ulonglong +__darwin_intptr_t = c_long +__darwin_natural_t = c_uint +__darwin_ct_rune_t = c_int +class __mbstate_t(Union): + pass +__mbstate_t._pack_ = 4 +__mbstate_t._fields_ = [ + ('__mbstate8', c_char * 128), + ('_mbstateL', c_longlong), +] +assert sizeof(__mbstate_t) == 128, sizeof(__mbstate_t) +assert alignment(__mbstate_t) == 4, alignment(__mbstate_t) +__darwin_mbstate_t = __mbstate_t +__darwin_ptrdiff_t = c_int +__darwin_size_t = c_ulong +__darwin_va_list = STRING +__darwin_wchar_t = c_int +__darwin_rune_t = __darwin_wchar_t +__darwin_wint_t = c_int +__darwin_clock_t = c_ulong +__darwin_socklen_t = __uint32_t +__darwin_ssize_t = c_long +__darwin_time_t = c_long +sig_atomic_t = c_int +class sigcontext(Structure): + pass +sigcontext._fields_ = [ + ('sc_onstack', c_int), + ('sc_mask', c_int), + ('sc_eax', c_uint), + ('sc_ebx', c_uint), + ('sc_ecx', c_uint), + ('sc_edx', c_uint), + ('sc_edi', c_uint), + ('sc_esi', c_uint), + ('sc_ebp', c_uint), + ('sc_esp', c_uint), + ('sc_ss', c_uint), + ('sc_eflags', c_uint), + ('sc_eip', c_uint), + ('sc_cs', c_uint), + ('sc_ds', c_uint), + ('sc_es', c_uint), + ('sc_fs', c_uint), + ('sc_gs', c_uint), +] +assert sizeof(sigcontext) == 72, sizeof(sigcontext) +assert alignment(sigcontext) == 4, alignment(sigcontext) +u_int8_t = c_ubyte +u_int16_t = c_ushort +u_int32_t = c_uint +u_int64_t = c_ulonglong +int32_t = c_int +register_t = int32_t +user_addr_t = u_int64_t +user_size_t = u_int64_t +int64_t = c_longlong +user_ssize_t = int64_t +user_long_t = int64_t +user_ulong_t = u_int64_t +user_time_t = int64_t +syscall_arg_t = u_int64_t + +# values for unnamed enumeration +class aes_key_st(Structure): + pass +aes_key_st._fields_ = [ + ('rd_key', c_ulong * 60), + ('rounds', c_int), +] +assert sizeof(aes_key_st) == 244, sizeof(aes_key_st) +assert alignment(aes_key_st) == 4, alignment(aes_key_st) +AES_KEY = aes_key_st +class asn1_ctx_st(Structure): + pass +asn1_ctx_st._fields_ = [ + ('p', POINTER(c_ubyte)), + ('eos', c_int), + ('error', c_int), + ('inf', c_int), + ('tag', c_int), + ('xclass', c_int), + ('slen', c_long), + ('max', POINTER(c_ubyte)), + ('q', POINTER(c_ubyte)), + ('pp', POINTER(POINTER(c_ubyte))), + ('line', c_int), +] +assert sizeof(asn1_ctx_st) == 44, sizeof(asn1_ctx_st) +assert alignment(asn1_ctx_st) == 4, alignment(asn1_ctx_st) +ASN1_CTX = asn1_ctx_st +class asn1_object_st(Structure): + pass +asn1_object_st._fields_ = [ + ('sn', STRING), + ('ln', STRING), + ('nid', c_int), + ('length', c_int), + ('data', POINTER(c_ubyte)), + ('flags', c_int), +] +assert sizeof(asn1_object_st) == 24, sizeof(asn1_object_st) +assert alignment(asn1_object_st) == 4, alignment(asn1_object_st) +ASN1_OBJECT = asn1_object_st +class asn1_string_st(Structure): + pass +asn1_string_st._fields_ = [ + ('length', c_int), + ('type', c_int), + ('data', POINTER(c_ubyte)), + ('flags', c_long), +] +assert sizeof(asn1_string_st) == 16, sizeof(asn1_string_st) +assert alignment(asn1_string_st) == 4, alignment(asn1_string_st) +ASN1_STRING = asn1_string_st +class ASN1_ENCODING_st(Structure): + pass +ASN1_ENCODING_st._fields_ = [ + ('enc', POINTER(c_ubyte)), + ('len', c_long), + ('modified', c_int), +] +assert sizeof(ASN1_ENCODING_st) == 12, sizeof(ASN1_ENCODING_st) +assert alignment(ASN1_ENCODING_st) == 4, alignment(ASN1_ENCODING_st) +ASN1_ENCODING = ASN1_ENCODING_st +class asn1_string_table_st(Structure): + pass +asn1_string_table_st._fields_ = [ + ('nid', c_int), + ('minsize', c_long), + ('maxsize', c_long), + ('mask', c_ulong), + ('flags', c_ulong), +] +assert sizeof(asn1_string_table_st) == 20, sizeof(asn1_string_table_st) +assert alignment(asn1_string_table_st) == 4, alignment(asn1_string_table_st) +ASN1_STRING_TABLE = asn1_string_table_st +class ASN1_TEMPLATE_st(Structure): + pass +ASN1_TEMPLATE_st._fields_ = [ +] +ASN1_TEMPLATE = ASN1_TEMPLATE_st +class ASN1_ITEM_st(Structure): + pass +ASN1_ITEM = ASN1_ITEM_st +ASN1_ITEM_st._fields_ = [ +] +class ASN1_TLC_st(Structure): + pass +ASN1_TLC = ASN1_TLC_st +ASN1_TLC_st._fields_ = [ +] +class ASN1_VALUE_st(Structure): + pass +ASN1_VALUE_st._fields_ = [ +] +ASN1_VALUE = ASN1_VALUE_st +ASN1_ITEM_EXP = ASN1_ITEM +class asn1_type_st(Structure): + pass +class N12asn1_type_st4DOLLAR_11E(Union): + pass +ASN1_BOOLEAN = c_int +ASN1_INTEGER = asn1_string_st +ASN1_ENUMERATED = asn1_string_st +ASN1_BIT_STRING = asn1_string_st +ASN1_OCTET_STRING = asn1_string_st +ASN1_PRINTABLESTRING = asn1_string_st +ASN1_T61STRING = asn1_string_st +ASN1_IA5STRING = asn1_string_st +ASN1_GENERALSTRING = asn1_string_st +ASN1_BMPSTRING = asn1_string_st +ASN1_UNIVERSALSTRING = asn1_string_st +ASN1_UTCTIME = asn1_string_st +ASN1_GENERALIZEDTIME = asn1_string_st +ASN1_VISIBLESTRING = asn1_string_st +ASN1_UTF8STRING = asn1_string_st +N12asn1_type_st4DOLLAR_11E._fields_ = [ + ('ptr', STRING), + ('boolean', ASN1_BOOLEAN), + ('asn1_string', POINTER(ASN1_STRING)), + ('object', POINTER(ASN1_OBJECT)), + ('integer', POINTER(ASN1_INTEGER)), + ('enumerated', POINTER(ASN1_ENUMERATED)), + ('bit_string', POINTER(ASN1_BIT_STRING)), + ('octet_string', POINTER(ASN1_OCTET_STRING)), + ('printablestring', POINTER(ASN1_PRINTABLESTRING)), + ('t61string', POINTER(ASN1_T61STRING)), + ('ia5string', POINTER(ASN1_IA5STRING)), + ('generalstring', POINTER(ASN1_GENERALSTRING)), + ('bmpstring', POINTER(ASN1_BMPSTRING)), + ('universalstring', POINTER(ASN1_UNIVERSALSTRING)), + ('utctime', POINTER(ASN1_UTCTIME)), + ('generalizedtime', POINTER(ASN1_GENERALIZEDTIME)), + ('visiblestring', POINTER(ASN1_VISIBLESTRING)), + ('utf8string', POINTER(ASN1_UTF8STRING)), + ('set', POINTER(ASN1_STRING)), + ('sequence', POINTER(ASN1_STRING)), +] +assert sizeof(N12asn1_type_st4DOLLAR_11E) == 4, sizeof(N12asn1_type_st4DOLLAR_11E) +assert alignment(N12asn1_type_st4DOLLAR_11E) == 4, alignment(N12asn1_type_st4DOLLAR_11E) +asn1_type_st._fields_ = [ + ('type', c_int), + ('value', N12asn1_type_st4DOLLAR_11E), +] +assert sizeof(asn1_type_st) == 8, sizeof(asn1_type_st) +assert alignment(asn1_type_st) == 4, alignment(asn1_type_st) +ASN1_TYPE = asn1_type_st +class asn1_method_st(Structure): + pass +asn1_method_st._fields_ = [ + ('i2d', CFUNCTYPE(c_int)), + ('d2i', CFUNCTYPE(STRING)), + ('create', CFUNCTYPE(STRING)), + ('destroy', CFUNCTYPE(None)), +] +assert sizeof(asn1_method_st) == 16, sizeof(asn1_method_st) +assert alignment(asn1_method_st) == 4, alignment(asn1_method_st) +ASN1_METHOD = asn1_method_st +class asn1_header_st(Structure): + pass +asn1_header_st._fields_ = [ + ('header', POINTER(ASN1_OCTET_STRING)), + ('data', STRING), + ('meth', POINTER(ASN1_METHOD)), +] +assert sizeof(asn1_header_st) == 12, sizeof(asn1_header_st) +assert alignment(asn1_header_st) == 4, alignment(asn1_header_st) +ASN1_HEADER = asn1_header_st +class BIT_STRING_BITNAME_st(Structure): + pass +BIT_STRING_BITNAME_st._fields_ = [ + ('bitnum', c_int), + ('lname', STRING), + ('sname', STRING), +] +assert sizeof(BIT_STRING_BITNAME_st) == 12, sizeof(BIT_STRING_BITNAME_st) +assert alignment(BIT_STRING_BITNAME_st) == 4, alignment(BIT_STRING_BITNAME_st) +BIT_STRING_BITNAME = BIT_STRING_BITNAME_st +class bio_st(Structure): + pass +BIO = bio_st +bio_info_cb = CFUNCTYPE(None, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long) +class bio_method_st(Structure): + pass +bio_method_st._fields_ = [ + ('type', c_int), + ('name', STRING), + ('bwrite', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), + ('bread', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), + ('bputs', CFUNCTYPE(c_int, POINTER(BIO), STRING)), + ('bgets', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), + ('ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, c_long, c_void_p)), + ('create', CFUNCTYPE(c_int, POINTER(BIO))), + ('destroy', CFUNCTYPE(c_int, POINTER(BIO))), + ('callback_ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, POINTER(bio_info_cb))), +] +assert sizeof(bio_method_st) == 40, sizeof(bio_method_st) +assert alignment(bio_method_st) == 4, alignment(bio_method_st) +BIO_METHOD = bio_method_st +class crypto_ex_data_st(Structure): + pass +class stack_st(Structure): + pass +STACK = stack_st +crypto_ex_data_st._fields_ = [ + ('sk', POINTER(STACK)), + ('dummy', c_int), +] +assert sizeof(crypto_ex_data_st) == 8, sizeof(crypto_ex_data_st) +assert alignment(crypto_ex_data_st) == 4, alignment(crypto_ex_data_st) +CRYPTO_EX_DATA = crypto_ex_data_st +bio_st._fields_ = [ + ('method', POINTER(BIO_METHOD)), + ('callback', CFUNCTYPE(c_long, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)), + ('cb_arg', STRING), + ('init', c_int), + ('shutdown', c_int), + ('flags', c_int), + ('retry_reason', c_int), + ('num', c_int), + ('ptr', c_void_p), + ('next_bio', POINTER(bio_st)), + ('prev_bio', POINTER(bio_st)), + ('references', c_int), + ('num_read', c_ulong), + ('num_write', c_ulong), + ('ex_data', CRYPTO_EX_DATA), +] +assert sizeof(bio_st) == 64, sizeof(bio_st) +assert alignment(bio_st) == 4, alignment(bio_st) +class bio_f_buffer_ctx_struct(Structure): + pass +bio_f_buffer_ctx_struct._fields_ = [ + ('ibuf_size', c_int), + ('obuf_size', c_int), + ('ibuf', STRING), + ('ibuf_len', c_int), + ('ibuf_off', c_int), + ('obuf', STRING), + ('obuf_len', c_int), + ('obuf_off', c_int), +] +assert sizeof(bio_f_buffer_ctx_struct) == 32, sizeof(bio_f_buffer_ctx_struct) +assert alignment(bio_f_buffer_ctx_struct) == 4, alignment(bio_f_buffer_ctx_struct) +BIO_F_BUFFER_CTX = bio_f_buffer_ctx_struct +class hostent(Structure): + pass +hostent._fields_ = [ +] +class bf_key_st(Structure): + pass +bf_key_st._fields_ = [ + ('P', c_uint * 18), + ('S', c_uint * 1024), +] +assert sizeof(bf_key_st) == 4168, sizeof(bf_key_st) +assert alignment(bf_key_st) == 4, alignment(bf_key_st) +BF_KEY = bf_key_st +class bignum_st(Structure): + pass +bignum_st._fields_ = [ + ('d', POINTER(c_ulong)), + ('top', c_int), + ('dmax', c_int), + ('neg', c_int), + ('flags', c_int), +] +assert sizeof(bignum_st) == 20, sizeof(bignum_st) +assert alignment(bignum_st) == 4, alignment(bignum_st) +BIGNUM = bignum_st +class bignum_ctx(Structure): + pass +bignum_ctx._fields_ = [ +] +BN_CTX = bignum_ctx +class bn_blinding_st(Structure): + pass +bn_blinding_st._fields_ = [ + ('init', c_int), + ('A', POINTER(BIGNUM)), + ('Ai', POINTER(BIGNUM)), + ('mod', POINTER(BIGNUM)), + ('thread_id', c_ulong), +] +assert sizeof(bn_blinding_st) == 20, sizeof(bn_blinding_st) +assert alignment(bn_blinding_st) == 4, alignment(bn_blinding_st) +BN_BLINDING = bn_blinding_st +class bn_mont_ctx_st(Structure): + pass +bn_mont_ctx_st._fields_ = [ + ('ri', c_int), + ('RR', BIGNUM), + ('N', BIGNUM), + ('Ni', BIGNUM), + ('n0', c_ulong), + ('flags', c_int), +] +assert sizeof(bn_mont_ctx_st) == 72, sizeof(bn_mont_ctx_st) +assert alignment(bn_mont_ctx_st) == 4, alignment(bn_mont_ctx_st) +BN_MONT_CTX = bn_mont_ctx_st +class bn_recp_ctx_st(Structure): + pass +bn_recp_ctx_st._fields_ = [ + ('N', BIGNUM), + ('Nr', BIGNUM), + ('num_bits', c_int), + ('shift', c_int), + ('flags', c_int), +] +assert sizeof(bn_recp_ctx_st) == 52, sizeof(bn_recp_ctx_st) +assert alignment(bn_recp_ctx_st) == 4, alignment(bn_recp_ctx_st) +BN_RECP_CTX = bn_recp_ctx_st +class buf_mem_st(Structure): + pass +buf_mem_st._fields_ = [ + ('length', c_int), + ('data', STRING), + ('max', c_int), +] +assert sizeof(buf_mem_st) == 12, sizeof(buf_mem_st) +assert alignment(buf_mem_st) == 4, alignment(buf_mem_st) +BUF_MEM = buf_mem_st +class cast_key_st(Structure): + pass +cast_key_st._fields_ = [ + ('data', c_ulong * 32), + ('short_key', c_int), +] +assert sizeof(cast_key_st) == 132, sizeof(cast_key_st) +assert alignment(cast_key_st) == 4, alignment(cast_key_st) +CAST_KEY = cast_key_st +class comp_method_st(Structure): + pass +comp_method_st._fields_ = [ + ('type', c_int), + ('name', STRING), + ('init', CFUNCTYPE(c_int)), + ('finish', CFUNCTYPE(None)), + ('compress', CFUNCTYPE(c_int)), + ('expand', CFUNCTYPE(c_int)), + ('ctrl', CFUNCTYPE(c_long)), + ('callback_ctrl', CFUNCTYPE(c_long)), +] +assert sizeof(comp_method_st) == 32, sizeof(comp_method_st) +assert alignment(comp_method_st) == 4, alignment(comp_method_st) +COMP_METHOD = comp_method_st +class comp_ctx_st(Structure): + pass +comp_ctx_st._fields_ = [ + ('meth', POINTER(COMP_METHOD)), + ('compress_in', c_ulong), + ('compress_out', c_ulong), + ('expand_in', c_ulong), + ('expand_out', c_ulong), + ('ex_data', CRYPTO_EX_DATA), +] +assert sizeof(comp_ctx_st) == 28, sizeof(comp_ctx_st) +assert alignment(comp_ctx_st) == 4, alignment(comp_ctx_st) +COMP_CTX = comp_ctx_st +class CRYPTO_dynlock_value(Structure): + pass +CRYPTO_dynlock_value._fields_ = [ +] +class CRYPTO_dynlock(Structure): + pass +CRYPTO_dynlock._fields_ = [ + ('references', c_int), + ('data', POINTER(CRYPTO_dynlock_value)), +] +assert sizeof(CRYPTO_dynlock) == 8, sizeof(CRYPTO_dynlock) +assert alignment(CRYPTO_dynlock) == 4, alignment(CRYPTO_dynlock) +BIO_dummy = bio_st +CRYPTO_EX_new = CFUNCTYPE(c_int, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) +CRYPTO_EX_free = CFUNCTYPE(None, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) +CRYPTO_EX_dup = CFUNCTYPE(c_int, POINTER(CRYPTO_EX_DATA), POINTER(CRYPTO_EX_DATA), c_void_p, c_int, c_long, c_void_p) +class crypto_ex_data_func_st(Structure): + pass +crypto_ex_data_func_st._fields_ = [ + ('argl', c_long), + ('argp', c_void_p), + ('new_func', POINTER(CRYPTO_EX_new)), + ('free_func', POINTER(CRYPTO_EX_free)), + ('dup_func', POINTER(CRYPTO_EX_dup)), +] +assert sizeof(crypto_ex_data_func_st) == 20, sizeof(crypto_ex_data_func_st) +assert alignment(crypto_ex_data_func_st) == 4, alignment(crypto_ex_data_func_st) +CRYPTO_EX_DATA_FUNCS = crypto_ex_data_func_st +class st_CRYPTO_EX_DATA_IMPL(Structure): + pass +CRYPTO_EX_DATA_IMPL = st_CRYPTO_EX_DATA_IMPL +st_CRYPTO_EX_DATA_IMPL._fields_ = [ +] +CRYPTO_MEM_LEAK_CB = CFUNCTYPE(c_void_p, c_ulong, STRING, c_int, c_int, c_void_p) +DES_cblock = c_ubyte * 8 +const_DES_cblock = c_ubyte * 8 +class DES_ks(Structure): + pass +class N6DES_ks3DOLLAR_9E(Union): + pass +N6DES_ks3DOLLAR_9E._fields_ = [ + ('cblock', DES_cblock), + ('deslong', c_ulong * 2), +] +assert sizeof(N6DES_ks3DOLLAR_9E) == 8, sizeof(N6DES_ks3DOLLAR_9E) +assert alignment(N6DES_ks3DOLLAR_9E) == 4, alignment(N6DES_ks3DOLLAR_9E) +DES_ks._fields_ = [ + ('ks', N6DES_ks3DOLLAR_9E * 16), +] +assert sizeof(DES_ks) == 128, sizeof(DES_ks) +assert alignment(DES_ks) == 4, alignment(DES_ks) +DES_key_schedule = DES_ks +_ossl_old_des_cblock = c_ubyte * 8 +class _ossl_old_des_ks_struct(Structure): + pass +class N23_ossl_old_des_ks_struct4DOLLAR_10E(Union): + pass +N23_ossl_old_des_ks_struct4DOLLAR_10E._fields_ = [ + ('_', _ossl_old_des_cblock), + ('pad', c_ulong * 2), +] +assert sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 8, sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) +assert alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 4, alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) +_ossl_old_des_ks_struct._fields_ = [ + ('ks', N23_ossl_old_des_ks_struct4DOLLAR_10E), +] +assert sizeof(_ossl_old_des_ks_struct) == 8, sizeof(_ossl_old_des_ks_struct) +assert alignment(_ossl_old_des_ks_struct) == 4, alignment(_ossl_old_des_ks_struct) +_ossl_old_des_key_schedule = _ossl_old_des_ks_struct * 16 +class dh_st(Structure): + pass +DH = dh_st +class dh_method(Structure): + pass +dh_method._fields_ = [ + ('name', STRING), + ('generate_key', CFUNCTYPE(c_int, POINTER(DH))), + ('compute_key', CFUNCTYPE(c_int, POINTER(c_ubyte), POINTER(BIGNUM), POINTER(DH))), + ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DH), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('init', CFUNCTYPE(c_int, POINTER(DH))), + ('finish', CFUNCTYPE(c_int, POINTER(DH))), + ('flags', c_int), + ('app_data', STRING), +] +assert sizeof(dh_method) == 32, sizeof(dh_method) +assert alignment(dh_method) == 4, alignment(dh_method) +DH_METHOD = dh_method +class engine_st(Structure): + pass +ENGINE = engine_st +dh_st._fields_ = [ + ('pad', c_int), + ('version', c_int), + ('p', POINTER(BIGNUM)), + ('g', POINTER(BIGNUM)), + ('length', c_long), + ('pub_key', POINTER(BIGNUM)), + ('priv_key', POINTER(BIGNUM)), + ('flags', c_int), + ('method_mont_p', STRING), + ('q', POINTER(BIGNUM)), + ('j', POINTER(BIGNUM)), + ('seed', POINTER(c_ubyte)), + ('seedlen', c_int), + ('counter', POINTER(BIGNUM)), + ('references', c_int), + ('ex_data', CRYPTO_EX_DATA), + ('meth', POINTER(DH_METHOD)), + ('engine', POINTER(ENGINE)), +] +assert sizeof(dh_st) == 76, sizeof(dh_st) +assert alignment(dh_st) == 4, alignment(dh_st) +class dsa_st(Structure): + pass +DSA = dsa_st +class DSA_SIG_st(Structure): + pass +DSA_SIG_st._fields_ = [ + ('r', POINTER(BIGNUM)), + ('s', POINTER(BIGNUM)), +] +assert sizeof(DSA_SIG_st) == 8, sizeof(DSA_SIG_st) +assert alignment(DSA_SIG_st) == 4, alignment(DSA_SIG_st) +DSA_SIG = DSA_SIG_st +class dsa_method(Structure): + pass +dsa_method._fields_ = [ + ('name', STRING), + ('dsa_do_sign', CFUNCTYPE(POINTER(DSA_SIG), POINTER(c_ubyte), c_int, POINTER(DSA))), + ('dsa_sign_setup', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BN_CTX), POINTER(POINTER(BIGNUM)), POINTER(POINTER(BIGNUM)))), + ('dsa_do_verify', CFUNCTYPE(c_int, POINTER(c_ubyte), c_int, POINTER(DSA_SIG), POINTER(DSA))), + ('dsa_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('init', CFUNCTYPE(c_int, POINTER(DSA))), + ('finish', CFUNCTYPE(c_int, POINTER(DSA))), + ('flags', c_int), + ('app_data', STRING), +] +assert sizeof(dsa_method) == 40, sizeof(dsa_method) +assert alignment(dsa_method) == 4, alignment(dsa_method) +DSA_METHOD = dsa_method +dsa_st._fields_ = [ + ('pad', c_int), + ('version', c_long), + ('write_params', c_int), + ('p', POINTER(BIGNUM)), + ('q', POINTER(BIGNUM)), + ('g', POINTER(BIGNUM)), + ('pub_key', POINTER(BIGNUM)), + ('priv_key', POINTER(BIGNUM)), + ('kinv', POINTER(BIGNUM)), + ('r', POINTER(BIGNUM)), + ('flags', c_int), + ('method_mont_p', STRING), + ('references', c_int), + ('ex_data', CRYPTO_EX_DATA), + ('meth', POINTER(DSA_METHOD)), + ('engine', POINTER(ENGINE)), +] +assert sizeof(dsa_st) == 68, sizeof(dsa_st) +assert alignment(dsa_st) == 4, alignment(dsa_st) +class evp_pkey_st(Structure): + pass +class N11evp_pkey_st4DOLLAR_12E(Union): + pass +class rsa_st(Structure): + pass +N11evp_pkey_st4DOLLAR_12E._fields_ = [ + ('ptr', STRING), + ('rsa', POINTER(rsa_st)), + ('dsa', POINTER(dsa_st)), + ('dh', POINTER(dh_st)), +] +assert sizeof(N11evp_pkey_st4DOLLAR_12E) == 4, sizeof(N11evp_pkey_st4DOLLAR_12E) +assert alignment(N11evp_pkey_st4DOLLAR_12E) == 4, alignment(N11evp_pkey_st4DOLLAR_12E) +evp_pkey_st._fields_ = [ + ('type', c_int), + ('save_type', c_int), + ('references', c_int), + ('pkey', N11evp_pkey_st4DOLLAR_12E), + ('save_parameters', c_int), + ('attributes', POINTER(STACK)), +] +assert sizeof(evp_pkey_st) == 24, sizeof(evp_pkey_st) +assert alignment(evp_pkey_st) == 4, alignment(evp_pkey_st) +class env_md_st(Structure): + pass +class env_md_ctx_st(Structure): + pass +EVP_MD_CTX = env_md_ctx_st +env_md_st._fields_ = [ + ('type', c_int), + ('pkey_type', c_int), + ('md_size', c_int), + ('flags', c_ulong), + ('init', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), + ('update', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), c_void_p, c_ulong)), + ('final', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(c_ubyte))), + ('copy', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(EVP_MD_CTX))), + ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), + ('sign', CFUNCTYPE(c_int)), + ('verify', CFUNCTYPE(c_int)), + ('required_pkey_type', c_int * 5), + ('block_size', c_int), + ('ctx_size', c_int), +] +assert sizeof(env_md_st) == 72, sizeof(env_md_st) +assert alignment(env_md_st) == 4, alignment(env_md_st) +EVP_MD = env_md_st +env_md_ctx_st._fields_ = [ + ('digest', POINTER(EVP_MD)), + ('engine', POINTER(ENGINE)), + ('flags', c_ulong), + ('md_data', c_void_p), +] +assert sizeof(env_md_ctx_st) == 16, sizeof(env_md_ctx_st) +assert alignment(env_md_ctx_st) == 4, alignment(env_md_ctx_st) +class evp_cipher_st(Structure): + pass +class evp_cipher_ctx_st(Structure): + pass +EVP_CIPHER_CTX = evp_cipher_ctx_st +evp_cipher_st._fields_ = [ + ('nid', c_int), + ('block_size', c_int), + ('key_len', c_int), + ('iv_len', c_int), + ('flags', c_ulong), + ('init', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_int)), + ('do_cipher', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_uint)), + ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX))), + ('ctx_size', c_int), + ('set_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), + ('get_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), + ('ctrl', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), c_int, c_int, c_void_p)), + ('app_data', c_void_p), +] +assert sizeof(evp_cipher_st) == 52, sizeof(evp_cipher_st) +assert alignment(evp_cipher_st) == 4, alignment(evp_cipher_st) +class evp_cipher_info_st(Structure): + pass +EVP_CIPHER = evp_cipher_st +evp_cipher_info_st._fields_ = [ + ('cipher', POINTER(EVP_CIPHER)), + ('iv', c_ubyte * 16), +] +assert sizeof(evp_cipher_info_st) == 20, sizeof(evp_cipher_info_st) +assert alignment(evp_cipher_info_st) == 4, alignment(evp_cipher_info_st) +EVP_CIPHER_INFO = evp_cipher_info_st +evp_cipher_ctx_st._fields_ = [ + ('cipher', POINTER(EVP_CIPHER)), + ('engine', POINTER(ENGINE)), + ('encrypt', c_int), + ('buf_len', c_int), + ('oiv', c_ubyte * 16), + ('iv', c_ubyte * 16), + ('buf', c_ubyte * 32), + ('num', c_int), + ('app_data', c_void_p), + ('key_len', c_int), + ('flags', c_ulong), + ('cipher_data', c_void_p), + ('final_used', c_int), + ('block_mask', c_int), + ('final', c_ubyte * 32), +] +assert sizeof(evp_cipher_ctx_st) == 140, sizeof(evp_cipher_ctx_st) +assert alignment(evp_cipher_ctx_st) == 4, alignment(evp_cipher_ctx_st) +class evp_Encode_Ctx_st(Structure): + pass +evp_Encode_Ctx_st._fields_ = [ + ('num', c_int), + ('length', c_int), + ('enc_data', c_ubyte * 80), + ('line_num', c_int), + ('expect_nl', c_int), +] +assert sizeof(evp_Encode_Ctx_st) == 96, sizeof(evp_Encode_Ctx_st) +assert alignment(evp_Encode_Ctx_st) == 4, alignment(evp_Encode_Ctx_st) +EVP_ENCODE_CTX = evp_Encode_Ctx_st +EVP_PBE_KEYGEN = CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), STRING, c_int, POINTER(ASN1_TYPE), POINTER(EVP_CIPHER), POINTER(EVP_MD), c_int) +class lhash_node_st(Structure): + pass +lhash_node_st._fields_ = [ + ('data', c_void_p), + ('next', POINTER(lhash_node_st)), + ('hash', c_ulong), +] +assert sizeof(lhash_node_st) == 12, sizeof(lhash_node_st) +assert alignment(lhash_node_st) == 4, alignment(lhash_node_st) +LHASH_NODE = lhash_node_st +LHASH_COMP_FN_TYPE = CFUNCTYPE(c_int, c_void_p, c_void_p) +LHASH_HASH_FN_TYPE = CFUNCTYPE(c_ulong, c_void_p) +LHASH_DOALL_FN_TYPE = CFUNCTYPE(None, c_void_p) +LHASH_DOALL_ARG_FN_TYPE = CFUNCTYPE(None, c_void_p, c_void_p) +class lhash_st(Structure): + pass +lhash_st._fields_ = [ + ('b', POINTER(POINTER(LHASH_NODE))), + ('comp', LHASH_COMP_FN_TYPE), + ('hash', LHASH_HASH_FN_TYPE), + ('num_nodes', c_uint), + ('num_alloc_nodes', c_uint), + ('p', c_uint), + ('pmax', c_uint), + ('up_load', c_ulong), + ('down_load', c_ulong), + ('num_items', c_ulong), + ('num_expands', c_ulong), + ('num_expand_reallocs', c_ulong), + ('num_contracts', c_ulong), + ('num_contract_reallocs', c_ulong), + ('num_hash_calls', c_ulong), + ('num_comp_calls', c_ulong), + ('num_insert', c_ulong), + ('num_replace', c_ulong), + ('num_delete', c_ulong), + ('num_no_delete', c_ulong), + ('num_retrieve', c_ulong), + ('num_retrieve_miss', c_ulong), + ('num_hash_comps', c_ulong), + ('error', c_int), +] +assert sizeof(lhash_st) == 96, sizeof(lhash_st) +assert alignment(lhash_st) == 4, alignment(lhash_st) +LHASH = lhash_st +class MD2state_st(Structure): + pass +MD2state_st._fields_ = [ + ('num', c_int), + ('data', c_ubyte * 16), + ('cksm', c_uint * 16), + ('state', c_uint * 16), +] +assert sizeof(MD2state_st) == 148, sizeof(MD2state_st) +assert alignment(MD2state_st) == 4, alignment(MD2state_st) +MD2_CTX = MD2state_st +class MD4state_st(Structure): + pass +MD4state_st._fields_ = [ + ('A', c_uint), + ('B', c_uint), + ('C', c_uint), + ('D', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(MD4state_st) == 92, sizeof(MD4state_st) +assert alignment(MD4state_st) == 4, alignment(MD4state_st) +MD4_CTX = MD4state_st +class MD5state_st(Structure): + pass +MD5state_st._fields_ = [ + ('A', c_uint), + ('B', c_uint), + ('C', c_uint), + ('D', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(MD5state_st) == 92, sizeof(MD5state_st) +assert alignment(MD5state_st) == 4, alignment(MD5state_st) +MD5_CTX = MD5state_st +class mdc2_ctx_st(Structure): + pass +mdc2_ctx_st._fields_ = [ + ('num', c_int), + ('data', c_ubyte * 8), + ('h', DES_cblock), + ('hh', DES_cblock), + ('pad_type', c_int), +] +assert sizeof(mdc2_ctx_st) == 32, sizeof(mdc2_ctx_st) +assert alignment(mdc2_ctx_st) == 4, alignment(mdc2_ctx_st) +MDC2_CTX = mdc2_ctx_st +class obj_name_st(Structure): + pass +obj_name_st._fields_ = [ + ('type', c_int), + ('alias', c_int), + ('name', STRING), + ('data', STRING), +] +assert sizeof(obj_name_st) == 16, sizeof(obj_name_st) +assert alignment(obj_name_st) == 4, alignment(obj_name_st) +OBJ_NAME = obj_name_st +ASN1_TIME = asn1_string_st +ASN1_NULL = c_int +EVP_PKEY = evp_pkey_st +class x509_st(Structure): + pass +X509 = x509_st +class X509_algor_st(Structure): + pass +X509_ALGOR = X509_algor_st +class X509_crl_st(Structure): + pass +X509_CRL = X509_crl_st +class X509_name_st(Structure): + pass +X509_NAME = X509_name_st +class x509_store_st(Structure): + pass +X509_STORE = x509_store_st +class x509_store_ctx_st(Structure): + pass +X509_STORE_CTX = x509_store_ctx_st +engine_st._fields_ = [ +] +class PEM_Encode_Seal_st(Structure): + pass +PEM_Encode_Seal_st._fields_ = [ + ('encode', EVP_ENCODE_CTX), + ('md', EVP_MD_CTX), + ('cipher', EVP_CIPHER_CTX), +] +assert sizeof(PEM_Encode_Seal_st) == 252, sizeof(PEM_Encode_Seal_st) +assert alignment(PEM_Encode_Seal_st) == 4, alignment(PEM_Encode_Seal_st) +PEM_ENCODE_SEAL_CTX = PEM_Encode_Seal_st +class pem_recip_st(Structure): + pass +pem_recip_st._fields_ = [ + ('name', STRING), + ('dn', POINTER(X509_NAME)), + ('cipher', c_int), + ('key_enc', c_int), +] +assert sizeof(pem_recip_st) == 16, sizeof(pem_recip_st) +assert alignment(pem_recip_st) == 4, alignment(pem_recip_st) +PEM_USER = pem_recip_st +class pem_ctx_st(Structure): + pass +class N10pem_ctx_st4DOLLAR_16E(Structure): + pass +N10pem_ctx_st4DOLLAR_16E._fields_ = [ + ('version', c_int), + ('mode', c_int), +] +assert sizeof(N10pem_ctx_st4DOLLAR_16E) == 8, sizeof(N10pem_ctx_st4DOLLAR_16E) +assert alignment(N10pem_ctx_st4DOLLAR_16E) == 4, alignment(N10pem_ctx_st4DOLLAR_16E) +class N10pem_ctx_st4DOLLAR_17E(Structure): + pass +N10pem_ctx_st4DOLLAR_17E._fields_ = [ + ('cipher', c_int), +] +assert sizeof(N10pem_ctx_st4DOLLAR_17E) == 4, sizeof(N10pem_ctx_st4DOLLAR_17E) +assert alignment(N10pem_ctx_st4DOLLAR_17E) == 4, alignment(N10pem_ctx_st4DOLLAR_17E) +pem_ctx_st._fields_ = [ + ('type', c_int), + ('proc_type', N10pem_ctx_st4DOLLAR_16E), + ('domain', STRING), + ('DEK_info', N10pem_ctx_st4DOLLAR_17E), + ('originator', POINTER(PEM_USER)), + ('num_recipient', c_int), + ('recipient', POINTER(POINTER(PEM_USER))), + ('x509_chain', POINTER(STACK)), + ('md', POINTER(EVP_MD)), + ('md_enc', c_int), + ('md_len', c_int), + ('md_data', STRING), + ('dec', POINTER(EVP_CIPHER)), + ('key_len', c_int), + ('key', POINTER(c_ubyte)), + ('data_enc', c_int), + ('data_len', c_int), + ('data', POINTER(c_ubyte)), +] +assert sizeof(pem_ctx_st) == 76, sizeof(pem_ctx_st) +assert alignment(pem_ctx_st) == 4, alignment(pem_ctx_st) +PEM_CTX = pem_ctx_st +pem_password_cb = CFUNCTYPE(c_int, STRING, c_int, c_int, c_void_p) +class pkcs7_issuer_and_serial_st(Structure): + pass +pkcs7_issuer_and_serial_st._fields_ = [ + ('issuer', POINTER(X509_NAME)), + ('serial', POINTER(ASN1_INTEGER)), +] +assert sizeof(pkcs7_issuer_and_serial_st) == 8, sizeof(pkcs7_issuer_and_serial_st) +assert alignment(pkcs7_issuer_and_serial_st) == 4, alignment(pkcs7_issuer_and_serial_st) +PKCS7_ISSUER_AND_SERIAL = pkcs7_issuer_and_serial_st +class pkcs7_signer_info_st(Structure): + pass +pkcs7_signer_info_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), + ('digest_alg', POINTER(X509_ALGOR)), + ('auth_attr', POINTER(STACK)), + ('digest_enc_alg', POINTER(X509_ALGOR)), + ('enc_digest', POINTER(ASN1_OCTET_STRING)), + ('unauth_attr', POINTER(STACK)), + ('pkey', POINTER(EVP_PKEY)), +] +assert sizeof(pkcs7_signer_info_st) == 32, sizeof(pkcs7_signer_info_st) +assert alignment(pkcs7_signer_info_st) == 4, alignment(pkcs7_signer_info_st) +PKCS7_SIGNER_INFO = pkcs7_signer_info_st +class pkcs7_recip_info_st(Structure): + pass +pkcs7_recip_info_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), + ('key_enc_algor', POINTER(X509_ALGOR)), + ('enc_key', POINTER(ASN1_OCTET_STRING)), + ('cert', POINTER(X509)), +] +assert sizeof(pkcs7_recip_info_st) == 20, sizeof(pkcs7_recip_info_st) +assert alignment(pkcs7_recip_info_st) == 4, alignment(pkcs7_recip_info_st) +PKCS7_RECIP_INFO = pkcs7_recip_info_st +class pkcs7_signed_st(Structure): + pass +class pkcs7_st(Structure): + pass +pkcs7_signed_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('md_algs', POINTER(STACK)), + ('cert', POINTER(STACK)), + ('crl', POINTER(STACK)), + ('signer_info', POINTER(STACK)), + ('contents', POINTER(pkcs7_st)), +] +assert sizeof(pkcs7_signed_st) == 24, sizeof(pkcs7_signed_st) +assert alignment(pkcs7_signed_st) == 4, alignment(pkcs7_signed_st) +PKCS7_SIGNED = pkcs7_signed_st +class pkcs7_enc_content_st(Structure): + pass +pkcs7_enc_content_st._fields_ = [ + ('content_type', POINTER(ASN1_OBJECT)), + ('algorithm', POINTER(X509_ALGOR)), + ('enc_data', POINTER(ASN1_OCTET_STRING)), + ('cipher', POINTER(EVP_CIPHER)), +] +assert sizeof(pkcs7_enc_content_st) == 16, sizeof(pkcs7_enc_content_st) +assert alignment(pkcs7_enc_content_st) == 4, alignment(pkcs7_enc_content_st) +PKCS7_ENC_CONTENT = pkcs7_enc_content_st +class pkcs7_enveloped_st(Structure): + pass +pkcs7_enveloped_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('recipientinfo', POINTER(STACK)), + ('enc_data', POINTER(PKCS7_ENC_CONTENT)), +] +assert sizeof(pkcs7_enveloped_st) == 12, sizeof(pkcs7_enveloped_st) +assert alignment(pkcs7_enveloped_st) == 4, alignment(pkcs7_enveloped_st) +PKCS7_ENVELOPE = pkcs7_enveloped_st +class pkcs7_signedandenveloped_st(Structure): + pass +pkcs7_signedandenveloped_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('md_algs', POINTER(STACK)), + ('cert', POINTER(STACK)), + ('crl', POINTER(STACK)), + ('signer_info', POINTER(STACK)), + ('enc_data', POINTER(PKCS7_ENC_CONTENT)), + ('recipientinfo', POINTER(STACK)), +] +assert sizeof(pkcs7_signedandenveloped_st) == 28, sizeof(pkcs7_signedandenveloped_st) +assert alignment(pkcs7_signedandenveloped_st) == 4, alignment(pkcs7_signedandenveloped_st) +PKCS7_SIGN_ENVELOPE = pkcs7_signedandenveloped_st +class pkcs7_digest_st(Structure): + pass +pkcs7_digest_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('md', POINTER(X509_ALGOR)), + ('contents', POINTER(pkcs7_st)), + ('digest', POINTER(ASN1_OCTET_STRING)), +] +assert sizeof(pkcs7_digest_st) == 16, sizeof(pkcs7_digest_st) +assert alignment(pkcs7_digest_st) == 4, alignment(pkcs7_digest_st) +PKCS7_DIGEST = pkcs7_digest_st +class pkcs7_encrypted_st(Structure): + pass +pkcs7_encrypted_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('enc_data', POINTER(PKCS7_ENC_CONTENT)), +] +assert sizeof(pkcs7_encrypted_st) == 8, sizeof(pkcs7_encrypted_st) +assert alignment(pkcs7_encrypted_st) == 4, alignment(pkcs7_encrypted_st) +PKCS7_ENCRYPT = pkcs7_encrypted_st +class N8pkcs7_st4DOLLAR_15E(Union): + pass +N8pkcs7_st4DOLLAR_15E._fields_ = [ + ('ptr', STRING), + ('data', POINTER(ASN1_OCTET_STRING)), + ('sign', POINTER(PKCS7_SIGNED)), + ('enveloped', POINTER(PKCS7_ENVELOPE)), + ('signed_and_enveloped', POINTER(PKCS7_SIGN_ENVELOPE)), + ('digest', POINTER(PKCS7_DIGEST)), + ('encrypted', POINTER(PKCS7_ENCRYPT)), + ('other', POINTER(ASN1_TYPE)), +] +assert sizeof(N8pkcs7_st4DOLLAR_15E) == 4, sizeof(N8pkcs7_st4DOLLAR_15E) +assert alignment(N8pkcs7_st4DOLLAR_15E) == 4, alignment(N8pkcs7_st4DOLLAR_15E) +pkcs7_st._fields_ = [ + ('asn1', POINTER(c_ubyte)), + ('length', c_long), + ('state', c_int), + ('detached', c_int), + ('type', POINTER(ASN1_OBJECT)), + ('d', N8pkcs7_st4DOLLAR_15E), +] +assert sizeof(pkcs7_st) == 24, sizeof(pkcs7_st) +assert alignment(pkcs7_st) == 4, alignment(pkcs7_st) +PKCS7 = pkcs7_st +class rc2_key_st(Structure): + pass +rc2_key_st._fields_ = [ + ('data', c_uint * 64), +] +assert sizeof(rc2_key_st) == 256, sizeof(rc2_key_st) +assert alignment(rc2_key_st) == 4, alignment(rc2_key_st) +RC2_KEY = rc2_key_st +class rc4_key_st(Structure): + pass +rc4_key_st._fields_ = [ + ('x', c_ubyte), + ('y', c_ubyte), + ('data', c_ubyte * 256), +] +assert sizeof(rc4_key_st) == 258, sizeof(rc4_key_st) +assert alignment(rc4_key_st) == 1, alignment(rc4_key_st) +RC4_KEY = rc4_key_st +class rc5_key_st(Structure): + pass +rc5_key_st._fields_ = [ + ('rounds', c_int), + ('data', c_ulong * 34), +] +assert sizeof(rc5_key_st) == 140, sizeof(rc5_key_st) +assert alignment(rc5_key_st) == 4, alignment(rc5_key_st) +RC5_32_KEY = rc5_key_st +class RIPEMD160state_st(Structure): + pass +RIPEMD160state_st._fields_ = [ + ('A', c_uint), + ('B', c_uint), + ('C', c_uint), + ('D', c_uint), + ('E', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(RIPEMD160state_st) == 96, sizeof(RIPEMD160state_st) +assert alignment(RIPEMD160state_st) == 4, alignment(RIPEMD160state_st) +RIPEMD160_CTX = RIPEMD160state_st +RSA = rsa_st +class rsa_meth_st(Structure): + pass +rsa_meth_st._fields_ = [ + ('name', STRING), + ('rsa_pub_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_pub_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_priv_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_priv_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(RSA))), + ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('init', CFUNCTYPE(c_int, POINTER(RSA))), + ('finish', CFUNCTYPE(c_int, POINTER(RSA))), + ('flags', c_int), + ('app_data', STRING), + ('rsa_sign', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), POINTER(c_uint), POINTER(RSA))), + ('rsa_verify', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), c_uint, POINTER(RSA))), +] +assert sizeof(rsa_meth_st) == 52, sizeof(rsa_meth_st) +assert alignment(rsa_meth_st) == 4, alignment(rsa_meth_st) +RSA_METHOD = rsa_meth_st +rsa_st._fields_ = [ + ('pad', c_int), + ('version', c_long), + ('meth', POINTER(RSA_METHOD)), + ('engine', POINTER(ENGINE)), + ('n', POINTER(BIGNUM)), + ('e', POINTER(BIGNUM)), + ('d', POINTER(BIGNUM)), + ('p', POINTER(BIGNUM)), + ('q', POINTER(BIGNUM)), + ('dmp1', POINTER(BIGNUM)), + ('dmq1', POINTER(BIGNUM)), + ('iqmp', POINTER(BIGNUM)), + ('ex_data', CRYPTO_EX_DATA), + ('references', c_int), + ('flags', c_int), + ('_method_mod_n', POINTER(BN_MONT_CTX)), + ('_method_mod_p', POINTER(BN_MONT_CTX)), + ('_method_mod_q', POINTER(BN_MONT_CTX)), + ('bignum_data', STRING), + ('blinding', POINTER(BN_BLINDING)), +] +assert sizeof(rsa_st) == 84, sizeof(rsa_st) +assert alignment(rsa_st) == 4, alignment(rsa_st) +openssl_fptr = CFUNCTYPE(None) +class SHAstate_st(Structure): + pass +SHAstate_st._fields_ = [ + ('h0', c_uint), + ('h1', c_uint), + ('h2', c_uint), + ('h3', c_uint), + ('h4', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(SHAstate_st) == 96, sizeof(SHAstate_st) +assert alignment(SHAstate_st) == 4, alignment(SHAstate_st) +SHA_CTX = SHAstate_st +class ssl_st(Structure): + pass +ssl_crock_st = POINTER(ssl_st) +class ssl_cipher_st(Structure): + pass +ssl_cipher_st._fields_ = [ + ('valid', c_int), + ('name', STRING), + ('id', c_ulong), + ('algorithms', c_ulong), + ('algo_strength', c_ulong), + ('algorithm2', c_ulong), + ('strength_bits', c_int), + ('alg_bits', c_int), + ('mask', c_ulong), + ('mask_strength', c_ulong), +] +assert sizeof(ssl_cipher_st) == 40, sizeof(ssl_cipher_st) +assert alignment(ssl_cipher_st) == 4, alignment(ssl_cipher_st) +SSL_CIPHER = ssl_cipher_st +SSL = ssl_st +class ssl_ctx_st(Structure): + pass +SSL_CTX = ssl_ctx_st +class ssl_method_st(Structure): + pass +class ssl3_enc_method(Structure): + pass +ssl_method_st._fields_ = [ + ('version', c_int), + ('ssl_new', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_clear', CFUNCTYPE(None, POINTER(SSL))), + ('ssl_free', CFUNCTYPE(None, POINTER(SSL))), + ('ssl_accept', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_connect', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_read', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), + ('ssl_peek', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), + ('ssl_write', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), + ('ssl_shutdown', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_renegotiate', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_renegotiate_check', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, c_long, c_void_p)), + ('ssl_ctx_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, c_long, c_void_p)), + ('get_cipher_by_char', CFUNCTYPE(POINTER(SSL_CIPHER), POINTER(c_ubyte))), + ('put_cipher_by_char', CFUNCTYPE(c_int, POINTER(SSL_CIPHER), POINTER(c_ubyte))), + ('ssl_pending', CFUNCTYPE(c_int, POINTER(SSL))), + ('num_ciphers', CFUNCTYPE(c_int)), + ('get_cipher', CFUNCTYPE(POINTER(SSL_CIPHER), c_uint)), + ('get_ssl_method', CFUNCTYPE(POINTER(ssl_method_st), c_int)), + ('get_timeout', CFUNCTYPE(c_long)), + ('ssl3_enc', POINTER(ssl3_enc_method)), + ('ssl_version', CFUNCTYPE(c_int)), + ('ssl_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, CFUNCTYPE(None))), + ('ssl_ctx_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, CFUNCTYPE(None))), +] +assert sizeof(ssl_method_st) == 100, sizeof(ssl_method_st) +assert alignment(ssl_method_st) == 4, alignment(ssl_method_st) +ssl3_enc_method._fields_ = [ +] +SSL_METHOD = ssl_method_st +class ssl_session_st(Structure): + pass +class sess_cert_st(Structure): + pass +ssl_session_st._fields_ = [ + ('ssl_version', c_int), + ('key_arg_length', c_uint), + ('key_arg', c_ubyte * 8), + ('master_key_length', c_int), + ('master_key', c_ubyte * 48), + ('session_id_length', c_uint), + ('session_id', c_ubyte * 32), + ('sid_ctx_length', c_uint), + ('sid_ctx', c_ubyte * 32), + ('not_resumable', c_int), + ('sess_cert', POINTER(sess_cert_st)), + ('peer', POINTER(X509)), + ('verify_result', c_long), + ('references', c_int), + ('timeout', c_long), + ('time', c_long), + ('compress_meth', c_int), + ('cipher', POINTER(SSL_CIPHER)), + ('cipher_id', c_ulong), + ('ciphers', POINTER(STACK)), + ('ex_data', CRYPTO_EX_DATA), + ('prev', POINTER(ssl_session_st)), + ('next', POINTER(ssl_session_st)), +] +assert sizeof(ssl_session_st) == 200, sizeof(ssl_session_st) +assert alignment(ssl_session_st) == 4, alignment(ssl_session_st) +sess_cert_st._fields_ = [ +] +SSL_SESSION = ssl_session_st +GEN_SESSION_CB = CFUNCTYPE(c_int, POINTER(SSL), POINTER(c_ubyte), POINTER(c_uint)) +class ssl_comp_st(Structure): + pass +ssl_comp_st._fields_ = [ + ('id', c_int), + ('name', STRING), + ('method', POINTER(COMP_METHOD)), +] +assert sizeof(ssl_comp_st) == 12, sizeof(ssl_comp_st) +assert alignment(ssl_comp_st) == 4, alignment(ssl_comp_st) +SSL_COMP = ssl_comp_st +class N10ssl_ctx_st4DOLLAR_18E(Structure): + pass +N10ssl_ctx_st4DOLLAR_18E._fields_ = [ + ('sess_connect', c_int), + ('sess_connect_renegotiate', c_int), + ('sess_connect_good', c_int), + ('sess_accept', c_int), + ('sess_accept_renegotiate', c_int), + ('sess_accept_good', c_int), + ('sess_miss', c_int), + ('sess_timeout', c_int), + ('sess_cache_full', c_int), + ('sess_hit', c_int), + ('sess_cb_hit', c_int), +] +assert sizeof(N10ssl_ctx_st4DOLLAR_18E) == 44, sizeof(N10ssl_ctx_st4DOLLAR_18E) +assert alignment(N10ssl_ctx_st4DOLLAR_18E) == 4, alignment(N10ssl_ctx_st4DOLLAR_18E) +class cert_st(Structure): + pass +ssl_ctx_st._fields_ = [ + ('method', POINTER(SSL_METHOD)), + ('cipher_list', POINTER(STACK)), + ('cipher_list_by_id', POINTER(STACK)), + ('cert_store', POINTER(x509_store_st)), + ('sessions', POINTER(lhash_st)), + ('session_cache_size', c_ulong), + ('session_cache_head', POINTER(ssl_session_st)), + ('session_cache_tail', POINTER(ssl_session_st)), + ('session_cache_mode', c_int), + ('session_timeout', c_long), + ('new_session_cb', CFUNCTYPE(c_int, POINTER(ssl_st), POINTER(SSL_SESSION))), + ('remove_session_cb', CFUNCTYPE(None, POINTER(ssl_ctx_st), POINTER(SSL_SESSION))), + ('get_session_cb', CFUNCTYPE(POINTER(SSL_SESSION), POINTER(ssl_st), POINTER(c_ubyte), c_int, POINTER(c_int))), + ('stats', N10ssl_ctx_st4DOLLAR_18E), + ('references', c_int), + ('app_verify_callback', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), c_void_p)), + ('app_verify_arg', c_void_p), + ('default_passwd_callback', POINTER(pem_password_cb)), + ('default_passwd_callback_userdata', c_void_p), + ('client_cert_cb', CFUNCTYPE(c_int, POINTER(SSL), POINTER(POINTER(X509)), POINTER(POINTER(EVP_PKEY)))), + ('ex_data', CRYPTO_EX_DATA), + ('rsa_md5', POINTER(EVP_MD)), + ('md5', POINTER(EVP_MD)), + ('sha1', POINTER(EVP_MD)), + ('extra_certs', POINTER(STACK)), + ('comp_methods', POINTER(STACK)), + ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), + ('client_CA', POINTER(STACK)), + ('options', c_ulong), + ('mode', c_ulong), + ('max_cert_list', c_long), + ('cert', POINTER(cert_st)), + ('read_ahead', c_int), + ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), + ('msg_callback_arg', c_void_p), + ('verify_mode', c_int), + ('verify_depth', c_int), + ('sid_ctx_length', c_uint), + ('sid_ctx', c_ubyte * 32), + ('default_verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('generate_session_id', GEN_SESSION_CB), + ('purpose', c_int), + ('trust', c_int), + ('quiet_shutdown', c_int), +] +assert sizeof(ssl_ctx_st) == 248, sizeof(ssl_ctx_st) +assert alignment(ssl_ctx_st) == 4, alignment(ssl_ctx_st) +cert_st._fields_ = [ +] +class ssl2_state_st(Structure): + pass +class ssl3_state_st(Structure): + pass +ssl_st._fields_ = [ + ('version', c_int), + ('type', c_int), + ('method', POINTER(SSL_METHOD)), + ('rbio', POINTER(BIO)), + ('wbio', POINTER(BIO)), + ('bbio', POINTER(BIO)), + ('rwstate', c_int), + ('in_handshake', c_int), + ('handshake_func', CFUNCTYPE(c_int)), + ('server', c_int), + ('new_session', c_int), + ('quiet_shutdown', c_int), + ('shutdown', c_int), + ('state', c_int), + ('rstate', c_int), + ('init_buf', POINTER(BUF_MEM)), + ('init_msg', c_void_p), + ('init_num', c_int), + ('init_off', c_int), + ('packet', POINTER(c_ubyte)), + ('packet_length', c_uint), + ('s2', POINTER(ssl2_state_st)), + ('s3', POINTER(ssl3_state_st)), + ('read_ahead', c_int), + ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), + ('msg_callback_arg', c_void_p), + ('hit', c_int), + ('purpose', c_int), + ('trust', c_int), + ('cipher_list', POINTER(STACK)), + ('cipher_list_by_id', POINTER(STACK)), + ('enc_read_ctx', POINTER(EVP_CIPHER_CTX)), + ('read_hash', POINTER(EVP_MD)), + ('expand', POINTER(COMP_CTX)), + ('enc_write_ctx', POINTER(EVP_CIPHER_CTX)), + ('write_hash', POINTER(EVP_MD)), + ('compress', POINTER(COMP_CTX)), + ('cert', POINTER(cert_st)), + ('sid_ctx_length', c_uint), + ('sid_ctx', c_ubyte * 32), + ('session', POINTER(SSL_SESSION)), + ('generate_session_id', GEN_SESSION_CB), + ('verify_mode', c_int), + ('verify_depth', c_int), + ('verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), + ('error', c_int), + ('error_code', c_int), + ('ctx', POINTER(SSL_CTX)), + ('debug', c_int), + ('verify_result', c_long), + ('ex_data', CRYPTO_EX_DATA), + ('client_CA', POINTER(STACK)), + ('references', c_int), + ('options', c_ulong), + ('mode', c_ulong), + ('max_cert_list', c_long), + ('first_packet', c_int), + ('client_version', c_int), +] +assert sizeof(ssl_st) == 268, sizeof(ssl_st) +assert alignment(ssl_st) == 4, alignment(ssl_st) +class N13ssl2_state_st4DOLLAR_19E(Structure): + pass +N13ssl2_state_st4DOLLAR_19E._fields_ = [ + ('conn_id_length', c_uint), + ('cert_type', c_uint), + ('cert_length', c_uint), + ('csl', c_uint), + ('clear', c_uint), + ('enc', c_uint), + ('ccl', c_ubyte * 32), + ('cipher_spec_length', c_uint), + ('session_id_length', c_uint), + ('clen', c_uint), + ('rlen', c_uint), +] +assert sizeof(N13ssl2_state_st4DOLLAR_19E) == 72, sizeof(N13ssl2_state_st4DOLLAR_19E) +assert alignment(N13ssl2_state_st4DOLLAR_19E) == 4, alignment(N13ssl2_state_st4DOLLAR_19E) +ssl2_state_st._fields_ = [ + ('three_byte_header', c_int), + ('clear_text', c_int), + ('escape', c_int), + ('ssl2_rollback', c_int), + ('wnum', c_uint), + ('wpend_tot', c_int), + ('wpend_buf', POINTER(c_ubyte)), + ('wpend_off', c_int), + ('wpend_len', c_int), + ('wpend_ret', c_int), + ('rbuf_left', c_int), + ('rbuf_offs', c_int), + ('rbuf', POINTER(c_ubyte)), + ('wbuf', POINTER(c_ubyte)), + ('write_ptr', POINTER(c_ubyte)), + ('padding', c_uint), + ('rlength', c_uint), + ('ract_data_length', c_int), + ('wlength', c_uint), + ('wact_data_length', c_int), + ('ract_data', POINTER(c_ubyte)), + ('wact_data', POINTER(c_ubyte)), + ('mac_data', POINTER(c_ubyte)), + ('read_key', POINTER(c_ubyte)), + ('write_key', POINTER(c_ubyte)), + ('challenge_length', c_uint), + ('challenge', c_ubyte * 32), + ('conn_id_length', c_uint), + ('conn_id', c_ubyte * 16), + ('key_material_length', c_uint), + ('key_material', c_ubyte * 48), + ('read_sequence', c_ulong), + ('write_sequence', c_ulong), + ('tmp', N13ssl2_state_st4DOLLAR_19E), +] +assert sizeof(ssl2_state_st) == 288, sizeof(ssl2_state_st) +assert alignment(ssl2_state_st) == 4, alignment(ssl2_state_st) +SSL2_STATE = ssl2_state_st +class ssl3_record_st(Structure): + pass +ssl3_record_st._fields_ = [ + ('type', c_int), + ('length', c_uint), + ('off', c_uint), + ('data', POINTER(c_ubyte)), + ('input', POINTER(c_ubyte)), + ('comp', POINTER(c_ubyte)), +] +assert sizeof(ssl3_record_st) == 24, sizeof(ssl3_record_st) +assert alignment(ssl3_record_st) == 4, alignment(ssl3_record_st) +SSL3_RECORD = ssl3_record_st +class ssl3_buffer_st(Structure): + pass +size_t = __darwin_size_t +ssl3_buffer_st._fields_ = [ + ('buf', POINTER(c_ubyte)), + ('len', size_t), + ('offset', c_int), + ('left', c_int), +] +assert sizeof(ssl3_buffer_st) == 16, sizeof(ssl3_buffer_st) +assert alignment(ssl3_buffer_st) == 4, alignment(ssl3_buffer_st) +SSL3_BUFFER = ssl3_buffer_st +class N13ssl3_state_st4DOLLAR_20E(Structure): + pass +N13ssl3_state_st4DOLLAR_20E._fields_ = [ + ('cert_verify_md', c_ubyte * 72), + ('finish_md', c_ubyte * 72), + ('finish_md_len', c_int), + ('peer_finish_md', c_ubyte * 72), + ('peer_finish_md_len', c_int), + ('message_size', c_ulong), + ('message_type', c_int), + ('new_cipher', POINTER(SSL_CIPHER)), + ('dh', POINTER(DH)), + ('next_state', c_int), + ('reuse_message', c_int), + ('cert_req', c_int), + ('ctype_num', c_int), + ('ctype', c_char * 7), + ('ca_names', POINTER(STACK)), + ('use_rsa_tmp', c_int), + ('key_block_length', c_int), + ('key_block', POINTER(c_ubyte)), + ('new_sym_enc', POINTER(EVP_CIPHER)), + ('new_hash', POINTER(EVP_MD)), + ('new_compression', POINTER(SSL_COMP)), + ('cert_request', c_int), +] +assert sizeof(N13ssl3_state_st4DOLLAR_20E) == 296, sizeof(N13ssl3_state_st4DOLLAR_20E) +assert alignment(N13ssl3_state_st4DOLLAR_20E) == 4, alignment(N13ssl3_state_st4DOLLAR_20E) +ssl3_state_st._fields_ = [ + ('flags', c_long), + ('delay_buf_pop_ret', c_int), + ('read_sequence', c_ubyte * 8), + ('read_mac_secret', c_ubyte * 36), + ('write_sequence', c_ubyte * 8), + ('write_mac_secret', c_ubyte * 36), + ('server_random', c_ubyte * 32), + ('client_random', c_ubyte * 32), + ('need_empty_fragments', c_int), + ('empty_fragment_done', c_int), + ('rbuf', SSL3_BUFFER), + ('wbuf', SSL3_BUFFER), + ('rrec', SSL3_RECORD), + ('wrec', SSL3_RECORD), + ('alert_fragment', c_ubyte * 2), + ('alert_fragment_len', c_uint), + ('handshake_fragment', c_ubyte * 4), + ('handshake_fragment_len', c_uint), + ('wnum', c_uint), + ('wpend_tot', c_int), + ('wpend_type', c_int), + ('wpend_ret', c_int), + ('wpend_buf', POINTER(c_ubyte)), + ('finish_dgst1', EVP_MD_CTX), + ('finish_dgst2', EVP_MD_CTX), + ('change_cipher_spec', c_int), + ('warn_alert', c_int), + ('fatal_alert', c_int), + ('alert_dispatch', c_int), + ('send_alert', c_ubyte * 2), + ('renegotiate', c_int), + ('total_renegotiations', c_int), + ('num_renegotiations', c_int), + ('in_read_app_data', c_int), + ('tmp', N13ssl3_state_st4DOLLAR_20E), +] +assert sizeof(ssl3_state_st) == 648, sizeof(ssl3_state_st) +assert alignment(ssl3_state_st) == 4, alignment(ssl3_state_st) +SSL3_STATE = ssl3_state_st +stack_st._fields_ = [ + ('num', c_int), + ('data', POINTER(STRING)), + ('sorted', c_int), + ('num_alloc', c_int), + ('comp', CFUNCTYPE(c_int, POINTER(STRING), POINTER(STRING))), +] +assert sizeof(stack_st) == 20, sizeof(stack_st) +assert alignment(stack_st) == 4, alignment(stack_st) +class ui_st(Structure): + pass +ui_st._fields_ = [ +] +UI = ui_st +class ui_method_st(Structure): + pass +ui_method_st._fields_ = [ +] +UI_METHOD = ui_method_st +class ui_string_st(Structure): + pass +ui_string_st._fields_ = [ +] +UI_STRING = ui_string_st + +# values for enumeration 'UI_string_types' +UI_string_types = c_int # enum +class X509_objects_st(Structure): + pass +X509_objects_st._fields_ = [ + ('nid', c_int), + ('a2i', CFUNCTYPE(c_int)), + ('i2a', CFUNCTYPE(c_int)), +] +assert sizeof(X509_objects_st) == 12, sizeof(X509_objects_st) +assert alignment(X509_objects_st) == 4, alignment(X509_objects_st) +X509_OBJECTS = X509_objects_st +X509_algor_st._fields_ = [ + ('algorithm', POINTER(ASN1_OBJECT)), + ('parameter', POINTER(ASN1_TYPE)), +] +assert sizeof(X509_algor_st) == 8, sizeof(X509_algor_st) +assert alignment(X509_algor_st) == 4, alignment(X509_algor_st) +class X509_val_st(Structure): + pass +X509_val_st._fields_ = [ + ('notBefore', POINTER(ASN1_TIME)), + ('notAfter', POINTER(ASN1_TIME)), +] +assert sizeof(X509_val_st) == 8, sizeof(X509_val_st) +assert alignment(X509_val_st) == 4, alignment(X509_val_st) +X509_VAL = X509_val_st +class X509_pubkey_st(Structure): + pass +X509_pubkey_st._fields_ = [ + ('algor', POINTER(X509_ALGOR)), + ('public_key', POINTER(ASN1_BIT_STRING)), + ('pkey', POINTER(EVP_PKEY)), +] +assert sizeof(X509_pubkey_st) == 12, sizeof(X509_pubkey_st) +assert alignment(X509_pubkey_st) == 4, alignment(X509_pubkey_st) +X509_PUBKEY = X509_pubkey_st +class X509_sig_st(Structure): + pass +X509_sig_st._fields_ = [ + ('algor', POINTER(X509_ALGOR)), + ('digest', POINTER(ASN1_OCTET_STRING)), +] +assert sizeof(X509_sig_st) == 8, sizeof(X509_sig_st) +assert alignment(X509_sig_st) == 4, alignment(X509_sig_st) +X509_SIG = X509_sig_st +class X509_name_entry_st(Structure): + pass +X509_name_entry_st._fields_ = [ + ('object', POINTER(ASN1_OBJECT)), + ('value', POINTER(ASN1_STRING)), + ('set', c_int), + ('size', c_int), +] +assert sizeof(X509_name_entry_st) == 16, sizeof(X509_name_entry_st) +assert alignment(X509_name_entry_st) == 4, alignment(X509_name_entry_st) +X509_NAME_ENTRY = X509_name_entry_st +X509_name_st._fields_ = [ + ('entries', POINTER(STACK)), + ('modified', c_int), + ('bytes', POINTER(BUF_MEM)), + ('hash', c_ulong), +] +assert sizeof(X509_name_st) == 16, sizeof(X509_name_st) +assert alignment(X509_name_st) == 4, alignment(X509_name_st) +class X509_extension_st(Structure): + pass +X509_extension_st._fields_ = [ + ('object', POINTER(ASN1_OBJECT)), + ('critical', ASN1_BOOLEAN), + ('value', POINTER(ASN1_OCTET_STRING)), +] +assert sizeof(X509_extension_st) == 12, sizeof(X509_extension_st) +assert alignment(X509_extension_st) == 4, alignment(X509_extension_st) +X509_EXTENSION = X509_extension_st +class x509_attributes_st(Structure): + pass +class N18x509_attributes_st4DOLLAR_13E(Union): + pass +N18x509_attributes_st4DOLLAR_13E._fields_ = [ + ('ptr', STRING), + ('set', POINTER(STACK)), + ('single', POINTER(ASN1_TYPE)), +] +assert sizeof(N18x509_attributes_st4DOLLAR_13E) == 4, sizeof(N18x509_attributes_st4DOLLAR_13E) +assert alignment(N18x509_attributes_st4DOLLAR_13E) == 4, alignment(N18x509_attributes_st4DOLLAR_13E) +x509_attributes_st._fields_ = [ + ('object', POINTER(ASN1_OBJECT)), + ('single', c_int), + ('value', N18x509_attributes_st4DOLLAR_13E), +] +assert sizeof(x509_attributes_st) == 12, sizeof(x509_attributes_st) +assert alignment(x509_attributes_st) == 4, alignment(x509_attributes_st) +X509_ATTRIBUTE = x509_attributes_st +class X509_req_info_st(Structure): + pass +X509_req_info_st._fields_ = [ + ('enc', ASN1_ENCODING), + ('version', POINTER(ASN1_INTEGER)), + ('subject', POINTER(X509_NAME)), + ('pubkey', POINTER(X509_PUBKEY)), + ('attributes', POINTER(STACK)), +] +assert sizeof(X509_req_info_st) == 28, sizeof(X509_req_info_st) +assert alignment(X509_req_info_st) == 4, alignment(X509_req_info_st) +X509_REQ_INFO = X509_req_info_st +class X509_req_st(Structure): + pass +X509_req_st._fields_ = [ + ('req_info', POINTER(X509_REQ_INFO)), + ('sig_alg', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), + ('references', c_int), +] +assert sizeof(X509_req_st) == 16, sizeof(X509_req_st) +assert alignment(X509_req_st) == 4, alignment(X509_req_st) +X509_REQ = X509_req_st +class x509_cinf_st(Structure): + pass +x509_cinf_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('serialNumber', POINTER(ASN1_INTEGER)), + ('signature', POINTER(X509_ALGOR)), + ('issuer', POINTER(X509_NAME)), + ('validity', POINTER(X509_VAL)), + ('subject', POINTER(X509_NAME)), + ('key', POINTER(X509_PUBKEY)), + ('issuerUID', POINTER(ASN1_BIT_STRING)), + ('subjectUID', POINTER(ASN1_BIT_STRING)), + ('extensions', POINTER(STACK)), +] +assert sizeof(x509_cinf_st) == 40, sizeof(x509_cinf_st) +assert alignment(x509_cinf_st) == 4, alignment(x509_cinf_st) +X509_CINF = x509_cinf_st +class x509_cert_aux_st(Structure): + pass +x509_cert_aux_st._fields_ = [ + ('trust', POINTER(STACK)), + ('reject', POINTER(STACK)), + ('alias', POINTER(ASN1_UTF8STRING)), + ('keyid', POINTER(ASN1_OCTET_STRING)), + ('other', POINTER(STACK)), +] +assert sizeof(x509_cert_aux_st) == 20, sizeof(x509_cert_aux_st) +assert alignment(x509_cert_aux_st) == 4, alignment(x509_cert_aux_st) +X509_CERT_AUX = x509_cert_aux_st +class AUTHORITY_KEYID_st(Structure): + pass +x509_st._fields_ = [ + ('cert_info', POINTER(X509_CINF)), + ('sig_alg', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), + ('valid', c_int), + ('references', c_int), + ('name', STRING), + ('ex_data', CRYPTO_EX_DATA), + ('ex_pathlen', c_long), + ('ex_flags', c_ulong), + ('ex_kusage', c_ulong), + ('ex_xkusage', c_ulong), + ('ex_nscert', c_ulong), + ('skid', POINTER(ASN1_OCTET_STRING)), + ('akid', POINTER(AUTHORITY_KEYID_st)), + ('sha1_hash', c_ubyte * 20), + ('aux', POINTER(X509_CERT_AUX)), +] +assert sizeof(x509_st) == 84, sizeof(x509_st) +assert alignment(x509_st) == 4, alignment(x509_st) +AUTHORITY_KEYID_st._fields_ = [ +] +class x509_trust_st(Structure): + pass +x509_trust_st._fields_ = [ + ('trust', c_int), + ('flags', c_int), + ('check_trust', CFUNCTYPE(c_int, POINTER(x509_trust_st), POINTER(X509), c_int)), + ('name', STRING), + ('arg1', c_int), + ('arg2', c_void_p), +] +assert sizeof(x509_trust_st) == 24, sizeof(x509_trust_st) +assert alignment(x509_trust_st) == 4, alignment(x509_trust_st) +X509_TRUST = x509_trust_st +class X509_revoked_st(Structure): + pass +X509_revoked_st._fields_ = [ + ('serialNumber', POINTER(ASN1_INTEGER)), + ('revocationDate', POINTER(ASN1_TIME)), + ('extensions', POINTER(STACK)), + ('sequence', c_int), +] +assert sizeof(X509_revoked_st) == 16, sizeof(X509_revoked_st) +assert alignment(X509_revoked_st) == 4, alignment(X509_revoked_st) +X509_REVOKED = X509_revoked_st +class X509_crl_info_st(Structure): + pass +X509_crl_info_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('sig_alg', POINTER(X509_ALGOR)), + ('issuer', POINTER(X509_NAME)), + ('lastUpdate', POINTER(ASN1_TIME)), + ('nextUpdate', POINTER(ASN1_TIME)), + ('revoked', POINTER(STACK)), + ('extensions', POINTER(STACK)), + ('enc', ASN1_ENCODING), +] +assert sizeof(X509_crl_info_st) == 40, sizeof(X509_crl_info_st) +assert alignment(X509_crl_info_st) == 4, alignment(X509_crl_info_st) +X509_CRL_INFO = X509_crl_info_st +X509_crl_st._fields_ = [ + ('crl', POINTER(X509_CRL_INFO)), + ('sig_alg', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), + ('references', c_int), +] +assert sizeof(X509_crl_st) == 16, sizeof(X509_crl_st) +assert alignment(X509_crl_st) == 4, alignment(X509_crl_st) +class private_key_st(Structure): + pass +private_key_st._fields_ = [ + ('version', c_int), + ('enc_algor', POINTER(X509_ALGOR)), + ('enc_pkey', POINTER(ASN1_OCTET_STRING)), + ('dec_pkey', POINTER(EVP_PKEY)), + ('key_length', c_int), + ('key_data', STRING), + ('key_free', c_int), + ('cipher', EVP_CIPHER_INFO), + ('references', c_int), +] +assert sizeof(private_key_st) == 52, sizeof(private_key_st) +assert alignment(private_key_st) == 4, alignment(private_key_st) +X509_PKEY = private_key_st +class X509_info_st(Structure): + pass +X509_info_st._fields_ = [ + ('x509', POINTER(X509)), + ('crl', POINTER(X509_CRL)), + ('x_pkey', POINTER(X509_PKEY)), + ('enc_cipher', EVP_CIPHER_INFO), + ('enc_len', c_int), + ('enc_data', STRING), + ('references', c_int), +] +assert sizeof(X509_info_st) == 44, sizeof(X509_info_st) +assert alignment(X509_info_st) == 4, alignment(X509_info_st) +X509_INFO = X509_info_st +class Netscape_spkac_st(Structure): + pass +Netscape_spkac_st._fields_ = [ + ('pubkey', POINTER(X509_PUBKEY)), + ('challenge', POINTER(ASN1_IA5STRING)), +] +assert sizeof(Netscape_spkac_st) == 8, sizeof(Netscape_spkac_st) +assert alignment(Netscape_spkac_st) == 4, alignment(Netscape_spkac_st) +NETSCAPE_SPKAC = Netscape_spkac_st +class Netscape_spki_st(Structure): + pass +Netscape_spki_st._fields_ = [ + ('spkac', POINTER(NETSCAPE_SPKAC)), + ('sig_algor', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), +] +assert sizeof(Netscape_spki_st) == 12, sizeof(Netscape_spki_st) +assert alignment(Netscape_spki_st) == 4, alignment(Netscape_spki_st) +NETSCAPE_SPKI = Netscape_spki_st +class Netscape_certificate_sequence(Structure): + pass +Netscape_certificate_sequence._fields_ = [ + ('type', POINTER(ASN1_OBJECT)), + ('certs', POINTER(STACK)), +] +assert sizeof(Netscape_certificate_sequence) == 8, sizeof(Netscape_certificate_sequence) +assert alignment(Netscape_certificate_sequence) == 4, alignment(Netscape_certificate_sequence) +NETSCAPE_CERT_SEQUENCE = Netscape_certificate_sequence +class PBEPARAM_st(Structure): + pass +PBEPARAM_st._fields_ = [ + ('salt', POINTER(ASN1_OCTET_STRING)), + ('iter', POINTER(ASN1_INTEGER)), +] +assert sizeof(PBEPARAM_st) == 8, sizeof(PBEPARAM_st) +assert alignment(PBEPARAM_st) == 4, alignment(PBEPARAM_st) +PBEPARAM = PBEPARAM_st +class PBE2PARAM_st(Structure): + pass +PBE2PARAM_st._fields_ = [ + ('keyfunc', POINTER(X509_ALGOR)), + ('encryption', POINTER(X509_ALGOR)), +] +assert sizeof(PBE2PARAM_st) == 8, sizeof(PBE2PARAM_st) +assert alignment(PBE2PARAM_st) == 4, alignment(PBE2PARAM_st) +PBE2PARAM = PBE2PARAM_st +class PBKDF2PARAM_st(Structure): + pass +PBKDF2PARAM_st._fields_ = [ + ('salt', POINTER(ASN1_TYPE)), + ('iter', POINTER(ASN1_INTEGER)), + ('keylength', POINTER(ASN1_INTEGER)), + ('prf', POINTER(X509_ALGOR)), +] +assert sizeof(PBKDF2PARAM_st) == 16, sizeof(PBKDF2PARAM_st) +assert alignment(PBKDF2PARAM_st) == 4, alignment(PBKDF2PARAM_st) +PBKDF2PARAM = PBKDF2PARAM_st +class pkcs8_priv_key_info_st(Structure): + pass +pkcs8_priv_key_info_st._fields_ = [ + ('broken', c_int), + ('version', POINTER(ASN1_INTEGER)), + ('pkeyalg', POINTER(X509_ALGOR)), + ('pkey', POINTER(ASN1_TYPE)), + ('attributes', POINTER(STACK)), +] +assert sizeof(pkcs8_priv_key_info_st) == 20, sizeof(pkcs8_priv_key_info_st) +assert alignment(pkcs8_priv_key_info_st) == 4, alignment(pkcs8_priv_key_info_st) +PKCS8_PRIV_KEY_INFO = pkcs8_priv_key_info_st +class x509_hash_dir_st(Structure): + pass +x509_hash_dir_st._fields_ = [ + ('num_dirs', c_int), + ('dirs', POINTER(STRING)), + ('dirs_type', POINTER(c_int)), + ('num_dirs_alloced', c_int), +] +assert sizeof(x509_hash_dir_st) == 16, sizeof(x509_hash_dir_st) +assert alignment(x509_hash_dir_st) == 4, alignment(x509_hash_dir_st) +X509_HASH_DIR_CTX = x509_hash_dir_st +class x509_file_st(Structure): + pass +x509_file_st._fields_ = [ + ('num_paths', c_int), + ('num_alloced', c_int), + ('paths', POINTER(STRING)), + ('path_type', POINTER(c_int)), +] +assert sizeof(x509_file_st) == 16, sizeof(x509_file_st) +assert alignment(x509_file_st) == 4, alignment(x509_file_st) +X509_CERT_FILE_CTX = x509_file_st +class x509_object_st(Structure): + pass +class N14x509_object_st4DOLLAR_14E(Union): + pass +N14x509_object_st4DOLLAR_14E._fields_ = [ + ('ptr', STRING), + ('x509', POINTER(X509)), + ('crl', POINTER(X509_CRL)), + ('pkey', POINTER(EVP_PKEY)), +] +assert sizeof(N14x509_object_st4DOLLAR_14E) == 4, sizeof(N14x509_object_st4DOLLAR_14E) +assert alignment(N14x509_object_st4DOLLAR_14E) == 4, alignment(N14x509_object_st4DOLLAR_14E) +x509_object_st._fields_ = [ + ('type', c_int), + ('data', N14x509_object_st4DOLLAR_14E), +] +assert sizeof(x509_object_st) == 8, sizeof(x509_object_st) +assert alignment(x509_object_st) == 4, alignment(x509_object_st) +X509_OBJECT = x509_object_st +class x509_lookup_st(Structure): + pass +X509_LOOKUP = x509_lookup_st +class x509_lookup_method_st(Structure): + pass +x509_lookup_method_st._fields_ = [ + ('name', STRING), + ('new_item', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), + ('free', CFUNCTYPE(None, POINTER(X509_LOOKUP))), + ('init', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), + ('shutdown', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), + ('ctrl', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_long, POINTER(STRING))), + ('get_by_subject', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(X509_OBJECT))), + ('get_by_issuer_serial', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(ASN1_INTEGER), POINTER(X509_OBJECT))), + ('get_by_fingerprint', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(c_ubyte), c_int, POINTER(X509_OBJECT))), + ('get_by_alias', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_int, POINTER(X509_OBJECT))), +] +assert sizeof(x509_lookup_method_st) == 40, sizeof(x509_lookup_method_st) +assert alignment(x509_lookup_method_st) == 4, alignment(x509_lookup_method_st) +X509_LOOKUP_METHOD = x509_lookup_method_st +x509_store_st._fields_ = [ + ('cache', c_int), + ('objs', POINTER(STACK)), + ('get_cert_methods', POINTER(STACK)), + ('flags', c_ulong), + ('purpose', c_int), + ('trust', c_int), + ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), + ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), + ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), + ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), + ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), + ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('ex_data', CRYPTO_EX_DATA), + ('references', c_int), + ('depth', c_int), +] +assert sizeof(x509_store_st) == 76, sizeof(x509_store_st) +assert alignment(x509_store_st) == 4, alignment(x509_store_st) +x509_lookup_st._fields_ = [ + ('init', c_int), + ('skip', c_int), + ('method', POINTER(X509_LOOKUP_METHOD)), + ('method_data', STRING), + ('store_ctx', POINTER(X509_STORE)), +] +assert sizeof(x509_lookup_st) == 20, sizeof(x509_lookup_st) +assert alignment(x509_lookup_st) == 4, alignment(x509_lookup_st) +time_t = __darwin_time_t +x509_store_ctx_st._fields_ = [ + ('ctx', POINTER(X509_STORE)), + ('current_method', c_int), + ('cert', POINTER(X509)), + ('untrusted', POINTER(STACK)), + ('purpose', c_int), + ('trust', c_int), + ('check_time', time_t), + ('flags', c_ulong), + ('other_ctx', c_void_p), + ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), + ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), + ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), + ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), + ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), + ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('depth', c_int), + ('valid', c_int), + ('last_untrusted', c_int), + ('chain', POINTER(STACK)), + ('error_depth', c_int), + ('error', c_int), + ('current_cert', POINTER(X509)), + ('current_issuer', POINTER(X509)), + ('current_crl', POINTER(X509_CRL)), + ('ex_data', CRYPTO_EX_DATA), +] +assert sizeof(x509_store_ctx_st) == 116, sizeof(x509_store_ctx_st) +assert alignment(x509_store_ctx_st) == 4, alignment(x509_store_ctx_st) +va_list = __darwin_va_list +__darwin_off_t = __int64_t +fpos_t = __darwin_off_t +class __sbuf(Structure): + pass +__sbuf._fields_ = [ + ('_base', POINTER(c_ubyte)), + ('_size', c_int), +] +assert sizeof(__sbuf) == 8, sizeof(__sbuf) +assert alignment(__sbuf) == 4, alignment(__sbuf) +class __sFILEX(Structure): + pass +__sFILEX._fields_ = [ +] +class __sFILE(Structure): + pass +__sFILE._pack_ = 4 +__sFILE._fields_ = [ + ('_p', POINTER(c_ubyte)), + ('_r', c_int), + ('_w', c_int), + ('_flags', c_short), + ('_file', c_short), + ('_bf', __sbuf), + ('_lbfsize', c_int), + ('_cookie', c_void_p), + ('_close', CFUNCTYPE(c_int, c_void_p)), + ('_read', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), + ('_seek', CFUNCTYPE(fpos_t, c_void_p, c_longlong, c_int)), + ('_write', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), + ('_ub', __sbuf), + ('_extra', POINTER(__sFILEX)), + ('_ur', c_int), + ('_ubuf', c_ubyte * 3), + ('_nbuf', c_ubyte * 1), + ('_lb', __sbuf), + ('_blksize', c_int), + ('_offset', fpos_t), +] +assert sizeof(__sFILE) == 88, sizeof(__sFILE) +assert alignment(__sFILE) == 4, alignment(__sFILE) +FILE = __sFILE +ct_rune_t = __darwin_ct_rune_t +rune_t = __darwin_rune_t +class div_t(Structure): + pass +div_t._fields_ = [ + ('quot', c_int), + ('rem', c_int), +] +assert sizeof(div_t) == 8, sizeof(div_t) +assert alignment(div_t) == 4, alignment(div_t) +class ldiv_t(Structure): + pass +ldiv_t._fields_ = [ + ('quot', c_long), + ('rem', c_long), +] +assert sizeof(ldiv_t) == 8, sizeof(ldiv_t) +assert alignment(ldiv_t) == 4, alignment(ldiv_t) +class lldiv_t(Structure): + pass +lldiv_t._pack_ = 4 +lldiv_t._fields_ = [ + ('quot', c_longlong), + ('rem', c_longlong), +] +assert sizeof(lldiv_t) == 16, sizeof(lldiv_t) +assert alignment(lldiv_t) == 4, alignment(lldiv_t) +__darwin_dev_t = __int32_t +dev_t = __darwin_dev_t +__darwin_mode_t = __uint16_t +mode_t = __darwin_mode_t +class mcontext(Structure): + pass +mcontext._fields_ = [ +] +class mcontext64(Structure): + pass +mcontext64._fields_ = [ +] +class __darwin_pthread_handler_rec(Structure): + pass +__darwin_pthread_handler_rec._fields_ = [ + ('__routine', CFUNCTYPE(None, c_void_p)), + ('__arg', c_void_p), + ('__next', POINTER(__darwin_pthread_handler_rec)), +] +assert sizeof(__darwin_pthread_handler_rec) == 12, sizeof(__darwin_pthread_handler_rec) +assert alignment(__darwin_pthread_handler_rec) == 4, alignment(__darwin_pthread_handler_rec) +class _opaque_pthread_attr_t(Structure): + pass +_opaque_pthread_attr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 36), +] +assert sizeof(_opaque_pthread_attr_t) == 40, sizeof(_opaque_pthread_attr_t) +assert alignment(_opaque_pthread_attr_t) == 4, alignment(_opaque_pthread_attr_t) +class _opaque_pthread_cond_t(Structure): + pass +_opaque_pthread_cond_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 24), +] +assert sizeof(_opaque_pthread_cond_t) == 28, sizeof(_opaque_pthread_cond_t) +assert alignment(_opaque_pthread_cond_t) == 4, alignment(_opaque_pthread_cond_t) +class _opaque_pthread_condattr_t(Structure): + pass +_opaque_pthread_condattr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 4), +] +assert sizeof(_opaque_pthread_condattr_t) == 8, sizeof(_opaque_pthread_condattr_t) +assert alignment(_opaque_pthread_condattr_t) == 4, alignment(_opaque_pthread_condattr_t) +class _opaque_pthread_mutex_t(Structure): + pass +_opaque_pthread_mutex_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 40), +] +assert sizeof(_opaque_pthread_mutex_t) == 44, sizeof(_opaque_pthread_mutex_t) +assert alignment(_opaque_pthread_mutex_t) == 4, alignment(_opaque_pthread_mutex_t) +class _opaque_pthread_mutexattr_t(Structure): + pass +_opaque_pthread_mutexattr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 8), +] +assert sizeof(_opaque_pthread_mutexattr_t) == 12, sizeof(_opaque_pthread_mutexattr_t) +assert alignment(_opaque_pthread_mutexattr_t) == 4, alignment(_opaque_pthread_mutexattr_t) +class _opaque_pthread_once_t(Structure): + pass +_opaque_pthread_once_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 4), +] +assert sizeof(_opaque_pthread_once_t) == 8, sizeof(_opaque_pthread_once_t) +assert alignment(_opaque_pthread_once_t) == 4, alignment(_opaque_pthread_once_t) +class _opaque_pthread_rwlock_t(Structure): + pass +_opaque_pthread_rwlock_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 124), +] +assert sizeof(_opaque_pthread_rwlock_t) == 128, sizeof(_opaque_pthread_rwlock_t) +assert alignment(_opaque_pthread_rwlock_t) == 4, alignment(_opaque_pthread_rwlock_t) +class _opaque_pthread_rwlockattr_t(Structure): + pass +_opaque_pthread_rwlockattr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 12), +] +assert sizeof(_opaque_pthread_rwlockattr_t) == 16, sizeof(_opaque_pthread_rwlockattr_t) +assert alignment(_opaque_pthread_rwlockattr_t) == 4, alignment(_opaque_pthread_rwlockattr_t) +class _opaque_pthread_t(Structure): + pass +_opaque_pthread_t._fields_ = [ + ('__sig', c_long), + ('__cleanup_stack', POINTER(__darwin_pthread_handler_rec)), + ('__opaque', c_char * 596), +] +assert sizeof(_opaque_pthread_t) == 604, sizeof(_opaque_pthread_t) +assert alignment(_opaque_pthread_t) == 4, alignment(_opaque_pthread_t) +__darwin_blkcnt_t = __int64_t +__darwin_blksize_t = __int32_t +__darwin_fsblkcnt_t = c_uint +__darwin_fsfilcnt_t = c_uint +__darwin_gid_t = __uint32_t +__darwin_id_t = __uint32_t +__darwin_ino_t = __uint32_t +__darwin_mach_port_name_t = __darwin_natural_t +__darwin_mach_port_t = __darwin_mach_port_name_t +__darwin_mcontext_t = POINTER(mcontext) +__darwin_mcontext64_t = POINTER(mcontext64) +__darwin_pid_t = __int32_t +__darwin_pthread_attr_t = _opaque_pthread_attr_t +__darwin_pthread_cond_t = _opaque_pthread_cond_t +__darwin_pthread_condattr_t = _opaque_pthread_condattr_t +__darwin_pthread_key_t = c_ulong +__darwin_pthread_mutex_t = _opaque_pthread_mutex_t +__darwin_pthread_mutexattr_t = _opaque_pthread_mutexattr_t +__darwin_pthread_once_t = _opaque_pthread_once_t +__darwin_pthread_rwlock_t = _opaque_pthread_rwlock_t +__darwin_pthread_rwlockattr_t = _opaque_pthread_rwlockattr_t +__darwin_pthread_t = POINTER(_opaque_pthread_t) +__darwin_sigset_t = __uint32_t +__darwin_suseconds_t = __int32_t +__darwin_uid_t = __uint32_t +__darwin_useconds_t = __uint32_t +__darwin_uuid_t = c_ubyte * 16 +class sigaltstack(Structure): + pass +sigaltstack._fields_ = [ + ('ss_sp', c_void_p), + ('ss_size', __darwin_size_t), + ('ss_flags', c_int), +] +assert sizeof(sigaltstack) == 12, sizeof(sigaltstack) +assert alignment(sigaltstack) == 4, alignment(sigaltstack) +__darwin_stack_t = sigaltstack +class ucontext(Structure): + pass +ucontext._fields_ = [ + ('uc_onstack', c_int), + ('uc_sigmask', __darwin_sigset_t), + ('uc_stack', __darwin_stack_t), + ('uc_link', POINTER(ucontext)), + ('uc_mcsize', __darwin_size_t), + ('uc_mcontext', __darwin_mcontext_t), +] +assert sizeof(ucontext) == 32, sizeof(ucontext) +assert alignment(ucontext) == 4, alignment(ucontext) +__darwin_ucontext_t = ucontext +class ucontext64(Structure): + pass +ucontext64._fields_ = [ + ('uc_onstack', c_int), + ('uc_sigmask', __darwin_sigset_t), + ('uc_stack', __darwin_stack_t), + ('uc_link', POINTER(ucontext64)), + ('uc_mcsize', __darwin_size_t), + ('uc_mcontext64', __darwin_mcontext64_t), +] +assert sizeof(ucontext64) == 32, sizeof(ucontext64) +assert alignment(ucontext64) == 4, alignment(ucontext64) +__darwin_ucontext64_t = ucontext64 +class timeval(Structure): + pass +timeval._fields_ = [ + ('tv_sec', __darwin_time_t), + ('tv_usec', __darwin_suseconds_t), +] +assert sizeof(timeval) == 8, sizeof(timeval) +assert alignment(timeval) == 4, alignment(timeval) +rlim_t = __int64_t +class rusage(Structure): + pass +rusage._fields_ = [ + ('ru_utime', timeval), + ('ru_stime', timeval), + ('ru_maxrss', c_long), + ('ru_ixrss', c_long), + ('ru_idrss', c_long), + ('ru_isrss', c_long), + ('ru_minflt', c_long), + ('ru_majflt', c_long), + ('ru_nswap', c_long), + ('ru_inblock', c_long), + ('ru_oublock', c_long), + ('ru_msgsnd', c_long), + ('ru_msgrcv', c_long), + ('ru_nsignals', c_long), + ('ru_nvcsw', c_long), + ('ru_nivcsw', c_long), +] +assert sizeof(rusage) == 72, sizeof(rusage) +assert alignment(rusage) == 4, alignment(rusage) +class rlimit(Structure): + pass +rlimit._pack_ = 4 +rlimit._fields_ = [ + ('rlim_cur', rlim_t), + ('rlim_max', rlim_t), +] +assert sizeof(rlimit) == 16, sizeof(rlimit) +assert alignment(rlimit) == 4, alignment(rlimit) +mcontext_t = __darwin_mcontext_t +mcontext64_t = __darwin_mcontext64_t +pthread_attr_t = __darwin_pthread_attr_t +sigset_t = __darwin_sigset_t +ucontext_t = __darwin_ucontext_t +ucontext64_t = __darwin_ucontext64_t +uid_t = __darwin_uid_t +class sigval(Union): + pass +sigval._fields_ = [ + ('sival_int', c_int), + ('sival_ptr', c_void_p), +] +assert sizeof(sigval) == 4, sizeof(sigval) +assert alignment(sigval) == 4, alignment(sigval) +class sigevent(Structure): + pass +sigevent._fields_ = [ + ('sigev_notify', c_int), + ('sigev_signo', c_int), + ('sigev_value', sigval), + ('sigev_notify_function', CFUNCTYPE(None, sigval)), + ('sigev_notify_attributes', POINTER(pthread_attr_t)), +] +assert sizeof(sigevent) == 20, sizeof(sigevent) +assert alignment(sigevent) == 4, alignment(sigevent) +class __siginfo(Structure): + pass +pid_t = __darwin_pid_t +__siginfo._fields_ = [ + ('si_signo', c_int), + ('si_errno', c_int), + ('si_code', c_int), + ('si_pid', pid_t), + ('si_uid', uid_t), + ('si_status', c_int), + ('si_addr', c_void_p), + ('si_value', sigval), + ('si_band', c_long), + ('pad', c_ulong * 7), +] +assert sizeof(__siginfo) == 64, sizeof(__siginfo) +assert alignment(__siginfo) == 4, alignment(__siginfo) +siginfo_t = __siginfo +class __sigaction_u(Union): + pass +__sigaction_u._fields_ = [ + ('__sa_handler', CFUNCTYPE(None, c_int)), + ('__sa_sigaction', CFUNCTYPE(None, c_int, POINTER(__siginfo), c_void_p)), +] +assert sizeof(__sigaction_u) == 4, sizeof(__sigaction_u) +assert alignment(__sigaction_u) == 4, alignment(__sigaction_u) +class __sigaction(Structure): + pass +__sigaction._fields_ = [ + ('__sigaction_u', __sigaction_u), + ('sa_tramp', CFUNCTYPE(None, c_void_p, c_int, c_int, POINTER(siginfo_t), c_void_p)), + ('sa_mask', sigset_t), + ('sa_flags', c_int), +] +assert sizeof(__sigaction) == 16, sizeof(__sigaction) +assert alignment(__sigaction) == 4, alignment(__sigaction) +class sigaction(Structure): + pass +sigaction._fields_ = [ + ('__sigaction_u', __sigaction_u), + ('sa_mask', sigset_t), + ('sa_flags', c_int), +] +assert sizeof(sigaction) == 12, sizeof(sigaction) +assert alignment(sigaction) == 4, alignment(sigaction) +sig_t = CFUNCTYPE(None, c_int) +stack_t = __darwin_stack_t +class sigvec(Structure): + pass +sigvec._fields_ = [ + ('sv_handler', CFUNCTYPE(None, c_int)), + ('sv_mask', c_int), + ('sv_flags', c_int), +] +assert sizeof(sigvec) == 12, sizeof(sigvec) +assert alignment(sigvec) == 4, alignment(sigvec) +class sigstack(Structure): + pass +sigstack._fields_ = [ + ('ss_sp', STRING), + ('ss_onstack', c_int), +] +assert sizeof(sigstack) == 8, sizeof(sigstack) +assert alignment(sigstack) == 4, alignment(sigstack) +u_char = c_ubyte +u_short = c_ushort +u_int = c_uint +u_long = c_ulong +ushort = c_ushort +uint = c_uint +u_quad_t = u_int64_t +quad_t = int64_t +qaddr_t = POINTER(quad_t) +caddr_t = STRING +daddr_t = int32_t +fixpt_t = u_int32_t +blkcnt_t = __darwin_blkcnt_t +blksize_t = __darwin_blksize_t +gid_t = __darwin_gid_t +in_addr_t = __uint32_t +in_port_t = __uint16_t +ino_t = __darwin_ino_t +key_t = __int32_t +nlink_t = __uint16_t +off_t = __darwin_off_t +segsz_t = int32_t +swblk_t = int32_t +clock_t = __darwin_clock_t +ssize_t = __darwin_ssize_t +useconds_t = __darwin_useconds_t +suseconds_t = __darwin_suseconds_t +fd_mask = __int32_t +class fd_set(Structure): + pass +fd_set._fields_ = [ + ('fds_bits', __int32_t * 32), +] +assert sizeof(fd_set) == 128, sizeof(fd_set) +assert alignment(fd_set) == 4, alignment(fd_set) +pthread_cond_t = __darwin_pthread_cond_t +pthread_condattr_t = __darwin_pthread_condattr_t +pthread_mutex_t = __darwin_pthread_mutex_t +pthread_mutexattr_t = __darwin_pthread_mutexattr_t +pthread_once_t = __darwin_pthread_once_t +pthread_rwlock_t = __darwin_pthread_rwlock_t +pthread_rwlockattr_t = __darwin_pthread_rwlockattr_t +pthread_t = __darwin_pthread_t +pthread_key_t = __darwin_pthread_key_t +fsblkcnt_t = __darwin_fsblkcnt_t +fsfilcnt_t = __darwin_fsfilcnt_t + +# values for enumeration 'idtype_t' +idtype_t = c_int # enum +id_t = __darwin_id_t +class wait(Union): + pass +class N4wait3DOLLAR_3E(Structure): + pass +N4wait3DOLLAR_3E._fields_ = [ + ('w_Termsig', c_uint, 7), + ('w_Coredump', c_uint, 1), + ('w_Retcode', c_uint, 8), + ('w_Filler', c_uint, 16), +] +assert sizeof(N4wait3DOLLAR_3E) == 4, sizeof(N4wait3DOLLAR_3E) +assert alignment(N4wait3DOLLAR_3E) == 4, alignment(N4wait3DOLLAR_3E) +class N4wait3DOLLAR_4E(Structure): + pass +N4wait3DOLLAR_4E._fields_ = [ + ('w_Stopval', c_uint, 8), + ('w_Stopsig', c_uint, 8), + ('w_Filler', c_uint, 16), +] +assert sizeof(N4wait3DOLLAR_4E) == 4, sizeof(N4wait3DOLLAR_4E) +assert alignment(N4wait3DOLLAR_4E) == 4, alignment(N4wait3DOLLAR_4E) +wait._fields_ = [ + ('w_status', c_int), + ('w_T', N4wait3DOLLAR_3E), + ('w_S', N4wait3DOLLAR_4E), +] +assert sizeof(wait) == 4, sizeof(wait) +assert alignment(wait) == 4, alignment(wait) +class timespec(Structure): + pass +timespec._fields_ = [ + ('tv_sec', time_t), + ('tv_nsec', c_long), +] +assert sizeof(timespec) == 8, sizeof(timespec) +assert alignment(timespec) == 4, alignment(timespec) +class tm(Structure): + pass +tm._fields_ = [ + ('tm_sec', c_int), + ('tm_min', c_int), + ('tm_hour', c_int), + ('tm_mday', c_int), + ('tm_mon', c_int), + ('tm_year', c_int), + ('tm_wday', c_int), + ('tm_yday', c_int), + ('tm_isdst', c_int), + ('tm_gmtoff', c_long), + ('tm_zone', STRING), +] +assert sizeof(tm) == 44, sizeof(tm) +assert alignment(tm) == 4, alignment(tm) +__gnuc_va_list = STRING +ptrdiff_t = c_int +int8_t = c_byte +int16_t = c_short +uint8_t = c_ubyte +uint16_t = c_ushort +uint32_t = c_uint +uint64_t = c_ulonglong +int_least8_t = int8_t +int_least16_t = int16_t +int_least32_t = int32_t +int_least64_t = int64_t +uint_least8_t = uint8_t +uint_least16_t = uint16_t +uint_least32_t = uint32_t +uint_least64_t = uint64_t +int_fast8_t = int8_t +int_fast16_t = int16_t +int_fast32_t = int32_t +int_fast64_t = int64_t +uint_fast8_t = uint8_t +uint_fast16_t = uint16_t +uint_fast32_t = uint32_t +uint_fast64_t = uint64_t +intptr_t = c_long +uintptr_t = c_ulong +intmax_t = c_longlong +uintmax_t = c_ulonglong +__all__ = ['ENGINE', 'pkcs7_enc_content_st', '__int16_t', + 'X509_REVOKED', 'SSL_CTX', 'UIT_BOOLEAN', + '__darwin_time_t', 'ucontext64_t', 'int_fast32_t', + 'pem_ctx_st', 'uint8_t', 'fpos_t', 'X509', 'COMP_CTX', + 'tm', 'N10pem_ctx_st4DOLLAR_17E', 'swblk_t', + 'ASN1_TEMPLATE', '__darwin_pthread_t', 'fixpt_t', + 'BIO_METHOD', 'ASN1_PRINTABLESTRING', 'EVP_ENCODE_CTX', + 'dh_method', 'bio_f_buffer_ctx_struct', 'in_port_t', + 'X509_SIG', '__darwin_ssize_t', '__darwin_sigset_t', + 'wait', 'uint_fast16_t', 'N12asn1_type_st4DOLLAR_11E', + 'uint_least8_t', 'pthread_rwlock_t', 'ASN1_IA5STRING', + 'fsfilcnt_t', 'ucontext', '__uint64_t', 'timespec', + 'x509_cinf_st', 'COMP_METHOD', 'MD5_CTX', 'buf_mem_st', + 'ASN1_ENCODING_st', 'PBEPARAM', 'X509_NAME_ENTRY', + '__darwin_va_list', 'ucontext_t', 'lhash_st', + 'N4wait3DOLLAR_4E', '__darwin_uuid_t', + '_ossl_old_des_ks_struct', 'id_t', 'ASN1_BIT_STRING', + 'va_list', '__darwin_wchar_t', 'pthread_key_t', + 'pkcs7_signer_info_st', 'ASN1_METHOD', 'DSA_SIG', 'DSA', + 'UIT_NONE', 'pthread_t', '__darwin_useconds_t', + 'uint_fast8_t', 'UI_STRING', 'DES_cblock', + '__darwin_mcontext64_t', 'rlim_t', 'PEM_Encode_Seal_st', + 'SHAstate_st', 'u_quad_t', 'openssl_fptr', + '_opaque_pthread_rwlockattr_t', + 'N18x509_attributes_st4DOLLAR_13E', + '__darwin_pthread_rwlock_t', 'daddr_t', 'ui_string_st', + 'x509_file_st', 'X509_req_info_st', 'int_least64_t', + 'evp_Encode_Ctx_st', 'X509_OBJECTS', 'CRYPTO_EX_DATA', + '__int8_t', 'AUTHORITY_KEYID_st', '_opaque_pthread_attr_t', + 'sigstack', 'EVP_CIPHER_CTX', 'X509_extension_st', 'pid_t', + 'RSA_METHOD', 'PEM_USER', 'pem_recip_st', 'env_md_ctx_st', + 'rc5_key_st', 'ui_st', 'X509_PUBKEY', 'u_int8_t', + 'ASN1_ITEM_st', 'pkcs7_recip_info_st', 'ssl2_state_st', + 'off_t', 'N10ssl_ctx_st4DOLLAR_18E', 'crypto_ex_data_st', + 'ui_method_st', '__darwin_pthread_rwlockattr_t', + 'CRYPTO_EX_dup', '__darwin_ino_t', '__sFILE', + 'OSUnknownByteOrder', 'BN_MONT_CTX', 'ASN1_NULL', 'time_t', + 'CRYPTO_EX_new', 'asn1_type_st', 'CRYPTO_EX_DATA_FUNCS', + 'user_time_t', 'BIGNUM', 'pthread_rwlockattr_t', + 'ASN1_VALUE_st', 'DH_METHOD', '__darwin_off_t', + '_opaque_pthread_t', 'bn_blinding_st', 'RSA', 'ssize_t', + 'mcontext64_t', 'user_long_t', 'fsblkcnt_t', 'cert_st', + '__darwin_pthread_condattr_t', 'X509_PKEY', + '__darwin_id_t', '__darwin_nl_item', 'SSL2_STATE', 'FILE', + 'pthread_mutexattr_t', 'size_t', + '_ossl_old_des_key_schedule', 'pkcs7_issuer_and_serial_st', + 'sigval', 'CRYPTO_MEM_LEAK_CB', 'X509_NAME', 'blkcnt_t', + 'uint_least16_t', '__darwin_dev_t', 'evp_cipher_info_st', + 'BN_BLINDING', 'ssl3_state_st', 'uint_least64_t', + 'user_addr_t', 'DES_key_schedule', 'RIPEMD160_CTX', + 'u_char', 'X509_algor_st', 'uid_t', 'sess_cert_st', + 'u_int64_t', 'u_int16_t', 'sigset_t', '__darwin_ptrdiff_t', + 'ASN1_CTX', 'STACK', '__int32_t', 'UI_METHOD', + 'NETSCAPE_SPKI', 'UIT_PROMPT', 'st_CRYPTO_EX_DATA_IMPL', + 'cast_key_st', 'X509_HASH_DIR_CTX', 'sigevent', + 'user_ssize_t', 'clock_t', 'aes_key_st', + '__darwin_socklen_t', '__darwin_intptr_t', 'int_fast64_t', + 'asn1_string_table_st', 'uint_fast32_t', + 'ASN1_VISIBLESTRING', 'DSA_SIG_st', 'obj_name_st', + 'X509_LOOKUP_METHOD', 'u_int32_t', 'EVP_CIPHER_INFO', + '__gnuc_va_list', 'AES_KEY', 'PKCS7_ISSUER_AND_SERIAL', + 'BN_CTX', '__darwin_blkcnt_t', 'key_t', 'SHA_CTX', + 'pkcs7_signed_st', 'SSL', 'N10pem_ctx_st4DOLLAR_16E', + 'pthread_attr_t', 'EVP_MD', 'uint', 'ASN1_BOOLEAN', + 'ino_t', '__darwin_clock_t', 'ASN1_OCTET_STRING', + 'asn1_ctx_st', 'BIO_F_BUFFER_CTX', 'bn_mont_ctx_st', + 'X509_REQ_INFO', 'PEM_CTX', 'sigvec', + '__darwin_pthread_mutexattr_t', 'x509_attributes_st', + 'stack_t', '__darwin_mode_t', '__mbstate_t', + 'asn1_object_st', 'ASN1_ENCODING', '__uint8_t', + 'LHASH_NODE', 'PKCS7_SIGNER_INFO', 'asn1_method_st', + 'stack_st', 'bio_info_cb', 'div_t', 'UIT_VERIFY', + 'PBEPARAM_st', 'N4wait3DOLLAR_3E', 'quad_t', '__siginfo', + '__darwin_mbstate_t', 'rsa_st', 'ASN1_UNIVERSALSTRING', + 'uint64_t', 'ssl_comp_st', 'X509_OBJECT', 'pthread_cond_t', + 'DH', '__darwin_wctype_t', 'PKCS7_ENVELOPE', 'ASN1_TLC_st', + 'sig_atomic_t', 'BIO', 'nlink_t', 'BUF_MEM', 'SSL3_RECORD', + 'bio_method_st', 'timeval', 'UI_string_types', 'BIO_dummy', + 'ssl_ctx_st', 'NETSCAPE_CERT_SEQUENCE', + 'BIT_STRING_BITNAME_st', '__darwin_pthread_attr_t', + 'int8_t', '__darwin_wint_t', 'OBJ_NAME', + 'PKCS8_PRIV_KEY_INFO', 'PBE2PARAM_st', + 'LHASH_DOALL_FN_TYPE', 'x509_st', 'X509_VAL', 'dev_t', + 'ASN1_TEMPLATE_st', 'MD5state_st', '__uint16_t', + 'LHASH_DOALL_ARG_FN_TYPE', 'mdc2_ctx_st', 'SSL3_STATE', + 'ssl3_buffer_st', 'ASN1_ITEM_EXP', + '_opaque_pthread_condattr_t', 'mode_t', 'ASN1_VALUE', + 'qaddr_t', '__darwin_gid_t', 'EVP_PKEY', 'CRYPTO_EX_free', + '_ossl_old_des_cblock', 'X509_INFO', 'asn1_string_st', + 'intptr_t', 'UIT_INFO', 'int_fast8_t', 'sigaltstack', + 'env_md_st', 'LHASH', '__darwin_ucontext_t', + 'PKCS7_SIGN_ENVELOPE', '__darwin_mcontext_t', 'ct_rune_t', + 'MD2_CTX', 'pthread_once_t', 'SSL3_BUFFER', 'fd_mask', + 'ASN1_TYPE', 'PKCS7_SIGNED', 'ssl3_record_st', 'BF_KEY', + 'MD4state_st', 'MD4_CTX', 'int16_t', 'SSL_CIPHER', + 'rune_t', 'X509_TRUST', 'siginfo_t', 'X509_STORE', + '__sbuf', 'X509_STORE_CTX', '__darwin_blksize_t', 'ldiv_t', + 'ASN1_TIME', 'SSL_METHOD', 'X509_LOOKUP', + 'Netscape_spki_st', 'P_PID', 'sigaction', 'sig_t', + 'hostent', 'x509_cert_aux_st', '_opaque_pthread_cond_t', + 'segsz_t', 'ushort', '__darwin_ct_rune_t', 'fd_set', + 'BN_RECP_CTX', 'x509_lookup_st', 'uint16_t', 'pkcs7_st', + 'asn1_header_st', '__darwin_pthread_key_t', + 'x509_trust_st', '__darwin_pthread_handler_rec', 'int32_t', + 'X509_CRL_INFO', 'N11evp_pkey_st4DOLLAR_12E', 'MDC2_CTX', + 'N23_ossl_old_des_ks_struct4DOLLAR_10E', 'ASN1_HEADER', + 'X509_crl_info_st', 'LHASH_HASH_FN_TYPE', + '_opaque_pthread_mutexattr_t', 'ssl_st', + 'N8pkcs7_st4DOLLAR_15E', 'evp_pkey_st', + 'pkcs7_signedandenveloped_st', '__darwin_mach_port_t', + 'EVP_PBE_KEYGEN', '_opaque_pthread_mutex_t', + 'ASN1_UTCTIME', 'mcontext', 'crypto_ex_data_func_st', + 'u_long', 'PBKDF2PARAM_st', 'rc4_key_st', 'DSA_METHOD', + 'EVP_CIPHER', 'BIT_STRING_BITNAME', 'PKCS7_RECIP_INFO', + 'ssl3_enc_method', 'X509_CERT_AUX', 'uintmax_t', + 'int_fast16_t', 'RC5_32_KEY', 'ucontext64', 'ASN1_INTEGER', + 'u_short', 'N14x509_object_st4DOLLAR_14E', 'mcontext64', + 'X509_sig_st', 'ASN1_GENERALSTRING', 'PKCS7', '__sFILEX', + 'X509_name_entry_st', 'ssl_session_st', 'caddr_t', + 'bignum_st', 'X509_CINF', '__darwin_pthread_cond_t', + 'ASN1_TLC', 'PKCS7_ENCRYPT', 'NETSCAPE_SPKAC', + 'Netscape_spkac_st', 'idtype_t', 'UIT_ERROR', + 'uint_fast64_t', 'in_addr_t', 'pthread_mutex_t', + '__int64_t', 'ASN1_BMPSTRING', 'uint32_t', + 'PEM_ENCODE_SEAL_CTX', 'suseconds_t', 'ASN1_OBJECT', + 'X509_val_st', 'private_key_st', 'CRYPTO_dynlock', + 'X509_objects_st', 'CRYPTO_EX_DATA_IMPL', + 'pthread_condattr_t', 'PKCS7_DIGEST', 'uint_least32_t', + 'ASN1_STRING', '__uint32_t', 'P_PGID', 'rsa_meth_st', + 'X509_crl_st', 'RC2_KEY', '__darwin_fsfilcnt_t', + 'X509_revoked_st', 'PBE2PARAM', 'blksize_t', + 'Netscape_certificate_sequence', 'ssl_cipher_st', + 'bignum_ctx', 'register_t', 'ASN1_UTF8STRING', + 'pkcs7_encrypted_st', 'RC4_KEY', '__darwin_ucontext64_t', + 'N13ssl2_state_st4DOLLAR_19E', 'bn_recp_ctx_st', + 'CAST_KEY', 'X509_ATTRIBUTE', '__darwin_suseconds_t', + '__sigaction', 'user_ulong_t', 'syscall_arg_t', + 'evp_cipher_ctx_st', 'X509_ALGOR', 'mcontext_t', + 'const_DES_cblock', '__darwin_fsblkcnt_t', 'dsa_st', + 'int_least8_t', 'MD2state_st', 'X509_EXTENSION', + 'GEN_SESSION_CB', 'int_least16_t', '__darwin_wctrans_t', + 'PBKDF2PARAM', 'x509_lookup_method_st', 'pem_password_cb', + 'X509_info_st', 'x509_store_st', '__darwin_natural_t', + 'X509_pubkey_st', 'pkcs7_digest_st', '__darwin_size_t', + 'ASN1_STRING_TABLE', 'OSLittleEndian', 'RIPEMD160state_st', + 'pkcs7_enveloped_st', 'UI', 'ptrdiff_t', 'X509_REQ', + 'CRYPTO_dynlock_value', 'X509_req_st', 'x509_store_ctx_st', + 'N13ssl3_state_st4DOLLAR_20E', 'lhash_node_st', + '__darwin_pthread_mutex_t', 'LHASH_COMP_FN_TYPE', + '__darwin_rune_t', 'rlimit', '__darwin_pthread_once_t', + 'OSBigEndian', 'uintptr_t', '__darwin_uid_t', 'u_int', + 'ASN1_T61STRING', 'gid_t', 'ssl_method_st', 'ASN1_ITEM', + 'ASN1_ENUMERATED', '_opaque_pthread_rwlock_t', + 'pkcs8_priv_key_info_st', 'intmax_t', 'sigcontext', + 'X509_CRL', 'rc2_key_st', 'engine_st', 'x509_object_st', + '_opaque_pthread_once_t', 'DES_ks', 'SSL_COMP', + 'dsa_method', 'int64_t', 'bio_st', 'bf_key_st', + 'ASN1_GENERALIZEDTIME', 'PKCS7_ENC_CONTENT', + '__darwin_pid_t', 'lldiv_t', 'comp_method_st', + 'EVP_MD_CTX', 'evp_cipher_st', 'X509_name_st', + 'x509_hash_dir_st', '__darwin_mach_port_name_t', + 'useconds_t', 'user_size_t', 'SSL_SESSION', 'rusage', + 'ssl_crock_st', 'int_least32_t', '__sigaction_u', 'dh_st', + 'P_ALL', '__darwin_stack_t', 'N6DES_ks3DOLLAR_9E', + 'comp_ctx_st', 'X509_CERT_FILE_CTX'] diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/py2_test_grammar.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/py2_test_grammar.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/py2_test_grammar.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/py2_test_grammar.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,971 @@ +# Python test set -- part 1, grammar. +# This just tests whether the parser accepts them all. + +# NOTE: When you run this test as a script from the command line, you +# get warnings about certain hex/oct constants. Since those are +# issued by the parser, you can't suppress them by adding a +# filterwarnings() call to this module. Therefore, to shut up the +# regression test, the filterwarnings() call has been added to +# regrtest.py. + +from test.test_support import check_syntax_error +import unittest +import sys +# testing import * +from sys import * + +class TokenTests(unittest.TestCase): + + def testBackslash(self): + # Backslash means line continuation: + x = 1 \ + + 1 + self.assertEquals(x, 2, 'backslash for line continuation') + + # Backslash does not means continuation in comments :\ + x = 0 + self.assertEquals(x, 0, 'backslash ending comment') + + def testPlainIntegers(self): + self.assertEquals(0xff, 255) + self.assertEquals(0377, 255) + self.assertEquals(2147483647, 017777777777) + # "0x" is not a valid literal + self.assertRaises(SyntaxError, eval, "0x") + from sys import maxint + if maxint == 2147483647: + self.assertEquals(-2147483647-1, -020000000000) + # XXX -2147483648 + self.assert_(037777777777 > 0) + self.assert_(0xffffffff > 0) + for s in '2147483648', '040000000000', '0x100000000': + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + elif maxint == 9223372036854775807: + self.assertEquals(-9223372036854775807-1, -01000000000000000000000) + self.assert_(01777777777777777777777 > 0) + self.assert_(0xffffffffffffffff > 0) + for s in '9223372036854775808', '02000000000000000000000', \ + '0x10000000000000000': + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + else: + self.fail('Weird maxint value %r' % maxint) + + def testLongIntegers(self): + x = 0L + x = 0l + x = 0xffffffffffffffffL + x = 0xffffffffffffffffl + x = 077777777777777777L + x = 077777777777777777l + x = 123456789012345678901234567890L + x = 123456789012345678901234567890l + + def testFloats(self): + x = 3.14 + x = 314. + x = 0.314 + # XXX x = 000.314 + x = .314 + x = 3e14 + x = 3E14 + x = 3e-14 + x = 3e+14 + x = 3.e14 + x = .3e14 + x = 3.1e4 + + def testStringLiterals(self): + x = ''; y = ""; self.assert_(len(x) == 0 and x == y) + x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) + x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) + x = "doesn't \"shrink\" does it" + y = 'doesn\'t "shrink" does it' + self.assert_(len(x) == 24 and x == y) + x = "does \"shrink\" doesn't it" + y = 'does "shrink" doesn\'t it' + self.assert_(len(x) == 24 and x == y) + x = """ +The "quick" +brown fox +jumps over +the 'lazy' dog. +""" + y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' + self.assertEquals(x, y) + y = ''' +The "quick" +brown fox +jumps over +the 'lazy' dog. +''' + self.assertEquals(x, y) + y = "\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the 'lazy' dog.\n\ +" + self.assertEquals(x, y) + y = '\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the \'lazy\' dog.\n\ +' + self.assertEquals(x, y) + + +class GrammarTests(unittest.TestCase): + + # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE + # XXX can't test in a script -- this rule is only used when interactive + + # file_input: (NEWLINE | stmt)* ENDMARKER + # Being tested as this very moment this very module + + # expr_input: testlist NEWLINE + # XXX Hard to test -- used only in calls to input() + + def testEvalInput(self): + # testlist ENDMARKER + x = eval('1, 0 or 1') + + def testFuncdef(self): + ### 'def' NAME parameters ':' suite + ### parameters: '(' [varargslist] ')' + ### varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' ('**'|'*' '*') NAME] + ### | ('**'|'*' '*') NAME) + ### | fpdef ['=' test] (',' fpdef ['=' test])* [','] + ### fpdef: NAME | '(' fplist ')' + ### fplist: fpdef (',' fpdef)* [','] + ### arglist: (argument ',')* (argument | *' test [',' '**' test] | '**' test) + ### argument: [test '='] test # Really [keyword '='] test + def f1(): pass + f1() + f1(*()) + f1(*(), **{}) + def f2(one_argument): pass + def f3(two, arguments): pass + def f4(two, (compound, (argument, list))): pass + def f5((compound, first), two): pass + self.assertEquals(f2.func_code.co_varnames, ('one_argument',)) + self.assertEquals(f3.func_code.co_varnames, ('two', 'arguments')) + if sys.platform.startswith('java'): + self.assertEquals(f4.func_code.co_varnames, + ('two', '(compound, (argument, list))', 'compound', 'argument', + 'list',)) + self.assertEquals(f5.func_code.co_varnames, + ('(compound, first)', 'two', 'compound', 'first')) + else: + self.assertEquals(f4.func_code.co_varnames, + ('two', '.1', 'compound', 'argument', 'list')) + self.assertEquals(f5.func_code.co_varnames, + ('.0', 'two', 'compound', 'first')) + def a1(one_arg,): pass + def a2(two, args,): pass + def v0(*rest): pass + def v1(a, *rest): pass + def v2(a, b, *rest): pass + def v3(a, (b, c), *rest): return a, b, c, rest + + f1() + f2(1) + f2(1,) + f3(1, 2) + f3(1, 2,) + f4(1, (2, (3, 4))) + v0() + v0(1) + v0(1,) + v0(1,2) + v0(1,2,3,4,5,6,7,8,9,0) + v1(1) + v1(1,) + v1(1,2) + v1(1,2,3) + v1(1,2,3,4,5,6,7,8,9,0) + v2(1,2) + v2(1,2,3) + v2(1,2,3,4) + v2(1,2,3,4,5,6,7,8,9,0) + v3(1,(2,3)) + v3(1,(2,3),4) + v3(1,(2,3),4,5,6,7,8,9,0) + + # ceval unpacks the formal arguments into the first argcount names; + # thus, the names nested inside tuples must appear after these names. + if sys.platform.startswith('java'): + self.assertEquals(v3.func_code.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c')) + else: + self.assertEquals(v3.func_code.co_varnames, ('a', '.1', 'rest', 'b', 'c')) + self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,))) + def d01(a=1): pass + d01() + d01(1) + d01(*(1,)) + d01(**{'a':2}) + def d11(a, b=1): pass + d11(1) + d11(1, 2) + d11(1, **{'b':2}) + def d21(a, b, c=1): pass + d21(1, 2) + d21(1, 2, 3) + d21(*(1, 2, 3)) + d21(1, *(2, 3)) + d21(1, 2, *(3,)) + d21(1, 2, **{'c':3}) + def d02(a=1, b=2): pass + d02() + d02(1) + d02(1, 2) + d02(*(1, 2)) + d02(1, *(2,)) + d02(1, **{'b':2}) + d02(**{'a': 1, 'b': 2}) + def d12(a, b=1, c=2): pass + d12(1) + d12(1, 2) + d12(1, 2, 3) + def d22(a, b, c=1, d=2): pass + d22(1, 2) + d22(1, 2, 3) + d22(1, 2, 3, 4) + def d01v(a=1, *rest): pass + d01v() + d01v(1) + d01v(1, 2) + d01v(*(1, 2, 3, 4)) + d01v(*(1,)) + d01v(**{'a':2}) + def d11v(a, b=1, *rest): pass + d11v(1) + d11v(1, 2) + d11v(1, 2, 3) + def d21v(a, b, c=1, *rest): pass + d21v(1, 2) + d21v(1, 2, 3) + d21v(1, 2, 3, 4) + d21v(*(1, 2, 3, 4)) + d21v(1, 2, **{'c': 3}) + def d02v(a=1, b=2, *rest): pass + d02v() + d02v(1) + d02v(1, 2) + d02v(1, 2, 3) + d02v(1, *(2, 3, 4)) + d02v(**{'a': 1, 'b': 2}) + def d12v(a, b=1, c=2, *rest): pass + d12v(1) + d12v(1, 2) + d12v(1, 2, 3) + d12v(1, 2, 3, 4) + d12v(*(1, 2, 3, 4)) + d12v(1, 2, *(3, 4, 5)) + d12v(1, *(2,), **{'c': 3}) + def d22v(a, b, c=1, d=2, *rest): pass + d22v(1, 2) + d22v(1, 2, 3) + d22v(1, 2, 3, 4) + d22v(1, 2, 3, 4, 5) + d22v(*(1, 2, 3, 4)) + d22v(1, 2, *(3, 4, 5)) + d22v(1, *(2, 3), **{'d': 4}) + def d31v((x)): pass + d31v(1) + def d32v((x,)): pass + d32v((1,)) + + # keyword arguments after *arglist + def f(*args, **kwargs): + return args, kwargs + self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), + {'x':2, 'y':5})) + self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") + self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") + + # Check ast errors in *args and *kwargs + check_syntax_error(self, "f(*g(1=2))") + check_syntax_error(self, "f(**g(1=2))") + + def testLambdef(self): + ### lambdef: 'lambda' [varargslist] ':' test + l1 = lambda : 0 + self.assertEquals(l1(), 0) + l2 = lambda : a[d] # XXX just testing the expression + l3 = lambda : [2 < x for x in [-1, 3, 0L]] + self.assertEquals(l3(), [0, 1, 0]) + l4 = lambda x = lambda y = lambda z=1 : z : y() : x() + self.assertEquals(l4(), 1) + l5 = lambda x, y, z=2: x + y + z + self.assertEquals(l5(1, 2), 5) + self.assertEquals(l5(1, 2, 3), 6) + check_syntax_error(self, "lambda x: x = 2") + check_syntax_error(self, "lambda (None,): None") + + ### stmt: simple_stmt | compound_stmt + # Tested below + + def testSimpleStmt(self): + ### simple_stmt: small_stmt (';' small_stmt)* [';'] + x = 1; pass; del x + def foo(): + # verify statements that end with semi-colons + x = 1; pass; del x; + foo() + + ### small_stmt: expr_stmt | print_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt | exec_stmt + # Tested below + + def testExprStmt(self): + # (exprlist '=')* exprlist + 1 + 1, 2, 3 + x = 1 + x = 1, 2, 3 + x = y = z = 1, 2, 3 + x, y, z = 1, 2, 3 + abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) + + check_syntax_error(self, "x + 1 = 1") + check_syntax_error(self, "a + 1 = b + 2") + + def testPrintStmt(self): + # 'print' (test ',')* [test] + import StringIO + + # Can't test printing to real stdout without comparing output + # which is not available in unittest. + save_stdout = sys.stdout + sys.stdout = StringIO.StringIO() + + print 1, 2, 3 + print 1, 2, 3, + print + print 0 or 1, 0 or 1, + print 0 or 1 + + # 'print' '>>' test ',' + print >> sys.stdout, 1, 2, 3 + print >> sys.stdout, 1, 2, 3, + print >> sys.stdout + print >> sys.stdout, 0 or 1, 0 or 1, + print >> sys.stdout, 0 or 1 + + # test printing to an instance + class Gulp: + def write(self, msg): pass + + gulp = Gulp() + print >> gulp, 1, 2, 3 + print >> gulp, 1, 2, 3, + print >> gulp + print >> gulp, 0 or 1, 0 or 1, + print >> gulp, 0 or 1 + + # test print >> None + def driver(): + oldstdout = sys.stdout + sys.stdout = Gulp() + try: + tellme(Gulp()) + tellme() + finally: + sys.stdout = oldstdout + + # we should see this once + def tellme(file=sys.stdout): + print >> file, 'hello world' + + driver() + + # we should not see this at all + def tellme(file=None): + print >> file, 'goodbye universe' + + driver() + + self.assertEqual(sys.stdout.getvalue(), '''\ +1 2 3 +1 2 3 +1 1 1 +1 2 3 +1 2 3 +1 1 1 +hello world +''') + sys.stdout = save_stdout + + # syntax errors + check_syntax_error(self, 'print ,') + check_syntax_error(self, 'print >> x,') + + def testDelStmt(self): + # 'del' exprlist + abc = [1,2,3] + x, y, z = abc + xyz = x, y, z + + del abc + del x, y, (z, xyz) + + def testPassStmt(self): + # 'pass' + pass + + # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt + # Tested below + + def testBreakStmt(self): + # 'break' + while 1: break + + def testContinueStmt(self): + # 'continue' + i = 1 + while i: i = 0; continue + + msg = "" + while not msg: + msg = "ok" + try: + continue + msg = "continue failed to continue inside try" + except: + msg = "continue inside try called except block" + if msg != "ok": + self.fail(msg) + + msg = "" + while not msg: + msg = "finally block not called" + try: + continue + finally: + msg = "ok" + if msg != "ok": + self.fail(msg) + + def test_break_continue_loop(self): + # This test warrants an explanation. It is a test specifically for SF bugs + # #463359 and #462937. The bug is that a 'break' statement executed or + # exception raised inside a try/except inside a loop, *after* a continue + # statement has been executed in that loop, will cause the wrong number of + # arguments to be popped off the stack and the instruction pointer reset to + # a very small number (usually 0.) Because of this, the following test + # *must* written as a function, and the tracking vars *must* be function + # arguments with default values. Otherwise, the test will loop and loop. + + def test_inner(extra_burning_oil = 1, count=0): + big_hippo = 2 + while big_hippo: + count += 1 + try: + if extra_burning_oil and big_hippo == 1: + extra_burning_oil -= 1 + break + big_hippo -= 1 + continue + except: + raise + if count > 2 or big_hippo <> 1: + self.fail("continue then break in try/except in loop broken!") + test_inner() + + def testReturn(self): + # 'return' [testlist] + def g1(): return + def g2(): return 1 + g1() + x = g2() + check_syntax_error(self, "class foo:return 1") + + def testYield(self): + check_syntax_error(self, "class foo:yield 1") + + def testRaise(self): + # 'raise' test [',' test] + try: raise RuntimeError, 'just testing' + except RuntimeError: pass + try: raise KeyboardInterrupt + except KeyboardInterrupt: pass + + def testImport(self): + # 'import' dotted_as_names + import sys + import time, sys + # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) + from time import time + from time import (time) + # not testable inside a function, but already done at top of the module + # from sys import * + from sys import path, argv + from sys import (path, argv) + from sys import (path, argv,) + + def testGlobal(self): + # 'global' NAME (',' NAME)* + global a + global a, b + global one, two, three, four, five, six, seven, eight, nine, ten + + def testExec(self): + # 'exec' expr ['in' expr [',' expr]] + z = None + del z + exec 'z=1+1\n' + if z != 2: self.fail('exec \'z=1+1\'\\n') + del z + exec 'z=1+1' + if z != 2: self.fail('exec \'z=1+1\'') + z = None + del z + import types + if hasattr(types, "UnicodeType"): + exec r"""if 1: + exec u'z=1+1\n' + if z != 2: self.fail('exec u\'z=1+1\'\\n') + del z + exec u'z=1+1' + if z != 2: self.fail('exec u\'z=1+1\'')""" + g = {} + exec 'z = 1' in g + if g.has_key('__builtins__'): del g['__builtins__'] + if g != {'z': 1}: self.fail('exec \'z = 1\' in g') + g = {} + l = {} + + import warnings + warnings.filterwarnings("ignore", "global statement", module="") + exec 'global a; a = 1; b = 2' in g, l + if g.has_key('__builtins__'): del g['__builtins__'] + if l.has_key('__builtins__'): del l['__builtins__'] + if (g, l) != ({'a':1}, {'b':2}): + self.fail('exec ... in g (%s), l (%s)' %(g,l)) + + def testAssert(self): + # assert_stmt: 'assert' test [',' test] + assert 1 + assert 1, 1 + assert lambda x:x + assert 1, lambda x:x+1 + try: + assert 0, "msg" + except AssertionError, e: + self.assertEquals(e.args[0], "msg") + else: + if __debug__: + self.fail("AssertionError not raised by assert 0") + + ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef + # Tested below + + def testIf(self): + # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] + if 1: pass + if 1: pass + else: pass + if 0: pass + elif 0: pass + if 0: pass + elif 0: pass + elif 0: pass + elif 0: pass + else: pass + + def testWhile(self): + # 'while' test ':' suite ['else' ':' suite] + while 0: pass + while 0: pass + else: pass + + # Issue1920: "while 0" is optimized away, + # ensure that the "else" clause is still present. + x = 0 + while 0: + x = 1 + else: + x = 2 + self.assertEquals(x, 2) + + def testFor(self): + # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] + for i in 1, 2, 3: pass + for i, j, k in (): pass + else: pass + class Squares: + def __init__(self, max): + self.max = max + self.sofar = [] + def __len__(self): return len(self.sofar) + def __getitem__(self, i): + if not 0 <= i < self.max: raise IndexError + n = len(self.sofar) + while n <= i: + self.sofar.append(n*n) + n = n+1 + return self.sofar[i] + n = 0 + for x in Squares(10): n = n+x + if n != 285: + self.fail('for over growing sequence') + + result = [] + for x, in [(1,), (2,), (3,)]: + result.append(x) + self.assertEqual(result, [1, 2, 3]) + + def testTry(self): + ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] + ### | 'try' ':' suite 'finally' ':' suite + ### except_clause: 'except' [expr [('as' | ',') expr]] + try: + 1/0 + except ZeroDivisionError: + pass + else: + pass + try: 1/0 + except EOFError: pass + except TypeError as msg: pass + except RuntimeError, msg: pass + except: pass + else: pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError): pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError), msg: pass + try: pass + finally: pass + + def testSuite(self): + # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT + if 1: pass + if 1: + pass + if 1: + # + # + # + pass + pass + # + pass + # + + def testTest(self): + ### and_test ('or' and_test)* + ### and_test: not_test ('and' not_test)* + ### not_test: 'not' not_test | comparison + if not 1: pass + if 1 and 1: pass + if 1 or 1: pass + if not not not 1: pass + if not 1 and 1 and 1: pass + if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass + + def testComparison(self): + ### comparison: expr (comp_op expr)* + ### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' + if 1: pass + x = (1 == 1) + if 1 == 1: pass + if 1 != 1: pass + if 1 <> 1: pass + if 1 < 1: pass + if 1 > 1: pass + if 1 <= 1: pass + if 1 >= 1: pass + if 1 is 1: pass + if 1 is not 1: pass + if 1 in (): pass + if 1 not in (): pass + if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass + + def testBinaryMaskOps(self): + x = 1 & 1 + x = 1 ^ 1 + x = 1 | 1 + + def testShiftOps(self): + x = 1 << 1 + x = 1 >> 1 + x = 1 << 1 >> 1 + + def testAdditiveOps(self): + x = 1 + x = 1 + 1 + x = 1 - 1 - 1 + x = 1 - 1 + 1 - 1 + 1 + + def testMultiplicativeOps(self): + x = 1 * 1 + x = 1 / 1 + x = 1 % 1 + x = 1 / 1 * 1 % 1 + + def testUnaryOps(self): + x = +1 + x = -1 + x = ~1 + x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 + x = -1*1/1 + 1*1 - ---1*1 + + def testSelectors(self): + ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME + ### subscript: expr | [expr] ':' [expr] + + import sys, time + c = sys.path[0] + x = time.time() + x = sys.modules['time'].time() + a = '01234' + c = a[0] + c = a[-1] + s = a[0:5] + s = a[:5] + s = a[0:] + s = a[:] + s = a[-5:] + s = a[:-1] + s = a[-4:-3] + # A rough test of SF bug 1333982. https://python.org/sf/1333982 + # The testing here is fairly incomplete. + # Test cases should include: commas with 1 and 2 colons + d = {} + d[1] = 1 + d[1,] = 2 + d[1,2] = 3 + d[1,2,3] = 4 + L = list(d) + L.sort() + self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') + + def testAtoms(self): + ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING + ### dictmaker: test ':' test (',' test ':' test)* [','] + + x = (1) + x = (1 or 2 or 3) + x = (1 or 2 or 3, 2, 3) + + x = [] + x = [1] + x = [1 or 2 or 3] + x = [1 or 2 or 3, 2, 3] + x = [] + + x = {} + x = {'one': 1} + x = {'one': 1,} + x = {'one' or 'two': 1 or 2} + x = {'one': 1, 'two': 2} + x = {'one': 1, 'two': 2,} + x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} + + x = `x` + x = `1 or 2 or 3` + self.assertEqual(`1,2`, '(1, 2)') + + x = x + x = 'x' + x = 123 + + ### exprlist: expr (',' expr)* [','] + ### testlist: test (',' test)* [','] + # These have been exercised enough above + + def testClassdef(self): + # 'class' NAME ['(' [testlist] ')'] ':' suite + class B: pass + class B2(): pass + class C1(B): pass + class C2(B): pass + class D(C1, C2, B): pass + class C: + def meth1(self): pass + def meth2(self, arg): pass + def meth3(self, a1, a2): pass + # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + # decorators: decorator+ + # decorated: decorators (classdef | funcdef) + def class_decorator(x): + x.decorated = True + return x + @class_decorator + class G: + pass + self.assertEqual(G.decorated, True) + + def testListcomps(self): + # list comprehension tests + nums = [1, 2, 3, 4, 5] + strs = ["Apple", "Banana", "Coconut"] + spcs = [" Apple", " Banana ", "Coco nut "] + + self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) + self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) + self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) + self.assertEqual([(i, s) for i in nums for s in strs], + [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), + (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), + (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], + [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], + [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) + + def test_in_func(l): + return [None < x < 3 for x in l if x > 2] + + self.assertEqual(test_in_func(nums), [False, False, False]) + + def test_nested_front(): + self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], + [[1, 2], [3, 4], [5, 6]]) + + test_nested_front() + + check_syntax_error(self, "[i, s for i in nums for s in strs]") + check_syntax_error(self, "[x if y]") + + suppliers = [ + (1, "Boeing"), + (2, "Ford"), + (3, "Macdonalds") + ] + + parts = [ + (10, "Airliner"), + (20, "Engine"), + (30, "Cheeseburger") + ] + + suppart = [ + (1, 10), (1, 20), (2, 20), (3, 30) + ] + + x = [ + (sname, pname) + for (sno, sname) in suppliers + for (pno, pname) in parts + for (sp_sno, sp_pno) in suppart + if sno == sp_sno and pno == sp_pno + ] + + self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), + ('Macdonalds', 'Cheeseburger')]) + + def testGenexps(self): + # generator expression tests + g = ([x for x in range(10)] for x in range(1)) + self.assertEqual(g.next(), [x for x in range(10)]) + try: + g.next() + self.fail('should produce StopIteration exception') + except StopIteration: + pass + + a = 1 + try: + g = (a for d in a) + g.next() + self.fail('should produce TypeError') + except TypeError: + pass + + self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) + self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) + + a = [x for x in range(10)] + b = (x for x in (y for y in a)) + self.assertEqual(sum(b), sum([x for x in range(10)])) + + self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) + self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) + self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) + check_syntax_error(self, "foo(x for x in range(10), 100)") + check_syntax_error(self, "foo(100, x for x in range(10))") + + def testComprehensionSpecials(self): + # test for outmost iterable precomputation + x = 10; g = (i for i in range(x)); x = 5 + self.assertEqual(len(list(g)), 10) + + # This should hold, since we're only precomputing outmost iterable. + x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) + x = 5; t = True; + self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) + + # Grammar allows multiple adjacent 'if's in listcomps and genexps, + # even though it's silly. Make sure it works (ifelse broke this.) + self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) + self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) + + # verify unpacking single element tuples in listcomp/genexp. + self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) + self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) + + def test_with_statement(self): + class manager(object): + def __enter__(self): + return (1, 2) + def __exit__(self, *args): + pass + + with manager(): + pass + with manager() as x: + pass + with manager() as (x, y): + pass + with manager(), manager(): + pass + with manager() as x, manager() as y: + pass + with manager() as x, manager(): + pass + + def testIfElseExpr(self): + # Test ifelse expressions in various cases + def _checkeval(msg, ret): + "helper to check that evaluation of expressions is done correctly" + print x + return ret + + self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) + self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) + self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) + self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) + self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) + self.assertEqual((5 and 6 if 0 else 1), 1) + self.assertEqual(((5 and 6) if 0 else 1), 1) + self.assertEqual((5 and (6 if 1 else 1)), 6) + self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) + self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) + self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) + self.assertEqual((not 5 if 1 else 1), False) + self.assertEqual((not 5 if 0 else 1), 1) + self.assertEqual((6 + 1 if 1 else 2), 7) + self.assertEqual((6 - 1 if 1 else 2), 5) + self.assertEqual((6 * 2 if 1 else 4), 12) + self.assertEqual((6 / 2 if 1 else 3), 3) + self.assertEqual((6 < 4 if 0 else 2), 2) + + +if __name__ == '__main__': + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/py3_test_grammar.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/py3_test_grammar.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/data/py3_test_grammar.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/data/py3_test_grammar.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,956 @@ +# Python test set -- part 1, grammar. +# This just tests whether the parser accepts them all. + +# NOTE: When you run this test as a script from the command line, you +# get warnings about certain hex/oct constants. Since those are +# issued by the parser, you can't suppress them by adding a +# filterwarnings() call to this module. Therefore, to shut up the +# regression test, the filterwarnings() call has been added to +# regrtest.py. + +from test.support import check_syntax_error +import unittest +import sys +# testing import * +from sys import * + +class TokenTests(unittest.TestCase): + + def testBackslash(self): + # Backslash means line continuation: + x = 1 \ + + 1 + self.assertEquals(x, 2, 'backslash for line continuation') + + # Backslash does not means continuation in comments :\ + x = 0 + self.assertEquals(x, 0, 'backslash ending comment') + + def testPlainIntegers(self): + self.assertEquals(type(000), type(0)) + self.assertEquals(0xff, 255) + self.assertEquals(0o377, 255) + self.assertEquals(2147483647, 0o17777777777) + self.assertEquals(0b1001, 9) + # "0x" is not a valid literal + self.assertRaises(SyntaxError, eval, "0x") + from sys import maxsize + if maxsize == 2147483647: + self.assertEquals(-2147483647-1, -0o20000000000) + # XXX -2147483648 + self.assert_(0o37777777777 > 0) + self.assert_(0xffffffff > 0) + self.assert_(0b1111111111111111111111111111111 > 0) + for s in ('2147483648', '0o40000000000', '0x100000000', + '0b10000000000000000000000000000000'): + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + elif maxsize == 9223372036854775807: + self.assertEquals(-9223372036854775807-1, -0o1000000000000000000000) + self.assert_(0o1777777777777777777777 > 0) + self.assert_(0xffffffffffffffff > 0) + self.assert_(0b11111111111111111111111111111111111111111111111111111111111111 > 0) + for s in '9223372036854775808', '0o2000000000000000000000', \ + '0x10000000000000000', \ + '0b100000000000000000000000000000000000000000000000000000000000000': + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + else: + self.fail('Weird maxsize value %r' % maxsize) + + def testLongIntegers(self): + x = 0 + x = 0xffffffffffffffff + x = 0Xffffffffffffffff + x = 0o77777777777777777 + x = 0O77777777777777777 + x = 123456789012345678901234567890 + x = 0b100000000000000000000000000000000000000000000000000000000000000000000 + x = 0B111111111111111111111111111111111111111111111111111111111111111111111 + + def testUnderscoresInNumbers(self): + # Integers + x = 1_0 + x = 123_456_7_89 + x = 0xabc_123_4_5 + x = 0X_abc_123 + x = 0B11_01 + x = 0b_11_01 + x = 0o45_67 + x = 0O_45_67 + + # Floats + x = 3_1.4 + x = 03_1.4 + x = 3_1. + x = .3_1 + x = 3.1_4 + x = 0_3.1_4 + x = 3e1_4 + x = 3_1e+4_1 + x = 3_1E-4_1 + + def testFloats(self): + x = 3.14 + x = 314. + x = 0.314 + # XXX x = 000.314 + x = .314 + x = 3e14 + x = 3E14 + x = 3e-14 + x = 3e+14 + x = 3.e14 + x = .3e14 + x = 3.1e4 + + def testStringLiterals(self): + x = ''; y = ""; self.assert_(len(x) == 0 and x == y) + x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) + x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) + x = "doesn't \"shrink\" does it" + y = 'doesn\'t "shrink" does it' + self.assert_(len(x) == 24 and x == y) + x = "does \"shrink\" doesn't it" + y = 'does "shrink" doesn\'t it' + self.assert_(len(x) == 24 and x == y) + x = """ +The "quick" +brown fox +jumps over +the 'lazy' dog. +""" + y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' + self.assertEquals(x, y) + y = ''' +The "quick" +brown fox +jumps over +the 'lazy' dog. +''' + self.assertEquals(x, y) + y = "\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the 'lazy' dog.\n\ +" + self.assertEquals(x, y) + y = '\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the \'lazy\' dog.\n\ +' + self.assertEquals(x, y) + x = rf"hello \{True}"; y = f"hello \\{True}" + self.assertEquals(x, y) + + def testEllipsis(self): + x = ... + self.assert_(x is Ellipsis) + self.assertRaises(SyntaxError, eval, ".. .") + +class GrammarTests(unittest.TestCase): + + # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE + # XXX can't test in a script -- this rule is only used when interactive + + # file_input: (NEWLINE | stmt)* ENDMARKER + # Being tested as this very moment this very module + + # expr_input: testlist NEWLINE + # XXX Hard to test -- used only in calls to input() + + def testEvalInput(self): + # testlist ENDMARKER + x = eval('1, 0 or 1') + + def testFuncdef(self): + ### [decorators] 'def' NAME parameters ['->' test] ':' suite + ### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + ### decorators: decorator+ + ### parameters: '(' [typedargslist] ')' + ### typedargslist: ((tfpdef ['=' test] ',')* + ### ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef) + ### | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) + ### tfpdef: NAME [':' test] + ### varargslist: ((vfpdef ['=' test] ',')* + ### ('*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef) + ### | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) + ### vfpdef: NAME + def f1(): pass + f1() + f1(*()) + f1(*(), **{}) + def f2(one_argument): pass + def f3(two, arguments): pass + self.assertEquals(f2.__code__.co_varnames, ('one_argument',)) + self.assertEquals(f3.__code__.co_varnames, ('two', 'arguments')) + def a1(one_arg,): pass + def a2(two, args,): pass + def v0(*rest): pass + def v1(a, *rest): pass + def v2(a, b, *rest): pass + + f1() + f2(1) + f2(1,) + f3(1, 2) + f3(1, 2,) + v0() + v0(1) + v0(1,) + v0(1,2) + v0(1,2,3,4,5,6,7,8,9,0) + v1(1) + v1(1,) + v1(1,2) + v1(1,2,3) + v1(1,2,3,4,5,6,7,8,9,0) + v2(1,2) + v2(1,2,3) + v2(1,2,3,4) + v2(1,2,3,4,5,6,7,8,9,0) + + def d01(a=1): pass + d01() + d01(1) + d01(*(1,)) + d01(**{'a':2}) + def d11(a, b=1): pass + d11(1) + d11(1, 2) + d11(1, **{'b':2}) + def d21(a, b, c=1): pass + d21(1, 2) + d21(1, 2, 3) + d21(*(1, 2, 3)) + d21(1, *(2, 3)) + d21(1, 2, *(3,)) + d21(1, 2, **{'c':3}) + def d02(a=1, b=2): pass + d02() + d02(1) + d02(1, 2) + d02(*(1, 2)) + d02(1, *(2,)) + d02(1, **{'b':2}) + d02(**{'a': 1, 'b': 2}) + def d12(a, b=1, c=2): pass + d12(1) + d12(1, 2) + d12(1, 2, 3) + def d22(a, b, c=1, d=2): pass + d22(1, 2) + d22(1, 2, 3) + d22(1, 2, 3, 4) + def d01v(a=1, *rest): pass + d01v() + d01v(1) + d01v(1, 2) + d01v(*(1, 2, 3, 4)) + d01v(*(1,)) + d01v(**{'a':2}) + def d11v(a, b=1, *rest): pass + d11v(1) + d11v(1, 2) + d11v(1, 2, 3) + def d21v(a, b, c=1, *rest): pass + d21v(1, 2) + d21v(1, 2, 3) + d21v(1, 2, 3, 4) + d21v(*(1, 2, 3, 4)) + d21v(1, 2, **{'c': 3}) + def d02v(a=1, b=2, *rest): pass + d02v() + d02v(1) + d02v(1, 2) + d02v(1, 2, 3) + d02v(1, *(2, 3, 4)) + d02v(**{'a': 1, 'b': 2}) + def d12v(a, b=1, c=2, *rest): pass + d12v(1) + d12v(1, 2) + d12v(1, 2, 3) + d12v(1, 2, 3, 4) + d12v(*(1, 2, 3, 4)) + d12v(1, 2, *(3, 4, 5)) + d12v(1, *(2,), **{'c': 3}) + def d22v(a, b, c=1, d=2, *rest): pass + d22v(1, 2) + d22v(1, 2, 3) + d22v(1, 2, 3, 4) + d22v(1, 2, 3, 4, 5) + d22v(*(1, 2, 3, 4)) + d22v(1, 2, *(3, 4, 5)) + d22v(1, *(2, 3), **{'d': 4}) + + # keyword argument type tests + try: + str('x', **{b'foo':1 }) + except TypeError: + pass + else: + self.fail('Bytes should not work as keyword argument names') + # keyword only argument tests + def pos0key1(*, key): return key + pos0key1(key=100) + def pos2key2(p1, p2, *, k1, k2=100): return p1,p2,k1,k2 + pos2key2(1, 2, k1=100) + pos2key2(1, 2, k1=100, k2=200) + pos2key2(1, 2, k2=100, k1=200) + def pos2key2dict(p1, p2, *, k1=100, k2, **kwarg): return p1,p2,k1,k2,kwarg + pos2key2dict(1,2,k2=100,tokwarg1=100,tokwarg2=200) + pos2key2dict(1,2,tokwarg1=100,tokwarg2=200, k2=100) + + # keyword arguments after *arglist + def f(*args, **kwargs): + return args, kwargs + self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), + {'x':2, 'y':5})) + self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") + self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") + + # argument annotation tests + def f(x) -> list: pass + self.assertEquals(f.__annotations__, {'return': list}) + def f(x:int): pass + self.assertEquals(f.__annotations__, {'x': int}) + def f(*x:str): pass + self.assertEquals(f.__annotations__, {'x': str}) + def f(**x:float): pass + self.assertEquals(f.__annotations__, {'x': float}) + def f(x, y:1+2): pass + self.assertEquals(f.__annotations__, {'y': 3}) + def f(a, b:1, c:2, d): pass + self.assertEquals(f.__annotations__, {'b': 1, 'c': 2}) + def f(a, b:1, c:2, d, e:3=4, f=5, *g:6): pass + self.assertEquals(f.__annotations__, + {'b': 1, 'c': 2, 'e': 3, 'g': 6}) + def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, + **k:11) -> 12: pass + self.assertEquals(f.__annotations__, + {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9, + 'k': 11, 'return': 12}) + # Check for SF Bug #1697248 - mixing decorators and a return annotation + def null(x): return x + @null + def f(x) -> list: pass + self.assertEquals(f.__annotations__, {'return': list}) + + # test closures with a variety of oparg's + closure = 1 + def f(): return closure + def f(x=1): return closure + def f(*, k=1): return closure + def f() -> int: return closure + + # Check ast errors in *args and *kwargs + check_syntax_error(self, "f(*g(1=2))") + check_syntax_error(self, "f(**g(1=2))") + + def testLambdef(self): + ### lambdef: 'lambda' [varargslist] ':' test + l1 = lambda : 0 + self.assertEquals(l1(), 0) + l2 = lambda : a[d] # XXX just testing the expression + l3 = lambda : [2 < x for x in [-1, 3, 0]] + self.assertEquals(l3(), [0, 1, 0]) + l4 = lambda x = lambda y = lambda z=1 : z : y() : x() + self.assertEquals(l4(), 1) + l5 = lambda x, y, z=2: x + y + z + self.assertEquals(l5(1, 2), 5) + self.assertEquals(l5(1, 2, 3), 6) + check_syntax_error(self, "lambda x: x = 2") + check_syntax_error(self, "lambda (None,): None") + l6 = lambda x, y, *, k=20: x+y+k + self.assertEquals(l6(1,2), 1+2+20) + self.assertEquals(l6(1,2,k=10), 1+2+10) + + + ### stmt: simple_stmt | compound_stmt + # Tested below + + def testSimpleStmt(self): + ### simple_stmt: small_stmt (';' small_stmt)* [';'] + x = 1; pass; del x + def foo(): + # verify statements that end with semi-colons + x = 1; pass; del x; + foo() + + ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt + # Tested below + + def testExprStmt(self): + # (exprlist '=')* exprlist + 1 + 1, 2, 3 + x = 1 + x = 1, 2, 3 + x = y = z = 1, 2, 3 + x, y, z = 1, 2, 3 + abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) + + check_syntax_error(self, "x + 1 = 1") + check_syntax_error(self, "a + 1 = b + 2") + + def testDelStmt(self): + # 'del' exprlist + abc = [1,2,3] + x, y, z = abc + xyz = x, y, z + + del abc + del x, y, (z, xyz) + + def testPassStmt(self): + # 'pass' + pass + + # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt + # Tested below + + def testBreakStmt(self): + # 'break' + while 1: break + + def testContinueStmt(self): + # 'continue' + i = 1 + while i: i = 0; continue + + msg = "" + while not msg: + msg = "ok" + try: + continue + msg = "continue failed to continue inside try" + except: + msg = "continue inside try called except block" + if msg != "ok": + self.fail(msg) + + msg = "" + while not msg: + msg = "finally block not called" + try: + continue + finally: + msg = "ok" + if msg != "ok": + self.fail(msg) + + def test_break_continue_loop(self): + # This test warrants an explanation. It is a test specifically for SF bugs + # #463359 and #462937. The bug is that a 'break' statement executed or + # exception raised inside a try/except inside a loop, *after* a continue + # statement has been executed in that loop, will cause the wrong number of + # arguments to be popped off the stack and the instruction pointer reset to + # a very small number (usually 0.) Because of this, the following test + # *must* written as a function, and the tracking vars *must* be function + # arguments with default values. Otherwise, the test will loop and loop. + + def test_inner(extra_burning_oil = 1, count=0): + big_hippo = 2 + while big_hippo: + count += 1 + try: + if extra_burning_oil and big_hippo == 1: + extra_burning_oil -= 1 + break + big_hippo -= 1 + continue + except: + raise + if count > 2 or big_hippo != 1: + self.fail("continue then break in try/except in loop broken!") + test_inner() + + def testReturn(self): + # 'return' [testlist_star_expr] + def g1(): return + def g2(): return 1 + return_list = [2, 3] + def g3(): return 1, *return_list + g1() + x = g2() + x3 = g3() + check_syntax_error(self, "class foo:return 1") + + def testYield(self): + # 'yield' [yield_arg] + def g1(): yield 1 + yield_list = [2, 3] + def g2(): yield 1, *yield_list + def g3(): yield from iter(yield_list) + x1 = g1() + x2 = g2() + x3 = g3() + check_syntax_error(self, "class foo:yield 1") + check_syntax_error(self, "def g4(): yield from *a") + + def testRaise(self): + # 'raise' test [',' test] + try: raise RuntimeError('just testing') + except RuntimeError: pass + try: raise KeyboardInterrupt + except KeyboardInterrupt: pass + + def testImport(self): + # 'import' dotted_as_names + import sys + import time, sys + # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) + from time import time + from time import (time) + # not testable inside a function, but already done at top of the module + # from sys import * + from sys import path, argv + from sys import (path, argv) + from sys import (path, argv,) + + def testGlobal(self): + # 'global' NAME (',' NAME)* + global a + global a, b + global one, two, three, four, five, six, seven, eight, nine, ten + + def testNonlocal(self): + # 'nonlocal' NAME (',' NAME)* + x = 0 + y = 0 + def f(): + nonlocal x + nonlocal x, y + + def testAssert(self): + # assert_stmt: 'assert' test [',' test] + assert 1 + assert 1, 1 + assert lambda x:x + assert 1, lambda x:x+1 + try: + assert 0, "msg" + except AssertionError as e: + self.assertEquals(e.args[0], "msg") + else: + if __debug__: + self.fail("AssertionError not raised by assert 0") + + ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef + # Tested below + + def testIf(self): + # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] + if 1: pass + if 1: pass + else: pass + if 0: pass + elif 0: pass + if 0: pass + elif 0: pass + elif 0: pass + elif 0: pass + else: pass + + def testWhile(self): + # 'while' test ':' suite ['else' ':' suite] + while 0: pass + while 0: pass + else: pass + + # Issue1920: "while 0" is optimized away, + # ensure that the "else" clause is still present. + x = 0 + while 0: + x = 1 + else: + x = 2 + self.assertEquals(x, 2) + + def testFor(self): + # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] + for i in 1, 2, 3: pass + for i, j, k in (): pass + else: pass + class Squares: + def __init__(self, max): + self.max = max + self.sofar = [] + def __len__(self): return len(self.sofar) + def __getitem__(self, i): + if not 0 <= i < self.max: raise IndexError + n = len(self.sofar) + while n <= i: + self.sofar.append(n*n) + n = n+1 + return self.sofar[i] + n = 0 + for x in Squares(10): n = n+x + if n != 285: + self.fail('for over growing sequence') + + result = [] + for x, in [(1,), (2,), (3,)]: + result.append(x) + self.assertEqual(result, [1, 2, 3]) + + def testTry(self): + ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] + ### | 'try' ':' suite 'finally' ':' suite + ### except_clause: 'except' [expr ['as' expr]] + try: + 1/0 + except ZeroDivisionError: + pass + else: + pass + try: 1/0 + except EOFError: pass + except TypeError as msg: pass + except RuntimeError as msg: pass + except: pass + else: pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError): pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError) as msg: pass + try: pass + finally: pass + + def testSuite(self): + # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT + if 1: pass + if 1: + pass + if 1: + # + # + # + pass + pass + # + pass + # + + def testTest(self): + ### and_test ('or' and_test)* + ### and_test: not_test ('and' not_test)* + ### not_test: 'not' not_test | comparison + if not 1: pass + if 1 and 1: pass + if 1 or 1: pass + if not not not 1: pass + if not 1 and 1 and 1: pass + if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass + + def testComparison(self): + ### comparison: expr (comp_op expr)* + ### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not' + if 1: pass + x = (1 == 1) + if 1 == 1: pass + if 1 != 1: pass + if 1 < 1: pass + if 1 > 1: pass + if 1 <= 1: pass + if 1 >= 1: pass + if 1 is 1: pass + if 1 is not 1: pass + if 1 in (): pass + if 1 not in (): pass + if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass + + def testBinaryMaskOps(self): + x = 1 & 1 + x = 1 ^ 1 + x = 1 | 1 + + def testShiftOps(self): + x = 1 << 1 + x = 1 >> 1 + x = 1 << 1 >> 1 + + def testAdditiveOps(self): + x = 1 + x = 1 + 1 + x = 1 - 1 - 1 + x = 1 - 1 + 1 - 1 + 1 + + def testMultiplicativeOps(self): + x = 1 * 1 + x = 1 / 1 + x = 1 % 1 + x = 1 / 1 * 1 % 1 + + def testUnaryOps(self): + x = +1 + x = -1 + x = ~1 + x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 + x = -1*1/1 + 1*1 - ---1*1 + + def testSelectors(self): + ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME + ### subscript: expr | [expr] ':' [expr] + + import sys, time + c = sys.path[0] + x = time.time() + x = sys.modules['time'].time() + a = '01234' + c = a[0] + c = a[-1] + s = a[0:5] + s = a[:5] + s = a[0:] + s = a[:] + s = a[-5:] + s = a[:-1] + s = a[-4:-3] + # A rough test of SF bug 1333982. https://python.org/sf/1333982 + # The testing here is fairly incomplete. + # Test cases should include: commas with 1 and 2 colons + d = {} + d[1] = 1 + d[1,] = 2 + d[1,2] = 3 + d[1,2,3] = 4 + L = list(d) + L.sort(key=lambda x: x if isinstance(x, tuple) else ()) + self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') + + def testAtoms(self): + ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING + ### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [',']) + + x = (1) + x = (1 or 2 or 3) + x = (1 or 2 or 3, 2, 3) + + x = [] + x = [1] + x = [1 or 2 or 3] + x = [1 or 2 or 3, 2, 3] + x = [] + + x = {} + x = {'one': 1} + x = {'one': 1,} + x = {'one' or 'two': 1 or 2} + x = {'one': 1, 'two': 2} + x = {'one': 1, 'two': 2,} + x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} + + x = {'one'} + x = {'one', 1,} + x = {'one', 'two', 'three'} + x = {2, 3, 4,} + + x = x + x = 'x' + x = 123 + + ### exprlist: expr (',' expr)* [','] + ### testlist: test (',' test)* [','] + # These have been exercised enough above + + def testClassdef(self): + # 'class' NAME ['(' [testlist] ')'] ':' suite + class B: pass + class B2(): pass + class C1(B): pass + class C2(B): pass + class D(C1, C2, B): pass + class C: + def meth1(self): pass + def meth2(self, arg): pass + def meth3(self, a1, a2): pass + + # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + # decorators: decorator+ + # decorated: decorators (classdef | funcdef) + def class_decorator(x): return x + @class_decorator + class G: pass + + def testDictcomps(self): + # dictorsetmaker: ( (test ':' test (comp_for | + # (',' test ':' test)* [','])) | + # (test (comp_for | (',' test)* [','])) ) + nums = [1, 2, 3] + self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4}) + + def testListcomps(self): + # list comprehension tests + nums = [1, 2, 3, 4, 5] + strs = ["Apple", "Banana", "Coconut"] + spcs = [" Apple", " Banana ", "Coco nut "] + + self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) + self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) + self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) + self.assertEqual([(i, s) for i in nums for s in strs], + [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), + (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), + (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], + [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], + [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) + + def test_in_func(l): + return [0 < x < 3 for x in l if x > 2] + + self.assertEqual(test_in_func(nums), [False, False, False]) + + def test_nested_front(): + self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], + [[1, 2], [3, 4], [5, 6]]) + + test_nested_front() + + check_syntax_error(self, "[i, s for i in nums for s in strs]") + check_syntax_error(self, "[x if y]") + + suppliers = [ + (1, "Boeing"), + (2, "Ford"), + (3, "Macdonalds") + ] + + parts = [ + (10, "Airliner"), + (20, "Engine"), + (30, "Cheeseburger") + ] + + suppart = [ + (1, 10), (1, 20), (2, 20), (3, 30) + ] + + x = [ + (sname, pname) + for (sno, sname) in suppliers + for (pno, pname) in parts + for (sp_sno, sp_pno) in suppart + if sno == sp_sno and pno == sp_pno + ] + + self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), + ('Macdonalds', 'Cheeseburger')]) + + def testGenexps(self): + # generator expression tests + g = ([x for x in range(10)] for x in range(1)) + self.assertEqual(next(g), [x for x in range(10)]) + try: + next(g) + self.fail('should produce StopIteration exception') + except StopIteration: + pass + + a = 1 + try: + g = (a for d in a) + next(g) + self.fail('should produce TypeError') + except TypeError: + pass + + self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) + self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) + + a = [x for x in range(10)] + b = (x for x in (y for y in a)) + self.assertEqual(sum(b), sum([x for x in range(10)])) + + self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) + self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) + self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) + check_syntax_error(self, "foo(x for x in range(10), 100)") + check_syntax_error(self, "foo(100, x for x in range(10))") + + def testComprehensionSpecials(self): + # test for outmost iterable precomputation + x = 10; g = (i for i in range(x)); x = 5 + self.assertEqual(len(list(g)), 10) + + # This should hold, since we're only precomputing outmost iterable. + x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) + x = 5; t = True; + self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) + + # Grammar allows multiple adjacent 'if's in listcomps and genexps, + # even though it's silly. Make sure it works (ifelse broke this.) + self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) + self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) + + # verify unpacking single element tuples in listcomp/genexp. + self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) + self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) + + def test_with_statement(self): + class manager(object): + def __enter__(self): + return (1, 2) + def __exit__(self, *args): + pass + + with manager(): + pass + with manager() as x: + pass + with manager() as (x, y): + pass + with manager(), manager(): + pass + with manager() as x, manager() as y: + pass + with manager() as x, manager(): + pass + + def testIfElseExpr(self): + # Test ifelse expressions in various cases + def _checkeval(msg, ret): + "helper to check that evaluation of expressions is done correctly" + print(x) + return ret + + # the next line is not allowed anymore + #self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) + self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) + self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) + self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) + self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) + self.assertEqual((5 and 6 if 0 else 1), 1) + self.assertEqual(((5 and 6) if 0 else 1), 1) + self.assertEqual((5 and (6 if 1 else 1)), 6) + self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) + self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) + self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) + self.assertEqual((not 5 if 1 else 1), False) + self.assertEqual((not 5 if 0 else 1), 1) + self.assertEqual((6 + 1 if 1 else 2), 7) + self.assertEqual((6 - 1 if 1 else 2), 5) + self.assertEqual((6 * 2 if 1 else 4), 12) + self.assertEqual((6 / 2 if 1 else 3), 3) + self.assertEqual((6 < 4 if 0 else 2), 2) + + +if __name__ == '__main__': + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/pytree_idempotency.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/pytree_idempotency.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/pytree_idempotency.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/pytree_idempotency.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Main program for testing the infrastructure.""" + +from __future__ import print_function + +__author__ = "Guido van Rossum " + +# Support imports (need to be imported first) +from . import support + +# Python imports +import os +import sys +import logging + +# Local imports +from .. import pytree +from .. import pgen2 +from ..pgen2 import driver + +logging.basicConfig() + +def main(): + gr = driver.load_grammar("Grammar.txt") + dr = driver.Driver(gr, convert=pytree.convert) + + fn = "example.py" + tree = dr.parse_file(fn, debug=True) + if not diff(fn, tree): + print("No diffs.") + if not sys.argv[1:]: + return # Pass a dummy argument to run the complete test suite below + + problems = [] + + # Process every imported module + for name in sys.modules: + mod = sys.modules[name] + if mod is None or not hasattr(mod, "__file__"): + continue + fn = mod.__file__ + if fn.endswith(".pyc"): + fn = fn[:-1] + if not fn.endswith(".py"): + continue + print("Parsing", fn, file=sys.stderr) + tree = dr.parse_file(fn, debug=True) + if diff(fn, tree): + problems.append(fn) + + # Process every single module on sys.path (but not in packages) + for dir in sys.path: + try: + names = os.listdir(dir) + except OSError: + continue + print("Scanning", dir, "...", file=sys.stderr) + for name in names: + if not name.endswith(".py"): + continue + print("Parsing", name, file=sys.stderr) + fn = os.path.join(dir, name) + try: + tree = dr.parse_file(fn, debug=True) + except pgen2.parse.ParseError as err: + print("ParseError:", err) + else: + if diff(fn, tree): + problems.append(fn) + + # Show summary of problem files + if not problems: + print("No problems. Congratulations!") + else: + print("Problems in following files:") + for fn in problems: + print("***", fn) + +def diff(fn, tree): + f = open("@", "w") + try: + f.write(str(tree)) + finally: + f.close() + try: + return os.system("diff -u %s @" % fn) + finally: + os.remove("@") + +if __name__ == "__main__": + main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/support.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/support.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/support.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/support.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,58 @@ +"""Support code for test_*.py files""" +# Author: Collin Winter + +# Python imports +import unittest +import os +import os.path +from textwrap import dedent + +# Local imports +from lib2to3 import pytree, refactor +from lib2to3.pgen2 import driver as pgen2_driver + +test_dir = os.path.dirname(__file__) +proj_dir = os.path.normpath(os.path.join(test_dir, "..")) +grammar_path = os.path.join(test_dir, "..", "Grammar.txt") +grammar = pgen2_driver.load_grammar(grammar_path) +grammar_no_print_statement = pgen2_driver.load_grammar(grammar_path) +del grammar_no_print_statement.keywords["print"] +driver = pgen2_driver.Driver(grammar, convert=pytree.convert) +driver_no_print_statement = pgen2_driver.Driver( + grammar_no_print_statement, + convert=pytree.convert +) + +def parse_string(string): + return driver.parse_string(reformat(string), debug=True) + +def run_all_tests(test_mod=None, tests=None): + if tests is None: + tests = unittest.TestLoader().loadTestsFromModule(test_mod) + unittest.TextTestRunner(verbosity=2).run(tests) + +def reformat(string): + return dedent(string) + "\n\n" + +def get_refactorer(fixer_pkg="lib2to3", fixers=None, options=None): + """ + A convenience function for creating a RefactoringTool for tests. + + fixers is a list of fixers for the RefactoringTool to use. By default + "lib2to3.fixes.*" is used. options is an optional dictionary of options to + be passed to the RefactoringTool. + """ + if fixers is not None: + fixers = [fixer_pkg + ".fixes.fix_" + fix for fix in fixers] + else: + fixers = refactor.get_fixers_from_package(fixer_pkg + ".fixes") + options = options or {} + return refactor.RefactoringTool(fixers, options, explicit=True) + +def all_project_files(): + for dirpath, dirnames, filenames in os.walk(proj_dir): + for filename in filenames: + if filename.endswith(".py"): + yield os.path.join(dirpath, filename) + +TestCase = unittest.TestCase diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_all_fixers.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_all_fixers.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_all_fixers.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_all_fixers.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,42 @@ +"""Tests that run all fixer modules over an input stream. + +This has been broken out into its own test module because of its +running time. +""" +# Author: Collin Winter + +# Python imports +import os.path +import sys +import test.support +import unittest + +# Local imports +from . import support + + +@test.support.requires_resource('cpu') +class Test_all(support.TestCase): + + def setUp(self): + self.refactor = support.get_refactorer() + + def refactor_file(self, filepath): + if test.support.verbose: + print(f"Refactor file: {filepath}") + if os.path.basename(filepath) == 'infinite_recursion.py': + # bpo-46542: Processing infinite_recursion.py can crash Python + # if Python is built in debug mode: lower the recursion limit + # to prevent a crash. + with test.support.infinite_recursion(150): + self.refactor.refactor_file(filepath) + else: + self.refactor.refactor_file(filepath) + + def test_all_project_files(self): + for filepath in support.all_project_files(): + with self.subTest(filepath=filepath): + self.refactor_file(filepath) + +if __name__ == '__main__': + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_fixers.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_fixers.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_fixers.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_fixers.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,4649 @@ +""" Test suite for the fixer modules """ + +# Python imports +import os +from itertools import chain +from operator import itemgetter + +# Local imports +from lib2to3 import pygram, fixer_util +from lib2to3.tests import support + + +class FixerTestCase(support.TestCase): + + # Other test cases can subclass this class and replace "fixer_pkg" with + # their own. + def setUp(self, fix_list=None, fixer_pkg="lib2to3", options=None): + if fix_list is None: + fix_list = [self.fixer] + self.refactor = support.get_refactorer(fixer_pkg, fix_list, options) + self.fixer_log = [] + self.filename = "" + + for fixer in chain(self.refactor.pre_order, + self.refactor.post_order): + fixer.log = self.fixer_log + + def _check(self, before, after): + before = support.reformat(before) + after = support.reformat(after) + tree = self.refactor.refactor_string(before, self.filename) + self.assertEqual(after, str(tree)) + return tree + + def check(self, before, after, ignore_warnings=False): + tree = self._check(before, after) + self.assertTrue(tree.was_changed) + if not ignore_warnings: + self.assertEqual(self.fixer_log, []) + + def warns(self, before, after, message, unchanged=False): + tree = self._check(before, after) + self.assertIn(message, "".join(self.fixer_log)) + if not unchanged: + self.assertTrue(tree.was_changed) + + def warns_unchanged(self, before, message): + self.warns(before, before, message, unchanged=True) + + def unchanged(self, before, ignore_warnings=False): + self._check(before, before) + if not ignore_warnings: + self.assertEqual(self.fixer_log, []) + + def assert_runs_after(self, *names): + fixes = [self.fixer] + fixes.extend(names) + r = support.get_refactorer("lib2to3", fixes) + (pre, post) = r.get_fixers() + n = "fix_" + self.fixer + if post and post[-1].__class__.__module__.endswith(n): + # We're the last fixer to run + return + if pre and pre[-1].__class__.__module__.endswith(n) and not post: + # We're the last in pre and post is empty + return + self.fail("Fixer run order (%s) is incorrect; %s should be last."\ + %(", ".join([x.__class__.__module__ for x in (pre+post)]), n)) + +class Test_ne(FixerTestCase): + fixer = "ne" + + def test_basic(self): + b = """if x <> y: + pass""" + + a = """if x != y: + pass""" + self.check(b, a) + + def test_no_spaces(self): + b = """if x<>y: + pass""" + + a = """if x!=y: + pass""" + self.check(b, a) + + def test_chained(self): + b = """if x<>y<>z: + pass""" + + a = """if x!=y!=z: + pass""" + self.check(b, a) + +class Test_has_key(FixerTestCase): + fixer = "has_key" + + def test_1(self): + b = """x = d.has_key("x") or d.has_key("y")""" + a = """x = "x" in d or "y" in d""" + self.check(b, a) + + def test_2(self): + b = """x = a.b.c.d.has_key("x") ** 3""" + a = """x = ("x" in a.b.c.d) ** 3""" + self.check(b, a) + + def test_3(self): + b = """x = a.b.has_key(1 + 2).__repr__()""" + a = """x = (1 + 2 in a.b).__repr__()""" + self.check(b, a) + + def test_4(self): + b = """x = a.b.has_key(1 + 2).__repr__() ** -3 ** 4""" + a = """x = (1 + 2 in a.b).__repr__() ** -3 ** 4""" + self.check(b, a) + + def test_5(self): + b = """x = a.has_key(f or g)""" + a = """x = (f or g) in a""" + self.check(b, a) + + def test_6(self): + b = """x = a + b.has_key(c)""" + a = """x = a + (c in b)""" + self.check(b, a) + + def test_7(self): + b = """x = a.has_key(lambda: 12)""" + a = """x = (lambda: 12) in a""" + self.check(b, a) + + def test_8(self): + b = """x = a.has_key(a for a in b)""" + a = """x = (a for a in b) in a""" + self.check(b, a) + + def test_9(self): + b = """if not a.has_key(b): pass""" + a = """if b not in a: pass""" + self.check(b, a) + + def test_10(self): + b = """if not a.has_key(b).__repr__(): pass""" + a = """if not (b in a).__repr__(): pass""" + self.check(b, a) + + def test_11(self): + b = """if not a.has_key(b) ** 2: pass""" + a = """if not (b in a) ** 2: pass""" + self.check(b, a) + +class Test_apply(FixerTestCase): + fixer = "apply" + + def test_1(self): + b = """x = apply(f, g + h)""" + a = """x = f(*g + h)""" + self.check(b, a) + + def test_2(self): + b = """y = apply(f, g, h)""" + a = """y = f(*g, **h)""" + self.check(b, a) + + def test_3(self): + b = """z = apply(fs[0], g or h, h or g)""" + a = """z = fs[0](*g or h, **h or g)""" + self.check(b, a) + + def test_4(self): + b = """apply(f, (x, y) + t)""" + a = """f(*(x, y) + t)""" + self.check(b, a) + + def test_5(self): + b = """apply(f, args,)""" + a = """f(*args)""" + self.check(b, a) + + def test_6(self): + b = """apply(f, args, kwds,)""" + a = """f(*args, **kwds)""" + self.check(b, a) + + # Test that complex functions are parenthesized + + def test_complex_1(self): + b = """x = apply(f+g, args)""" + a = """x = (f+g)(*args)""" + self.check(b, a) + + def test_complex_2(self): + b = """x = apply(f*g, args)""" + a = """x = (f*g)(*args)""" + self.check(b, a) + + def test_complex_3(self): + b = """x = apply(f**g, args)""" + a = """x = (f**g)(*args)""" + self.check(b, a) + + # But dotted names etc. not + + def test_dotted_name(self): + b = """x = apply(f.g, args)""" + a = """x = f.g(*args)""" + self.check(b, a) + + def test_subscript(self): + b = """x = apply(f[x], args)""" + a = """x = f[x](*args)""" + self.check(b, a) + + def test_call(self): + b = """x = apply(f(), args)""" + a = """x = f()(*args)""" + self.check(b, a) + + # Extreme case + def test_extreme(self): + b = """x = apply(a.b.c.d.e.f, args, kwds)""" + a = """x = a.b.c.d.e.f(*args, **kwds)""" + self.check(b, a) + + # XXX Comments in weird places still get lost + def test_weird_comments(self): + b = """apply( # foo + f, # bar + args)""" + a = """f(*args)""" + self.check(b, a) + + # These should *not* be touched + + def test_unchanged_1(self): + s = """apply()""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """apply(f)""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """apply(f,)""" + self.unchanged(s) + + def test_unchanged_4(self): + s = """apply(f, args, kwds, extras)""" + self.unchanged(s) + + def test_unchanged_5(self): + s = """apply(f, *args, **kwds)""" + self.unchanged(s) + + def test_unchanged_6(self): + s = """apply(f, *args)""" + self.unchanged(s) + + def test_unchanged_6b(self): + s = """apply(f, **kwds)""" + self.unchanged(s) + + def test_unchanged_7(self): + s = """apply(func=f, args=args, kwds=kwds)""" + self.unchanged(s) + + def test_unchanged_8(self): + s = """apply(f, args=args, kwds=kwds)""" + self.unchanged(s) + + def test_unchanged_9(self): + s = """apply(f, args, kwds=kwds)""" + self.unchanged(s) + + def test_space_1(self): + a = """apply( f, args, kwds)""" + b = """f(*args, **kwds)""" + self.check(a, b) + + def test_space_2(self): + a = """apply( f ,args,kwds )""" + b = """f(*args, **kwds)""" + self.check(a, b) + +class Test_reload(FixerTestCase): + fixer = "reload" + + def test(self): + b = """reload(a)""" + a = """import importlib\nimportlib.reload(a)""" + self.check(b, a) + + def test_comment(self): + b = """reload( a ) # comment""" + a = """import importlib\nimportlib.reload( a ) # comment""" + self.check(b, a) + + # PEP 8 comments + b = """reload( a ) # comment""" + a = """import importlib\nimportlib.reload( a ) # comment""" + self.check(b, a) + + def test_space(self): + b = """reload( a )""" + a = """import importlib\nimportlib.reload( a )""" + self.check(b, a) + + b = """reload( a)""" + a = """import importlib\nimportlib.reload( a)""" + self.check(b, a) + + b = """reload(a )""" + a = """import importlib\nimportlib.reload(a )""" + self.check(b, a) + + def test_unchanged(self): + s = """reload(a=1)""" + self.unchanged(s) + + s = """reload(f, g)""" + self.unchanged(s) + + s = """reload(f, *h)""" + self.unchanged(s) + + s = """reload(f, *h, **i)""" + self.unchanged(s) + + s = """reload(f, **i)""" + self.unchanged(s) + + s = """reload(*h, **i)""" + self.unchanged(s) + + s = """reload(*h)""" + self.unchanged(s) + + s = """reload(**i)""" + self.unchanged(s) + + s = """reload()""" + self.unchanged(s) + +class Test_intern(FixerTestCase): + fixer = "intern" + + def test_prefix_preservation(self): + b = """x = intern( a )""" + a = """import sys\nx = sys.intern( a )""" + self.check(b, a) + + b = """y = intern("b" # test + )""" + a = """import sys\ny = sys.intern("b" # test + )""" + self.check(b, a) + + b = """z = intern(a+b+c.d, )""" + a = """import sys\nz = sys.intern(a+b+c.d, )""" + self.check(b, a) + + def test(self): + b = """x = intern(a)""" + a = """import sys\nx = sys.intern(a)""" + self.check(b, a) + + b = """z = intern(a+b+c.d,)""" + a = """import sys\nz = sys.intern(a+b+c.d,)""" + self.check(b, a) + + b = """intern("y%s" % 5).replace("y", "")""" + a = """import sys\nsys.intern("y%s" % 5).replace("y", "")""" + self.check(b, a) + + # These should not be refactored + + def test_unchanged(self): + s = """intern(a=1)""" + self.unchanged(s) + + s = """intern(f, g)""" + self.unchanged(s) + + s = """intern(*h)""" + self.unchanged(s) + + s = """intern(**i)""" + self.unchanged(s) + + s = """intern()""" + self.unchanged(s) + +class Test_reduce(FixerTestCase): + fixer = "reduce" + + def test_simple_call(self): + b = "reduce(a, b, c)" + a = "from functools import reduce\nreduce(a, b, c)" + self.check(b, a) + + def test_bug_7253(self): + # fix_tuple_params was being bad and orphaning nodes in the tree. + b = "def x(arg): reduce(sum, [])" + a = "from functools import reduce\ndef x(arg): reduce(sum, [])" + self.check(b, a) + + def test_call_with_lambda(self): + b = "reduce(lambda x, y: x + y, seq)" + a = "from functools import reduce\nreduce(lambda x, y: x + y, seq)" + self.check(b, a) + + def test_unchanged(self): + s = "reduce(a)" + self.unchanged(s) + + s = "reduce(a, b=42)" + self.unchanged(s) + + s = "reduce(a, b, c, d)" + self.unchanged(s) + + s = "reduce(**c)" + self.unchanged(s) + + s = "reduce()" + self.unchanged(s) + +class Test_print(FixerTestCase): + fixer = "print" + + def test_prefix_preservation(self): + b = """print 1, 1+1, 1+1+1""" + a = """print(1, 1+1, 1+1+1)""" + self.check(b, a) + + def test_idempotency(self): + s = """print()""" + self.unchanged(s) + + s = """print('')""" + self.unchanged(s) + + def test_idempotency_print_as_function(self): + self.refactor.driver.grammar = pygram.python_grammar_no_print_statement + s = """print(1, 1+1, 1+1+1)""" + self.unchanged(s) + + s = """print()""" + self.unchanged(s) + + s = """print('')""" + self.unchanged(s) + + def test_1(self): + b = """print 1, 1+1, 1+1+1""" + a = """print(1, 1+1, 1+1+1)""" + self.check(b, a) + + def test_2(self): + b = """print 1, 2""" + a = """print(1, 2)""" + self.check(b, a) + + def test_3(self): + b = """print""" + a = """print()""" + self.check(b, a) + + def test_4(self): + # from bug 3000 + b = """print whatever; print""" + a = """print(whatever); print()""" + self.check(b, a) + + def test_5(self): + b = """print; print whatever;""" + a = """print(); print(whatever);""" + self.check(b, a) + + def test_tuple(self): + b = """print (a, b, c)""" + a = """print((a, b, c))""" + self.check(b, a) + + # trailing commas + + def test_trailing_comma_1(self): + b = """print 1, 2, 3,""" + a = """print(1, 2, 3, end=' ')""" + self.check(b, a) + + def test_trailing_comma_2(self): + b = """print 1, 2,""" + a = """print(1, 2, end=' ')""" + self.check(b, a) + + def test_trailing_comma_3(self): + b = """print 1,""" + a = """print(1, end=' ')""" + self.check(b, a) + + # >> stuff + + def test_vargs_without_trailing_comma(self): + b = """print >>sys.stderr, 1, 2, 3""" + a = """print(1, 2, 3, file=sys.stderr)""" + self.check(b, a) + + def test_with_trailing_comma(self): + b = """print >>sys.stderr, 1, 2,""" + a = """print(1, 2, end=' ', file=sys.stderr)""" + self.check(b, a) + + def test_no_trailing_comma(self): + b = """print >>sys.stderr, 1+1""" + a = """print(1+1, file=sys.stderr)""" + self.check(b, a) + + def test_spaces_before_file(self): + b = """print >> sys.stderr""" + a = """print(file=sys.stderr)""" + self.check(b, a) + + def test_with_future_print_function(self): + s = "from __future__ import print_function\n" \ + "print('Hai!', end=' ')" + self.unchanged(s) + + b = "print 'Hello, world!'" + a = "print('Hello, world!')" + self.check(b, a) + + +class Test_exec(FixerTestCase): + fixer = "exec" + + def test_prefix_preservation(self): + b = """ exec code in ns1, ns2""" + a = """ exec(code, ns1, ns2)""" + self.check(b, a) + + def test_basic(self): + b = """exec code""" + a = """exec(code)""" + self.check(b, a) + + def test_with_globals(self): + b = """exec code in ns""" + a = """exec(code, ns)""" + self.check(b, a) + + def test_with_globals_locals(self): + b = """exec code in ns1, ns2""" + a = """exec(code, ns1, ns2)""" + self.check(b, a) + + def test_complex_1(self): + b = """exec (a.b()) in ns""" + a = """exec((a.b()), ns)""" + self.check(b, a) + + def test_complex_2(self): + b = """exec a.b() + c in ns""" + a = """exec(a.b() + c, ns)""" + self.check(b, a) + + # These should not be touched + + def test_unchanged_1(self): + s = """exec(code)""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """exec (code)""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """exec(code, ns)""" + self.unchanged(s) + + def test_unchanged_4(self): + s = """exec(code, ns1, ns2)""" + self.unchanged(s) + +class Test_repr(FixerTestCase): + fixer = "repr" + + def test_prefix_preservation(self): + b = """x = `1 + 2`""" + a = """x = repr(1 + 2)""" + self.check(b, a) + + def test_simple_1(self): + b = """x = `1 + 2`""" + a = """x = repr(1 + 2)""" + self.check(b, a) + + def test_simple_2(self): + b = """y = `x`""" + a = """y = repr(x)""" + self.check(b, a) + + def test_complex(self): + b = """z = `y`.__repr__()""" + a = """z = repr(y).__repr__()""" + self.check(b, a) + + def test_tuple(self): + b = """x = `1, 2, 3`""" + a = """x = repr((1, 2, 3))""" + self.check(b, a) + + def test_nested(self): + b = """x = `1 + `2``""" + a = """x = repr(1 + repr(2))""" + self.check(b, a) + + def test_nested_tuples(self): + b = """x = `1, 2 + `3, 4``""" + a = """x = repr((1, 2 + repr((3, 4))))""" + self.check(b, a) + +class Test_except(FixerTestCase): + fixer = "except" + + def test_prefix_preservation(self): + b = """ + try: + pass + except (RuntimeError, ImportError), e: + pass""" + a = """ + try: + pass + except (RuntimeError, ImportError) as e: + pass""" + self.check(b, a) + + def test_simple(self): + b = """ + try: + pass + except Foo, e: + pass""" + a = """ + try: + pass + except Foo as e: + pass""" + self.check(b, a) + + def test_simple_no_space_before_target(self): + b = """ + try: + pass + except Foo,e: + pass""" + a = """ + try: + pass + except Foo as e: + pass""" + self.check(b, a) + + def test_tuple_unpack(self): + b = """ + def foo(): + try: + pass + except Exception, (f, e): + pass + except ImportError, e: + pass""" + + a = """ + def foo(): + try: + pass + except Exception as xxx_todo_changeme: + (f, e) = xxx_todo_changeme.args + pass + except ImportError as e: + pass""" + self.check(b, a) + + def test_multi_class(self): + b = """ + try: + pass + except (RuntimeError, ImportError), e: + pass""" + + a = """ + try: + pass + except (RuntimeError, ImportError) as e: + pass""" + self.check(b, a) + + def test_list_unpack(self): + b = """ + try: + pass + except Exception, [a, b]: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + [a, b] = xxx_todo_changeme.args + pass""" + self.check(b, a) + + def test_weird_target_1(self): + b = """ + try: + pass + except Exception, d[5]: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + d[5] = xxx_todo_changeme + pass""" + self.check(b, a) + + def test_weird_target_2(self): + b = """ + try: + pass + except Exception, a.foo: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + a.foo = xxx_todo_changeme + pass""" + self.check(b, a) + + def test_weird_target_3(self): + b = """ + try: + pass + except Exception, a().foo: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + a().foo = xxx_todo_changeme + pass""" + self.check(b, a) + + def test_bare_except(self): + b = """ + try: + pass + except Exception, a: + pass + except: + pass""" + + a = """ + try: + pass + except Exception as a: + pass + except: + pass""" + self.check(b, a) + + def test_bare_except_and_else_finally(self): + b = """ + try: + pass + except Exception, a: + pass + except: + pass + else: + pass + finally: + pass""" + + a = """ + try: + pass + except Exception as a: + pass + except: + pass + else: + pass + finally: + pass""" + self.check(b, a) + + def test_multi_fixed_excepts_before_bare_except(self): + b = """ + try: + pass + except TypeError, b: + pass + except Exception, a: + pass + except: + pass""" + + a = """ + try: + pass + except TypeError as b: + pass + except Exception as a: + pass + except: + pass""" + self.check(b, a) + + def test_one_line_suites(self): + b = """ + try: raise TypeError + except TypeError, e: + pass + """ + a = """ + try: raise TypeError + except TypeError as e: + pass + """ + self.check(b, a) + b = """ + try: + raise TypeError + except TypeError, e: pass + """ + a = """ + try: + raise TypeError + except TypeError as e: pass + """ + self.check(b, a) + b = """ + try: raise TypeError + except TypeError, e: pass + """ + a = """ + try: raise TypeError + except TypeError as e: pass + """ + self.check(b, a) + b = """ + try: raise TypeError + except TypeError, e: pass + else: function() + finally: done() + """ + a = """ + try: raise TypeError + except TypeError as e: pass + else: function() + finally: done() + """ + self.check(b, a) + + # These should not be touched: + + def test_unchanged_1(self): + s = """ + try: + pass + except: + pass""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """ + try: + pass + except Exception: + pass""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """ + try: + pass + except (Exception, SystemExit): + pass""" + self.unchanged(s) + +class Test_raise(FixerTestCase): + fixer = "raise" + + def test_basic(self): + b = """raise Exception, 5""" + a = """raise Exception(5)""" + self.check(b, a) + + def test_prefix_preservation(self): + b = """raise Exception,5""" + a = """raise Exception(5)""" + self.check(b, a) + + b = """raise Exception, 5""" + a = """raise Exception(5)""" + self.check(b, a) + + def test_with_comments(self): + b = """raise Exception, 5 # foo""" + a = """raise Exception(5) # foo""" + self.check(b, a) + + b = """raise E, (5, 6) % (a, b) # foo""" + a = """raise E((5, 6) % (a, b)) # foo""" + self.check(b, a) + + b = """def foo(): + raise Exception, 5, 6 # foo""" + a = """def foo(): + raise Exception(5).with_traceback(6) # foo""" + self.check(b, a) + + def test_None_value(self): + b = """raise Exception(5), None, tb""" + a = """raise Exception(5).with_traceback(tb)""" + self.check(b, a) + + def test_tuple_value(self): + b = """raise Exception, (5, 6, 7)""" + a = """raise Exception(5, 6, 7)""" + self.check(b, a) + + def test_tuple_detection(self): + b = """raise E, (5, 6) % (a, b)""" + a = """raise E((5, 6) % (a, b))""" + self.check(b, a) + + def test_tuple_exc_1(self): + b = """raise (((E1, E2), E3), E4), V""" + a = """raise E1(V)""" + self.check(b, a) + + def test_tuple_exc_2(self): + b = """raise (E1, (E2, E3), E4), V""" + a = """raise E1(V)""" + self.check(b, a) + + # These should produce a warning + + def test_string_exc(self): + s = """raise 'foo'""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_string_exc_val(self): + s = """raise "foo", 5""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_string_exc_val_tb(self): + s = """raise "foo", 5, 6""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + # These should result in traceback-assignment + + def test_tb_1(self): + b = """def foo(): + raise Exception, 5, 6""" + a = """def foo(): + raise Exception(5).with_traceback(6)""" + self.check(b, a) + + def test_tb_2(self): + b = """def foo(): + a = 5 + raise Exception, 5, 6 + b = 6""" + a = """def foo(): + a = 5 + raise Exception(5).with_traceback(6) + b = 6""" + self.check(b, a) + + def test_tb_3(self): + b = """def foo(): + raise Exception,5,6""" + a = """def foo(): + raise Exception(5).with_traceback(6)""" + self.check(b, a) + + def test_tb_4(self): + b = """def foo(): + a = 5 + raise Exception,5,6 + b = 6""" + a = """def foo(): + a = 5 + raise Exception(5).with_traceback(6) + b = 6""" + self.check(b, a) + + def test_tb_5(self): + b = """def foo(): + raise Exception, (5, 6, 7), 6""" + a = """def foo(): + raise Exception(5, 6, 7).with_traceback(6)""" + self.check(b, a) + + def test_tb_6(self): + b = """def foo(): + a = 5 + raise Exception, (5, 6, 7), 6 + b = 6""" + a = """def foo(): + a = 5 + raise Exception(5, 6, 7).with_traceback(6) + b = 6""" + self.check(b, a) + +class Test_throw(FixerTestCase): + fixer = "throw" + + def test_1(self): + b = """g.throw(Exception, 5)""" + a = """g.throw(Exception(5))""" + self.check(b, a) + + def test_2(self): + b = """g.throw(Exception,5)""" + a = """g.throw(Exception(5))""" + self.check(b, a) + + def test_3(self): + b = """g.throw(Exception, (5, 6, 7))""" + a = """g.throw(Exception(5, 6, 7))""" + self.check(b, a) + + def test_4(self): + b = """5 + g.throw(Exception, 5)""" + a = """5 + g.throw(Exception(5))""" + self.check(b, a) + + # These should produce warnings + + def test_warn_1(self): + s = """g.throw("foo")""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_warn_2(self): + s = """g.throw("foo", 5)""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_warn_3(self): + s = """g.throw("foo", 5, 6)""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + # These should not be touched + + def test_untouched_1(self): + s = """g.throw(Exception)""" + self.unchanged(s) + + def test_untouched_2(self): + s = """g.throw(Exception(5, 6))""" + self.unchanged(s) + + def test_untouched_3(self): + s = """5 + g.throw(Exception(5, 6))""" + self.unchanged(s) + + # These should result in traceback-assignment + + def test_tb_1(self): + b = """def foo(): + g.throw(Exception, 5, 6)""" + a = """def foo(): + g.throw(Exception(5).with_traceback(6))""" + self.check(b, a) + + def test_tb_2(self): + b = """def foo(): + a = 5 + g.throw(Exception, 5, 6) + b = 6""" + a = """def foo(): + a = 5 + g.throw(Exception(5).with_traceback(6)) + b = 6""" + self.check(b, a) + + def test_tb_3(self): + b = """def foo(): + g.throw(Exception,5,6)""" + a = """def foo(): + g.throw(Exception(5).with_traceback(6))""" + self.check(b, a) + + def test_tb_4(self): + b = """def foo(): + a = 5 + g.throw(Exception,5,6) + b = 6""" + a = """def foo(): + a = 5 + g.throw(Exception(5).with_traceback(6)) + b = 6""" + self.check(b, a) + + def test_tb_5(self): + b = """def foo(): + g.throw(Exception, (5, 6, 7), 6)""" + a = """def foo(): + g.throw(Exception(5, 6, 7).with_traceback(6))""" + self.check(b, a) + + def test_tb_6(self): + b = """def foo(): + a = 5 + g.throw(Exception, (5, 6, 7), 6) + b = 6""" + a = """def foo(): + a = 5 + g.throw(Exception(5, 6, 7).with_traceback(6)) + b = 6""" + self.check(b, a) + + def test_tb_7(self): + b = """def foo(): + a + g.throw(Exception, 5, 6)""" + a = """def foo(): + a + g.throw(Exception(5).with_traceback(6))""" + self.check(b, a) + + def test_tb_8(self): + b = """def foo(): + a = 5 + a + g.throw(Exception, 5, 6) + b = 6""" + a = """def foo(): + a = 5 + a + g.throw(Exception(5).with_traceback(6)) + b = 6""" + self.check(b, a) + +class Test_long(FixerTestCase): + fixer = "long" + + def test_1(self): + b = """x = long(x)""" + a = """x = int(x)""" + self.check(b, a) + + def test_2(self): + b = """y = isinstance(x, long)""" + a = """y = isinstance(x, int)""" + self.check(b, a) + + def test_3(self): + b = """z = type(x) in (int, long)""" + a = """z = type(x) in (int, int)""" + self.check(b, a) + + def test_unchanged(self): + s = """long = True""" + self.unchanged(s) + + s = """s.long = True""" + self.unchanged(s) + + s = """def long(): pass""" + self.unchanged(s) + + s = """class long(): pass""" + self.unchanged(s) + + s = """def f(long): pass""" + self.unchanged(s) + + s = """def f(g, long): pass""" + self.unchanged(s) + + s = """def f(x, long=True): pass""" + self.unchanged(s) + + def test_prefix_preservation(self): + b = """x = long( x )""" + a = """x = int( x )""" + self.check(b, a) + + +class Test_execfile(FixerTestCase): + fixer = "execfile" + + def test_conversion(self): + b = """execfile("fn")""" + a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'))""" + self.check(b, a) + + b = """execfile("fn", glob)""" + a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), glob)""" + self.check(b, a) + + b = """execfile("fn", glob, loc)""" + a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), glob, loc)""" + self.check(b, a) + + b = """execfile("fn", globals=glob)""" + a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals=glob)""" + self.check(b, a) + + b = """execfile("fn", locals=loc)""" + a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), locals=loc)""" + self.check(b, a) + + b = """execfile("fn", globals=glob, locals=loc)""" + a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals=glob, locals=loc)""" + self.check(b, a) + + def test_spacing(self): + b = """execfile( "fn" )""" + a = """exec(compile(open( "fn", "rb" ).read(), "fn", 'exec'))""" + self.check(b, a) + + b = """execfile("fn", globals = glob)""" + a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals = glob)""" + self.check(b, a) + + +class Test_isinstance(FixerTestCase): + fixer = "isinstance" + + def test_remove_multiple_items(self): + b = """isinstance(x, (int, int, int))""" + a = """isinstance(x, int)""" + self.check(b, a) + + b = """isinstance(x, (int, float, int, int, float))""" + a = """isinstance(x, (int, float))""" + self.check(b, a) + + b = """isinstance(x, (int, float, int, int, float, str))""" + a = """isinstance(x, (int, float, str))""" + self.check(b, a) + + b = """isinstance(foo() + bar(), (x(), y(), x(), int, int))""" + a = """isinstance(foo() + bar(), (x(), y(), x(), int))""" + self.check(b, a) + + def test_prefix_preservation(self): + b = """if isinstance( foo(), ( bar, bar, baz )) : pass""" + a = """if isinstance( foo(), ( bar, baz )) : pass""" + self.check(b, a) + + def test_unchanged(self): + self.unchanged("isinstance(x, (str, int))") + +class Test_dict(FixerTestCase): + fixer = "dict" + + def test_prefix_preservation(self): + b = "if d. keys ( ) : pass" + a = "if list(d. keys ( )) : pass" + self.check(b, a) + + b = "if d. items ( ) : pass" + a = "if list(d. items ( )) : pass" + self.check(b, a) + + b = "if d. iterkeys ( ) : pass" + a = "if iter(d. keys ( )) : pass" + self.check(b, a) + + b = "[i for i in d. iterkeys( ) ]" + a = "[i for i in d. keys( ) ]" + self.check(b, a) + + b = "if d. viewkeys ( ) : pass" + a = "if d. keys ( ) : pass" + self.check(b, a) + + b = "[i for i in d. viewkeys( ) ]" + a = "[i for i in d. keys( ) ]" + self.check(b, a) + + def test_trailing_comment(self): + b = "d.keys() # foo" + a = "list(d.keys()) # foo" + self.check(b, a) + + b = "d.items() # foo" + a = "list(d.items()) # foo" + self.check(b, a) + + b = "d.iterkeys() # foo" + a = "iter(d.keys()) # foo" + self.check(b, a) + + b = """[i for i in d.iterkeys() # foo + ]""" + a = """[i for i in d.keys() # foo + ]""" + self.check(b, a) + + b = """[i for i in d.iterkeys() # foo + ]""" + a = """[i for i in d.keys() # foo + ]""" + self.check(b, a) + + b = "d.viewitems() # foo" + a = "d.items() # foo" + self.check(b, a) + + def test_unchanged(self): + for wrapper in fixer_util.consuming_calls: + s = "s = %s(d.keys())" % wrapper + self.unchanged(s) + + s = "s = %s(d.values())" % wrapper + self.unchanged(s) + + s = "s = %s(d.items())" % wrapper + self.unchanged(s) + + def test_01(self): + b = "d.keys()" + a = "list(d.keys())" + self.check(b, a) + + b = "a[0].foo().keys()" + a = "list(a[0].foo().keys())" + self.check(b, a) + + def test_02(self): + b = "d.items()" + a = "list(d.items())" + self.check(b, a) + + def test_03(self): + b = "d.values()" + a = "list(d.values())" + self.check(b, a) + + def test_04(self): + b = "d.iterkeys()" + a = "iter(d.keys())" + self.check(b, a) + + def test_05(self): + b = "d.iteritems()" + a = "iter(d.items())" + self.check(b, a) + + def test_06(self): + b = "d.itervalues()" + a = "iter(d.values())" + self.check(b, a) + + def test_07(self): + s = "list(d.keys())" + self.unchanged(s) + + def test_08(self): + s = "sorted(d.keys())" + self.unchanged(s) + + def test_09(self): + b = "iter(d.keys())" + a = "iter(list(d.keys()))" + self.check(b, a) + + def test_10(self): + b = "foo(d.keys())" + a = "foo(list(d.keys()))" + self.check(b, a) + + def test_11(self): + b = "for i in d.keys(): print i" + a = "for i in list(d.keys()): print i" + self.check(b, a) + + def test_12(self): + b = "for i in d.iterkeys(): print i" + a = "for i in d.keys(): print i" + self.check(b, a) + + def test_13(self): + b = "[i for i in d.keys()]" + a = "[i for i in list(d.keys())]" + self.check(b, a) + + def test_14(self): + b = "[i for i in d.iterkeys()]" + a = "[i for i in d.keys()]" + self.check(b, a) + + def test_15(self): + b = "(i for i in d.keys())" + a = "(i for i in list(d.keys()))" + self.check(b, a) + + def test_16(self): + b = "(i for i in d.iterkeys())" + a = "(i for i in d.keys())" + self.check(b, a) + + def test_17(self): + b = "iter(d.iterkeys())" + a = "iter(d.keys())" + self.check(b, a) + + def test_18(self): + b = "list(d.iterkeys())" + a = "list(d.keys())" + self.check(b, a) + + def test_19(self): + b = "sorted(d.iterkeys())" + a = "sorted(d.keys())" + self.check(b, a) + + def test_20(self): + b = "foo(d.iterkeys())" + a = "foo(iter(d.keys()))" + self.check(b, a) + + def test_21(self): + b = "print h.iterkeys().next()" + a = "print iter(h.keys()).next()" + self.check(b, a) + + def test_22(self): + b = "print h.keys()[0]" + a = "print list(h.keys())[0]" + self.check(b, a) + + def test_23(self): + b = "print list(h.iterkeys().next())" + a = "print list(iter(h.keys()).next())" + self.check(b, a) + + def test_24(self): + b = "for x in h.keys()[0]: print x" + a = "for x in list(h.keys())[0]: print x" + self.check(b, a) + + def test_25(self): + b = "d.viewkeys()" + a = "d.keys()" + self.check(b, a) + + def test_26(self): + b = "d.viewitems()" + a = "d.items()" + self.check(b, a) + + def test_27(self): + b = "d.viewvalues()" + a = "d.values()" + self.check(b, a) + + def test_28(self): + b = "[i for i in d.viewkeys()]" + a = "[i for i in d.keys()]" + self.check(b, a) + + def test_29(self): + b = "(i for i in d.viewkeys())" + a = "(i for i in d.keys())" + self.check(b, a) + + def test_30(self): + b = "iter(d.viewkeys())" + a = "iter(d.keys())" + self.check(b, a) + + def test_31(self): + b = "list(d.viewkeys())" + a = "list(d.keys())" + self.check(b, a) + + def test_32(self): + b = "sorted(d.viewkeys())" + a = "sorted(d.keys())" + self.check(b, a) + +class Test_xrange(FixerTestCase): + fixer = "xrange" + + def test_prefix_preservation(self): + b = """x = xrange( 10 )""" + a = """x = range( 10 )""" + self.check(b, a) + + b = """x = xrange( 1 , 10 )""" + a = """x = range( 1 , 10 )""" + self.check(b, a) + + b = """x = xrange( 0 , 10 , 2 )""" + a = """x = range( 0 , 10 , 2 )""" + self.check(b, a) + + def test_single_arg(self): + b = """x = xrange(10)""" + a = """x = range(10)""" + self.check(b, a) + + def test_two_args(self): + b = """x = xrange(1, 10)""" + a = """x = range(1, 10)""" + self.check(b, a) + + def test_three_args(self): + b = """x = xrange(0, 10, 2)""" + a = """x = range(0, 10, 2)""" + self.check(b, a) + + def test_wrap_in_list(self): + b = """x = range(10, 3, 9)""" + a = """x = list(range(10, 3, 9))""" + self.check(b, a) + + b = """x = foo(range(10, 3, 9))""" + a = """x = foo(list(range(10, 3, 9)))""" + self.check(b, a) + + b = """x = range(10, 3, 9) + [4]""" + a = """x = list(range(10, 3, 9)) + [4]""" + self.check(b, a) + + b = """x = range(10)[::-1]""" + a = """x = list(range(10))[::-1]""" + self.check(b, a) + + b = """x = range(10) [3]""" + a = """x = list(range(10)) [3]""" + self.check(b, a) + + def test_xrange_in_for(self): + b = """for i in xrange(10):\n j=i""" + a = """for i in range(10):\n j=i""" + self.check(b, a) + + b = """[i for i in xrange(10)]""" + a = """[i for i in range(10)]""" + self.check(b, a) + + def test_range_in_for(self): + self.unchanged("for i in range(10): pass") + self.unchanged("[i for i in range(10)]") + + def test_in_contains_test(self): + self.unchanged("x in range(10, 3, 9)") + + def test_in_consuming_context(self): + for call in fixer_util.consuming_calls: + self.unchanged("a = %s(range(10))" % call) + +class Test_xrange_with_reduce(FixerTestCase): + + def setUp(self): + super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"]) + + def test_double_transform(self): + b = """reduce(x, xrange(5))""" + a = """from functools import reduce +reduce(x, range(5))""" + self.check(b, a) + +class Test_raw_input(FixerTestCase): + fixer = "raw_input" + + def test_prefix_preservation(self): + b = """x = raw_input( )""" + a = """x = input( )""" + self.check(b, a) + + b = """x = raw_input( '' )""" + a = """x = input( '' )""" + self.check(b, a) + + def test_1(self): + b = """x = raw_input()""" + a = """x = input()""" + self.check(b, a) + + def test_2(self): + b = """x = raw_input('')""" + a = """x = input('')""" + self.check(b, a) + + def test_3(self): + b = """x = raw_input('prompt')""" + a = """x = input('prompt')""" + self.check(b, a) + + def test_4(self): + b = """x = raw_input(foo(a) + 6)""" + a = """x = input(foo(a) + 6)""" + self.check(b, a) + + def test_5(self): + b = """x = raw_input(invite).split()""" + a = """x = input(invite).split()""" + self.check(b, a) + + def test_6(self): + b = """x = raw_input(invite) . split ()""" + a = """x = input(invite) . split ()""" + self.check(b, a) + + def test_8(self): + b = "x = int(raw_input())" + a = "x = int(input())" + self.check(b, a) + +class Test_funcattrs(FixerTestCase): + fixer = "funcattrs" + + attrs = ["closure", "doc", "name", "defaults", "code", "globals", "dict"] + + def test(self): + for attr in self.attrs: + b = "a.func_%s" % attr + a = "a.__%s__" % attr + self.check(b, a) + + b = "self.foo.func_%s.foo_bar" % attr + a = "self.foo.__%s__.foo_bar" % attr + self.check(b, a) + + def test_unchanged(self): + for attr in self.attrs: + s = "foo(func_%s + 5)" % attr + self.unchanged(s) + + s = "f(foo.__%s__)" % attr + self.unchanged(s) + + s = "f(foo.__%s__.foo)" % attr + self.unchanged(s) + +class Test_xreadlines(FixerTestCase): + fixer = "xreadlines" + + def test_call(self): + b = "for x in f.xreadlines(): pass" + a = "for x in f: pass" + self.check(b, a) + + b = "for x in foo().xreadlines(): pass" + a = "for x in foo(): pass" + self.check(b, a) + + b = "for x in (5 + foo()).xreadlines(): pass" + a = "for x in (5 + foo()): pass" + self.check(b, a) + + def test_attr_ref(self): + b = "foo(f.xreadlines + 5)" + a = "foo(f.__iter__ + 5)" + self.check(b, a) + + b = "foo(f().xreadlines + 5)" + a = "foo(f().__iter__ + 5)" + self.check(b, a) + + b = "foo((5 + f()).xreadlines + 5)" + a = "foo((5 + f()).__iter__ + 5)" + self.check(b, a) + + def test_unchanged(self): + s = "for x in f.xreadlines(5): pass" + self.unchanged(s) + + s = "for x in f.xreadlines(k=5): pass" + self.unchanged(s) + + s = "for x in f.xreadlines(*k, **v): pass" + self.unchanged(s) + + s = "foo(xreadlines)" + self.unchanged(s) + + +class ImportsFixerTests: + + def test_import_module(self): + for old, new in self.modules.items(): + b = "import %s" % old + a = "import %s" % new + self.check(b, a) + + b = "import foo, %s, bar" % old + a = "import foo, %s, bar" % new + self.check(b, a) + + def test_import_from(self): + for old, new in self.modules.items(): + b = "from %s import foo" % old + a = "from %s import foo" % new + self.check(b, a) + + b = "from %s import foo, bar" % old + a = "from %s import foo, bar" % new + self.check(b, a) + + b = "from %s import (yes, no)" % old + a = "from %s import (yes, no)" % new + self.check(b, a) + + def test_import_module_as(self): + for old, new in self.modules.items(): + b = "import %s as foo_bar" % old + a = "import %s as foo_bar" % new + self.check(b, a) + + b = "import %s as foo_bar" % old + a = "import %s as foo_bar" % new + self.check(b, a) + + def test_import_from_as(self): + for old, new in self.modules.items(): + b = "from %s import foo as bar" % old + a = "from %s import foo as bar" % new + self.check(b, a) + + def test_star(self): + for old, new in self.modules.items(): + b = "from %s import *" % old + a = "from %s import *" % new + self.check(b, a) + + def test_import_module_usage(self): + for old, new in self.modules.items(): + b = """ + import %s + foo(%s.bar) + """ % (old, old) + a = """ + import %s + foo(%s.bar) + """ % (new, new) + self.check(b, a) + + b = """ + from %s import x + %s = 23 + """ % (old, old) + a = """ + from %s import x + %s = 23 + """ % (new, old) + self.check(b, a) + + s = """ + def f(): + %s.method() + """ % (old,) + self.unchanged(s) + + # test nested usage + b = """ + import %s + %s.bar(%s.foo) + """ % (old, old, old) + a = """ + import %s + %s.bar(%s.foo) + """ % (new, new, new) + self.check(b, a) + + b = """ + import %s + x.%s + """ % (old, old) + a = """ + import %s + x.%s + """ % (new, old) + self.check(b, a) + + +class Test_imports(FixerTestCase, ImportsFixerTests): + fixer = "imports" + from ..fixes.fix_imports import MAPPING as modules + + def test_multiple_imports(self): + b = """import urlparse, cStringIO""" + a = """import urllib.parse, io""" + self.check(b, a) + + def test_multiple_imports_as(self): + b = """ + import copy_reg as bar, HTMLParser as foo, urlparse + s = urlparse.spam(bar.foo()) + """ + a = """ + import copyreg as bar, html.parser as foo, urllib.parse + s = urllib.parse.spam(bar.foo()) + """ + self.check(b, a) + + +class Test_imports2(FixerTestCase, ImportsFixerTests): + fixer = "imports2" + from ..fixes.fix_imports2 import MAPPING as modules + + +class Test_imports_fixer_order(FixerTestCase, ImportsFixerTests): + + def setUp(self): + super(Test_imports_fixer_order, self).setUp(['imports', 'imports2']) + from ..fixes.fix_imports2 import MAPPING as mapping2 + self.modules = mapping2.copy() + from ..fixes.fix_imports import MAPPING as mapping1 + for key in ('dbhash', 'dumbdbm', 'dbm', 'gdbm'): + self.modules[key] = mapping1[key] + + def test_after_local_imports_refactoring(self): + for fix in ("imports", "imports2"): + self.fixer = fix + self.assert_runs_after("import") + + +class Test_urllib(FixerTestCase): + fixer = "urllib" + from ..fixes.fix_urllib import MAPPING as modules + + def test_import_module(self): + for old, changes in self.modules.items(): + b = "import %s" % old + a = "import %s" % ", ".join(map(itemgetter(0), changes)) + self.check(b, a) + + def test_import_from(self): + for old, changes in self.modules.items(): + all_members = [] + for new, members in changes: + for member in members: + all_members.append(member) + b = "from %s import %s" % (old, member) + a = "from %s import %s" % (new, member) + self.check(b, a) + + s = "from foo import %s" % member + self.unchanged(s) + + b = "from %s import %s" % (old, ", ".join(members)) + a = "from %s import %s" % (new, ", ".join(members)) + self.check(b, a) + + s = "from foo import %s" % ", ".join(members) + self.unchanged(s) + + # test the breaking of a module into multiple replacements + b = "from %s import %s" % (old, ", ".join(all_members)) + a = "\n".join(["from %s import %s" % (new, ", ".join(members)) + for (new, members) in changes]) + self.check(b, a) + + def test_import_module_as(self): + for old in self.modules: + s = "import %s as foo" % old + self.warns_unchanged(s, "This module is now multiple modules") + + def test_import_from_as(self): + for old, changes in self.modules.items(): + for new, members in changes: + for member in members: + b = "from %s import %s as foo_bar" % (old, member) + a = "from %s import %s as foo_bar" % (new, member) + self.check(b, a) + b = "from %s import %s as blah, %s" % (old, member, member) + a = "from %s import %s as blah, %s" % (new, member, member) + self.check(b, a) + + def test_star(self): + for old in self.modules: + s = "from %s import *" % old + self.warns_unchanged(s, "Cannot handle star imports") + + def test_indented(self): + b = """ +def foo(): + from urllib import urlencode, urlopen +""" + a = """ +def foo(): + from urllib.parse import urlencode + from urllib.request import urlopen +""" + self.check(b, a) + + b = """ +def foo(): + other() + from urllib import urlencode, urlopen +""" + a = """ +def foo(): + other() + from urllib.parse import urlencode + from urllib.request import urlopen +""" + self.check(b, a) + + def test_single_import(self): + b = "from urllib import getproxies" + a = "from urllib.request import getproxies" + + self.check(b, a) + + def test_import_module_usage(self): + for old, changes in self.modules.items(): + for new, members in changes: + for member in members: + new_import = ", ".join([n for (n, mems) + in self.modules[old]]) + b = """ + import %s + foo(%s.%s) + """ % (old, old, member) + a = """ + import %s + foo(%s.%s) + """ % (new_import, new, member) + self.check(b, a) + b = """ + import %s + %s.%s(%s.%s) + """ % (old, old, member, old, member) + a = """ + import %s + %s.%s(%s.%s) + """ % (new_import, new, member, new, member) + self.check(b, a) + + +class Test_input(FixerTestCase): + fixer = "input" + + def test_prefix_preservation(self): + b = """x = input( )""" + a = """x = eval(input( ))""" + self.check(b, a) + + b = """x = input( '' )""" + a = """x = eval(input( '' ))""" + self.check(b, a) + + def test_trailing_comment(self): + b = """x = input() # foo""" + a = """x = eval(input()) # foo""" + self.check(b, a) + + def test_idempotency(self): + s = """x = eval(input())""" + self.unchanged(s) + + s = """x = eval(input(''))""" + self.unchanged(s) + + s = """x = eval(input(foo(5) + 9))""" + self.unchanged(s) + + def test_1(self): + b = """x = input()""" + a = """x = eval(input())""" + self.check(b, a) + + def test_2(self): + b = """x = input('')""" + a = """x = eval(input(''))""" + self.check(b, a) + + def test_3(self): + b = """x = input('prompt')""" + a = """x = eval(input('prompt'))""" + self.check(b, a) + + def test_4(self): + b = """x = input(foo(5) + 9)""" + a = """x = eval(input(foo(5) + 9))""" + self.check(b, a) + +class Test_tuple_params(FixerTestCase): + fixer = "tuple_params" + + def test_unchanged_1(self): + s = """def foo(): pass""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """def foo(a, b, c): pass""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """def foo(a=3, b=4, c=5): pass""" + self.unchanged(s) + + def test_1(self): + b = """ + def foo(((a, b), c)): + x = 5""" + + a = """ + def foo(xxx_todo_changeme): + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_2(self): + b = """ + def foo(((a, b), c), d): + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d): + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_3(self): + b = """ + def foo(((a, b), c), d) -> e: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d) -> e: + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_semicolon(self): + b = """ + def foo(((a, b), c)): x = 5; y = 7""" + + a = """ + def foo(xxx_todo_changeme): ((a, b), c) = xxx_todo_changeme; x = 5; y = 7""" + self.check(b, a) + + def test_keywords(self): + b = """ + def foo(((a, b), c), d, e=5) -> z: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d, e=5) -> z: + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_varargs(self): + b = """ + def foo(((a, b), c), d, *vargs, **kwargs) -> z: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d, *vargs, **kwargs) -> z: + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_multi_1(self): + b = """ + def foo(((a, b), c), (d, e, f)) -> z: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z: + ((a, b), c) = xxx_todo_changeme + (d, e, f) = xxx_todo_changeme1 + x = 5""" + self.check(b, a) + + def test_multi_2(self): + b = """ + def foo(x, ((a, b), c), d, (e, f, g), y) -> z: + x = 5""" + + a = """ + def foo(x, xxx_todo_changeme, d, xxx_todo_changeme1, y) -> z: + ((a, b), c) = xxx_todo_changeme + (e, f, g) = xxx_todo_changeme1 + x = 5""" + self.check(b, a) + + def test_docstring(self): + b = """ + def foo(((a, b), c), (d, e, f)) -> z: + "foo foo foo foo" + x = 5""" + + a = """ + def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z: + "foo foo foo foo" + ((a, b), c) = xxx_todo_changeme + (d, e, f) = xxx_todo_changeme1 + x = 5""" + self.check(b, a) + + def test_lambda_no_change(self): + s = """lambda x: x + 5""" + self.unchanged(s) + + def test_lambda_parens_single_arg(self): + b = """lambda (x): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + b = """lambda(x): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + b = """lambda ((((x)))): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + b = """lambda((((x)))): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + def test_lambda_simple(self): + b = """lambda (x, y): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + b = """lambda(x, y): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + b = """lambda (((x, y))): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + b = """lambda(((x, y))): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + def test_lambda_one_tuple(self): + b = """lambda (x,): x + f(x)""" + a = """lambda x1: x1[0] + f(x1[0])""" + self.check(b, a) + + b = """lambda (((x,))): x + f(x)""" + a = """lambda x1: x1[0] + f(x1[0])""" + self.check(b, a) + + def test_lambda_simple_multi_use(self): + b = """lambda (x, y): x + x + f(x) + x""" + a = """lambda x_y: x_y[0] + x_y[0] + f(x_y[0]) + x_y[0]""" + self.check(b, a) + + def test_lambda_simple_reverse(self): + b = """lambda (x, y): y + x""" + a = """lambda x_y: x_y[1] + x_y[0]""" + self.check(b, a) + + def test_lambda_nested(self): + b = """lambda (x, (y, z)): x + y + z""" + a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]""" + self.check(b, a) + + b = """lambda (((x, (y, z)))): x + y + z""" + a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]""" + self.check(b, a) + + def test_lambda_nested_multi_use(self): + b = """lambda (x, (y, z)): x + y + f(y)""" + a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + f(x_y_z[1][0])""" + self.check(b, a) + +class Test_methodattrs(FixerTestCase): + fixer = "methodattrs" + + attrs = ["func", "self", "class"] + + def test(self): + for attr in self.attrs: + b = "a.im_%s" % attr + if attr == "class": + a = "a.__self__.__class__" + else: + a = "a.__%s__" % attr + self.check(b, a) + + b = "self.foo.im_%s.foo_bar" % attr + if attr == "class": + a = "self.foo.__self__.__class__.foo_bar" + else: + a = "self.foo.__%s__.foo_bar" % attr + self.check(b, a) + + def test_unchanged(self): + for attr in self.attrs: + s = "foo(im_%s + 5)" % attr + self.unchanged(s) + + s = "f(foo.__%s__)" % attr + self.unchanged(s) + + s = "f(foo.__%s__.foo)" % attr + self.unchanged(s) + +class Test_next(FixerTestCase): + fixer = "next" + + def test_1(self): + b = """it.next()""" + a = """next(it)""" + self.check(b, a) + + def test_2(self): + b = """a.b.c.d.next()""" + a = """next(a.b.c.d)""" + self.check(b, a) + + def test_3(self): + b = """(a + b).next()""" + a = """next((a + b))""" + self.check(b, a) + + def test_4(self): + b = """a().next()""" + a = """next(a())""" + self.check(b, a) + + def test_5(self): + b = """a().next() + b""" + a = """next(a()) + b""" + self.check(b, a) + + def test_6(self): + b = """c( a().next() + b)""" + a = """c( next(a()) + b)""" + self.check(b, a) + + def test_prefix_preservation_1(self): + b = """ + for a in b: + foo(a) + a.next() + """ + a = """ + for a in b: + foo(a) + next(a) + """ + self.check(b, a) + + def test_prefix_preservation_2(self): + b = """ + for a in b: + foo(a) # abc + # def + a.next() + """ + a = """ + for a in b: + foo(a) # abc + # def + next(a) + """ + self.check(b, a) + + def test_prefix_preservation_3(self): + b = """ + next = 5 + for a in b: + foo(a) + a.next() + """ + a = """ + next = 5 + for a in b: + foo(a) + a.__next__() + """ + self.check(b, a, ignore_warnings=True) + + def test_prefix_preservation_4(self): + b = """ + next = 5 + for a in b: + foo(a) # abc + # def + a.next() + """ + a = """ + next = 5 + for a in b: + foo(a) # abc + # def + a.__next__() + """ + self.check(b, a, ignore_warnings=True) + + def test_prefix_preservation_5(self): + b = """ + next = 5 + for a in b: + foo(foo(a), # abc + a.next()) + """ + a = """ + next = 5 + for a in b: + foo(foo(a), # abc + a.__next__()) + """ + self.check(b, a, ignore_warnings=True) + + def test_prefix_preservation_6(self): + b = """ + for a in b: + foo(foo(a), # abc + a.next()) + """ + a = """ + for a in b: + foo(foo(a), # abc + next(a)) + """ + self.check(b, a) + + def test_method_1(self): + b = """ + class A: + def next(self): + pass + """ + a = """ + class A: + def __next__(self): + pass + """ + self.check(b, a) + + def test_method_2(self): + b = """ + class A(object): + def next(self): + pass + """ + a = """ + class A(object): + def __next__(self): + pass + """ + self.check(b, a) + + def test_method_3(self): + b = """ + class A: + def next(x): + pass + """ + a = """ + class A: + def __next__(x): + pass + """ + self.check(b, a) + + def test_method_4(self): + b = """ + class A: + def __init__(self, foo): + self.foo = foo + + def next(self): + pass + + def __iter__(self): + return self + """ + a = """ + class A: + def __init__(self, foo): + self.foo = foo + + def __next__(self): + pass + + def __iter__(self): + return self + """ + self.check(b, a) + + def test_method_unchanged(self): + s = """ + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_shadowing_assign_simple(self): + s = """ + next = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_tuple_1(self): + s = """ + (next, a) = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_tuple_2(self): + s = """ + (a, (b, (next, c)), a) = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_list_1(self): + s = """ + [next, a] = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_list_2(self): + s = """ + [a, [b, [next, c]], a] = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_builtin_assign(self): + s = """ + def foo(): + __builtin__.next = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_builtin_assign_in_tuple(self): + s = """ + def foo(): + (a, __builtin__.next) = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_builtin_assign_in_list(self): + s = """ + def foo(): + [a, __builtin__.next] = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_assign_to_next(self): + s = """ + def foo(): + A.next = foo + + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_assign_to_next_in_tuple(self): + s = """ + def foo(): + (a, A.next) = foo + + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_assign_to_next_in_list(self): + s = """ + def foo(): + [a, A.next] = foo + + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_shadowing_import_1(self): + s = """ + import foo.bar as next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_2(self): + s = """ + import bar, bar.foo as next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_3(self): + s = """ + import bar, bar.foo as next, baz + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_1(self): + s = """ + from x import next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_2(self): + s = """ + from x.a import next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_3(self): + s = """ + from x import a, next, b + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_4(self): + s = """ + from x.a import a, next, b + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_funcdef_1(self): + s = """ + def next(a): + pass + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_funcdef_2(self): + b = """ + def next(a): + pass + + class A: + def next(self): + pass + + it.next() + """ + a = """ + def next(a): + pass + + class A: + def __next__(self): + pass + + it.__next__() + """ + self.warns(b, a, "Calls to builtin next() possibly shadowed") + + def test_shadowing_global_1(self): + s = """ + def f(): + global next + next = 5 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_global_2(self): + s = """ + def f(): + global a, next, b + next = 5 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_for_simple(self): + s = """ + for next in it(): + pass + + b = 5 + c = 6 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_for_tuple_1(self): + s = """ + for next, b in it(): + pass + + b = 5 + c = 6 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_for_tuple_2(self): + s = """ + for a, (next, c), b in it(): + pass + + b = 5 + c = 6 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_noncall_access_1(self): + b = """gnext = g.next""" + a = """gnext = g.__next__""" + self.check(b, a) + + def test_noncall_access_2(self): + b = """f(g.next + 5)""" + a = """f(g.__next__ + 5)""" + self.check(b, a) + + def test_noncall_access_3(self): + b = """f(g().next + 5)""" + a = """f(g().__next__ + 5)""" + self.check(b, a) + +class Test_nonzero(FixerTestCase): + fixer = "nonzero" + + def test_1(self): + b = """ + class A: + def __nonzero__(self): + pass + """ + a = """ + class A: + def __bool__(self): + pass + """ + self.check(b, a) + + def test_2(self): + b = """ + class A(object): + def __nonzero__(self): + pass + """ + a = """ + class A(object): + def __bool__(self): + pass + """ + self.check(b, a) + + def test_unchanged_1(self): + s = """ + class A(object): + def __bool__(self): + pass + """ + self.unchanged(s) + + def test_unchanged_2(self): + s = """ + class A(object): + def __nonzero__(self, a): + pass + """ + self.unchanged(s) + + def test_unchanged_func(self): + s = """ + def __nonzero__(self): + pass + """ + self.unchanged(s) + +class Test_numliterals(FixerTestCase): + fixer = "numliterals" + + def test_octal_1(self): + b = """0755""" + a = """0o755""" + self.check(b, a) + + def test_long_int_1(self): + b = """a = 12L""" + a = """a = 12""" + self.check(b, a) + + def test_long_int_2(self): + b = """a = 12l""" + a = """a = 12""" + self.check(b, a) + + def test_long_hex(self): + b = """b = 0x12l""" + a = """b = 0x12""" + self.check(b, a) + + def test_comments_and_spacing(self): + b = """b = 0x12L""" + a = """b = 0x12""" + self.check(b, a) + + b = """b = 0755 # spam""" + a = """b = 0o755 # spam""" + self.check(b, a) + + def test_unchanged_int(self): + s = """5""" + self.unchanged(s) + + def test_unchanged_float(self): + s = """5.0""" + self.unchanged(s) + + def test_unchanged_octal(self): + s = """0o755""" + self.unchanged(s) + + def test_unchanged_hex(self): + s = """0xABC""" + self.unchanged(s) + + def test_unchanged_exp(self): + s = """5.0e10""" + self.unchanged(s) + + def test_unchanged_complex_int(self): + s = """5 + 4j""" + self.unchanged(s) + + def test_unchanged_complex_float(self): + s = """5.4 + 4.9j""" + self.unchanged(s) + + def test_unchanged_complex_bare(self): + s = """4j""" + self.unchanged(s) + s = """4.4j""" + self.unchanged(s) + +class Test_renames(FixerTestCase): + fixer = "renames" + + modules = {"sys": ("maxint", "maxsize"), + } + + def test_import_from(self): + for mod, (old, new) in list(self.modules.items()): + b = "from %s import %s" % (mod, old) + a = "from %s import %s" % (mod, new) + self.check(b, a) + + s = "from foo import %s" % old + self.unchanged(s) + + def test_import_from_as(self): + for mod, (old, new) in list(self.modules.items()): + b = "from %s import %s as foo_bar" % (mod, old) + a = "from %s import %s as foo_bar" % (mod, new) + self.check(b, a) + + def test_import_module_usage(self): + for mod, (old, new) in list(self.modules.items()): + b = """ + import %s + foo(%s, %s.%s) + """ % (mod, mod, mod, old) + a = """ + import %s + foo(%s, %s.%s) + """ % (mod, mod, mod, new) + self.check(b, a) + + def XXX_test_from_import_usage(self): + # not implemented yet + for mod, (old, new) in list(self.modules.items()): + b = """ + from %s import %s + foo(%s, %s) + """ % (mod, old, mod, old) + a = """ + from %s import %s + foo(%s, %s) + """ % (mod, new, mod, new) + self.check(b, a) + +class Test_unicode(FixerTestCase): + fixer = "unicode" + + def test_whitespace(self): + b = """unicode( x)""" + a = """str( x)""" + self.check(b, a) + + b = """ unicode(x )""" + a = """ str(x )""" + self.check(b, a) + + b = """ u'h'""" + a = """ 'h'""" + self.check(b, a) + + def test_unicode_call(self): + b = """unicode(x, y, z)""" + a = """str(x, y, z)""" + self.check(b, a) + + def test_unichr(self): + b = """unichr(u'h')""" + a = """chr('h')""" + self.check(b, a) + + def test_unicode_literal_1(self): + b = '''u"x"''' + a = '''"x"''' + self.check(b, a) + + def test_unicode_literal_2(self): + b = """ur'x'""" + a = """r'x'""" + self.check(b, a) + + def test_unicode_literal_3(self): + b = """UR'''x''' """ + a = """R'''x''' """ + self.check(b, a) + + def test_native_literal_escape_u(self): + b = r"""'\\\u20ac\U0001d121\\u20ac'""" + a = r"""'\\\\u20ac\\U0001d121\\u20ac'""" + self.check(b, a) + + b = r"""r'\\\u20ac\U0001d121\\u20ac'""" + a = r"""r'\\\u20ac\U0001d121\\u20ac'""" + self.check(b, a) + + def test_bytes_literal_escape_u(self): + b = r"""b'\\\u20ac\U0001d121\\u20ac'""" + a = r"""b'\\\u20ac\U0001d121\\u20ac'""" + self.check(b, a) + + b = r"""br'\\\u20ac\U0001d121\\u20ac'""" + a = r"""br'\\\u20ac\U0001d121\\u20ac'""" + self.check(b, a) + + def test_unicode_literal_escape_u(self): + b = r"""u'\\\u20ac\U0001d121\\u20ac'""" + a = r"""'\\\u20ac\U0001d121\\u20ac'""" + self.check(b, a) + + b = r"""ur'\\\u20ac\U0001d121\\u20ac'""" + a = r"""r'\\\u20ac\U0001d121\\u20ac'""" + self.check(b, a) + + def test_native_unicode_literal_escape_u(self): + f = 'from __future__ import unicode_literals\n' + b = f + r"""'\\\u20ac\U0001d121\\u20ac'""" + a = f + r"""'\\\u20ac\U0001d121\\u20ac'""" + self.check(b, a) + + b = f + r"""r'\\\u20ac\U0001d121\\u20ac'""" + a = f + r"""r'\\\u20ac\U0001d121\\u20ac'""" + self.check(b, a) + + +class Test_filter(FixerTestCase): + fixer = "filter" + + def test_prefix_preservation(self): + b = """x = filter( foo, 'abc' )""" + a = """x = list(filter( foo, 'abc' ))""" + self.check(b, a) + + b = """x = filter( None , 'abc' )""" + a = """x = [_f for _f in 'abc' if _f]""" + self.check(b, a) + + def test_filter_basic(self): + b = """x = filter(None, 'abc')""" + a = """x = [_f for _f in 'abc' if _f]""" + self.check(b, a) + + b = """x = len(filter(f, 'abc'))""" + a = """x = len(list(filter(f, 'abc')))""" + self.check(b, a) + + b = """x = filter(lambda x: x%2 == 0, range(10))""" + a = """x = [x for x in range(10) if x%2 == 0]""" + self.check(b, a) + + # Note the parens around x + b = """x = filter(lambda (x): x%2 == 0, range(10))""" + a = """x = [x for x in range(10) if x%2 == 0]""" + self.check(b, a) + + # bpo-38871 + b = """filter(lambda x: True if x > 2 else False, [1, 2, 3])""" + a = """[x for x in [1, 2, 3] if (True if x > 2 else False)]""" + self.check(b, a) + + def test_filter_trailers(self): + b = """x = filter(None, 'abc')[0]""" + a = """x = [_f for _f in 'abc' if _f][0]""" + self.check(b, a) + + b = """x = len(filter(f, 'abc')[0])""" + a = """x = len(list(filter(f, 'abc'))[0])""" + self.check(b, a) + + b = """x = filter(lambda x: x%2 == 0, range(10))[0]""" + a = """x = [x for x in range(10) if x%2 == 0][0]""" + self.check(b, a) + + # Note the parens around x + b = """x = filter(lambda (x): x%2 == 0, range(10))[0]""" + a = """x = [x for x in range(10) if x%2 == 0][0]""" + self.check(b, a) + + def test_filter_nochange(self): + a = """b.join(filter(f, 'abc'))""" + self.unchanged(a) + a = """(a + foo(5)).join(filter(f, 'abc'))""" + self.unchanged(a) + a = """iter(filter(f, 'abc'))""" + self.unchanged(a) + a = """list(filter(f, 'abc'))""" + self.unchanged(a) + a = """list(filter(f, 'abc'))[0]""" + self.unchanged(a) + a = """set(filter(f, 'abc'))""" + self.unchanged(a) + a = """set(filter(f, 'abc')).pop()""" + self.unchanged(a) + a = """tuple(filter(f, 'abc'))""" + self.unchanged(a) + a = """any(filter(f, 'abc'))""" + self.unchanged(a) + a = """all(filter(f, 'abc'))""" + self.unchanged(a) + a = """sum(filter(f, 'abc'))""" + self.unchanged(a) + a = """sorted(filter(f, 'abc'))""" + self.unchanged(a) + a = """sorted(filter(f, 'abc'), key=blah)""" + self.unchanged(a) + a = """sorted(filter(f, 'abc'), key=blah)[0]""" + self.unchanged(a) + a = """enumerate(filter(f, 'abc'))""" + self.unchanged(a) + a = """enumerate(filter(f, 'abc'), start=1)""" + self.unchanged(a) + a = """for i in filter(f, 'abc'): pass""" + self.unchanged(a) + a = """[x for x in filter(f, 'abc')]""" + self.unchanged(a) + a = """(x for x in filter(f, 'abc'))""" + self.unchanged(a) + + def test_future_builtins(self): + a = "from future_builtins import spam, filter; filter(f, 'ham')" + self.unchanged(a) + + b = """from future_builtins import spam; x = filter(f, 'abc')""" + a = """from future_builtins import spam; x = list(filter(f, 'abc'))""" + self.check(b, a) + + a = "from future_builtins import *; filter(f, 'ham')" + self.unchanged(a) + +class Test_map(FixerTestCase): + fixer = "map" + + def check(self, b, a): + self.unchanged("from future_builtins import map; " + b, a) + super(Test_map, self).check(b, a) + + def test_prefix_preservation(self): + b = """x = map( f, 'abc' )""" + a = """x = list(map( f, 'abc' ))""" + self.check(b, a) + + def test_map_trailers(self): + b = """x = map(f, 'abc')[0]""" + a = """x = list(map(f, 'abc'))[0]""" + self.check(b, a) + + b = """x = map(None, l)[0]""" + a = """x = list(l)[0]""" + self.check(b, a) + + b = """x = map(lambda x:x, l)[0]""" + a = """x = [x for x in l][0]""" + self.check(b, a) + + b = """x = map(f, 'abc')[0][1]""" + a = """x = list(map(f, 'abc'))[0][1]""" + self.check(b, a) + + def test_trailing_comment(self): + b = """x = map(f, 'abc') # foo""" + a = """x = list(map(f, 'abc')) # foo""" + self.check(b, a) + + def test_None_with_multiple_arguments(self): + s = """x = map(None, a, b, c)""" + self.warns_unchanged(s, "cannot convert map(None, ...) with " + "multiple arguments") + + def test_map_basic(self): + b = """x = map(f, 'abc')""" + a = """x = list(map(f, 'abc'))""" + self.check(b, a) + + b = """x = len(map(f, 'abc', 'def'))""" + a = """x = len(list(map(f, 'abc', 'def')))""" + self.check(b, a) + + b = """x = map(None, 'abc')""" + a = """x = list('abc')""" + self.check(b, a) + + b = """x = map(lambda x: x+1, range(4))""" + a = """x = [x+1 for x in range(4)]""" + self.check(b, a) + + # Note the parens around x + b = """x = map(lambda (x): x+1, range(4))""" + a = """x = [x+1 for x in range(4)]""" + self.check(b, a) + + b = """ + foo() + # foo + map(f, x) + """ + a = """ + foo() + # foo + list(map(f, x)) + """ + self.warns(b, a, "You should use a for loop here") + + def test_map_nochange(self): + a = """b.join(map(f, 'abc'))""" + self.unchanged(a) + a = """(a + foo(5)).join(map(f, 'abc'))""" + self.unchanged(a) + a = """iter(map(f, 'abc'))""" + self.unchanged(a) + a = """list(map(f, 'abc'))""" + self.unchanged(a) + a = """list(map(f, 'abc'))[0]""" + self.unchanged(a) + a = """set(map(f, 'abc'))""" + self.unchanged(a) + a = """set(map(f, 'abc')).pop()""" + self.unchanged(a) + a = """tuple(map(f, 'abc'))""" + self.unchanged(a) + a = """any(map(f, 'abc'))""" + self.unchanged(a) + a = """all(map(f, 'abc'))""" + self.unchanged(a) + a = """sum(map(f, 'abc'))""" + self.unchanged(a) + a = """sorted(map(f, 'abc'))""" + self.unchanged(a) + a = """sorted(map(f, 'abc'), key=blah)""" + self.unchanged(a) + a = """sorted(map(f, 'abc'), key=blah)[0]""" + self.unchanged(a) + a = """enumerate(map(f, 'abc'))""" + self.unchanged(a) + a = """enumerate(map(f, 'abc'), start=1)""" + self.unchanged(a) + a = """for i in map(f, 'abc'): pass""" + self.unchanged(a) + a = """[x for x in map(f, 'abc')]""" + self.unchanged(a) + a = """(x for x in map(f, 'abc'))""" + self.unchanged(a) + + def test_future_builtins(self): + a = "from future_builtins import spam, map, eggs; map(f, 'ham')" + self.unchanged(a) + + b = """from future_builtins import spam, eggs; x = map(f, 'abc')""" + a = """from future_builtins import spam, eggs; x = list(map(f, 'abc'))""" + self.check(b, a) + + a = "from future_builtins import *; map(f, 'ham')" + self.unchanged(a) + +class Test_zip(FixerTestCase): + fixer = "zip" + + def check(self, b, a): + self.unchanged("from future_builtins import zip; " + b, a) + super(Test_zip, self).check(b, a) + + def test_zip_basic(self): + b = """x = zip()""" + a = """x = list(zip())""" + self.check(b, a) + + b = """x = zip(a, b, c)""" + a = """x = list(zip(a, b, c))""" + self.check(b, a) + + b = """x = len(zip(a, b))""" + a = """x = len(list(zip(a, b)))""" + self.check(b, a) + + def test_zip_trailers(self): + b = """x = zip(a, b, c)[0]""" + a = """x = list(zip(a, b, c))[0]""" + self.check(b, a) + + b = """x = zip(a, b, c)[0][1]""" + a = """x = list(zip(a, b, c))[0][1]""" + self.check(b, a) + + def test_zip_nochange(self): + a = """b.join(zip(a, b))""" + self.unchanged(a) + a = """(a + foo(5)).join(zip(a, b))""" + self.unchanged(a) + a = """iter(zip(a, b))""" + self.unchanged(a) + a = """list(zip(a, b))""" + self.unchanged(a) + a = """list(zip(a, b))[0]""" + self.unchanged(a) + a = """set(zip(a, b))""" + self.unchanged(a) + a = """set(zip(a, b)).pop()""" + self.unchanged(a) + a = """tuple(zip(a, b))""" + self.unchanged(a) + a = """any(zip(a, b))""" + self.unchanged(a) + a = """all(zip(a, b))""" + self.unchanged(a) + a = """sum(zip(a, b))""" + self.unchanged(a) + a = """sorted(zip(a, b))""" + self.unchanged(a) + a = """sorted(zip(a, b), key=blah)""" + self.unchanged(a) + a = """sorted(zip(a, b), key=blah)[0]""" + self.unchanged(a) + a = """enumerate(zip(a, b))""" + self.unchanged(a) + a = """enumerate(zip(a, b), start=1)""" + self.unchanged(a) + a = """for i in zip(a, b): pass""" + self.unchanged(a) + a = """[x for x in zip(a, b)]""" + self.unchanged(a) + a = """(x for x in zip(a, b))""" + self.unchanged(a) + + def test_future_builtins(self): + a = "from future_builtins import spam, zip, eggs; zip(a, b)" + self.unchanged(a) + + b = """from future_builtins import spam, eggs; x = zip(a, b)""" + a = """from future_builtins import spam, eggs; x = list(zip(a, b))""" + self.check(b, a) + + a = "from future_builtins import *; zip(a, b)" + self.unchanged(a) + +class Test_standarderror(FixerTestCase): + fixer = "standarderror" + + def test(self): + b = """x = StandardError()""" + a = """x = Exception()""" + self.check(b, a) + + b = """x = StandardError(a, b, c)""" + a = """x = Exception(a, b, c)""" + self.check(b, a) + + b = """f(2 + StandardError(a, b, c))""" + a = """f(2 + Exception(a, b, c))""" + self.check(b, a) + +class Test_types(FixerTestCase): + fixer = "types" + + def test_basic_types_convert(self): + b = """types.StringType""" + a = """bytes""" + self.check(b, a) + + b = """types.DictType""" + a = """dict""" + self.check(b, a) + + b = """types . IntType""" + a = """int""" + self.check(b, a) + + b = """types.ListType""" + a = """list""" + self.check(b, a) + + b = """types.LongType""" + a = """int""" + self.check(b, a) + + b = """types.NoneType""" + a = """type(None)""" + self.check(b, a) + + b = "types.StringTypes" + a = "(str,)" + self.check(b, a) + +class Test_idioms(FixerTestCase): + fixer = "idioms" + + def test_while(self): + b = """while 1: foo()""" + a = """while True: foo()""" + self.check(b, a) + + b = """while 1: foo()""" + a = """while True: foo()""" + self.check(b, a) + + b = """ + while 1: + foo() + """ + a = """ + while True: + foo() + """ + self.check(b, a) + + def test_while_unchanged(self): + s = """while 11: foo()""" + self.unchanged(s) + + s = """while 0: foo()""" + self.unchanged(s) + + s = """while foo(): foo()""" + self.unchanged(s) + + s = """while []: foo()""" + self.unchanged(s) + + def test_eq_simple(self): + b = """type(x) == T""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) == T: pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_eq_reverse(self): + b = """T == type(x)""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if T == type(x): pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_eq_expression(self): + b = """type(x+y) == d.get('T')""" + a = """isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) == d.get('T')""" + a = """isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_is_simple(self): + b = """type(x) is T""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) is T: pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_is_reverse(self): + b = """T is type(x)""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if T is type(x): pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_is_expression(self): + b = """type(x+y) is d.get('T')""" + a = """isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) is d.get('T')""" + a = """isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_is_not_simple(self): + b = """type(x) is not T""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) is not T: pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_is_not_reverse(self): + b = """T is not type(x)""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if T is not type(x): pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_is_not_expression(self): + b = """type(x+y) is not d.get('T')""" + a = """not isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) is not d.get('T')""" + a = """not isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_ne_simple(self): + b = """type(x) != T""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) != T: pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_ne_reverse(self): + b = """T != type(x)""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if T != type(x): pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_ne_expression(self): + b = """type(x+y) != d.get('T')""" + a = """not isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) != d.get('T')""" + a = """not isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_type_unchanged(self): + a = """type(x).__name__""" + self.unchanged(a) + + def test_sort_list_call(self): + b = """ + v = list(t) + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = list(foo(b) + d) + v.sort() + foo(v) + """ + a = """ + v = sorted(foo(b) + d) + foo(v) + """ + self.check(b, a) + + b = """ + while x: + v = list(t) + v.sort() + foo(v) + """ + a = """ + while x: + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = list(t) + # foo + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + # foo + foo(v) + """ + self.check(b, a) + + b = r""" + v = list( t) + v.sort() + foo(v) + """ + a = r""" + v = sorted( t) + foo(v) + """ + self.check(b, a) + + b = r""" + try: + m = list(s) + m.sort() + except: pass + """ + + a = r""" + try: + m = sorted(s) + except: pass + """ + self.check(b, a) + + b = r""" + try: + m = list(s) + # foo + m.sort() + except: pass + """ + + a = r""" + try: + m = sorted(s) + # foo + except: pass + """ + self.check(b, a) + + b = r""" + m = list(s) + # more comments + m.sort()""" + + a = r""" + m = sorted(s) + # more comments""" + self.check(b, a) + + def test_sort_simple_expr(self): + b = """ + v = t + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = foo(b) + v.sort() + foo(v) + """ + a = """ + v = sorted(foo(b)) + foo(v) + """ + self.check(b, a) + + b = """ + v = b.keys() + v.sort() + foo(v) + """ + a = """ + v = sorted(b.keys()) + foo(v) + """ + self.check(b, a) + + b = """ + v = foo(b) + d + v.sort() + foo(v) + """ + a = """ + v = sorted(foo(b) + d) + foo(v) + """ + self.check(b, a) + + b = """ + while x: + v = t + v.sort() + foo(v) + """ + a = """ + while x: + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = t + # foo + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + # foo + foo(v) + """ + self.check(b, a) + + b = r""" + v = t + v.sort() + foo(v) + """ + a = r""" + v = sorted(t) + foo(v) + """ + self.check(b, a) + + def test_sort_unchanged(self): + s = """ + v = list(t) + w.sort() + foo(w) + """ + self.unchanged(s) + + s = """ + v = list(t) + v.sort(u) + foo(v) + """ + self.unchanged(s) + +class Test_basestring(FixerTestCase): + fixer = "basestring" + + def test_basestring(self): + b = """isinstance(x, basestring)""" + a = """isinstance(x, str)""" + self.check(b, a) + +class Test_buffer(FixerTestCase): + fixer = "buffer" + + def test_buffer(self): + b = """x = buffer(y)""" + a = """x = memoryview(y)""" + self.check(b, a) + + def test_slicing(self): + b = """buffer(y)[4:5]""" + a = """memoryview(y)[4:5]""" + self.check(b, a) + +class Test_future(FixerTestCase): + fixer = "future" + + def test_future(self): + b = """from __future__ import braces""" + a = """""" + self.check(b, a) + + b = """# comment\nfrom __future__ import braces""" + a = """# comment\n""" + self.check(b, a) + + b = """from __future__ import braces\n# comment""" + a = """\n# comment""" + self.check(b, a) + + def test_run_order(self): + self.assert_runs_after('print') + +class Test_itertools(FixerTestCase): + fixer = "itertools" + + def checkall(self, before, after): + # Because we need to check with and without the itertools prefix + # and on each of the three functions, these loops make it all + # much easier + for i in ('itertools.', ''): + for f in ('map', 'filter', 'zip'): + b = before %(i+'i'+f) + a = after %(f) + self.check(b, a) + + def test_0(self): + # A simple example -- test_1 covers exactly the same thing, + # but it's not quite as clear. + b = "itertools.izip(a, b)" + a = "zip(a, b)" + self.check(b, a) + + def test_1(self): + b = """%s(f, a)""" + a = """%s(f, a)""" + self.checkall(b, a) + + def test_qualified(self): + b = """itertools.ifilterfalse(a, b)""" + a = """itertools.filterfalse(a, b)""" + self.check(b, a) + + b = """itertools.izip_longest(a, b)""" + a = """itertools.zip_longest(a, b)""" + self.check(b, a) + + def test_2(self): + b = """ifilterfalse(a, b)""" + a = """filterfalse(a, b)""" + self.check(b, a) + + b = """izip_longest(a, b)""" + a = """zip_longest(a, b)""" + self.check(b, a) + + def test_space_1(self): + b = """ %s(f, a)""" + a = """ %s(f, a)""" + self.checkall(b, a) + + def test_space_2(self): + b = """ itertools.ifilterfalse(a, b)""" + a = """ itertools.filterfalse(a, b)""" + self.check(b, a) + + b = """ itertools.izip_longest(a, b)""" + a = """ itertools.zip_longest(a, b)""" + self.check(b, a) + + def test_run_order(self): + self.assert_runs_after('map', 'zip', 'filter') + + +class Test_itertools_imports(FixerTestCase): + fixer = 'itertools_imports' + + def test_reduced(self): + b = "from itertools import imap, izip, foo" + a = "from itertools import foo" + self.check(b, a) + + b = "from itertools import bar, imap, izip, foo" + a = "from itertools import bar, foo" + self.check(b, a) + + b = "from itertools import chain, imap, izip" + a = "from itertools import chain" + self.check(b, a) + + def test_comments(self): + b = "#foo\nfrom itertools import imap, izip" + a = "#foo\n" + self.check(b, a) + + def test_none(self): + b = "from itertools import imap, izip" + a = "" + self.check(b, a) + + b = "from itertools import izip" + a = "" + self.check(b, a) + + def test_import_as(self): + b = "from itertools import izip, bar as bang, imap" + a = "from itertools import bar as bang" + self.check(b, a) + + b = "from itertools import izip as _zip, imap, bar" + a = "from itertools import bar" + self.check(b, a) + + b = "from itertools import imap as _map" + a = "" + self.check(b, a) + + b = "from itertools import imap as _map, izip as _zip" + a = "" + self.check(b, a) + + s = "from itertools import bar as bang" + self.unchanged(s) + + def test_ifilter_and_zip_longest(self): + for name in "filterfalse", "zip_longest": + b = "from itertools import i%s" % (name,) + a = "from itertools import %s" % (name,) + self.check(b, a) + + b = "from itertools import imap, i%s, foo" % (name,) + a = "from itertools import %s, foo" % (name,) + self.check(b, a) + + b = "from itertools import bar, i%s, foo" % (name,) + a = "from itertools import bar, %s, foo" % (name,) + self.check(b, a) + + def test_import_star(self): + s = "from itertools import *" + self.unchanged(s) + + + def test_unchanged(self): + s = "from itertools import foo" + self.unchanged(s) + + +class Test_import(FixerTestCase): + fixer = "import" + + def setUp(self): + super(Test_import, self).setUp() + # Need to replace fix_import's exists method + # so we can check that it's doing the right thing + self.files_checked = [] + self.present_files = set() + self.always_exists = True + def fake_exists(name): + self.files_checked.append(name) + return self.always_exists or (name in self.present_files) + + from lib2to3.fixes import fix_import + fix_import.exists = fake_exists + + def tearDown(self): + from lib2to3.fixes import fix_import + fix_import.exists = os.path.exists + + def check_both(self, b, a): + self.always_exists = True + super(Test_import, self).check(b, a) + self.always_exists = False + super(Test_import, self).unchanged(b) + + def test_files_checked(self): + def p(path): + # Takes a unix path and returns a path with correct separators + return os.path.pathsep.join(path.split("/")) + + self.always_exists = False + self.present_files = set(['__init__.py']) + expected_extensions = ('.py', os.path.sep, '.pyc', '.so', '.sl', '.pyd') + names_to_test = (p("/spam/eggs.py"), "ni.py", p("../../shrubbery.py")) + + for name in names_to_test: + self.files_checked = [] + self.filename = name + self.unchanged("import jam") + + if os.path.dirname(name): + name = os.path.dirname(name) + '/jam' + else: + name = 'jam' + expected_checks = set(name + ext for ext in expected_extensions) + expected_checks.add("__init__.py") + + self.assertEqual(set(self.files_checked), expected_checks) + + def test_not_in_package(self): + s = "import bar" + self.always_exists = False + self.present_files = set(["bar.py"]) + self.unchanged(s) + + def test_with_absolute_import_enabled(self): + s = "from __future__ import absolute_import\nimport bar" + self.always_exists = False + self.present_files = set(["__init__.py", "bar.py"]) + self.unchanged(s) + + def test_in_package(self): + b = "import bar" + a = "from . import bar" + self.always_exists = False + self.present_files = set(["__init__.py", "bar.py"]) + self.check(b, a) + + def test_import_from_package(self): + b = "import bar" + a = "from . import bar" + self.always_exists = False + self.present_files = set(["__init__.py", "bar" + os.path.sep]) + self.check(b, a) + + def test_already_relative_import(self): + s = "from . import bar" + self.unchanged(s) + + def test_comments_and_indent(self): + b = "import bar # Foo" + a = "from . import bar # Foo" + self.check(b, a) + + def test_from(self): + b = "from foo import bar, baz" + a = "from .foo import bar, baz" + self.check_both(b, a) + + b = "from foo import bar" + a = "from .foo import bar" + self.check_both(b, a) + + b = "from foo import (bar, baz)" + a = "from .foo import (bar, baz)" + self.check_both(b, a) + + def test_dotted_from(self): + b = "from green.eggs import ham" + a = "from .green.eggs import ham" + self.check_both(b, a) + + def test_from_as(self): + b = "from green.eggs import ham as spam" + a = "from .green.eggs import ham as spam" + self.check_both(b, a) + + def test_import(self): + b = "import foo" + a = "from . import foo" + self.check_both(b, a) + + b = "import foo, bar" + a = "from . import foo, bar" + self.check_both(b, a) + + b = "import foo, bar, x" + a = "from . import foo, bar, x" + self.check_both(b, a) + + b = "import x, y, z" + a = "from . import x, y, z" + self.check_both(b, a) + + def test_import_as(self): + b = "import foo as x" + a = "from . import foo as x" + self.check_both(b, a) + + b = "import a as b, b as c, c as d" + a = "from . import a as b, b as c, c as d" + self.check_both(b, a) + + def test_local_and_absolute(self): + self.always_exists = False + self.present_files = set(["foo.py", "__init__.py"]) + + s = "import foo, bar" + self.warns_unchanged(s, "absolute and local imports together") + + def test_dotted_import(self): + b = "import foo.bar" + a = "from . import foo.bar" + self.check_both(b, a) + + def test_dotted_import_as(self): + b = "import foo.bar as bang" + a = "from . import foo.bar as bang" + self.check_both(b, a) + + def test_prefix(self): + b = """ + # prefix + import foo.bar + """ + a = """ + # prefix + from . import foo.bar + """ + self.check_both(b, a) + + +class Test_set_literal(FixerTestCase): + + fixer = "set_literal" + + def test_basic(self): + b = """set([1, 2, 3])""" + a = """{1, 2, 3}""" + self.check(b, a) + + b = """set((1, 2, 3))""" + a = """{1, 2, 3}""" + self.check(b, a) + + b = """set((1,))""" + a = """{1}""" + self.check(b, a) + + b = """set([1])""" + self.check(b, a) + + b = """set((a, b))""" + a = """{a, b}""" + self.check(b, a) + + b = """set([a, b])""" + self.check(b, a) + + b = """set((a*234, f(args=23)))""" + a = """{a*234, f(args=23)}""" + self.check(b, a) + + b = """set([a*23, f(23)])""" + a = """{a*23, f(23)}""" + self.check(b, a) + + b = """set([a-234**23])""" + a = """{a-234**23}""" + self.check(b, a) + + def test_listcomps(self): + b = """set([x for x in y])""" + a = """{x for x in y}""" + self.check(b, a) + + b = """set([x for x in y if x == m])""" + a = """{x for x in y if x == m}""" + self.check(b, a) + + b = """set([x for x in y for a in b])""" + a = """{x for x in y for a in b}""" + self.check(b, a) + + b = """set([f(x) - 23 for x in y])""" + a = """{f(x) - 23 for x in y}""" + self.check(b, a) + + def test_whitespace(self): + b = """set( [1, 2])""" + a = """{1, 2}""" + self.check(b, a) + + b = """set([1 , 2])""" + a = """{1 , 2}""" + self.check(b, a) + + b = """set([ 1 ])""" + a = """{ 1 }""" + self.check(b, a) + + b = """set( [1] )""" + a = """{1}""" + self.check(b, a) + + b = """set([ 1, 2 ])""" + a = """{ 1, 2 }""" + self.check(b, a) + + b = """set([x for x in y ])""" + a = """{x for x in y }""" + self.check(b, a) + + b = """set( + [1, 2] + ) + """ + a = """{1, 2}\n""" + self.check(b, a) + + def test_comments(self): + b = """set((1, 2)) # Hi""" + a = """{1, 2} # Hi""" + self.check(b, a) + + # This isn't optimal behavior, but the fixer is optional. + b = """ + # Foo + set( # Bar + (1, 2) + ) + """ + a = """ + # Foo + {1, 2} + """ + self.check(b, a) + + def test_unchanged(self): + s = """set()""" + self.unchanged(s) + + s = """set(a)""" + self.unchanged(s) + + s = """set(a, b, c)""" + self.unchanged(s) + + # Don't transform generators because they might have to be lazy. + s = """set(x for x in y)""" + self.unchanged(s) + + s = """set(x for x in y if z)""" + self.unchanged(s) + + s = """set(a*823-23**2 + f(23))""" + self.unchanged(s) + + +class Test_sys_exc(FixerTestCase): + fixer = "sys_exc" + + def test_0(self): + b = "sys.exc_type" + a = "sys.exc_info()[0]" + self.check(b, a) + + def test_1(self): + b = "sys.exc_value" + a = "sys.exc_info()[1]" + self.check(b, a) + + def test_2(self): + b = "sys.exc_traceback" + a = "sys.exc_info()[2]" + self.check(b, a) + + def test_3(self): + b = "sys.exc_type # Foo" + a = "sys.exc_info()[0] # Foo" + self.check(b, a) + + def test_4(self): + b = "sys. exc_type" + a = "sys. exc_info()[0]" + self.check(b, a) + + def test_5(self): + b = "sys .exc_type" + a = "sys .exc_info()[0]" + self.check(b, a) + + +class Test_paren(FixerTestCase): + fixer = "paren" + + def test_0(self): + b = """[i for i in 1, 2 ]""" + a = """[i for i in (1, 2) ]""" + self.check(b, a) + + def test_1(self): + b = """[i for i in 1, 2, ]""" + a = """[i for i in (1, 2,) ]""" + self.check(b, a) + + def test_2(self): + b = """[i for i in 1, 2 ]""" + a = """[i for i in (1, 2) ]""" + self.check(b, a) + + def test_3(self): + b = """[i for i in 1, 2 if i]""" + a = """[i for i in (1, 2) if i]""" + self.check(b, a) + + def test_4(self): + b = """[i for i in 1, 2 ]""" + a = """[i for i in (1, 2) ]""" + self.check(b, a) + + def test_5(self): + b = """(i for i in 1, 2)""" + a = """(i for i in (1, 2))""" + self.check(b, a) + + def test_6(self): + b = """(i for i in 1 ,2 if i)""" + a = """(i for i in (1 ,2) if i)""" + self.check(b, a) + + def test_unchanged_0(self): + s = """[i for i in (1, 2)]""" + self.unchanged(s) + + def test_unchanged_1(self): + s = """[i for i in foo()]""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """[i for i in (1, 2) if nothing]""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """(i for i in (1, 2))""" + self.unchanged(s) + + def test_unchanged_4(self): + s = """[i for i in m]""" + self.unchanged(s) + +class Test_metaclass(FixerTestCase): + + fixer = 'metaclass' + + def test_unchanged(self): + self.unchanged("class X(): pass") + self.unchanged("class X(object): pass") + self.unchanged("class X(object1, object2): pass") + self.unchanged("class X(object1, object2, object3): pass") + self.unchanged("class X(metaclass=Meta): pass") + self.unchanged("class X(b, arg=23, metclass=Meta): pass") + self.unchanged("class X(b, arg=23, metaclass=Meta, other=42): pass") + + s = """ + class X: + def __metaclass__(self): pass + """ + self.unchanged(s) + + s = """ + class X: + a[23] = 74 + """ + self.unchanged(s) + + def test_comments(self): + b = """ + class X: + # hi + __metaclass__ = AppleMeta + """ + a = """ + class X(metaclass=AppleMeta): + # hi + pass + """ + self.check(b, a) + + b = """ + class X: + __metaclass__ = Meta + # Bedtime! + """ + a = """ + class X(metaclass=Meta): + pass + # Bedtime! + """ + self.check(b, a) + + def test_meta(self): + # no-parent class, odd body + b = """ + class X(): + __metaclass__ = Q + pass + """ + a = """ + class X(metaclass=Q): + pass + """ + self.check(b, a) + + # one parent class, no body + b = """class X(object): __metaclass__ = Q""" + a = """class X(object, metaclass=Q): pass""" + self.check(b, a) + + + # one parent, simple body + b = """ + class X(object): + __metaclass__ = Meta + bar = 7 + """ + a = """ + class X(object, metaclass=Meta): + bar = 7 + """ + self.check(b, a) + + b = """ + class X: + __metaclass__ = Meta; x = 4; g = 23 + """ + a = """ + class X(metaclass=Meta): + x = 4; g = 23 + """ + self.check(b, a) + + # one parent, simple body, __metaclass__ last + b = """ + class X(object): + bar = 7 + __metaclass__ = Meta + """ + a = """ + class X(object, metaclass=Meta): + bar = 7 + """ + self.check(b, a) + + # redefining __metaclass__ + b = """ + class X(): + __metaclass__ = A + __metaclass__ = B + bar = 7 + """ + a = """ + class X(metaclass=B): + bar = 7 + """ + self.check(b, a) + + # multiple inheritance, simple body + b = """ + class X(clsA, clsB): + __metaclass__ = Meta + bar = 7 + """ + a = """ + class X(clsA, clsB, metaclass=Meta): + bar = 7 + """ + self.check(b, a) + + # keywords in the class statement + b = """class m(a, arg=23): __metaclass__ = Meta""" + a = """class m(a, arg=23, metaclass=Meta): pass""" + self.check(b, a) + + b = """ + class X(expression(2 + 4)): + __metaclass__ = Meta + """ + a = """ + class X(expression(2 + 4), metaclass=Meta): + pass + """ + self.check(b, a) + + b = """ + class X(expression(2 + 4), x**4): + __metaclass__ = Meta + """ + a = """ + class X(expression(2 + 4), x**4, metaclass=Meta): + pass + """ + self.check(b, a) + + b = """ + class X: + __metaclass__ = Meta + save.py = 23 + """ + a = """ + class X(metaclass=Meta): + save.py = 23 + """ + self.check(b, a) + + +class Test_getcwdu(FixerTestCase): + + fixer = 'getcwdu' + + def test_basic(self): + b = """os.getcwdu""" + a = """os.getcwd""" + self.check(b, a) + + b = """os.getcwdu()""" + a = """os.getcwd()""" + self.check(b, a) + + b = """meth = os.getcwdu""" + a = """meth = os.getcwd""" + self.check(b, a) + + b = """os.getcwdu(args)""" + a = """os.getcwd(args)""" + self.check(b, a) + + def test_comment(self): + b = """os.getcwdu() # Foo""" + a = """os.getcwd() # Foo""" + self.check(b, a) + + def test_unchanged(self): + s = """os.getcwd()""" + self.unchanged(s) + + s = """getcwdu()""" + self.unchanged(s) + + s = """os.getcwdb()""" + self.unchanged(s) + + def test_indentation(self): + b = """ + if 1: + os.getcwdu() + """ + a = """ + if 1: + os.getcwd() + """ + self.check(b, a) + + def test_multilation(self): + b = """os .getcwdu()""" + a = """os .getcwd()""" + self.check(b, a) + + b = """os. getcwdu""" + a = """os. getcwd""" + self.check(b, a) + + b = """os.getcwdu ( )""" + a = """os.getcwd ( )""" + self.check(b, a) + + +class Test_operator(FixerTestCase): + + fixer = "operator" + + def test_operator_isCallable(self): + b = "operator.isCallable(x)" + a = "callable(x)" + self.check(b, a) + + def test_operator_sequenceIncludes(self): + b = "operator.sequenceIncludes(x, y)" + a = "operator.contains(x, y)" + self.check(b, a) + + b = "operator .sequenceIncludes(x, y)" + a = "operator .contains(x, y)" + self.check(b, a) + + b = "operator. sequenceIncludes(x, y)" + a = "operator. contains(x, y)" + self.check(b, a) + + def test_operator_isSequenceType(self): + b = "operator.isSequenceType(x)" + a = "import collections.abc\nisinstance(x, collections.abc.Sequence)" + self.check(b, a) + + def test_operator_isMappingType(self): + b = "operator.isMappingType(x)" + a = "import collections.abc\nisinstance(x, collections.abc.Mapping)" + self.check(b, a) + + def test_operator_isNumberType(self): + b = "operator.isNumberType(x)" + a = "import numbers\nisinstance(x, numbers.Number)" + self.check(b, a) + + def test_operator_repeat(self): + b = "operator.repeat(x, n)" + a = "operator.mul(x, n)" + self.check(b, a) + + b = "operator .repeat(x, n)" + a = "operator .mul(x, n)" + self.check(b, a) + + b = "operator. repeat(x, n)" + a = "operator. mul(x, n)" + self.check(b, a) + + def test_operator_irepeat(self): + b = "operator.irepeat(x, n)" + a = "operator.imul(x, n)" + self.check(b, a) + + b = "operator .irepeat(x, n)" + a = "operator .imul(x, n)" + self.check(b, a) + + b = "operator. irepeat(x, n)" + a = "operator. imul(x, n)" + self.check(b, a) + + def test_bare_isCallable(self): + s = "isCallable(x)" + t = "You should use 'callable(x)' here." + self.warns_unchanged(s, t) + + def test_bare_sequenceIncludes(self): + s = "sequenceIncludes(x, y)" + t = "You should use 'operator.contains(x, y)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_isSequenceType(self): + s = "isSequenceType(z)" + t = "You should use 'isinstance(z, collections.abc.Sequence)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_isMappingType(self): + s = "isMappingType(x)" + t = "You should use 'isinstance(x, collections.abc.Mapping)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_isNumberType(self): + s = "isNumberType(y)" + t = "You should use 'isinstance(y, numbers.Number)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_repeat(self): + s = "repeat(x, n)" + t = "You should use 'operator.mul(x, n)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_irepeat(self): + s = "irepeat(y, 187)" + t = "You should use 'operator.imul(y, 187)' here." + self.warns_unchanged(s, t) + + +class Test_exitfunc(FixerTestCase): + + fixer = "exitfunc" + + def test_simple(self): + b = """ + import sys + sys.exitfunc = my_atexit + """ + a = """ + import sys + import atexit + atexit.register(my_atexit) + """ + self.check(b, a) + + def test_names_import(self): + b = """ + import sys, crumbs + sys.exitfunc = my_func + """ + a = """ + import sys, crumbs, atexit + atexit.register(my_func) + """ + self.check(b, a) + + def test_complex_expression(self): + b = """ + import sys + sys.exitfunc = do(d)/a()+complex(f=23, g=23)*expression + """ + a = """ + import sys + import atexit + atexit.register(do(d)/a()+complex(f=23, g=23)*expression) + """ + self.check(b, a) + + def test_comments(self): + b = """ + import sys # Foo + sys.exitfunc = f # Blah + """ + a = """ + import sys + import atexit # Foo + atexit.register(f) # Blah + """ + self.check(b, a) + + b = """ + import apples, sys, crumbs, larry # Pleasant comments + sys.exitfunc = func + """ + a = """ + import apples, sys, crumbs, larry, atexit # Pleasant comments + atexit.register(func) + """ + self.check(b, a) + + def test_in_a_function(self): + b = """ + import sys + def f(): + sys.exitfunc = func + """ + a = """ + import sys + import atexit + def f(): + atexit.register(func) + """ + self.check(b, a) + + def test_no_sys_import(self): + b = """sys.exitfunc = f""" + a = """atexit.register(f)""" + msg = ("Can't find sys import; Please add an atexit import at the " + "top of your file.") + self.warns(b, a, msg) + + + def test_unchanged(self): + s = """f(sys.exitfunc)""" + self.unchanged(s) + + +class Test_asserts(FixerTestCase): + + fixer = "asserts" + + def test_deprecated_names(self): + tests = [ + ('self.assert_(True)', 'self.assertTrue(True)'), + ('self.assertEquals(2, 2)', 'self.assertEqual(2, 2)'), + ('self.assertNotEquals(2, 3)', 'self.assertNotEqual(2, 3)'), + ('self.assertAlmostEquals(2, 3)', 'self.assertAlmostEqual(2, 3)'), + ('self.assertNotAlmostEquals(2, 8)', 'self.assertNotAlmostEqual(2, 8)'), + ('self.failUnlessEqual(2, 2)', 'self.assertEqual(2, 2)'), + ('self.failIfEqual(2, 3)', 'self.assertNotEqual(2, 3)'), + ('self.failUnlessAlmostEqual(2, 3)', 'self.assertAlmostEqual(2, 3)'), + ('self.failIfAlmostEqual(2, 8)', 'self.assertNotAlmostEqual(2, 8)'), + ('self.failUnless(True)', 'self.assertTrue(True)'), + ('self.failUnlessRaises(foo)', 'self.assertRaises(foo)'), + ('self.failIf(False)', 'self.assertFalse(False)'), + ] + for b, a in tests: + self.check(b, a) + + def test_variants(self): + b = 'eq = self.assertEquals' + a = 'eq = self.assertEqual' + self.check(b, a) + b = 'self.assertEquals(2, 3, msg="fail")' + a = 'self.assertEqual(2, 3, msg="fail")' + self.check(b, a) + b = 'self.assertEquals(2, 3, msg="fail") # foo' + a = 'self.assertEqual(2, 3, msg="fail") # foo' + self.check(b, a) + b = 'self.assertEquals (2, 3)' + a = 'self.assertEqual (2, 3)' + self.check(b, a) + b = ' self.assertEquals (2, 3)' + a = ' self.assertEqual (2, 3)' + self.check(b, a) + b = 'with self.failUnlessRaises(Explosion): explode()' + a = 'with self.assertRaises(Explosion): explode()' + self.check(b, a) + b = 'with self.failUnlessRaises(Explosion) as cm: explode()' + a = 'with self.assertRaises(Explosion) as cm: explode()' + self.check(b, a) + + def test_unchanged(self): + self.unchanged('self.assertEqualsOnSaturday') + self.unchanged('self.assertEqualsOnSaturday(3, 5)') diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_main.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_main.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_main.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_main.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +import codecs +import io +import logging +import os +import re +import shutil +import sys +import tempfile +import unittest + +from lib2to3 import main + + +TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data") +PY2_TEST_MODULE = os.path.join(TEST_DATA_DIR, "py2_test_grammar.py") + + +class TestMain(unittest.TestCase): + + def setUp(self): + self.temp_dir = None # tearDown() will rmtree this directory if set. + + def tearDown(self): + # Clean up logging configuration down by main. + del logging.root.handlers[:] + if self.temp_dir: + shutil.rmtree(self.temp_dir) + + def run_2to3_capture(self, args, in_capture, out_capture, err_capture): + save_stdin = sys.stdin + save_stdout = sys.stdout + save_stderr = sys.stderr + sys.stdin = in_capture + sys.stdout = out_capture + sys.stderr = err_capture + try: + return main.main("lib2to3.fixes", args) + finally: + sys.stdin = save_stdin + sys.stdout = save_stdout + sys.stderr = save_stderr + + def test_unencodable_diff(self): + input_stream = io.StringIO("print 'nothing'\nprint u'über'\n") + out = io.BytesIO() + out_enc = codecs.getwriter("ascii")(out) + err = io.StringIO() + ret = self.run_2to3_capture(["-"], input_stream, out_enc, err) + self.assertEqual(ret, 0) + output = out.getvalue().decode("ascii") + self.assertIn("-print 'nothing'", output) + self.assertIn("WARNING: couldn't encode 's diff for " + "your terminal", err.getvalue()) + + def setup_test_source_trees(self): + """Setup a test source tree and output destination tree.""" + self.temp_dir = tempfile.mkdtemp() # tearDown() cleans this up. + self.py2_src_dir = os.path.join(self.temp_dir, "python2_project") + self.py3_dest_dir = os.path.join(self.temp_dir, "python3_project") + os.mkdir(self.py2_src_dir) + os.mkdir(self.py3_dest_dir) + # Turn it into a package with a few files. + self.setup_files = [] + open(os.path.join(self.py2_src_dir, "__init__.py"), "w").close() + self.setup_files.append("__init__.py") + shutil.copy(PY2_TEST_MODULE, self.py2_src_dir) + self.setup_files.append(os.path.basename(PY2_TEST_MODULE)) + self.trivial_py2_file = os.path.join(self.py2_src_dir, "trivial.py") + self.init_py2_file = os.path.join(self.py2_src_dir, "__init__.py") + with open(self.trivial_py2_file, "w") as trivial: + trivial.write("print 'I need a simple conversion.'") + self.setup_files.append("trivial.py") + + def test_filename_changing_on_output_single_dir(self): + """2to3 a single directory with a new output dir and suffix.""" + self.setup_test_source_trees() + out = io.StringIO() + err = io.StringIO() + suffix = "TEST" + ret = self.run_2to3_capture( + ["-n", "--add-suffix", suffix, "--write-unchanged-files", + "--no-diffs", "--output-dir", + self.py3_dest_dir, self.py2_src_dir], + io.StringIO(""), out, err) + self.assertEqual(ret, 0) + stderr = err.getvalue() + self.assertIn(" implies -w.", stderr) + self.assertIn( + "Output in %r will mirror the input directory %r layout" % ( + self.py3_dest_dir, self.py2_src_dir), stderr) + self.assertEqual(set(name+suffix for name in self.setup_files), + set(os.listdir(self.py3_dest_dir))) + for name in self.setup_files: + self.assertIn("Writing converted %s to %s" % ( + os.path.join(self.py2_src_dir, name), + os.path.join(self.py3_dest_dir, name+suffix)), stderr) + sep = re.escape(os.sep) + self.assertRegex( + stderr, r"No changes to .*/__init__\.py".replace("/", sep)) + self.assertNotRegex( + stderr, r"No changes to .*/trivial\.py".replace("/", sep)) + + def test_filename_changing_on_output_two_files(self): + """2to3 two files in one directory with a new output dir.""" + self.setup_test_source_trees() + err = io.StringIO() + py2_files = [self.trivial_py2_file, self.init_py2_file] + expected_files = set(os.path.basename(name) for name in py2_files) + ret = self.run_2to3_capture( + ["-n", "-w", "--write-unchanged-files", + "--no-diffs", "--output-dir", self.py3_dest_dir] + py2_files, + io.StringIO(""), io.StringIO(), err) + self.assertEqual(ret, 0) + stderr = err.getvalue() + self.assertIn( + "Output in %r will mirror the input directory %r layout" % ( + self.py3_dest_dir, self.py2_src_dir), stderr) + self.assertEqual(expected_files, set(os.listdir(self.py3_dest_dir))) + + def test_filename_changing_on_output_single_file(self): + """2to3 a single file with a new output dir.""" + self.setup_test_source_trees() + err = io.StringIO() + ret = self.run_2to3_capture( + ["-n", "-w", "--no-diffs", "--output-dir", self.py3_dest_dir, + self.trivial_py2_file], + io.StringIO(""), io.StringIO(), err) + self.assertEqual(ret, 0) + stderr = err.getvalue() + self.assertIn( + "Output in %r will mirror the input directory %r layout" % ( + self.py3_dest_dir, self.py2_src_dir), stderr) + self.assertEqual(set([os.path.basename(self.trivial_py2_file)]), + set(os.listdir(self.py3_dest_dir))) + + +if __name__ == '__main__': + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_parser.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_parser.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_parser.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_parser.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,718 @@ +"""Test suite for 2to3's parser and grammar files. + +This is the place to add tests for changes to 2to3's grammar, such as those +merging the grammars for Python 2 and 3. In addition to specific tests for +parts of the grammar we've changed, we also make sure we can parse the +test_grammar.py files from both Python 2 and Python 3. +""" + +# Testing imports +from . import support +from .support import driver, driver_no_print_statement + +# Python imports +import difflib +import importlib +import operator +import os +import pickle +import shutil +import subprocess +import sys +import tempfile +import test.support +import unittest + +# Local imports +from lib2to3.pgen2 import driver as pgen2_driver +from lib2to3.pgen2 import tokenize +from ..pgen2.parse import ParseError +from lib2to3.pygram import python_symbols as syms + + +class TestDriver(support.TestCase): + + def test_formfeed(self): + s = """print 1\n\x0Cprint 2\n""" + t = driver.parse_string(s) + self.assertEqual(t.children[0].children[0].type, syms.print_stmt) + self.assertEqual(t.children[1].children[0].type, syms.print_stmt) + + +class TestPgen2Caching(support.TestCase): + def test_load_grammar_from_txt_file(self): + pgen2_driver.load_grammar(support.grammar_path, save=False, force=True) + + def test_load_grammar_from_pickle(self): + # Make a copy of the grammar file in a temp directory we are + # guaranteed to be able to write to. + tmpdir = tempfile.mkdtemp() + try: + grammar_copy = os.path.join( + tmpdir, os.path.basename(support.grammar_path)) + shutil.copy(support.grammar_path, grammar_copy) + pickle_name = pgen2_driver._generate_pickle_name(grammar_copy) + + pgen2_driver.load_grammar(grammar_copy, save=True, force=True) + self.assertTrue(os.path.exists(pickle_name)) + + os.unlink(grammar_copy) # Only the pickle remains... + pgen2_driver.load_grammar(grammar_copy, save=False, force=False) + finally: + shutil.rmtree(tmpdir) + + @unittest.skipIf(sys.executable is None, 'sys.executable required') + @unittest.skipIf( + sys.platform in {'emscripten', 'wasi'}, 'requires working subprocess' + ) + def test_load_grammar_from_subprocess(self): + tmpdir = tempfile.mkdtemp() + tmpsubdir = os.path.join(tmpdir, 'subdir') + try: + os.mkdir(tmpsubdir) + grammar_base = os.path.basename(support.grammar_path) + grammar_copy = os.path.join(tmpdir, grammar_base) + grammar_sub_copy = os.path.join(tmpsubdir, grammar_base) + shutil.copy(support.grammar_path, grammar_copy) + shutil.copy(support.grammar_path, grammar_sub_copy) + pickle_name = pgen2_driver._generate_pickle_name(grammar_copy) + pickle_sub_name = pgen2_driver._generate_pickle_name( + grammar_sub_copy) + self.assertNotEqual(pickle_name, pickle_sub_name) + + # Generate a pickle file from this process. + pgen2_driver.load_grammar(grammar_copy, save=True, force=True) + self.assertTrue(os.path.exists(pickle_name)) + + # Generate a new pickle file in a subprocess with a most likely + # different hash randomization seed. + sub_env = dict(os.environ) + sub_env['PYTHONHASHSEED'] = 'random' + code = """ +from lib2to3.pgen2 import driver as pgen2_driver +pgen2_driver.load_grammar(%r, save=True, force=True) + """ % (grammar_sub_copy,) + cmd = [sys.executable, + '-Wignore:lib2to3:DeprecationWarning', + '-c', code] + subprocess.check_call( cmd, env=sub_env) + self.assertTrue(os.path.exists(pickle_sub_name)) + + with open(pickle_name, 'rb') as pickle_f_1, \ + open(pickle_sub_name, 'rb') as pickle_f_2: + self.assertEqual( + pickle_f_1.read(), pickle_f_2.read(), + msg='Grammar caches generated using different hash seeds' + ' were not identical.') + finally: + shutil.rmtree(tmpdir) + + def test_load_packaged_grammar(self): + modname = __name__ + '.load_test' + class MyLoader: + def get_data(self, where): + return pickle.dumps({'elephant': 19}) + class MyModule: + __file__ = 'parsertestmodule' + __spec__ = importlib.util.spec_from_loader(modname, MyLoader()) + sys.modules[modname] = MyModule() + self.addCleanup(operator.delitem, sys.modules, modname) + g = pgen2_driver.load_packaged_grammar(modname, 'Grammar.txt') + self.assertEqual(g.elephant, 19) + + +class GrammarTest(support.TestCase): + def validate(self, code): + support.parse_string(code) + + def invalid_syntax(self, code): + try: + self.validate(code) + except ParseError: + pass + else: + raise AssertionError("Syntax shouldn't have been valid") + + +class TestMatrixMultiplication(GrammarTest): + def test_matrix_multiplication_operator(self): + self.validate("a @ b") + self.validate("a @= b") + + +class TestYieldFrom(GrammarTest): + def test_yield_from(self): + self.validate("yield from x") + self.validate("(yield from x) + y") + self.invalid_syntax("yield from") + + +class TestAsyncAwait(GrammarTest): + def test_await_expr(self): + self.validate("""async def foo(): + await x + """) + + self.validate("""async def foo(): + [i async for i in b] + """) + + self.validate("""async def foo(): + {i for i in b + async for i in a if await i + for b in i} + """) + + self.validate("""async def foo(): + [await i for i in b if await c] + """) + + self.validate("""async def foo(): + [ i for i in b if c] + """) + + self.validate("""async def foo(): + + def foo(): pass + + def foo(): pass + + await x + """) + + self.validate("""async def foo(): return await a""") + + self.validate("""def foo(): + def foo(): pass + async def foo(): await x + """) + + self.invalid_syntax("await x") + self.invalid_syntax("""def foo(): + await x""") + + self.invalid_syntax("""def foo(): + def foo(): pass + async def foo(): pass + await x + """) + + def test_async_var(self): + self.validate("""async = 1""") + self.validate("""await = 1""") + self.validate("""def async(): pass""") + + def test_async_for(self): + self.validate("""async def foo(): + async for a in b: pass""") + + def test_async_with(self): + self.validate("""async def foo(): + async with a: pass""") + + self.invalid_syntax("""def foo(): + async with a: pass""") + + def test_async_generator(self): + self.validate( + """async def foo(): + return (i * 2 async for i in arange(42))""" + ) + self.validate( + """def foo(): + return (i * 2 async for i in arange(42))""" + ) + + +class TestRaiseChanges(GrammarTest): + def test_2x_style_1(self): + self.validate("raise") + + def test_2x_style_2(self): + self.validate("raise E, V") + + def test_2x_style_3(self): + self.validate("raise E, V, T") + + def test_2x_style_invalid_1(self): + self.invalid_syntax("raise E, V, T, Z") + + def test_3x_style(self): + self.validate("raise E1 from E2") + + def test_3x_style_invalid_1(self): + self.invalid_syntax("raise E, V from E1") + + def test_3x_style_invalid_2(self): + self.invalid_syntax("raise E from E1, E2") + + def test_3x_style_invalid_3(self): + self.invalid_syntax("raise from E1, E2") + + def test_3x_style_invalid_4(self): + self.invalid_syntax("raise E from") + + +# Modelled after Lib/test/test_grammar.py:TokenTests.test_funcdef issue2292 +# and Lib/test/text_parser.py test_list_displays, test_set_displays, +# test_dict_displays, test_argument_unpacking, ... changes. +class TestUnpackingGeneralizations(GrammarTest): + def test_mid_positional_star(self): + self.validate("""func(1, *(2, 3), 4)""") + + def test_double_star_dict_literal(self): + self.validate("""func(**{'eggs':'scrambled', 'spam':'fried'})""") + + def test_double_star_dict_literal_after_keywords(self): + self.validate("""func(spam='fried', **{'eggs':'scrambled'})""") + + def test_double_star_expression(self): + self.validate("""func(**{'a':2} or {})""") + self.validate("""func(**() or {})""") + + def test_star_expression(self): + self.validate("""func(*[] or [2])""") + + def test_list_display(self): + self.validate("""[*{2}, 3, *[4]]""") + + def test_set_display(self): + self.validate("""{*{2}, 3, *[4]}""") + + def test_dict_display_1(self): + self.validate("""{**{}}""") + + def test_dict_display_2(self): + self.validate("""{**{}, 3:4, **{5:6, 7:8}}""") + + def test_complex_star_expression(self): + self.validate("func(* [] or [1])") + + def test_complex_double_star_expression(self): + self.validate("func(**{1: 3} if False else {x: x for x in range(3)})") + + def test_argument_unpacking_1(self): + self.validate("""f(a, *b, *c, d)""") + + def test_argument_unpacking_2(self): + self.validate("""f(**a, **b)""") + + def test_argument_unpacking_3(self): + self.validate("""f(2, *a, *b, **b, **c, **d)""") + + def test_trailing_commas_1(self): + self.validate("def f(a, b): call(a, b)") + self.validate("def f(a, b,): call(a, b,)") + + def test_trailing_commas_2(self): + self.validate("def f(a, *b): call(a, *b)") + self.validate("def f(a, *b,): call(a, *b,)") + + def test_trailing_commas_3(self): + self.validate("def f(a, b=1): call(a, b=1)") + self.validate("def f(a, b=1,): call(a, b=1,)") + + def test_trailing_commas_4(self): + self.validate("def f(a, **b): call(a, **b)") + self.validate("def f(a, **b,): call(a, **b,)") + + def test_trailing_commas_5(self): + self.validate("def f(*a, b=1): call(*a, b=1)") + self.validate("def f(*a, b=1,): call(*a, b=1,)") + + def test_trailing_commas_6(self): + self.validate("def f(*a, **b): call(*a, **b)") + self.validate("def f(*a, **b,): call(*a, **b,)") + + def test_trailing_commas_7(self): + self.validate("def f(*, b=1): call(*b)") + self.validate("def f(*, b=1,): call(*b,)") + + def test_trailing_commas_8(self): + self.validate("def f(a=1, b=2): call(a=1, b=2)") + self.validate("def f(a=1, b=2,): call(a=1, b=2,)") + + def test_trailing_commas_9(self): + self.validate("def f(a=1, **b): call(a=1, **b)") + self.validate("def f(a=1, **b,): call(a=1, **b,)") + + def test_trailing_commas_lambda_1(self): + self.validate("f = lambda a, b: call(a, b)") + self.validate("f = lambda a, b,: call(a, b,)") + + def test_trailing_commas_lambda_2(self): + self.validate("f = lambda a, *b: call(a, *b)") + self.validate("f = lambda a, *b,: call(a, *b,)") + + def test_trailing_commas_lambda_3(self): + self.validate("f = lambda a, b=1: call(a, b=1)") + self.validate("f = lambda a, b=1,: call(a, b=1,)") + + def test_trailing_commas_lambda_4(self): + self.validate("f = lambda a, **b: call(a, **b)") + self.validate("f = lambda a, **b,: call(a, **b,)") + + def test_trailing_commas_lambda_5(self): + self.validate("f = lambda *a, b=1: call(*a, b=1)") + self.validate("f = lambda *a, b=1,: call(*a, b=1,)") + + def test_trailing_commas_lambda_6(self): + self.validate("f = lambda *a, **b: call(*a, **b)") + self.validate("f = lambda *a, **b,: call(*a, **b,)") + + def test_trailing_commas_lambda_7(self): + self.validate("f = lambda *, b=1: call(*b)") + self.validate("f = lambda *, b=1,: call(*b,)") + + def test_trailing_commas_lambda_8(self): + self.validate("f = lambda a=1, b=2: call(a=1, b=2)") + self.validate("f = lambda a=1, b=2,: call(a=1, b=2,)") + + def test_trailing_commas_lambda_9(self): + self.validate("f = lambda a=1, **b: call(a=1, **b)") + self.validate("f = lambda a=1, **b,: call(a=1, **b,)") + + +# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef +class TestFunctionAnnotations(GrammarTest): + def test_1(self): + self.validate("""def f(x) -> list: pass""") + + def test_2(self): + self.validate("""def f(x:int): pass""") + + def test_3(self): + self.validate("""def f(*x:str): pass""") + + def test_4(self): + self.validate("""def f(**x:float): pass""") + + def test_5(self): + self.validate("""def f(x, y:1+2): pass""") + + def test_6(self): + self.validate("""def f(a, (b:1, c:2, d)): pass""") + + def test_7(self): + self.validate("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""") + + def test_8(self): + s = """def f(a, (b:1, c:2, d), e:3=4, f=5, + *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass""" + self.validate(s) + + def test_9(self): + s = """def f( + a: str, + b: int, + *, + c: bool = False, + **kwargs, + ) -> None: + call(c=c, **kwargs,)""" + self.validate(s) + + def test_10(self): + s = """def f( + a: str, + ) -> None: + call(a,)""" + self.validate(s) + + def test_11(self): + s = """def f( + a: str = '', + ) -> None: + call(a=a,)""" + self.validate(s) + + def test_12(self): + s = """def f( + *args: str, + ) -> None: + call(*args,)""" + self.validate(s) + + def test_13(self): + self.validate("def f(a: str, b: int) -> None: call(a, b)") + self.validate("def f(a: str, b: int,) -> None: call(a, b,)") + + def test_14(self): + self.validate("def f(a: str, *b: int) -> None: call(a, *b)") + self.validate("def f(a: str, *b: int,) -> None: call(a, *b,)") + + def test_15(self): + self.validate("def f(a: str, b: int=1) -> None: call(a, b=1)") + self.validate("def f(a: str, b: int=1,) -> None: call(a, b=1,)") + + def test_16(self): + self.validate("def f(a: str, **b: int) -> None: call(a, **b)") + self.validate("def f(a: str, **b: int,) -> None: call(a, **b,)") + + def test_17(self): + self.validate("def f(*a: str, b: int=1) -> None: call(*a, b=1)") + self.validate("def f(*a: str, b: int=1,) -> None: call(*a, b=1,)") + + def test_18(self): + self.validate("def f(*a: str, **b: int) -> None: call(*a, **b)") + self.validate("def f(*a: str, **b: int,) -> None: call(*a, **b,)") + + def test_19(self): + self.validate("def f(*, b: int=1) -> None: call(*b)") + self.validate("def f(*, b: int=1,) -> None: call(*b,)") + + def test_20(self): + self.validate("def f(a: str='', b: int=2) -> None: call(a=a, b=2)") + self.validate("def f(a: str='', b: int=2,) -> None: call(a=a, b=2,)") + + def test_21(self): + self.validate("def f(a: str='', **b: int) -> None: call(a=a, **b)") + self.validate("def f(a: str='', **b: int,) -> None: call(a=a, **b,)") + + +# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.test_var_annot +class TestVarAnnotations(GrammarTest): + def test_1(self): + self.validate("var1: int = 5") + + def test_2(self): + self.validate("var2: [int, str]") + + def test_3(self): + self.validate("def f():\n" + " st: str = 'Hello'\n" + " a.b: int = (1, 2)\n" + " return st\n") + + def test_4(self): + self.validate("def fbad():\n" + " x: int\n" + " print(x)\n") + + def test_5(self): + self.validate("class C:\n" + " x: int\n" + " s: str = 'attr'\n" + " z = 2\n" + " def __init__(self, x):\n" + " self.x: int = x\n") + + def test_6(self): + self.validate("lst: List[int] = []") + + +class TestExcept(GrammarTest): + def test_new(self): + s = """ + try: + x + except E as N: + y""" + self.validate(s) + + def test_old(self): + s = """ + try: + x + except E, N: + y""" + self.validate(s) + + +class TestStringLiterals(GrammarTest): + prefixes = ("'", '"', + "r'", 'r"', "R'", 'R"', + "u'", 'u"', "U'", 'U"', + "b'", 'b"', "B'", 'B"', + "f'", 'f"', "F'", 'F"', + "ur'", 'ur"', "Ur'", 'Ur"', + "uR'", 'uR"', "UR'", 'UR"', + "br'", 'br"', "Br'", 'Br"', + "bR'", 'bR"', "BR'", 'BR"', + "rb'", 'rb"', "Rb'", 'Rb"', + "rB'", 'rB"', "RB'", 'RB"',) + + def test_lit(self): + for pre in self.prefixes: + single = "{p}spamspamspam{s}".format(p=pre, s=pre[-1]) + self.validate(single) + triple = "{p}{s}{s}eggs{s}{s}{s}".format(p=pre, s=pre[-1]) + self.validate(triple) + + +# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testAtoms +class TestSetLiteral(GrammarTest): + def test_1(self): + self.validate("""x = {'one'}""") + + def test_2(self): + self.validate("""x = {'one', 1,}""") + + def test_3(self): + self.validate("""x = {'one', 'two', 'three'}""") + + def test_4(self): + self.validate("""x = {2, 3, 4,}""") + + +# Adapted from Python 3's Lib/test/test_unicode_identifiers.py and +# Lib/test/test_tokenize.py:TokenizeTest.test_non_ascii_identifiers +class TestIdentifier(GrammarTest): + def test_non_ascii_identifiers(self): + self.validate("Örter = 'places'\ngrün = 'green'") + self.validate("蟒 = a蟒 = 锦蛇 = 1") + self.validate("µ = aµ = µµ = 1") + self.validate("𝔘𝔫𝔦𝔠𝔬𝔡𝔢 = a_𝔘𝔫𝔦𝔠𝔬𝔡𝔢 = 1") + + +class TestNumericLiterals(GrammarTest): + def test_new_octal_notation(self): + self.validate("""0o7777777777777""") + self.invalid_syntax("""0o7324528887""") + + def test_new_binary_notation(self): + self.validate("""0b101010""") + self.invalid_syntax("""0b0101021""") + + +class TestClassDef(GrammarTest): + def test_new_syntax(self): + self.validate("class B(t=7): pass") + self.validate("class B(t, *args): pass") + self.validate("class B(t, **kwargs): pass") + self.validate("class B(t, *args, **kwargs): pass") + self.validate("class B(t, y=9, *args, **kwargs,): pass") + + +class TestParserIdempotency(support.TestCase): + + """A cut-down version of pytree_idempotency.py.""" + + def parse_file(self, filepath): + if test.support.verbose: + print(f"Parse file: {filepath}") + with open(filepath, "rb") as fp: + encoding = tokenize.detect_encoding(fp.readline)[0] + self.assertIsNotNone(encoding, + "can't detect encoding for %s" % filepath) + with open(filepath, "r", encoding=encoding) as fp: + source = fp.read() + try: + tree = driver.parse_string(source) + except ParseError: + try: + tree = driver_no_print_statement.parse_string(source) + except ParseError as err: + self.fail('ParseError on file %s (%s)' % (filepath, err)) + new = str(tree) + if new != source: + print(diff_texts(source, new, filepath)) + self.fail("Idempotency failed: %s" % filepath) + + def test_all_project_files(self): + for filepath in support.all_project_files(): + with self.subTest(filepath=filepath): + self.parse_file(filepath) + + def test_extended_unpacking(self): + driver.parse_string("a, *b, c = x\n") + driver.parse_string("[*a, b] = x\n") + driver.parse_string("(z, *y, w) = m\n") + driver.parse_string("for *z, m in d: pass\n") + + +class TestLiterals(GrammarTest): + + def validate(self, s): + driver.parse_string(support.dedent(s) + "\n\n") + + def test_multiline_bytes_literals(self): + s = """ + md5test(b"\xaa" * 80, + (b"Test Using Larger Than Block-Size Key " + b"and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + """ + self.validate(s) + + def test_multiline_bytes_tripquote_literals(self): + s = ''' + b""" + + + """ + ''' + self.validate(s) + + def test_multiline_str_literals(self): + s = """ + md5test("\xaa" * 80, + ("Test Using Larger Than Block-Size Key " + "and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + """ + self.validate(s) + + +class TestNamedAssignments(GrammarTest): + """Also known as the walrus operator.""" + + def test_named_assignment_if(self): + driver.parse_string("if f := x(): pass\n") + + def test_named_assignment_while(self): + driver.parse_string("while f := x(): pass\n") + + def test_named_assignment_generator(self): + driver.parse_string("any((lastNum := num) == 1 for num in [1, 2, 3])\n") + + def test_named_assignment_listcomp(self): + driver.parse_string("[(lastNum := num) == 1 for num in [1, 2, 3]]\n") + + +class TestPositionalOnlyArgs(GrammarTest): + + def test_one_pos_only_arg(self): + driver.parse_string("def one_pos_only_arg(a, /): pass\n") + + def test_all_markers(self): + driver.parse_string( + "def all_markers(a, b=2, /, c, d=4, *, e=5, f): pass\n") + + def test_all_with_args_and_kwargs(self): + driver.parse_string( + """def all_markers_with_args_and_kwargs( + aa, b, /, _cc, d, *args, e, f_f, **kwargs, + ): + pass\n""") + + def test_lambda_soup(self): + driver.parse_string( + "lambda a, b, /, c, d, *args, e, f, **kw: kw\n") + + def test_only_positional_or_keyword(self): + driver.parse_string("def func(a,b,/,*,g,e=3): pass\n") + + +class TestPickleableException(unittest.TestCase): + def test_ParseError(self): + err = ParseError('msg', 2, None, (1, 'context')) + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + err2 = pickle.loads(pickle.dumps(err, protocol=proto)) + self.assertEqual(err.args, err2.args) + self.assertEqual(err.msg, err2.msg) + self.assertEqual(err.type, err2.type) + self.assertEqual(err.value, err2.value) + self.assertEqual(err.context, err2.context) + + +def diff_texts(a, b, filename): + a = a.splitlines() + b = b.splitlines() + return difflib.unified_diff(a, b, filename, filename, + "(original)", "(reserialized)", + lineterm="") + + +if __name__ == '__main__': + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_pytree.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_pytree.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_pytree.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_pytree.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,472 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Unit tests for pytree.py. + +NOTE: Please *don't* add doc strings to individual test methods! +In verbose mode, printing of the module, class and method name is much +more helpful than printing of (the first line of) the docstring, +especially when debugging a test. +""" + +# Testing imports +from . import support + +from lib2to3 import pytree + +try: + sorted +except NameError: + def sorted(lst): + l = list(lst) + l.sort() + return l + +class TestNodes(support.TestCase): + + """Unit tests for nodes (Base, Leaf, Node).""" + + def test_instantiate_base(self): + if __debug__: + # Test that instantiating Base() raises an AssertionError + self.assertRaises(AssertionError, pytree.Base) + + def test_leaf(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(l1.type, 100) + self.assertEqual(l1.value, "foo") + + def test_leaf_repr(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(repr(l1), "Leaf(100, 'foo')") + + def test_leaf_str(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(str(l1), "foo") + l2 = pytree.Leaf(100, "foo", context=(" ", (10, 1))) + self.assertEqual(str(l2), " foo") + + def test_leaf_str_numeric_value(self): + # Make sure that the Leaf's value is stringified. Failing to + # do this can cause a TypeError in certain situations. + l1 = pytree.Leaf(2, 5) + l1.prefix = "foo_" + self.assertEqual(str(l1), "foo_5") + + def test_leaf_equality(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "foo", context=(" ", (1, 0))) + self.assertEqual(l1, l2) + l3 = pytree.Leaf(101, "foo") + l4 = pytree.Leaf(100, "bar") + self.assertNotEqual(l1, l3) + self.assertNotEqual(l1, l4) + + def test_leaf_prefix(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(l1.prefix, "") + self.assertFalse(l1.was_changed) + l1.prefix = " ##\n\n" + self.assertEqual(l1.prefix, " ##\n\n") + self.assertTrue(l1.was_changed) + + def test_node(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(200, "bar") + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(n1.type, 1000) + self.assertEqual(n1.children, [l1, l2]) + + def test_node_repr(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0))) + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(repr(n1), + "Node(1000, [%s, %s])" % (repr(l1), repr(l2))) + + def test_node_str(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0))) + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(str(n1), "foo bar") + + def test_node_prefix(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(l1.prefix, "") + n1 = pytree.Node(1000, [l1]) + self.assertEqual(n1.prefix, "") + n1.prefix = " " + self.assertEqual(n1.prefix, " ") + self.assertEqual(l1.prefix, " ") + + def test_get_suffix(self): + l1 = pytree.Leaf(100, "foo", prefix="a") + l2 = pytree.Leaf(100, "bar", prefix="b") + n1 = pytree.Node(1000, [l1, l2]) + + self.assertEqual(l1.get_suffix(), l2.prefix) + self.assertEqual(l2.get_suffix(), "") + self.assertEqual(n1.get_suffix(), "") + + l3 = pytree.Leaf(100, "bar", prefix="c") + n2 = pytree.Node(1000, [n1, l3]) + + self.assertEqual(n1.get_suffix(), l3.prefix) + self.assertEqual(l3.get_suffix(), "") + self.assertEqual(n2.get_suffix(), "") + + def test_node_equality(self): + n1 = pytree.Node(1000, ()) + n2 = pytree.Node(1000, [], context=(" ", (1, 0))) + self.assertEqual(n1, n2) + n3 = pytree.Node(1001, ()) + self.assertNotEqual(n1, n3) + + def test_node_recursive_equality(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1]) + n2 = pytree.Node(1000, [l2]) + self.assertEqual(n1, n2) + l3 = pytree.Leaf(100, "bar") + n3 = pytree.Node(1000, [l3]) + self.assertNotEqual(n1, n3) + + def test_replace(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "+") + l3 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2, l3]) + self.assertEqual(n1.children, [l1, l2, l3]) + self.assertIsInstance(n1.children, list) + self.assertFalse(n1.was_changed) + l2new = pytree.Leaf(100, "-") + l2.replace(l2new) + self.assertEqual(n1.children, [l1, l2new, l3]) + self.assertIsInstance(n1.children, list) + self.assertTrue(n1.was_changed) + + def test_replace_with_list(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "+") + l3 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2, l3]) + + l2.replace([pytree.Leaf(100, "*"), pytree.Leaf(100, "*")]) + self.assertEqual(str(n1), "foo**bar") + self.assertIsInstance(n1.children, list) + + def test_leaves(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "fooey") + n2 = pytree.Node(1000, [l1, l2]) + n3 = pytree.Node(1000, [l3]) + n1 = pytree.Node(1000, [n2, n3]) + + self.assertEqual(list(n1.leaves()), [l1, l2, l3]) + + def test_depth(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + n2 = pytree.Node(1000, [l1, l2]) + n3 = pytree.Node(1000, []) + n1 = pytree.Node(1000, [n2, n3]) + + self.assertEqual(l1.depth(), 2) + self.assertEqual(n3.depth(), 1) + self.assertEqual(n1.depth(), 0) + + def test_post_order(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "fooey") + c1 = pytree.Node(1000, [l1, l2]) + n1 = pytree.Node(1000, [c1, l3]) + self.assertEqual(list(n1.post_order()), [l1, l2, c1, l3, n1]) + + def test_pre_order(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "fooey") + c1 = pytree.Node(1000, [l1, l2]) + n1 = pytree.Node(1000, [c1, l3]) + self.assertEqual(list(n1.pre_order()), [n1, c1, l1, l2, l3]) + + def test_changed(self): + l1 = pytree.Leaf(100, "f") + self.assertFalse(l1.was_changed) + l1.changed() + self.assertTrue(l1.was_changed) + + l1 = pytree.Leaf(100, "f") + n1 = pytree.Node(1000, [l1]) + self.assertFalse(n1.was_changed) + n1.changed() + self.assertTrue(n1.was_changed) + + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "+") + l3 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2, l3]) + n2 = pytree.Node(1000, [n1]) + self.assertFalse(l1.was_changed) + self.assertFalse(n1.was_changed) + self.assertFalse(n2.was_changed) + + n1.changed() + self.assertTrue(n1.was_changed) + self.assertTrue(n2.was_changed) + self.assertFalse(l1.was_changed) + + def test_leaf_constructor_prefix(self): + for prefix in ("xyz_", ""): + l1 = pytree.Leaf(100, "self", prefix=prefix) + self.assertTrue(str(l1), prefix + "self") + self.assertEqual(l1.prefix, prefix) + + def test_node_constructor_prefix(self): + for prefix in ("xyz_", ""): + l1 = pytree.Leaf(100, "self") + l2 = pytree.Leaf(100, "foo", prefix="_") + n1 = pytree.Node(1000, [l1, l2], prefix=prefix) + self.assertTrue(str(n1), prefix + "self_foo") + self.assertEqual(n1.prefix, prefix) + self.assertEqual(l1.prefix, prefix) + self.assertEqual(l2.prefix, "_") + + def test_remove(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1, l2]) + n2 = pytree.Node(1000, [n1]) + + self.assertEqual(n1.remove(), 0) + self.assertEqual(n2.children, []) + self.assertEqual(l1.parent, n1) + self.assertEqual(n1.parent, None) + self.assertEqual(n2.parent, None) + self.assertFalse(n1.was_changed) + self.assertTrue(n2.was_changed) + + self.assertEqual(l2.remove(), 1) + self.assertEqual(l1.remove(), 0) + self.assertEqual(n1.children, []) + self.assertEqual(l1.parent, None) + self.assertEqual(n1.parent, None) + self.assertEqual(n2.parent, None) + self.assertTrue(n1.was_changed) + self.assertTrue(n2.was_changed) + + def test_remove_parentless(self): + n1 = pytree.Node(1000, []) + n1.remove() + self.assertEqual(n1.parent, None) + + l1 = pytree.Leaf(100, "foo") + l1.remove() + self.assertEqual(l1.parent, None) + + def test_node_set_child(self): + l1 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1]) + + l2 = pytree.Leaf(100, "bar") + n1.set_child(0, l2) + self.assertEqual(l1.parent, None) + self.assertEqual(l2.parent, n1) + self.assertEqual(n1.children, [l2]) + + n2 = pytree.Node(1000, [l1]) + n2.set_child(0, n1) + self.assertEqual(l1.parent, None) + self.assertEqual(n1.parent, n2) + self.assertEqual(n2.parent, None) + self.assertEqual(n2.children, [n1]) + + self.assertRaises(IndexError, n1.set_child, 4, l2) + # I don't care what it raises, so long as it's an exception + self.assertRaises(Exception, n1.set_child, 0, list) + + def test_node_insert_child(self): + l1 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1]) + + l2 = pytree.Leaf(100, "bar") + n1.insert_child(0, l2) + self.assertEqual(l2.parent, n1) + self.assertEqual(n1.children, [l2, l1]) + + l3 = pytree.Leaf(100, "abc") + n1.insert_child(2, l3) + self.assertEqual(n1.children, [l2, l1, l3]) + + # I don't care what it raises, so long as it's an exception + self.assertRaises(Exception, n1.insert_child, 0, list) + + def test_node_append_child(self): + n1 = pytree.Node(1000, []) + + l1 = pytree.Leaf(100, "foo") + n1.append_child(l1) + self.assertEqual(l1.parent, n1) + self.assertEqual(n1.children, [l1]) + + l2 = pytree.Leaf(100, "bar") + n1.append_child(l2) + self.assertEqual(l2.parent, n1) + self.assertEqual(n1.children, [l1, l2]) + + # I don't care what it raises, so long as it's an exception + self.assertRaises(Exception, n1.append_child, list) + + def test_node_next_sibling(self): + n1 = pytree.Node(1000, []) + n2 = pytree.Node(1000, []) + p1 = pytree.Node(1000, [n1, n2]) + + self.assertIs(n1.next_sibling, n2) + self.assertEqual(n2.next_sibling, None) + self.assertEqual(p1.next_sibling, None) + + def test_leaf_next_sibling(self): + l1 = pytree.Leaf(100, "a") + l2 = pytree.Leaf(100, "b") + p1 = pytree.Node(1000, [l1, l2]) + + self.assertIs(l1.next_sibling, l2) + self.assertEqual(l2.next_sibling, None) + self.assertEqual(p1.next_sibling, None) + + def test_node_prev_sibling(self): + n1 = pytree.Node(1000, []) + n2 = pytree.Node(1000, []) + p1 = pytree.Node(1000, [n1, n2]) + + self.assertIs(n2.prev_sibling, n1) + self.assertEqual(n1.prev_sibling, None) + self.assertEqual(p1.prev_sibling, None) + + def test_leaf_prev_sibling(self): + l1 = pytree.Leaf(100, "a") + l2 = pytree.Leaf(100, "b") + p1 = pytree.Node(1000, [l1, l2]) + + self.assertIs(l2.prev_sibling, l1) + self.assertEqual(l1.prev_sibling, None) + self.assertEqual(p1.prev_sibling, None) + + +class TestPatterns(support.TestCase): + + """Unit tests for tree matching patterns.""" + + def test_basic_patterns(self): + # Build a tree + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1, l2]) + n2 = pytree.Node(1000, [l3]) + root = pytree.Node(1000, [n1, n2]) + # Build a pattern matching a leaf + pl = pytree.LeafPattern(100, "foo", name="pl") + r = {} + self.assertFalse(pl.match(root, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pl.match(n1, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pl.match(n2, results=r)) + self.assertEqual(r, {}) + self.assertTrue(pl.match(l1, results=r)) + self.assertEqual(r, {"pl": l1}) + r = {} + self.assertFalse(pl.match(l2, results=r)) + self.assertEqual(r, {}) + # Build a pattern matching a node + pn = pytree.NodePattern(1000, [pl], name="pn") + self.assertFalse(pn.match(root, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pn.match(n1, results=r)) + self.assertEqual(r, {}) + self.assertTrue(pn.match(n2, results=r)) + self.assertEqual(r, {"pn": n2, "pl": l3}) + r = {} + self.assertFalse(pn.match(l1, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pn.match(l2, results=r)) + self.assertEqual(r, {}) + + def test_wildcard(self): + # Build a tree for testing + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1, l2]) + n2 = pytree.Node(1000, [l3]) + root = pytree.Node(1000, [n1, n2]) + # Build a pattern + pl = pytree.LeafPattern(100, "foo", name="pl") + pn = pytree.NodePattern(1000, [pl], name="pn") + pw = pytree.WildcardPattern([[pn], [pl, pl]], name="pw") + r = {} + self.assertFalse(pw.match_seq([root], r)) + self.assertEqual(r, {}) + self.assertFalse(pw.match_seq([n1], r)) + self.assertEqual(r, {}) + self.assertTrue(pw.match_seq([n2], r)) + # These are easier to debug + self.assertEqual(sorted(r.keys()), ["pl", "pn", "pw"]) + self.assertEqual(r["pl"], l1) + self.assertEqual(r["pn"], n2) + self.assertEqual(r["pw"], [n2]) + # But this is equivalent + self.assertEqual(r, {"pl": l1, "pn": n2, "pw": [n2]}) + r = {} + self.assertTrue(pw.match_seq([l1, l3], r)) + self.assertEqual(r, {"pl": l3, "pw": [l1, l3]}) + self.assertIs(r["pl"], l3) + r = {} + + def test_generate_matches(self): + la = pytree.Leaf(1, "a") + lb = pytree.Leaf(1, "b") + lc = pytree.Leaf(1, "c") + ld = pytree.Leaf(1, "d") + le = pytree.Leaf(1, "e") + lf = pytree.Leaf(1, "f") + leaves = [la, lb, lc, ld, le, lf] + root = pytree.Node(1000, leaves) + pa = pytree.LeafPattern(1, "a", "pa") + pb = pytree.LeafPattern(1, "b", "pb") + pc = pytree.LeafPattern(1, "c", "pc") + pd = pytree.LeafPattern(1, "d", "pd") + pe = pytree.LeafPattern(1, "e", "pe") + pf = pytree.LeafPattern(1, "f", "pf") + pw = pytree.WildcardPattern([[pa, pb, pc], [pd, pe], + [pa, pb], [pc, pd], [pe, pf]], + min=1, max=4, name="pw") + self.assertEqual([x[0] for x in pw.generate_matches(leaves)], + [3, 5, 2, 4, 6]) + pr = pytree.NodePattern(type=1000, content=[pw], name="pr") + matches = list(pytree.generate_matches([pr], [root])) + self.assertEqual(len(matches), 1) + c, r = matches[0] + self.assertEqual(c, 1) + self.assertEqual(str(r["pr"]), "abcdef") + self.assertEqual(r["pw"], [la, lb, lc, ld, le, lf]) + for c in "abcdef": + self.assertEqual(r["p" + c], pytree.Leaf(1, c)) + + def test_has_key_example(self): + pattern = pytree.NodePattern(331, + (pytree.LeafPattern(7), + pytree.WildcardPattern(name="args"), + pytree.LeafPattern(8))) + l1 = pytree.Leaf(7, "(") + l2 = pytree.Leaf(3, "x") + l3 = pytree.Leaf(8, ")") + node = pytree.Node(331, [l1, l2, l3]) + r = {} + self.assertTrue(pattern.match(node, r)) + self.assertEqual(r["args"], [l2]) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_refactor.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_refactor.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_refactor.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_refactor.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,337 @@ +""" +Unit tests for refactor.py. +""" + +import sys +import os +import codecs +import io +import re +import tempfile +import shutil +import unittest + +from lib2to3 import refactor, pygram, fixer_base +from lib2to3.pgen2 import token + + +TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data") +FIXER_DIR = os.path.join(TEST_DATA_DIR, "fixers") + +sys.path.append(FIXER_DIR) +try: + _DEFAULT_FIXERS = refactor.get_fixers_from_package("myfixes") +finally: + sys.path.pop() + +_2TO3_FIXERS = refactor.get_fixers_from_package("lib2to3.fixes") + +class TestRefactoringTool(unittest.TestCase): + + def setUp(self): + sys.path.append(FIXER_DIR) + + def tearDown(self): + sys.path.pop() + + def check_instances(self, instances, classes): + for inst, cls in zip(instances, classes): + if not isinstance(inst, cls): + self.fail("%s are not instances of %s" % instances, classes) + + def rt(self, options=None, fixers=_DEFAULT_FIXERS, explicit=None): + return refactor.RefactoringTool(fixers, options, explicit) + + def test_print_function_option(self): + rt = self.rt({"print_function" : True}) + self.assertNotIn("print", rt.grammar.keywords) + self.assertNotIn("print", rt.driver.grammar.keywords) + + def test_exec_function_option(self): + rt = self.rt({"exec_function" : True}) + self.assertNotIn("exec", rt.grammar.keywords) + self.assertNotIn("exec", rt.driver.grammar.keywords) + + def test_write_unchanged_files_option(self): + rt = self.rt() + self.assertFalse(rt.write_unchanged_files) + rt = self.rt({"write_unchanged_files" : True}) + self.assertTrue(rt.write_unchanged_files) + + def test_fixer_loading_helpers(self): + contents = ["explicit", "first", "last", "parrot", "preorder"] + non_prefixed = refactor.get_all_fix_names("myfixes") + prefixed = refactor.get_all_fix_names("myfixes", False) + full_names = refactor.get_fixers_from_package("myfixes") + self.assertEqual(prefixed, ["fix_" + name for name in contents]) + self.assertEqual(non_prefixed, contents) + self.assertEqual(full_names, + ["myfixes.fix_" + name for name in contents]) + + def test_detect_future_features(self): + run = refactor._detect_future_features + fs = frozenset + empty = fs() + self.assertEqual(run(""), empty) + self.assertEqual(run("from __future__ import print_function"), + fs(("print_function",))) + self.assertEqual(run("from __future__ import generators"), + fs(("generators",))) + self.assertEqual(run("from __future__ import generators, feature"), + fs(("generators", "feature"))) + inp = "from __future__ import generators, print_function" + self.assertEqual(run(inp), fs(("generators", "print_function"))) + inp ="from __future__ import print_function, generators" + self.assertEqual(run(inp), fs(("print_function", "generators"))) + inp = "from __future__ import (print_function,)" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "from __future__ import (generators, print_function)" + self.assertEqual(run(inp), fs(("generators", "print_function"))) + inp = "from __future__ import (generators, nested_scopes)" + self.assertEqual(run(inp), fs(("generators", "nested_scopes"))) + inp = """from __future__ import generators +from __future__ import print_function""" + self.assertEqual(run(inp), fs(("generators", "print_function"))) + invalid = ("from", + "from 4", + "from x", + "from x 5", + "from x im", + "from x import", + "from x import 4", + ) + for inp in invalid: + self.assertEqual(run(inp), empty) + inp = "'docstring'\nfrom __future__ import print_function" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "'docstring'\n'somng'\nfrom __future__ import print_function" + self.assertEqual(run(inp), empty) + inp = "# comment\nfrom __future__ import print_function" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "# comment\n'doc'\nfrom __future__ import print_function" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "class x: pass\nfrom __future__ import print_function" + self.assertEqual(run(inp), empty) + + def test_get_headnode_dict(self): + class NoneFix(fixer_base.BaseFix): + pass + + class FileInputFix(fixer_base.BaseFix): + PATTERN = "file_input< any * >" + + class SimpleFix(fixer_base.BaseFix): + PATTERN = "'name'" + + no_head = NoneFix({}, []) + with_head = FileInputFix({}, []) + simple = SimpleFix({}, []) + d = refactor._get_headnode_dict([no_head, with_head, simple]) + top_fixes = d.pop(pygram.python_symbols.file_input) + self.assertEqual(top_fixes, [with_head, no_head]) + name_fixes = d.pop(token.NAME) + self.assertEqual(name_fixes, [simple, no_head]) + for fixes in d.values(): + self.assertEqual(fixes, [no_head]) + + def test_fixer_loading(self): + from myfixes.fix_first import FixFirst + from myfixes.fix_last import FixLast + from myfixes.fix_parrot import FixParrot + from myfixes.fix_preorder import FixPreorder + + rt = self.rt() + pre, post = rt.get_fixers() + + self.check_instances(pre, [FixPreorder]) + self.check_instances(post, [FixFirst, FixParrot, FixLast]) + + def test_naughty_fixers(self): + self.assertRaises(ImportError, self.rt, fixers=["not_here"]) + self.assertRaises(refactor.FixerError, self.rt, fixers=["no_fixer_cls"]) + self.assertRaises(refactor.FixerError, self.rt, fixers=["bad_order"]) + + def test_refactor_string(self): + rt = self.rt() + input = "def parrot(): pass\n\n" + tree = rt.refactor_string(input, "") + self.assertNotEqual(str(tree), input) + + input = "def f(): pass\n\n" + tree = rt.refactor_string(input, "") + self.assertEqual(str(tree), input) + + def test_refactor_stdin(self): + + class MyRT(refactor.RefactoringTool): + + def print_output(self, old_text, new_text, filename, equal): + results.extend([old_text, new_text, filename, equal]) + + results = [] + rt = MyRT(_DEFAULT_FIXERS) + save = sys.stdin + sys.stdin = io.StringIO("def parrot(): pass\n\n") + try: + rt.refactor_stdin() + finally: + sys.stdin = save + expected = ["def parrot(): pass\n\n", + "def cheese(): pass\n\n", + "", False] + self.assertEqual(results, expected) + + def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS, + options=None, mock_log_debug=None, + actually_write=True): + test_file = self.init_test_file(test_file) + old_contents = self.read_file(test_file) + rt = self.rt(fixers=fixers, options=options) + if mock_log_debug: + rt.log_debug = mock_log_debug + + rt.refactor_file(test_file) + self.assertEqual(old_contents, self.read_file(test_file)) + + if not actually_write: + return + rt.refactor_file(test_file, True) + new_contents = self.read_file(test_file) + self.assertNotEqual(old_contents, new_contents) + return new_contents + + def init_test_file(self, test_file): + tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") + self.addCleanup(shutil.rmtree, tmpdir) + shutil.copy(test_file, tmpdir) + test_file = os.path.join(tmpdir, os.path.basename(test_file)) + os.chmod(test_file, 0o644) + return test_file + + def read_file(self, test_file): + with open(test_file, "rb") as fp: + return fp.read() + + def refactor_file(self, test_file, fixers=_2TO3_FIXERS): + test_file = self.init_test_file(test_file) + old_contents = self.read_file(test_file) + rt = self.rt(fixers=fixers) + rt.refactor_file(test_file, True) + new_contents = self.read_file(test_file) + return old_contents, new_contents + + def test_refactor_file(self): + test_file = os.path.join(FIXER_DIR, "parrot_example.py") + self.check_file_refactoring(test_file, _DEFAULT_FIXERS) + + def test_refactor_file_write_unchanged_file(self): + test_file = os.path.join(FIXER_DIR, "parrot_example.py") + debug_messages = [] + def recording_log_debug(msg, *args): + debug_messages.append(msg % args) + self.check_file_refactoring(test_file, fixers=(), + options={"write_unchanged_files": True}, + mock_log_debug=recording_log_debug, + actually_write=False) + # Testing that it logged this message when write=False was passed is + # sufficient to see that it did not bail early after "No changes". + message_regex = r"Not writing changes to .*%s" % \ + re.escape(os.sep + os.path.basename(test_file)) + for message in debug_messages: + if "Not writing changes" in message: + self.assertRegex(message, message_regex) + break + else: + self.fail("%r not matched in %r" % (message_regex, debug_messages)) + + def test_refactor_dir(self): + def check(structure, expected): + def mock_refactor_file(self, f, *args): + got.append(f) + save_func = refactor.RefactoringTool.refactor_file + refactor.RefactoringTool.refactor_file = mock_refactor_file + rt = self.rt() + got = [] + dir = tempfile.mkdtemp(prefix="2to3-test_refactor") + try: + os.mkdir(os.path.join(dir, "a_dir")) + for fn in structure: + open(os.path.join(dir, fn), "wb").close() + rt.refactor_dir(dir) + finally: + refactor.RefactoringTool.refactor_file = save_func + shutil.rmtree(dir) + self.assertEqual(got, + [os.path.join(dir, path) for path in expected]) + check([], []) + tree = ["nothing", + "hi.py", + ".dumb", + ".after.py", + "notpy.npy", + "sappy"] + expected = ["hi.py"] + check(tree, expected) + tree = ["hi.py", + os.path.join("a_dir", "stuff.py")] + check(tree, tree) + + def test_file_encoding(self): + fn = os.path.join(TEST_DATA_DIR, "different_encoding.py") + self.check_file_refactoring(fn) + + def test_false_file_encoding(self): + fn = os.path.join(TEST_DATA_DIR, "false_encoding.py") + data = self.check_file_refactoring(fn) + + def test_bom(self): + fn = os.path.join(TEST_DATA_DIR, "bom.py") + data = self.check_file_refactoring(fn) + self.assertTrue(data.startswith(codecs.BOM_UTF8)) + + def test_crlf_newlines(self): + old_sep = os.linesep + os.linesep = "\r\n" + try: + fn = os.path.join(TEST_DATA_DIR, "crlf.py") + fixes = refactor.get_fixers_from_package("lib2to3.fixes") + self.check_file_refactoring(fn, fixes) + finally: + os.linesep = old_sep + + def test_crlf_unchanged(self): + fn = os.path.join(TEST_DATA_DIR, "crlf.py") + old, new = self.refactor_file(fn) + self.assertIn(b"\r\n", old) + self.assertIn(b"\r\n", new) + self.assertNotIn(b"\r\r\n", new) + + def test_refactor_docstring(self): + rt = self.rt() + + doc = """ +>>> example() +42 +""" + out = rt.refactor_docstring(doc, "") + self.assertEqual(out, doc) + + doc = """ +>>> def parrot(): +... return 43 +""" + out = rt.refactor_docstring(doc, "") + self.assertNotEqual(out, doc) + + def test_explicit(self): + from myfixes.fix_explicit import FixExplicit + + rt = self.rt(fixers=["myfixes.fix_explicit"]) + self.assertEqual(len(rt.post_order), 0) + + rt = self.rt(explicit=["myfixes.fix_explicit"]) + for fix in rt.post_order: + if isinstance(fix, FixExplicit): + break + else: + self.fail("explicit fixer not loaded") diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_util.py python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_util.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/lib2to3/tests/test_util.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/lib2to3/tests/test_util.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,591 @@ +""" Test suite for the code in fixer_util """ + +# Testing imports +from . import support + +# Local imports +from lib2to3.pytree import Node, Leaf +from lib2to3 import fixer_util +from lib2to3.fixer_util import Attr, Name, Call, Comma +from lib2to3.pgen2 import token + +def parse(code, strip_levels=0): + # The topmost node is file_input, which we don't care about. + # The next-topmost node is a *_stmt node, which we also don't care about + tree = support.parse_string(code) + for i in range(strip_levels): + tree = tree.children[0] + tree.parent = None + return tree + +class MacroTestCase(support.TestCase): + def assertStr(self, node, string): + if isinstance(node, (tuple, list)): + node = Node(fixer_util.syms.simple_stmt, node) + self.assertEqual(str(node), string) + + +class Test_is_tuple(support.TestCase): + def is_tuple(self, string): + return fixer_util.is_tuple(parse(string, strip_levels=2)) + + def test_valid(self): + self.assertTrue(self.is_tuple("(a, b)")) + self.assertTrue(self.is_tuple("(a, (b, c))")) + self.assertTrue(self.is_tuple("((a, (b, c)),)")) + self.assertTrue(self.is_tuple("(a,)")) + self.assertTrue(self.is_tuple("()")) + + def test_invalid(self): + self.assertFalse(self.is_tuple("(a)")) + self.assertFalse(self.is_tuple("('foo') % (b, c)")) + + +class Test_is_list(support.TestCase): + def is_list(self, string): + return fixer_util.is_list(parse(string, strip_levels=2)) + + def test_valid(self): + self.assertTrue(self.is_list("[]")) + self.assertTrue(self.is_list("[a]")) + self.assertTrue(self.is_list("[a, b]")) + self.assertTrue(self.is_list("[a, [b, c]]")) + self.assertTrue(self.is_list("[[a, [b, c]],]")) + + def test_invalid(self): + self.assertFalse(self.is_list("[]+[]")) + + +class Test_Attr(MacroTestCase): + def test(self): + call = parse("foo()", strip_levels=2) + + self.assertStr(Attr(Name("a"), Name("b")), "a.b") + self.assertStr(Attr(call, Name("b")), "foo().b") + + def test_returns(self): + attr = Attr(Name("a"), Name("b")) + self.assertEqual(type(attr), list) + + +class Test_Name(MacroTestCase): + def test(self): + self.assertStr(Name("a"), "a") + self.assertStr(Name("foo.foo().bar"), "foo.foo().bar") + self.assertStr(Name("a", prefix="b"), "ba") + + +class Test_Call(MacroTestCase): + def _Call(self, name, args=None, prefix=None): + """Help the next test""" + children = [] + if isinstance(args, list): + for arg in args: + children.append(arg) + children.append(Comma()) + children.pop() + return Call(Name(name), children, prefix) + + def test(self): + kids = [None, + [Leaf(token.NUMBER, 1), Leaf(token.NUMBER, 2), + Leaf(token.NUMBER, 3)], + [Leaf(token.NUMBER, 1), Leaf(token.NUMBER, 3), + Leaf(token.NUMBER, 2), Leaf(token.NUMBER, 4)], + [Leaf(token.STRING, "b"), Leaf(token.STRING, "j", prefix=" ")] + ] + self.assertStr(self._Call("A"), "A()") + self.assertStr(self._Call("b", kids[1]), "b(1,2,3)") + self.assertStr(self._Call("a.b().c", kids[2]), "a.b().c(1,3,2,4)") + self.assertStr(self._Call("d", kids[3], prefix=" "), " d(b, j)") + + +class Test_does_tree_import(support.TestCase): + def _find_bind_rec(self, name, node): + # Search a tree for a binding -- used to find the starting + # point for these tests. + c = fixer_util.find_binding(name, node) + if c: return c + for child in node.children: + c = self._find_bind_rec(name, child) + if c: return c + + def does_tree_import(self, package, name, string): + node = parse(string) + # Find the binding of start -- that's what we'll go from + node = self._find_bind_rec('start', node) + return fixer_util.does_tree_import(package, name, node) + + def try_with(self, string): + failing_tests = (("a", "a", "from a import b"), + ("a.d", "a", "from a.d import b"), + ("d.a", "a", "from d.a import b"), + (None, "a", "import b"), + (None, "a", "import b, c, d")) + for package, name, import_ in failing_tests: + n = self.does_tree_import(package, name, import_ + "\n" + string) + self.assertFalse(n) + n = self.does_tree_import(package, name, string + "\n" + import_) + self.assertFalse(n) + + passing_tests = (("a", "a", "from a import a"), + ("x", "a", "from x import a"), + ("x", "a", "from x import b, c, a, d"), + ("x.b", "a", "from x.b import a"), + ("x.b", "a", "from x.b import b, c, a, d"), + (None, "a", "import a"), + (None, "a", "import b, c, a, d")) + for package, name, import_ in passing_tests: + n = self.does_tree_import(package, name, import_ + "\n" + string) + self.assertTrue(n) + n = self.does_tree_import(package, name, string + "\n" + import_) + self.assertTrue(n) + + def test_in_function(self): + self.try_with("def foo():\n\tbar.baz()\n\tstart=3") + +class Test_find_binding(support.TestCase): + def find_binding(self, name, string, package=None): + return fixer_util.find_binding(name, parse(string), package) + + def test_simple_assignment(self): + self.assertTrue(self.find_binding("a", "a = b")) + self.assertTrue(self.find_binding("a", "a = [b, c, d]")) + self.assertTrue(self.find_binding("a", "a = foo()")) + self.assertTrue(self.find_binding("a", "a = foo().foo.foo[6][foo]")) + self.assertFalse(self.find_binding("a", "foo = a")) + self.assertFalse(self.find_binding("a", "foo = (a, b, c)")) + + def test_tuple_assignment(self): + self.assertTrue(self.find_binding("a", "(a,) = b")) + self.assertTrue(self.find_binding("a", "(a, b, c) = [b, c, d]")) + self.assertTrue(self.find_binding("a", "(c, (d, a), b) = foo()")) + self.assertTrue(self.find_binding("a", "(a, b) = foo().foo[6][foo]")) + self.assertFalse(self.find_binding("a", "(foo, b) = (b, a)")) + self.assertFalse(self.find_binding("a", "(foo, (b, c)) = (a, b, c)")) + + def test_list_assignment(self): + self.assertTrue(self.find_binding("a", "[a] = b")) + self.assertTrue(self.find_binding("a", "[a, b, c] = [b, c, d]")) + self.assertTrue(self.find_binding("a", "[c, [d, a], b] = foo()")) + self.assertTrue(self.find_binding("a", "[a, b] = foo().foo[a][foo]")) + self.assertFalse(self.find_binding("a", "[foo, b] = (b, a)")) + self.assertFalse(self.find_binding("a", "[foo, [b, c]] = (a, b, c)")) + + def test_invalid_assignments(self): + self.assertFalse(self.find_binding("a", "foo.a = 5")) + self.assertFalse(self.find_binding("a", "foo[a] = 5")) + self.assertFalse(self.find_binding("a", "foo(a) = 5")) + self.assertFalse(self.find_binding("a", "foo(a, b) = 5")) + + def test_simple_import(self): + self.assertTrue(self.find_binding("a", "import a")) + self.assertTrue(self.find_binding("a", "import b, c, a, d")) + self.assertFalse(self.find_binding("a", "import b")) + self.assertFalse(self.find_binding("a", "import b, c, d")) + + def test_from_import(self): + self.assertTrue(self.find_binding("a", "from x import a")) + self.assertTrue(self.find_binding("a", "from a import a")) + self.assertTrue(self.find_binding("a", "from x import b, c, a, d")) + self.assertTrue(self.find_binding("a", "from x.b import a")) + self.assertTrue(self.find_binding("a", "from x.b import b, c, a, d")) + self.assertFalse(self.find_binding("a", "from a import b")) + self.assertFalse(self.find_binding("a", "from a.d import b")) + self.assertFalse(self.find_binding("a", "from d.a import b")) + + def test_import_as(self): + self.assertTrue(self.find_binding("a", "import b as a")) + self.assertTrue(self.find_binding("a", "import b as a, c, a as f, d")) + self.assertFalse(self.find_binding("a", "import a as f")) + self.assertFalse(self.find_binding("a", "import b, c as f, d as e")) + + def test_from_import_as(self): + self.assertTrue(self.find_binding("a", "from x import b as a")) + self.assertTrue(self.find_binding("a", "from x import g as a, d as b")) + self.assertTrue(self.find_binding("a", "from x.b import t as a")) + self.assertTrue(self.find_binding("a", "from x.b import g as a, d")) + self.assertFalse(self.find_binding("a", "from a import b as t")) + self.assertFalse(self.find_binding("a", "from a.d import b as t")) + self.assertFalse(self.find_binding("a", "from d.a import b as t")) + + def test_simple_import_with_package(self): + self.assertTrue(self.find_binding("b", "import b")) + self.assertTrue(self.find_binding("b", "import b, c, d")) + self.assertFalse(self.find_binding("b", "import b", "b")) + self.assertFalse(self.find_binding("b", "import b, c, d", "c")) + + def test_from_import_with_package(self): + self.assertTrue(self.find_binding("a", "from x import a", "x")) + self.assertTrue(self.find_binding("a", "from a import a", "a")) + self.assertTrue(self.find_binding("a", "from x import *", "x")) + self.assertTrue(self.find_binding("a", "from x import b, c, a, d", "x")) + self.assertTrue(self.find_binding("a", "from x.b import a", "x.b")) + self.assertTrue(self.find_binding("a", "from x.b import *", "x.b")) + self.assertTrue(self.find_binding("a", "from x.b import b, c, a, d", "x.b")) + self.assertFalse(self.find_binding("a", "from a import b", "a")) + self.assertFalse(self.find_binding("a", "from a.d import b", "a.d")) + self.assertFalse(self.find_binding("a", "from d.a import b", "a.d")) + self.assertFalse(self.find_binding("a", "from x.y import *", "a.b")) + + def test_import_as_with_package(self): + self.assertFalse(self.find_binding("a", "import b.c as a", "b.c")) + self.assertFalse(self.find_binding("a", "import a as f", "f")) + self.assertFalse(self.find_binding("a", "import a as f", "a")) + + def test_from_import_as_with_package(self): + # Because it would take a lot of special-case code in the fixers + # to deal with from foo import bar as baz, we'll simply always + # fail if there is an "from ... import ... as ..." + self.assertFalse(self.find_binding("a", "from x import b as a", "x")) + self.assertFalse(self.find_binding("a", "from x import g as a, d as b", "x")) + self.assertFalse(self.find_binding("a", "from x.b import t as a", "x.b")) + self.assertFalse(self.find_binding("a", "from x.b import g as a, d", "x.b")) + self.assertFalse(self.find_binding("a", "from a import b as t", "a")) + self.assertFalse(self.find_binding("a", "from a import b as t", "b")) + self.assertFalse(self.find_binding("a", "from a import b as t", "t")) + + def test_function_def(self): + self.assertTrue(self.find_binding("a", "def a(): pass")) + self.assertTrue(self.find_binding("a", "def a(b, c, d): pass")) + self.assertTrue(self.find_binding("a", "def a(): b = 7")) + self.assertFalse(self.find_binding("a", "def d(b, (c, a), e): pass")) + self.assertFalse(self.find_binding("a", "def d(a=7): pass")) + self.assertFalse(self.find_binding("a", "def d(a): pass")) + self.assertFalse(self.find_binding("a", "def d(): a = 7")) + + s = """ + def d(): + def a(): + pass""" + self.assertFalse(self.find_binding("a", s)) + + def test_class_def(self): + self.assertTrue(self.find_binding("a", "class a: pass")) + self.assertTrue(self.find_binding("a", "class a(): pass")) + self.assertTrue(self.find_binding("a", "class a(b): pass")) + self.assertTrue(self.find_binding("a", "class a(b, c=8): pass")) + self.assertFalse(self.find_binding("a", "class d: pass")) + self.assertFalse(self.find_binding("a", "class d(a): pass")) + self.assertFalse(self.find_binding("a", "class d(b, a=7): pass")) + self.assertFalse(self.find_binding("a", "class d(b, *a): pass")) + self.assertFalse(self.find_binding("a", "class d(b, **a): pass")) + self.assertFalse(self.find_binding("a", "class d: a = 7")) + + s = """ + class d(): + class a(): + pass""" + self.assertFalse(self.find_binding("a", s)) + + def test_for(self): + self.assertTrue(self.find_binding("a", "for a in r: pass")) + self.assertTrue(self.find_binding("a", "for a, b in r: pass")) + self.assertTrue(self.find_binding("a", "for (a, b) in r: pass")) + self.assertTrue(self.find_binding("a", "for c, (a,) in r: pass")) + self.assertTrue(self.find_binding("a", "for c, (a, b) in r: pass")) + self.assertTrue(self.find_binding("a", "for c in r: a = c")) + self.assertFalse(self.find_binding("a", "for c in a: pass")) + + def test_for_nested(self): + s = """ + for b in r: + for a in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for a, c in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for (a, c) in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for (a,) in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for c, (a, d) in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for c in b: + a = 7""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for c in b: + d = a""" + self.assertFalse(self.find_binding("a", s)) + + s = """ + for b in r: + for c in a: + d = 7""" + self.assertFalse(self.find_binding("a", s)) + + def test_if(self): + self.assertTrue(self.find_binding("a", "if b in r: a = c")) + self.assertFalse(self.find_binding("a", "if a in r: d = e")) + + def test_if_nested(self): + s = """ + if b in r: + if c in d: + a = c""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + if b in r: + if c in d: + c = a""" + self.assertFalse(self.find_binding("a", s)) + + def test_while(self): + self.assertTrue(self.find_binding("a", "while b in r: a = c")) + self.assertFalse(self.find_binding("a", "while a in r: d = e")) + + def test_while_nested(self): + s = """ + while b in r: + while c in d: + a = c""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + while b in r: + while c in d: + c = a""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except(self): + s = """ + try: + a = 6 + except: + b = 8""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except KeyError: + pass + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except_nested(self): + s = """ + try: + try: + a = 6 + except: + pass + except: + b = 8""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + try: + a = 6 + except: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + try: + pass + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + try: + b = 8 + except KeyError: + pass + except: + a = 6 + except: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + pass + except: + try: + b = 8 + except KeyError: + pass + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + s = """ + try: + try: + b = 8 + except: + c = d + except: + try: + b = 6 + except: + t = 8 + except: + o = y""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except_finally(self): + s = """ + try: + c = 6 + except: + b = 8 + finally: + a = 9""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + b = 9 + finally: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except_finally_nested(self): + s = """ + try: + c = 6 + except: + b = 8 + finally: + try: + a = 9 + except: + b = 9 + finally: + c = 9""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + try: + pass + finally: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + try: + b = 6 + finally: + b = 7""" + self.assertFalse(self.find_binding("a", s)) + +class Test_touch_import(support.TestCase): + + def test_after_docstring(self): + node = parse('"""foo"""\nbar()') + fixer_util.touch_import(None, "foo", node) + self.assertEqual(str(node), '"""foo"""\nimport foo\nbar()\n\n') + + def test_after_imports(self): + node = parse('"""foo"""\nimport bar\nbar()') + fixer_util.touch_import(None, "foo", node) + self.assertEqual(str(node), '"""foo"""\nimport bar\nimport foo\nbar()\n\n') + + def test_beginning(self): + node = parse('bar()') + fixer_util.touch_import(None, "foo", node) + self.assertEqual(str(node), 'import foo\nbar()\n\n') + + def test_from_import(self): + node = parse('bar()') + fixer_util.touch_import("html", "escape", node) + self.assertEqual(str(node), 'from html import escape\nbar()\n\n') + + def test_name_import(self): + node = parse('bar()') + fixer_util.touch_import(None, "cgi", node) + self.assertEqual(str(node), 'import cgi\nbar()\n\n') + +class Test_find_indentation(support.TestCase): + + def test_nothing(self): + fi = fixer_util.find_indentation + node = parse("node()") + self.assertEqual(fi(node), "") + node = parse("") + self.assertEqual(fi(node), "") + + def test_simple(self): + fi = fixer_util.find_indentation + node = parse("def f():\n x()") + self.assertEqual(fi(node), "") + self.assertEqual(fi(node.children[0].children[4].children[2]), " ") + node = parse("def f():\n x()\n y()") + self.assertEqual(fi(node.children[0].children[4].children[4]), " ") diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/__init__.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/__init__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/__init__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,4638 @@ +"""Wrapper functions for Tcl/Tk. + +Tkinter provides classes which allow the display, positioning and +control of widgets. Toplevel widgets are Tk and Toplevel. Other +widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton, +Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox +LabelFrame and PanedWindow. + +Properties of the widgets are specified with keyword arguments. +Keyword arguments have the same name as the corresponding resource +under Tk. + +Widgets are positioned with one of the geometry managers Place, Pack +or Grid. These managers can be called with methods place, pack, grid +available in every Widget. + +Actions are bound to events by resources (e.g. keyword argument +command) or with the method bind. + +Example (Hello, World): +import tkinter +from tkinter.constants import * +tk = tkinter.Tk() +frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2) +frame.pack(fill=BOTH,expand=1) +label = tkinter.Label(frame, text="Hello, World") +label.pack(fill=X, expand=1) +button = tkinter.Button(frame,text="Exit",command=tk.destroy) +button.pack(side=BOTTOM) +tk.mainloop() +""" + +import collections +import enum +import sys +import types + +import _tkinter # If this fails your Python may not be configured for Tk +TclError = _tkinter.TclError +from tkinter.constants import * +import re + +wantobjects = 1 + +TkVersion = float(_tkinter.TK_VERSION) +TclVersion = float(_tkinter.TCL_VERSION) + +READABLE = _tkinter.READABLE +WRITABLE = _tkinter.WRITABLE +EXCEPTION = _tkinter.EXCEPTION + + +_magic_re = re.compile(r'([\\{}])') +_space_re = re.compile(r'([\s])', re.ASCII) + + +def _join(value): + """Internal function.""" + return ' '.join(map(_stringify, value)) + + +def _stringify(value): + """Internal function.""" + if isinstance(value, (list, tuple)): + if len(value) == 1: + value = _stringify(value[0]) + if _magic_re.search(value): + value = '{%s}' % value + else: + value = '{%s}' % _join(value) + else: + value = str(value) + if not value: + value = '{}' + elif _magic_re.search(value): + # add '\' before special characters and spaces + value = _magic_re.sub(r'\\\1', value) + value = value.replace('\n', r'\n') + value = _space_re.sub(r'\\\1', value) + if value[0] == '"': + value = '\\' + value + elif value[0] == '"' or _space_re.search(value): + value = '{%s}' % value + return value + + +def _flatten(seq): + """Internal function.""" + res = () + for item in seq: + if isinstance(item, (tuple, list)): + res = res + _flatten(item) + elif item is not None: + res = res + (item,) + return res + + +try: _flatten = _tkinter._flatten +except AttributeError: pass + + +def _cnfmerge(cnfs): + """Internal function.""" + if isinstance(cnfs, dict): + return cnfs + elif isinstance(cnfs, (type(None), str)): + return cnfs + else: + cnf = {} + for c in _flatten(cnfs): + try: + cnf.update(c) + except (AttributeError, TypeError) as msg: + print("_cnfmerge: fallback due to:", msg) + for k, v in c.items(): + cnf[k] = v + return cnf + + +try: _cnfmerge = _tkinter._cnfmerge +except AttributeError: pass + + +def _splitdict(tk, v, cut_minus=True, conv=None): + """Return a properly formatted dict built from Tcl list pairs. + + If cut_minus is True, the supposed '-' prefix will be removed from + keys. If conv is specified, it is used to convert values. + + Tcl list is expected to contain an even number of elements. + """ + t = tk.splitlist(v) + if len(t) % 2: + raise RuntimeError('Tcl list representing a dict is expected ' + 'to contain an even number of elements') + it = iter(t) + dict = {} + for key, value in zip(it, it): + key = str(key) + if cut_minus and key[0] == '-': + key = key[1:] + if conv: + value = conv(value) + dict[key] = value + return dict + +class _VersionInfoType(collections.namedtuple('_VersionInfoType', + ('major', 'minor', 'micro', 'releaselevel', 'serial'))): + def __str__(self): + if self.releaselevel == 'final': + return f'{self.major}.{self.minor}.{self.micro}' + else: + return f'{self.major}.{self.minor}{self.releaselevel[0]}{self.serial}' + +def _parse_version(version): + import re + m = re.fullmatch(r'(\d+)\.(\d+)([ab.])(\d+)', version) + major, minor, releaselevel, serial = m.groups() + major, minor, serial = int(major), int(minor), int(serial) + if releaselevel == '.': + micro = serial + serial = 0 + releaselevel = 'final' + else: + micro = 0 + releaselevel = {'a': 'alpha', 'b': 'beta'}[releaselevel] + return _VersionInfoType(major, minor, micro, releaselevel, serial) + + +@enum._simple_enum(enum.StrEnum) +class EventType: + KeyPress = '2' + Key = KeyPress + KeyRelease = '3' + ButtonPress = '4' + Button = ButtonPress + ButtonRelease = '5' + Motion = '6' + Enter = '7' + Leave = '8' + FocusIn = '9' + FocusOut = '10' + Keymap = '11' # undocumented + Expose = '12' + GraphicsExpose = '13' # undocumented + NoExpose = '14' # undocumented + Visibility = '15' + Create = '16' + Destroy = '17' + Unmap = '18' + Map = '19' + MapRequest = '20' + Reparent = '21' + Configure = '22' + ConfigureRequest = '23' + Gravity = '24' + ResizeRequest = '25' + Circulate = '26' + CirculateRequest = '27' + Property = '28' + SelectionClear = '29' # undocumented + SelectionRequest = '30' # undocumented + Selection = '31' # undocumented + Colormap = '32' + ClientMessage = '33' # undocumented + Mapping = '34' # undocumented + VirtualEvent = '35' # undocumented + Activate = '36' + Deactivate = '37' + MouseWheel = '38' + + +class Event: + """Container for the properties of an event. + + Instances of this type are generated if one of the following events occurs: + + KeyPress, KeyRelease - for keyboard events + ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events + Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate, + Colormap, Gravity, Reparent, Property, Destroy, Activate, + Deactivate - for window events. + + If a callback function for one of these events is registered + using bind, bind_all, bind_class, or tag_bind, the callback is + called with an Event as first argument. It will have the + following attributes (in braces are the event types for which + the attribute is valid): + + serial - serial number of event + num - mouse button pressed (ButtonPress, ButtonRelease) + focus - whether the window has the focus (Enter, Leave) + height - height of the exposed window (Configure, Expose) + width - width of the exposed window (Configure, Expose) + keycode - keycode of the pressed key (KeyPress, KeyRelease) + state - state of the event as a number (ButtonPress, ButtonRelease, + Enter, KeyPress, KeyRelease, + Leave, Motion) + state - state as a string (Visibility) + time - when the event occurred + x - x-position of the mouse + y - y-position of the mouse + x_root - x-position of the mouse on the screen + (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) + y_root - y-position of the mouse on the screen + (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) + char - pressed character (KeyPress, KeyRelease) + send_event - see X/Windows documentation + keysym - keysym of the event as a string (KeyPress, KeyRelease) + keysym_num - keysym of the event as a number (KeyPress, KeyRelease) + type - type of the event as a number + widget - widget in which the event occurred + delta - delta of wheel movement (MouseWheel) + """ + + def __repr__(self): + attrs = {k: v for k, v in self.__dict__.items() if v != '??'} + if not self.char: + del attrs['char'] + elif self.char != '??': + attrs['char'] = repr(self.char) + if not getattr(self, 'send_event', True): + del attrs['send_event'] + if self.state == 0: + del attrs['state'] + elif isinstance(self.state, int): + state = self.state + mods = ('Shift', 'Lock', 'Control', + 'Mod1', 'Mod2', 'Mod3', 'Mod4', 'Mod5', + 'Button1', 'Button2', 'Button3', 'Button4', 'Button5') + s = [] + for i, n in enumerate(mods): + if state & (1 << i): + s.append(n) + state = state & ~((1<< len(mods)) - 1) + if state or not s: + s.append(hex(state)) + attrs['state'] = '|'.join(s) + if self.delta == 0: + del attrs['delta'] + # widget usually is known + # serial and time are not very interesting + # keysym_num duplicates keysym + # x_root and y_root mostly duplicate x and y + keys = ('send_event', + 'state', 'keysym', 'keycode', 'char', + 'num', 'delta', 'focus', + 'x', 'y', 'width', 'height') + return '<%s event%s>' % ( + getattr(self.type, 'name', self.type), + ''.join(' %s=%s' % (k, attrs[k]) for k in keys if k in attrs) + ) + + +_support_default_root = True +_default_root = None + + +def NoDefaultRoot(): + """Inhibit setting of default root window. + + Call this function to inhibit that the first instance of + Tk is used for windows without an explicit parent window. + """ + global _support_default_root, _default_root + _support_default_root = False + # Delete, so any use of _default_root will immediately raise an exception. + # Rebind before deletion, so repeated calls will not fail. + _default_root = None + del _default_root + + +def _get_default_root(what=None): + if not _support_default_root: + raise RuntimeError("No master specified and tkinter is " + "configured to not support default root") + if _default_root is None: + if what: + raise RuntimeError(f"Too early to {what}: no default root window") + root = Tk() + assert _default_root is root + return _default_root + + +def _get_temp_root(): + global _support_default_root + if not _support_default_root: + raise RuntimeError("No master specified and tkinter is " + "configured to not support default root") + root = _default_root + if root is None: + assert _support_default_root + _support_default_root = False + root = Tk() + _support_default_root = True + assert _default_root is None + root.withdraw() + root._temporary = True + return root + + +def _destroy_temp_root(master): + if getattr(master, '_temporary', False): + try: + master.destroy() + except TclError: + pass + + +def _tkerror(err): + """Internal function.""" + pass + + +def _exit(code=0): + """Internal function. Calling it will raise the exception SystemExit.""" + try: + code = int(code) + except ValueError: + pass + raise SystemExit(code) + + +_varnum = 0 + + +class Variable: + """Class to define value holders for e.g. buttons. + + Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations + that constrain the type of the value returned from get().""" + _default = "" + _tk = None + _tclCommands = None + + def __init__(self, master=None, value=None, name=None): + """Construct a variable + + MASTER can be given as master widget. + VALUE is an optional value (defaults to "") + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ + # check for type of NAME parameter to override weird error message + # raised from Modules/_tkinter.c:SetVar like: + # TypeError: setvar() takes exactly 3 arguments (2 given) + if name is not None and not isinstance(name, str): + raise TypeError("name must be a string") + global _varnum + if master is None: + master = _get_default_root('create variable') + self._root = master._root() + self._tk = master.tk + if name: + self._name = name + else: + self._name = 'PY_VAR' + repr(_varnum) + _varnum += 1 + if value is not None: + self.initialize(value) + elif not self._tk.getboolean(self._tk.call("info", "exists", self._name)): + self.initialize(self._default) + + def __del__(self): + """Unset the variable in Tcl.""" + if self._tk is None: + return + if self._tk.getboolean(self._tk.call("info", "exists", self._name)): + self._tk.globalunsetvar(self._name) + if self._tclCommands is not None: + for name in self._tclCommands: + #print '- Tkinter: deleted command', name + self._tk.deletecommand(name) + self._tclCommands = None + + def __str__(self): + """Return the name of the variable in Tcl.""" + return self._name + + def set(self, value): + """Set the variable to VALUE.""" + return self._tk.globalsetvar(self._name, value) + + initialize = set + + def get(self): + """Return value of variable.""" + return self._tk.globalgetvar(self._name) + + def _register(self, callback): + f = CallWrapper(callback, None, self._root).__call__ + cbname = repr(id(f)) + try: + callback = callback.__func__ + except AttributeError: + pass + try: + cbname = cbname + callback.__name__ + except AttributeError: + pass + self._tk.createcommand(cbname, f) + if self._tclCommands is None: + self._tclCommands = [] + self._tclCommands.append(cbname) + return cbname + + def trace_add(self, mode, callback): + """Define a trace callback for the variable. + + Mode is one of "read", "write", "unset", or a list or tuple of + such strings. + Callback must be a function which is called when the variable is + read, written or unset. + + Return the name of the callback. + """ + cbname = self._register(callback) + self._tk.call('trace', 'add', 'variable', + self._name, mode, (cbname,)) + return cbname + + def trace_remove(self, mode, cbname): + """Delete the trace callback for a variable. + + Mode is one of "read", "write", "unset" or a list or tuple of + such strings. Must be same as were specified in trace_add(). + cbname is the name of the callback returned from trace_add(). + """ + self._tk.call('trace', 'remove', 'variable', + self._name, mode, cbname) + for m, ca in self.trace_info(): + if self._tk.splitlist(ca)[0] == cbname: + break + else: + self._tk.deletecommand(cbname) + try: + self._tclCommands.remove(cbname) + except ValueError: + pass + + def trace_info(self): + """Return all trace callback information.""" + splitlist = self._tk.splitlist + return [(splitlist(k), v) for k, v in map(splitlist, + splitlist(self._tk.call('trace', 'info', 'variable', self._name)))] + + def trace_variable(self, mode, callback): + """Define a trace callback for the variable. + + MODE is one of "r", "w", "u" for read, write, undefine. + CALLBACK must be a function which is called when + the variable is read, written or undefined. + + Return the name of the callback. + + This deprecated method wraps a deprecated Tcl method that will + likely be removed in the future. Use trace_add() instead. + """ + # TODO: Add deprecation warning + cbname = self._register(callback) + self._tk.call("trace", "variable", self._name, mode, cbname) + return cbname + + trace = trace_variable + + def trace_vdelete(self, mode, cbname): + """Delete the trace callback for a variable. + + MODE is one of "r", "w", "u" for read, write, undefine. + CBNAME is the name of the callback returned from trace_variable or trace. + + This deprecated method wraps a deprecated Tcl method that will + likely be removed in the future. Use trace_remove() instead. + """ + # TODO: Add deprecation warning + self._tk.call("trace", "vdelete", self._name, mode, cbname) + cbname = self._tk.splitlist(cbname)[0] + for m, ca in self.trace_info(): + if self._tk.splitlist(ca)[0] == cbname: + break + else: + self._tk.deletecommand(cbname) + try: + self._tclCommands.remove(cbname) + except ValueError: + pass + + def trace_vinfo(self): + """Return all trace callback information. + + This deprecated method wraps a deprecated Tcl method that will + likely be removed in the future. Use trace_info() instead. + """ + # TODO: Add deprecation warning + return [self._tk.splitlist(x) for x in self._tk.splitlist( + self._tk.call("trace", "vinfo", self._name))] + + def __eq__(self, other): + if not isinstance(other, Variable): + return NotImplemented + return (self._name == other._name + and self.__class__.__name__ == other.__class__.__name__ + and self._tk == other._tk) + + +class StringVar(Variable): + """Value holder for strings variables.""" + _default = "" + + def __init__(self, master=None, value=None, name=None): + """Construct a string variable. + + MASTER can be given as master widget. + VALUE is an optional value (defaults to "") + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ + Variable.__init__(self, master, value, name) + + def get(self): + """Return value of variable as string.""" + value = self._tk.globalgetvar(self._name) + if isinstance(value, str): + return value + return str(value) + + +class IntVar(Variable): + """Value holder for integer variables.""" + _default = 0 + + def __init__(self, master=None, value=None, name=None): + """Construct an integer variable. + + MASTER can be given as master widget. + VALUE is an optional value (defaults to 0) + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ + Variable.__init__(self, master, value, name) + + def get(self): + """Return the value of the variable as an integer.""" + value = self._tk.globalgetvar(self._name) + try: + return self._tk.getint(value) + except (TypeError, TclError): + return int(self._tk.getdouble(value)) + + +class DoubleVar(Variable): + """Value holder for float variables.""" + _default = 0.0 + + def __init__(self, master=None, value=None, name=None): + """Construct a float variable. + + MASTER can be given as master widget. + VALUE is an optional value (defaults to 0.0) + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ + Variable.__init__(self, master, value, name) + + def get(self): + """Return the value of the variable as a float.""" + return self._tk.getdouble(self._tk.globalgetvar(self._name)) + + +class BooleanVar(Variable): + """Value holder for boolean variables.""" + _default = False + + def __init__(self, master=None, value=None, name=None): + """Construct a boolean variable. + + MASTER can be given as master widget. + VALUE is an optional value (defaults to False) + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ + Variable.__init__(self, master, value, name) + + def set(self, value): + """Set the variable to VALUE.""" + return self._tk.globalsetvar(self._name, self._tk.getboolean(value)) + + initialize = set + + def get(self): + """Return the value of the variable as a bool.""" + try: + return self._tk.getboolean(self._tk.globalgetvar(self._name)) + except TclError: + raise ValueError("invalid literal for getboolean()") + + +def mainloop(n=0): + """Run the main loop of Tcl.""" + _get_default_root('run the main loop').tk.mainloop(n) + + +getint = int + +getdouble = float + + +def getboolean(s): + """Convert Tcl object to True or False.""" + try: + return _get_default_root('use getboolean()').tk.getboolean(s) + except TclError: + raise ValueError("invalid literal for getboolean()") + + +# Methods defined on both toplevel and interior widgets + +class Misc: + """Internal class. + + Base class which defines methods common for interior widgets.""" + + # used for generating child widget names + _last_child_ids = None + + # XXX font command? + _tclCommands = None + + def destroy(self): + """Internal function. + + Delete all Tcl commands created for + this widget in the Tcl interpreter.""" + if self._tclCommands is not None: + for name in self._tclCommands: + #print '- Tkinter: deleted command', name + self.tk.deletecommand(name) + self._tclCommands = None + + def deletecommand(self, name): + """Internal function. + + Delete the Tcl command provided in NAME.""" + #print '- Tkinter: deleted command', name + self.tk.deletecommand(name) + try: + self._tclCommands.remove(name) + except ValueError: + pass + + def tk_strictMotif(self, boolean=None): + """Set Tcl internal variable, whether the look and feel + should adhere to Motif. + + A parameter of 1 means adhere to Motif (e.g. no color + change if mouse passes over slider). + Returns the set value.""" + return self.tk.getboolean(self.tk.call( + 'set', 'tk_strictMotif', boolean)) + + def tk_bisque(self): + """Change the color scheme to light brown as used in Tk 3.6 and before.""" + self.tk.call('tk_bisque') + + def tk_setPalette(self, *args, **kw): + """Set a new color scheme for all widget elements. + + A single color as argument will cause that all colors of Tk + widget elements are derived from this. + Alternatively several keyword parameters and its associated + colors can be given. The following keywords are valid: + activeBackground, foreground, selectColor, + activeForeground, highlightBackground, selectBackground, + background, highlightColor, selectForeground, + disabledForeground, insertBackground, troughColor.""" + self.tk.call(('tk_setPalette',) + + _flatten(args) + _flatten(list(kw.items()))) + + def wait_variable(self, name='PY_VAR'): + """Wait until the variable is modified. + + A parameter of type IntVar, StringVar, DoubleVar or + BooleanVar must be given.""" + self.tk.call('tkwait', 'variable', name) + waitvar = wait_variable # XXX b/w compat + + def wait_window(self, window=None): + """Wait until a WIDGET is destroyed. + + If no parameter is given self is used.""" + if window is None: + window = self + self.tk.call('tkwait', 'window', window._w) + + def wait_visibility(self, window=None): + """Wait until the visibility of a WIDGET changes + (e.g. it appears). + + If no parameter is given self is used.""" + if window is None: + window = self + self.tk.call('tkwait', 'visibility', window._w) + + def setvar(self, name='PY_VAR', value='1'): + """Set Tcl variable NAME to VALUE.""" + self.tk.setvar(name, value) + + def getvar(self, name='PY_VAR'): + """Return value of Tcl variable NAME.""" + return self.tk.getvar(name) + + def getint(self, s): + try: + return self.tk.getint(s) + except TclError as exc: + raise ValueError(str(exc)) + + def getdouble(self, s): + try: + return self.tk.getdouble(s) + except TclError as exc: + raise ValueError(str(exc)) + + def getboolean(self, s): + """Return a boolean value for Tcl boolean values true and false given as parameter.""" + try: + return self.tk.getboolean(s) + except TclError: + raise ValueError("invalid literal for getboolean()") + + def focus_set(self): + """Direct input focus to this widget. + + If the application currently does not have the focus + this widget will get the focus if the application gets + the focus through the window manager.""" + self.tk.call('focus', self._w) + focus = focus_set # XXX b/w compat? + + def focus_force(self): + """Direct input focus to this widget even if the + application does not have the focus. Use with + caution!""" + self.tk.call('focus', '-force', self._w) + + def focus_get(self): + """Return the widget which has currently the focus in the + application. + + Use focus_displayof to allow working with several + displays. Return None if application does not have + the focus.""" + name = self.tk.call('focus') + if name == 'none' or not name: return None + return self._nametowidget(name) + + def focus_displayof(self): + """Return the widget which has currently the focus on the + display where this widget is located. + + Return None if the application does not have the focus.""" + name = self.tk.call('focus', '-displayof', self._w) + if name == 'none' or not name: return None + return self._nametowidget(name) + + def focus_lastfor(self): + """Return the widget which would have the focus if top level + for this widget gets the focus from the window manager.""" + name = self.tk.call('focus', '-lastfor', self._w) + if name == 'none' or not name: return None + return self._nametowidget(name) + + def tk_focusFollowsMouse(self): + """The widget under mouse will get automatically focus. Can not + be disabled easily.""" + self.tk.call('tk_focusFollowsMouse') + + def tk_focusNext(self): + """Return the next widget in the focus order which follows + widget which has currently the focus. + + The focus order first goes to the next child, then to + the children of the child recursively and then to the + next sibling which is higher in the stacking order. A + widget is omitted if it has the takefocus resource set + to 0.""" + name = self.tk.call('tk_focusNext', self._w) + if not name: return None + return self._nametowidget(name) + + def tk_focusPrev(self): + """Return previous widget in the focus order. See tk_focusNext for details.""" + name = self.tk.call('tk_focusPrev', self._w) + if not name: return None + return self._nametowidget(name) + + def after(self, ms, func=None, *args): + """Call function once after given time. + + MS specifies the time in milliseconds. FUNC gives the + function which shall be called. Additional parameters + are given as parameters to the function call. Return + identifier to cancel scheduling with after_cancel.""" + if func is None: + # I'd rather use time.sleep(ms*0.001) + self.tk.call('after', ms) + return None + else: + def callit(): + try: + func(*args) + finally: + try: + self.deletecommand(name) + except TclError: + pass + try: + callit.__name__ = func.__name__ + except AttributeError: + # Required for callable classes (bpo-44404) + callit.__name__ = type(func).__name__ + name = self._register(callit) + return self.tk.call('after', ms, name) + + def after_idle(self, func, *args): + """Call FUNC once if the Tcl main loop has no event to + process. + + Return an identifier to cancel the scheduling with + after_cancel.""" + return self.after('idle', func, *args) + + def after_cancel(self, id): + """Cancel scheduling of function identified with ID. + + Identifier returned by after or after_idle must be + given as first parameter. + """ + if not id: + raise ValueError('id must be a valid identifier returned from ' + 'after or after_idle') + try: + data = self.tk.call('after', 'info', id) + script = self.tk.splitlist(data)[0] + self.deletecommand(script) + except TclError: + pass + self.tk.call('after', 'cancel', id) + + def bell(self, displayof=0): + """Ring a display's bell.""" + self.tk.call(('bell',) + self._displayof(displayof)) + + # Clipboard handling: + def clipboard_get(self, **kw): + """Retrieve data from the clipboard on window's display. + + The window keyword defaults to the root window of the Tkinter + application. + + The type keyword specifies the form in which the data is + to be returned and should be an atom name such as STRING + or FILE_NAME. Type defaults to STRING, except on X11, where the default + is to try UTF8_STRING and fall back to STRING. + + This command is equivalent to: + + selection_get(CLIPBOARD) + """ + if 'type' not in kw and self._windowingsystem == 'x11': + try: + kw['type'] = 'UTF8_STRING' + return self.tk.call(('clipboard', 'get') + self._options(kw)) + except TclError: + del kw['type'] + return self.tk.call(('clipboard', 'get') + self._options(kw)) + + def clipboard_clear(self, **kw): + """Clear the data in the Tk clipboard. + + A widget specified for the optional displayof keyword + argument specifies the target display.""" + if 'displayof' not in kw: kw['displayof'] = self._w + self.tk.call(('clipboard', 'clear') + self._options(kw)) + + def clipboard_append(self, string, **kw): + """Append STRING to the Tk clipboard. + + A widget specified at the optional displayof keyword + argument specifies the target display. The clipboard + can be retrieved with selection_get.""" + if 'displayof' not in kw: kw['displayof'] = self._w + self.tk.call(('clipboard', 'append') + self._options(kw) + + ('--', string)) + # XXX grab current w/o window argument + + def grab_current(self): + """Return widget which has currently the grab in this application + or None.""" + name = self.tk.call('grab', 'current', self._w) + if not name: return None + return self._nametowidget(name) + + def grab_release(self): + """Release grab for this widget if currently set.""" + self.tk.call('grab', 'release', self._w) + + def grab_set(self): + """Set grab for this widget. + + A grab directs all events to this and descendant + widgets in the application.""" + self.tk.call('grab', 'set', self._w) + + def grab_set_global(self): + """Set global grab for this widget. + + A global grab directs all events to this and + descendant widgets on the display. Use with caution - + other applications do not get events anymore.""" + self.tk.call('grab', 'set', '-global', self._w) + + def grab_status(self): + """Return None, "local" or "global" if this widget has + no, a local or a global grab.""" + status = self.tk.call('grab', 'status', self._w) + if status == 'none': status = None + return status + + def option_add(self, pattern, value, priority = None): + """Set a VALUE (second parameter) for an option + PATTERN (first parameter). + + An optional third parameter gives the numeric priority + (defaults to 80).""" + self.tk.call('option', 'add', pattern, value, priority) + + def option_clear(self): + """Clear the option database. + + It will be reloaded if option_add is called.""" + self.tk.call('option', 'clear') + + def option_get(self, name, className): + """Return the value for an option NAME for this widget + with CLASSNAME. + + Values with higher priority override lower values.""" + return self.tk.call('option', 'get', self._w, name, className) + + def option_readfile(self, fileName, priority = None): + """Read file FILENAME into the option database. + + An optional second parameter gives the numeric + priority.""" + self.tk.call('option', 'readfile', fileName, priority) + + def selection_clear(self, **kw): + """Clear the current X selection.""" + if 'displayof' not in kw: kw['displayof'] = self._w + self.tk.call(('selection', 'clear') + self._options(kw)) + + def selection_get(self, **kw): + """Return the contents of the current X selection. + + A keyword parameter selection specifies the name of + the selection and defaults to PRIMARY. A keyword + parameter displayof specifies a widget on the display + to use. A keyword parameter type specifies the form of data to be + fetched, defaulting to STRING except on X11, where UTF8_STRING is tried + before STRING.""" + if 'displayof' not in kw: kw['displayof'] = self._w + if 'type' not in kw and self._windowingsystem == 'x11': + try: + kw['type'] = 'UTF8_STRING' + return self.tk.call(('selection', 'get') + self._options(kw)) + except TclError: + del kw['type'] + return self.tk.call(('selection', 'get') + self._options(kw)) + + def selection_handle(self, command, **kw): + """Specify a function COMMAND to call if the X + selection owned by this widget is queried by another + application. + + This function must return the contents of the + selection. The function will be called with the + arguments OFFSET and LENGTH which allows the chunking + of very long selections. The following keyword + parameters can be provided: + selection - name of the selection (default PRIMARY), + type - type of the selection (e.g. STRING, FILE_NAME).""" + name = self._register(command) + self.tk.call(('selection', 'handle') + self._options(kw) + + (self._w, name)) + + def selection_own(self, **kw): + """Become owner of X selection. + + A keyword parameter selection specifies the name of + the selection (default PRIMARY).""" + self.tk.call(('selection', 'own') + + self._options(kw) + (self._w,)) + + def selection_own_get(self, **kw): + """Return owner of X selection. + + The following keyword parameter can + be provided: + selection - name of the selection (default PRIMARY), + type - type of the selection (e.g. STRING, FILE_NAME).""" + if 'displayof' not in kw: kw['displayof'] = self._w + name = self.tk.call(('selection', 'own') + self._options(kw)) + if not name: return None + return self._nametowidget(name) + + def send(self, interp, cmd, *args): + """Send Tcl command CMD to different interpreter INTERP to be executed.""" + return self.tk.call(('send', interp, cmd) + args) + + def lower(self, belowThis=None): + """Lower this widget in the stacking order.""" + self.tk.call('lower', self._w, belowThis) + + def tkraise(self, aboveThis=None): + """Raise this widget in the stacking order.""" + self.tk.call('raise', self._w, aboveThis) + + lift = tkraise + + def info_patchlevel(self): + """Returns the exact version of the Tcl library.""" + patchlevel = self.tk.call('info', 'patchlevel') + return _parse_version(patchlevel) + + def winfo_atom(self, name, displayof=0): + """Return integer which represents atom NAME.""" + args = ('winfo', 'atom') + self._displayof(displayof) + (name,) + return self.tk.getint(self.tk.call(args)) + + def winfo_atomname(self, id, displayof=0): + """Return name of atom with identifier ID.""" + args = ('winfo', 'atomname') \ + + self._displayof(displayof) + (id,) + return self.tk.call(args) + + def winfo_cells(self): + """Return number of cells in the colormap for this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'cells', self._w)) + + def winfo_children(self): + """Return a list of all widgets which are children of this widget.""" + result = [] + for child in self.tk.splitlist( + self.tk.call('winfo', 'children', self._w)): + try: + # Tcl sometimes returns extra windows, e.g. for + # menus; those need to be skipped + result.append(self._nametowidget(child)) + except KeyError: + pass + return result + + def winfo_class(self): + """Return window class name of this widget.""" + return self.tk.call('winfo', 'class', self._w) + + def winfo_colormapfull(self): + """Return True if at the last color request the colormap was full.""" + return self.tk.getboolean( + self.tk.call('winfo', 'colormapfull', self._w)) + + def winfo_containing(self, rootX, rootY, displayof=0): + """Return the widget which is at the root coordinates ROOTX, ROOTY.""" + args = ('winfo', 'containing') \ + + self._displayof(displayof) + (rootX, rootY) + name = self.tk.call(args) + if not name: return None + return self._nametowidget(name) + + def winfo_depth(self): + """Return the number of bits per pixel.""" + return self.tk.getint(self.tk.call('winfo', 'depth', self._w)) + + def winfo_exists(self): + """Return true if this widget exists.""" + return self.tk.getint( + self.tk.call('winfo', 'exists', self._w)) + + def winfo_fpixels(self, number): + """Return the number of pixels for the given distance NUMBER + (e.g. "3c") as float.""" + return self.tk.getdouble(self.tk.call( + 'winfo', 'fpixels', self._w, number)) + + def winfo_geometry(self): + """Return geometry string for this widget in the form "widthxheight+X+Y".""" + return self.tk.call('winfo', 'geometry', self._w) + + def winfo_height(self): + """Return height of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'height', self._w)) + + def winfo_id(self): + """Return identifier ID for this widget.""" + return int(self.tk.call('winfo', 'id', self._w), 0) + + def winfo_interps(self, displayof=0): + """Return the name of all Tcl interpreters for this display.""" + args = ('winfo', 'interps') + self._displayof(displayof) + return self.tk.splitlist(self.tk.call(args)) + + def winfo_ismapped(self): + """Return true if this widget is mapped.""" + return self.tk.getint( + self.tk.call('winfo', 'ismapped', self._w)) + + def winfo_manager(self): + """Return the window manager name for this widget.""" + return self.tk.call('winfo', 'manager', self._w) + + def winfo_name(self): + """Return the name of this widget.""" + return self.tk.call('winfo', 'name', self._w) + + def winfo_parent(self): + """Return the name of the parent of this widget.""" + return self.tk.call('winfo', 'parent', self._w) + + def winfo_pathname(self, id, displayof=0): + """Return the pathname of the widget given by ID.""" + args = ('winfo', 'pathname') \ + + self._displayof(displayof) + (id,) + return self.tk.call(args) + + def winfo_pixels(self, number): + """Rounded integer value of winfo_fpixels.""" + return self.tk.getint( + self.tk.call('winfo', 'pixels', self._w, number)) + + def winfo_pointerx(self): + """Return the x coordinate of the pointer on the root window.""" + return self.tk.getint( + self.tk.call('winfo', 'pointerx', self._w)) + + def winfo_pointerxy(self): + """Return a tuple of x and y coordinates of the pointer on the root window.""" + return self._getints( + self.tk.call('winfo', 'pointerxy', self._w)) + + def winfo_pointery(self): + """Return the y coordinate of the pointer on the root window.""" + return self.tk.getint( + self.tk.call('winfo', 'pointery', self._w)) + + def winfo_reqheight(self): + """Return requested height of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'reqheight', self._w)) + + def winfo_reqwidth(self): + """Return requested width of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'reqwidth', self._w)) + + def winfo_rgb(self, color): + """Return a tuple of integer RGB values in range(65536) for color in this widget.""" + return self._getints( + self.tk.call('winfo', 'rgb', self._w, color)) + + def winfo_rootx(self): + """Return x coordinate of upper left corner of this widget on the + root window.""" + return self.tk.getint( + self.tk.call('winfo', 'rootx', self._w)) + + def winfo_rooty(self): + """Return y coordinate of upper left corner of this widget on the + root window.""" + return self.tk.getint( + self.tk.call('winfo', 'rooty', self._w)) + + def winfo_screen(self): + """Return the screen name of this widget.""" + return self.tk.call('winfo', 'screen', self._w) + + def winfo_screencells(self): + """Return the number of the cells in the colormap of the screen + of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'screencells', self._w)) + + def winfo_screendepth(self): + """Return the number of bits per pixel of the root window of the + screen of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'screendepth', self._w)) + + def winfo_screenheight(self): + """Return the number of pixels of the height of the screen of this widget + in pixel.""" + return self.tk.getint( + self.tk.call('winfo', 'screenheight', self._w)) + + def winfo_screenmmheight(self): + """Return the number of pixels of the height of the screen of + this widget in mm.""" + return self.tk.getint( + self.tk.call('winfo', 'screenmmheight', self._w)) + + def winfo_screenmmwidth(self): + """Return the number of pixels of the width of the screen of + this widget in mm.""" + return self.tk.getint( + self.tk.call('winfo', 'screenmmwidth', self._w)) + + def winfo_screenvisual(self): + """Return one of the strings directcolor, grayscale, pseudocolor, + staticcolor, staticgray, or truecolor for the default + colormodel of this screen.""" + return self.tk.call('winfo', 'screenvisual', self._w) + + def winfo_screenwidth(self): + """Return the number of pixels of the width of the screen of + this widget in pixel.""" + return self.tk.getint( + self.tk.call('winfo', 'screenwidth', self._w)) + + def winfo_server(self): + """Return information of the X-Server of the screen of this widget in + the form "XmajorRminor vendor vendorVersion".""" + return self.tk.call('winfo', 'server', self._w) + + def winfo_toplevel(self): + """Return the toplevel widget of this widget.""" + return self._nametowidget(self.tk.call( + 'winfo', 'toplevel', self._w)) + + def winfo_viewable(self): + """Return true if the widget and all its higher ancestors are mapped.""" + return self.tk.getint( + self.tk.call('winfo', 'viewable', self._w)) + + def winfo_visual(self): + """Return one of the strings directcolor, grayscale, pseudocolor, + staticcolor, staticgray, or truecolor for the + colormodel of this widget.""" + return self.tk.call('winfo', 'visual', self._w) + + def winfo_visualid(self): + """Return the X identifier for the visual for this widget.""" + return self.tk.call('winfo', 'visualid', self._w) + + def winfo_visualsavailable(self, includeids=False): + """Return a list of all visuals available for the screen + of this widget. + + Each item in the list consists of a visual name (see winfo_visual), a + depth and if includeids is true is given also the X identifier.""" + data = self.tk.call('winfo', 'visualsavailable', self._w, + 'includeids' if includeids else None) + data = [self.tk.splitlist(x) for x in self.tk.splitlist(data)] + return [self.__winfo_parseitem(x) for x in data] + + def __winfo_parseitem(self, t): + """Internal function.""" + return t[:1] + tuple(map(self.__winfo_getint, t[1:])) + + def __winfo_getint(self, x): + """Internal function.""" + return int(x, 0) + + def winfo_vrootheight(self): + """Return the height of the virtual root window associated with this + widget in pixels. If there is no virtual root window return the + height of the screen.""" + return self.tk.getint( + self.tk.call('winfo', 'vrootheight', self._w)) + + def winfo_vrootwidth(self): + """Return the width of the virtual root window associated with this + widget in pixel. If there is no virtual root window return the + width of the screen.""" + return self.tk.getint( + self.tk.call('winfo', 'vrootwidth', self._w)) + + def winfo_vrootx(self): + """Return the x offset of the virtual root relative to the root + window of the screen of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'vrootx', self._w)) + + def winfo_vrooty(self): + """Return the y offset of the virtual root relative to the root + window of the screen of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'vrooty', self._w)) + + def winfo_width(self): + """Return the width of this widget.""" + return self.tk.getint( + self.tk.call('winfo', 'width', self._w)) + + def winfo_x(self): + """Return the x coordinate of the upper left corner of this widget + in the parent.""" + return self.tk.getint( + self.tk.call('winfo', 'x', self._w)) + + def winfo_y(self): + """Return the y coordinate of the upper left corner of this widget + in the parent.""" + return self.tk.getint( + self.tk.call('winfo', 'y', self._w)) + + def update(self): + """Enter event loop until all pending events have been processed by Tcl.""" + self.tk.call('update') + + def update_idletasks(self): + """Enter event loop until all idle callbacks have been called. This + will update the display of windows but not process events caused by + the user.""" + self.tk.call('update', 'idletasks') + + def bindtags(self, tagList=None): + """Set or get the list of bindtags for this widget. + + With no argument return the list of all bindtags associated with + this widget. With a list of strings as argument the bindtags are + set to this list. The bindtags determine in which order events are + processed (see bind).""" + if tagList is None: + return self.tk.splitlist( + self.tk.call('bindtags', self._w)) + else: + self.tk.call('bindtags', self._w, tagList) + + def _bind(self, what, sequence, func, add, needcleanup=1): + """Internal function.""" + if isinstance(func, str): + self.tk.call(what + (sequence, func)) + elif func: + funcid = self._register(func, self._substitute, + needcleanup) + cmd = ('%sif {"[%s %s]" == "break"} break\n' + % + (add and '+' or '', + funcid, self._subst_format_str)) + self.tk.call(what + (sequence, cmd)) + return funcid + elif sequence: + return self.tk.call(what + (sequence,)) + else: + return self.tk.splitlist(self.tk.call(what)) + + def bind(self, sequence=None, func=None, add=None): + """Bind to this widget at event SEQUENCE a call to function FUNC. + + SEQUENCE is a string of concatenated event + patterns. An event pattern is of the form + where MODIFIER is one + of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, + Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, + B3, Alt, Button4, B4, Double, Button5, B5 Triple, + Mod1, M1. TYPE is one of Activate, Enter, Map, + ButtonPress, Button, Expose, Motion, ButtonRelease + FocusIn, MouseWheel, Circulate, FocusOut, Property, + Colormap, Gravity Reparent, Configure, KeyPress, Key, + Unmap, Deactivate, KeyRelease Visibility, Destroy, + Leave and DETAIL is the button number for ButtonPress, + ButtonRelease and DETAIL is the Keysym for KeyPress and + KeyRelease. Examples are + for pressing Control and mouse button 1 or + for pressing A and the Alt key (KeyPress can be omitted). + An event pattern can also be a virtual event of the form + <> where AString can be arbitrary. This + event can be generated by event_generate. + If events are concatenated they must appear shortly + after each other. + + FUNC will be called if the event sequence occurs with an + instance of Event as argument. If the return value of FUNC is + "break" no further bound function is invoked. + + An additional boolean parameter ADD specifies whether FUNC will + be called additionally to the other bound function or whether + it will replace the previous function. + + Bind will return an identifier to allow deletion of the bound function with + unbind without memory leak. + + If FUNC or SEQUENCE is omitted the bound function or list + of bound events are returned.""" + + return self._bind(('bind', self._w), sequence, func, add) + + def unbind(self, sequence, funcid=None): + """Unbind for this widget for event SEQUENCE the + function identified with FUNCID.""" + self.tk.call('bind', self._w, sequence, '') + if funcid: + self.deletecommand(funcid) + + def bind_all(self, sequence=None, func=None, add=None): + """Bind to all widgets at an event SEQUENCE a call to function FUNC. + An additional boolean parameter ADD specifies whether FUNC will + be called additionally to the other bound function or whether + it will replace the previous function. See bind for the return value.""" + return self._bind(('bind', 'all'), sequence, func, add, 0) + + def unbind_all(self, sequence): + """Unbind for all widgets for event SEQUENCE all functions.""" + self.tk.call('bind', 'all' , sequence, '') + + def bind_class(self, className, sequence=None, func=None, add=None): + """Bind to widgets with bindtag CLASSNAME at event + SEQUENCE a call of function FUNC. An additional + boolean parameter ADD specifies whether FUNC will be + called additionally to the other bound function or + whether it will replace the previous function. See bind for + the return value.""" + + return self._bind(('bind', className), sequence, func, add, 0) + + def unbind_class(self, className, sequence): + """Unbind for all widgets with bindtag CLASSNAME for event SEQUENCE + all functions.""" + self.tk.call('bind', className , sequence, '') + + def mainloop(self, n=0): + """Call the mainloop of Tk.""" + self.tk.mainloop(n) + + def quit(self): + """Quit the Tcl interpreter. All widgets will be destroyed.""" + self.tk.quit() + + def _getints(self, string): + """Internal function.""" + if string: + return tuple(map(self.tk.getint, self.tk.splitlist(string))) + + def _getdoubles(self, string): + """Internal function.""" + if string: + return tuple(map(self.tk.getdouble, self.tk.splitlist(string))) + + def _getboolean(self, string): + """Internal function.""" + if string: + return self.tk.getboolean(string) + + def _displayof(self, displayof): + """Internal function.""" + if displayof: + return ('-displayof', displayof) + if displayof is None: + return ('-displayof', self._w) + return () + + @property + def _windowingsystem(self): + """Internal function.""" + try: + return self._root()._windowingsystem_cached + except AttributeError: + ws = self._root()._windowingsystem_cached = \ + self.tk.call('tk', 'windowingsystem') + return ws + + def _options(self, cnf, kw = None): + """Internal function.""" + if kw: + cnf = _cnfmerge((cnf, kw)) + else: + cnf = _cnfmerge(cnf) + res = () + for k, v in cnf.items(): + if v is not None: + if k[-1] == '_': k = k[:-1] + if callable(v): + v = self._register(v) + elif isinstance(v, (tuple, list)): + nv = [] + for item in v: + if isinstance(item, int): + nv.append(str(item)) + elif isinstance(item, str): + nv.append(_stringify(item)) + else: + break + else: + v = ' '.join(nv) + res = res + ('-'+k, v) + return res + + def nametowidget(self, name): + """Return the Tkinter instance of a widget identified by + its Tcl name NAME.""" + name = str(name).split('.') + w = self + + if not name[0]: + w = w._root() + name = name[1:] + + for n in name: + if not n: + break + w = w.children[n] + + return w + + _nametowidget = nametowidget + + def _register(self, func, subst=None, needcleanup=1): + """Return a newly created Tcl function. If this + function is called, the Python function FUNC will + be executed. An optional function SUBST can + be given which will be executed before FUNC.""" + f = CallWrapper(func, subst, self).__call__ + name = repr(id(f)) + try: + func = func.__func__ + except AttributeError: + pass + try: + name = name + func.__name__ + except AttributeError: + pass + self.tk.createcommand(name, f) + if needcleanup: + if self._tclCommands is None: + self._tclCommands = [] + self._tclCommands.append(name) + return name + + register = _register + + def _root(self): + """Internal function.""" + w = self + while w.master is not None: w = w.master + return w + _subst_format = ('%#', '%b', '%f', '%h', '%k', + '%s', '%t', '%w', '%x', '%y', + '%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D') + _subst_format_str = " ".join(_subst_format) + + def _substitute(self, *args): + """Internal function.""" + if len(args) != len(self._subst_format): return args + getboolean = self.tk.getboolean + + getint = self.tk.getint + def getint_event(s): + """Tk changed behavior in 8.4.2, returning "??" rather more often.""" + try: + return getint(s) + except (ValueError, TclError): + return s + + nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args + # Missing: (a, c, d, m, o, v, B, R) + e = Event() + # serial field: valid for all events + # number of button: ButtonPress and ButtonRelease events only + # height field: Configure, ConfigureRequest, Create, + # ResizeRequest, and Expose events only + # keycode field: KeyPress and KeyRelease events only + # time field: "valid for events that contain a time field" + # width field: Configure, ConfigureRequest, Create, ResizeRequest, + # and Expose events only + # x field: "valid for events that contain an x field" + # y field: "valid for events that contain a y field" + # keysym as decimal: KeyPress and KeyRelease events only + # x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress, + # KeyRelease, and Motion events + e.serial = getint(nsign) + e.num = getint_event(b) + try: e.focus = getboolean(f) + except TclError: pass + e.height = getint_event(h) + e.keycode = getint_event(k) + e.state = getint_event(s) + e.time = getint_event(t) + e.width = getint_event(w) + e.x = getint_event(x) + e.y = getint_event(y) + e.char = A + try: e.send_event = getboolean(E) + except TclError: pass + e.keysym = K + e.keysym_num = getint_event(N) + try: + e.type = EventType(T) + except ValueError: + e.type = T + try: + e.widget = self._nametowidget(W) + except KeyError: + e.widget = W + e.x_root = getint_event(X) + e.y_root = getint_event(Y) + try: + e.delta = getint(D) + except (ValueError, TclError): + e.delta = 0 + return (e,) + + def _report_exception(self): + """Internal function.""" + exc, val, tb = sys.exc_info() + root = self._root() + root.report_callback_exception(exc, val, tb) + + def _getconfigure(self, *args): + """Call Tcl configure command and return the result as a dict.""" + cnf = {} + for x in self.tk.splitlist(self.tk.call(*args)): + x = self.tk.splitlist(x) + cnf[x[0][1:]] = (x[0][1:],) + x[1:] + return cnf + + def _getconfigure1(self, *args): + x = self.tk.splitlist(self.tk.call(*args)) + return (x[0][1:],) + x[1:] + + def _configure(self, cmd, cnf, kw): + """Internal function.""" + if kw: + cnf = _cnfmerge((cnf, kw)) + elif cnf: + cnf = _cnfmerge(cnf) + if cnf is None: + return self._getconfigure(_flatten((self._w, cmd))) + if isinstance(cnf, str): + return self._getconfigure1(_flatten((self._w, cmd, '-'+cnf))) + self.tk.call(_flatten((self._w, cmd)) + self._options(cnf)) + # These used to be defined in Widget: + + def configure(self, cnf=None, **kw): + """Configure resources of a widget. + + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + return self._configure('configure', cnf, kw) + + config = configure + + def cget(self, key): + """Return the resource value for a KEY given as string.""" + return self.tk.call(self._w, 'cget', '-' + key) + + __getitem__ = cget + + def __setitem__(self, key, value): + self.configure({key: value}) + + def keys(self): + """Return a list of all resource names of this widget.""" + splitlist = self.tk.splitlist + return [splitlist(x)[0][1:] for x in + splitlist(self.tk.call(self._w, 'configure'))] + + def __str__(self): + """Return the window path name of this widget.""" + return self._w + + def __repr__(self): + return '<%s.%s object %s>' % ( + self.__class__.__module__, self.__class__.__qualname__, self._w) + + # Pack methods that apply to the master + _noarg_ = ['_noarg_'] + + def pack_propagate(self, flag=_noarg_): + """Set or get the status for propagation of geometry information. + + A boolean argument specifies whether the geometry information + of the slaves will determine the size of this widget. If no argument + is given the current setting will be returned. + """ + if flag is Misc._noarg_: + return self._getboolean(self.tk.call( + 'pack', 'propagate', self._w)) + else: + self.tk.call('pack', 'propagate', self._w, flag) + + propagate = pack_propagate + + def pack_slaves(self): + """Return a list of all slaves of this widget + in its packing order.""" + return [self._nametowidget(x) for x in + self.tk.splitlist( + self.tk.call('pack', 'slaves', self._w))] + + slaves = pack_slaves + + # Place method that applies to the master + def place_slaves(self): + """Return a list of all slaves of this widget + in its packing order.""" + return [self._nametowidget(x) for x in + self.tk.splitlist( + self.tk.call( + 'place', 'slaves', self._w))] + + # Grid methods that apply to the master + + def grid_anchor(self, anchor=None): # new in Tk 8.5 + """The anchor value controls how to place the grid within the + master when no row/column has any weight. + + The default anchor is nw.""" + self.tk.call('grid', 'anchor', self._w, anchor) + + anchor = grid_anchor + + def grid_bbox(self, column=None, row=None, col2=None, row2=None): + """Return a tuple of integer coordinates for the bounding + box of this widget controlled by the geometry manager grid. + + If COLUMN, ROW is given the bounding box applies from + the cell with row and column 0 to the specified + cell. If COL2 and ROW2 are given the bounding box + starts at that cell. + + The returned integers specify the offset of the upper left + corner in the master widget and the width and height. + """ + args = ('grid', 'bbox', self._w) + if column is not None and row is not None: + args = args + (column, row) + if col2 is not None and row2 is not None: + args = args + (col2, row2) + return self._getints(self.tk.call(*args)) or None + + bbox = grid_bbox + + def _gridconvvalue(self, value): + if isinstance(value, (str, _tkinter.Tcl_Obj)): + try: + svalue = str(value) + if not svalue: + return None + elif '.' in svalue: + return self.tk.getdouble(svalue) + else: + return self.tk.getint(svalue) + except (ValueError, TclError): + pass + return value + + def _grid_configure(self, command, index, cnf, kw): + """Internal function.""" + if isinstance(cnf, str) and not kw: + if cnf[-1:] == '_': + cnf = cnf[:-1] + if cnf[:1] != '-': + cnf = '-'+cnf + options = (cnf,) + else: + options = self._options(cnf, kw) + if not options: + return _splitdict( + self.tk, + self.tk.call('grid', command, self._w, index), + conv=self._gridconvvalue) + res = self.tk.call( + ('grid', command, self._w, index) + + options) + if len(options) == 1: + return self._gridconvvalue(res) + + def grid_columnconfigure(self, index, cnf={}, **kw): + """Configure column INDEX of a grid. + + Valid resources are minsize (minimum size of the column), + weight (how much does additional space propagate to this column) + and pad (how much space to let additionally).""" + return self._grid_configure('columnconfigure', index, cnf, kw) + + columnconfigure = grid_columnconfigure + + def grid_location(self, x, y): + """Return a tuple of column and row which identify the cell + at which the pixel at position X and Y inside the master + widget is located.""" + return self._getints( + self.tk.call( + 'grid', 'location', self._w, x, y)) or None + + def grid_propagate(self, flag=_noarg_): + """Set or get the status for propagation of geometry information. + + A boolean argument specifies whether the geometry information + of the slaves will determine the size of this widget. If no argument + is given, the current setting will be returned. + """ + if flag is Misc._noarg_: + return self._getboolean(self.tk.call( + 'grid', 'propagate', self._w)) + else: + self.tk.call('grid', 'propagate', self._w, flag) + + def grid_rowconfigure(self, index, cnf={}, **kw): + """Configure row INDEX of a grid. + + Valid resources are minsize (minimum size of the row), + weight (how much does additional space propagate to this row) + and pad (how much space to let additionally).""" + return self._grid_configure('rowconfigure', index, cnf, kw) + + rowconfigure = grid_rowconfigure + + def grid_size(self): + """Return a tuple of the number of column and rows in the grid.""" + return self._getints( + self.tk.call('grid', 'size', self._w)) or None + + size = grid_size + + def grid_slaves(self, row=None, column=None): + """Return a list of all slaves of this widget + in its packing order.""" + args = () + if row is not None: + args = args + ('-row', row) + if column is not None: + args = args + ('-column', column) + return [self._nametowidget(x) for x in + self.tk.splitlist(self.tk.call( + ('grid', 'slaves', self._w) + args))] + + # Support for the "event" command, new in Tk 4.2. + # By Case Roole. + + def event_add(self, virtual, *sequences): + """Bind a virtual event VIRTUAL (of the form <>) + to an event SEQUENCE such that the virtual event is triggered + whenever SEQUENCE occurs.""" + args = ('event', 'add', virtual) + sequences + self.tk.call(args) + + def event_delete(self, virtual, *sequences): + """Unbind a virtual event VIRTUAL from SEQUENCE.""" + args = ('event', 'delete', virtual) + sequences + self.tk.call(args) + + def event_generate(self, sequence, **kw): + """Generate an event SEQUENCE. Additional + keyword arguments specify parameter of the event + (e.g. x, y, rootx, rooty).""" + args = ('event', 'generate', self._w, sequence) + for k, v in kw.items(): + args = args + ('-%s' % k, str(v)) + self.tk.call(args) + + def event_info(self, virtual=None): + """Return a list of all virtual events or the information + about the SEQUENCE bound to the virtual event VIRTUAL.""" + return self.tk.splitlist( + self.tk.call('event', 'info', virtual)) + + # Image related commands + + def image_names(self): + """Return a list of all existing image names.""" + return self.tk.splitlist(self.tk.call('image', 'names')) + + def image_types(self): + """Return a list of all available image types (e.g. photo bitmap).""" + return self.tk.splitlist(self.tk.call('image', 'types')) + + +class CallWrapper: + """Internal class. Stores function to call when some user + defined Tcl function is called e.g. after an event occurred.""" + + def __init__(self, func, subst, widget): + """Store FUNC, SUBST and WIDGET as members.""" + self.func = func + self.subst = subst + self.widget = widget + + def __call__(self, *args): + """Apply first function SUBST to arguments, than FUNC.""" + try: + if self.subst: + args = self.subst(*args) + return self.func(*args) + except SystemExit: + raise + except: + self.widget._report_exception() + + +class XView: + """Mix-in class for querying and changing the horizontal position + of a widget's window.""" + + def xview(self, *args): + """Query and change the horizontal position of the view.""" + res = self.tk.call(self._w, 'xview', *args) + if not args: + return self._getdoubles(res) + + def xview_moveto(self, fraction): + """Adjusts the view in the window so that FRACTION of the + total width of the canvas is off-screen to the left.""" + self.tk.call(self._w, 'xview', 'moveto', fraction) + + def xview_scroll(self, number, what): + """Shift the x-view according to NUMBER which is measured in "units" + or "pages" (WHAT).""" + self.tk.call(self._w, 'xview', 'scroll', number, what) + + +class YView: + """Mix-in class for querying and changing the vertical position + of a widget's window.""" + + def yview(self, *args): + """Query and change the vertical position of the view.""" + res = self.tk.call(self._w, 'yview', *args) + if not args: + return self._getdoubles(res) + + def yview_moveto(self, fraction): + """Adjusts the view in the window so that FRACTION of the + total height of the canvas is off-screen to the top.""" + self.tk.call(self._w, 'yview', 'moveto', fraction) + + def yview_scroll(self, number, what): + """Shift the y-view according to NUMBER which is measured in + "units" or "pages" (WHAT).""" + self.tk.call(self._w, 'yview', 'scroll', number, what) + + +class Wm: + """Provides functions for the communication with the window manager.""" + + def wm_aspect(self, + minNumer=None, minDenom=None, + maxNumer=None, maxDenom=None): + """Instruct the window manager to set the aspect ratio (width/height) + of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple + of the actual values if no argument is given.""" + return self._getints( + self.tk.call('wm', 'aspect', self._w, + minNumer, minDenom, + maxNumer, maxDenom)) + + aspect = wm_aspect + + def wm_attributes(self, *args): + """This subcommand returns or sets platform specific attributes + + The first form returns a list of the platform specific flags and + their values. The second form returns the value for the specific + option. The third form sets one or more of the values. The values + are as follows: + + On Windows, -disabled gets or sets whether the window is in a + disabled state. -toolwindow gets or sets the style of the window + to toolwindow (as defined in the MSDN). -topmost gets or sets + whether this is a topmost window (displays above all other + windows). + + On Macintosh, XXXXX + + On Unix, there are currently no special attribute values. + """ + args = ('wm', 'attributes', self._w) + args + return self.tk.call(args) + + attributes = wm_attributes + + def wm_client(self, name=None): + """Store NAME in WM_CLIENT_MACHINE property of this widget. Return + current value.""" + return self.tk.call('wm', 'client', self._w, name) + + client = wm_client + + def wm_colormapwindows(self, *wlist): + """Store list of window names (WLIST) into WM_COLORMAPWINDOWS property + of this widget. This list contains windows whose colormaps differ from their + parents. Return current list of widgets if WLIST is empty.""" + if len(wlist) > 1: + wlist = (wlist,) # Tk needs a list of windows here + args = ('wm', 'colormapwindows', self._w) + wlist + if wlist: + self.tk.call(args) + else: + return [self._nametowidget(x) + for x in self.tk.splitlist(self.tk.call(args))] + + colormapwindows = wm_colormapwindows + + def wm_command(self, value=None): + """Store VALUE in WM_COMMAND property. It is the command + which shall be used to invoke the application. Return current + command if VALUE is None.""" + return self.tk.call('wm', 'command', self._w, value) + + command = wm_command + + def wm_deiconify(self): + """Deiconify this widget. If it was never mapped it will not be mapped. + On Windows it will raise this widget and give it the focus.""" + return self.tk.call('wm', 'deiconify', self._w) + + deiconify = wm_deiconify + + def wm_focusmodel(self, model=None): + """Set focus model to MODEL. "active" means that this widget will claim + the focus itself, "passive" means that the window manager shall give + the focus. Return current focus model if MODEL is None.""" + return self.tk.call('wm', 'focusmodel', self._w, model) + + focusmodel = wm_focusmodel + + def wm_forget(self, window): # new in Tk 8.5 + """The window will be unmapped from the screen and will no longer + be managed by wm. toplevel windows will be treated like frame + windows once they are no longer managed by wm, however, the menu + option configuration will be remembered and the menus will return + once the widget is managed again.""" + self.tk.call('wm', 'forget', window) + + forget = wm_forget + + def wm_frame(self): + """Return identifier for decorative frame of this widget if present.""" + return self.tk.call('wm', 'frame', self._w) + + frame = wm_frame + + def wm_geometry(self, newGeometry=None): + """Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return + current value if None is given.""" + return self.tk.call('wm', 'geometry', self._w, newGeometry) + + geometry = wm_geometry + + def wm_grid(self, + baseWidth=None, baseHeight=None, + widthInc=None, heightInc=None): + """Instruct the window manager that this widget shall only be + resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and + height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the + number of grid units requested in Tk_GeometryRequest.""" + return self._getints(self.tk.call( + 'wm', 'grid', self._w, + baseWidth, baseHeight, widthInc, heightInc)) + + grid = wm_grid + + def wm_group(self, pathName=None): + """Set the group leader widgets for related widgets to PATHNAME. Return + the group leader of this widget if None is given.""" + return self.tk.call('wm', 'group', self._w, pathName) + + group = wm_group + + def wm_iconbitmap(self, bitmap=None, default=None): + """Set bitmap for the iconified widget to BITMAP. Return + the bitmap if None is given. + + Under Windows, the DEFAULT parameter can be used to set the icon + for the widget and any descendants that don't have an icon set + explicitly. DEFAULT can be the relative path to a .ico file + (example: root.iconbitmap(default='myicon.ico') ). See Tk + documentation for more information.""" + if default: + return self.tk.call('wm', 'iconbitmap', self._w, '-default', default) + else: + return self.tk.call('wm', 'iconbitmap', self._w, bitmap) + + iconbitmap = wm_iconbitmap + + def wm_iconify(self): + """Display widget as icon.""" + return self.tk.call('wm', 'iconify', self._w) + + iconify = wm_iconify + + def wm_iconmask(self, bitmap=None): + """Set mask for the icon bitmap of this widget. Return the + mask if None is given.""" + return self.tk.call('wm', 'iconmask', self._w, bitmap) + + iconmask = wm_iconmask + + def wm_iconname(self, newName=None): + """Set the name of the icon for this widget. Return the name if + None is given.""" + return self.tk.call('wm', 'iconname', self._w, newName) + + iconname = wm_iconname + + def wm_iconphoto(self, default=False, *args): # new in Tk 8.5 + """Sets the titlebar icon for this window based on the named photo + images passed through args. If default is True, this is applied to + all future created toplevels as well. + + The data in the images is taken as a snapshot at the time of + invocation. If the images are later changed, this is not reflected + to the titlebar icons. Multiple images are accepted to allow + different images sizes to be provided. The window manager may scale + provided icons to an appropriate size. + + On Windows, the images are packed into a Windows icon structure. + This will override an icon specified to wm_iconbitmap, and vice + versa. + + On X, the images are arranged into the _NET_WM_ICON X property, + which most modern window managers support. An icon specified by + wm_iconbitmap may exist simultaneously. + + On Macintosh, this currently does nothing.""" + if default: + self.tk.call('wm', 'iconphoto', self._w, "-default", *args) + else: + self.tk.call('wm', 'iconphoto', self._w, *args) + + iconphoto = wm_iconphoto + + def wm_iconposition(self, x=None, y=None): + """Set the position of the icon of this widget to X and Y. Return + a tuple of the current values of X and X if None is given.""" + return self._getints(self.tk.call( + 'wm', 'iconposition', self._w, x, y)) + + iconposition = wm_iconposition + + def wm_iconwindow(self, pathName=None): + """Set widget PATHNAME to be displayed instead of icon. Return the current + value if None is given.""" + return self.tk.call('wm', 'iconwindow', self._w, pathName) + + iconwindow = wm_iconwindow + + def wm_manage(self, widget): # new in Tk 8.5 + """The widget specified will become a stand alone top-level window. + The window will be decorated with the window managers title bar, + etc.""" + self.tk.call('wm', 'manage', widget) + + manage = wm_manage + + def wm_maxsize(self, width=None, height=None): + """Set max WIDTH and HEIGHT for this widget. If the window is gridded + the values are given in grid units. Return the current values if None + is given.""" + return self._getints(self.tk.call( + 'wm', 'maxsize', self._w, width, height)) + + maxsize = wm_maxsize + + def wm_minsize(self, width=None, height=None): + """Set min WIDTH and HEIGHT for this widget. If the window is gridded + the values are given in grid units. Return the current values if None + is given.""" + return self._getints(self.tk.call( + 'wm', 'minsize', self._w, width, height)) + + minsize = wm_minsize + + def wm_overrideredirect(self, boolean=None): + """Instruct the window manager to ignore this widget + if BOOLEAN is given with 1. Return the current value if None + is given.""" + return self._getboolean(self.tk.call( + 'wm', 'overrideredirect', self._w, boolean)) + + overrideredirect = wm_overrideredirect + + def wm_positionfrom(self, who=None): + """Instruct the window manager that the position of this widget shall + be defined by the user if WHO is "user", and by its own policy if WHO is + "program".""" + return self.tk.call('wm', 'positionfrom', self._w, who) + + positionfrom = wm_positionfrom + + def wm_protocol(self, name=None, func=None): + """Bind function FUNC to command NAME for this widget. + Return the function bound to NAME if None is given. NAME could be + e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW".""" + if callable(func): + command = self._register(func) + else: + command = func + return self.tk.call( + 'wm', 'protocol', self._w, name, command) + + protocol = wm_protocol + + def wm_resizable(self, width=None, height=None): + """Instruct the window manager whether this width can be resized + in WIDTH or HEIGHT. Both values are boolean values.""" + return self.tk.call('wm', 'resizable', self._w, width, height) + + resizable = wm_resizable + + def wm_sizefrom(self, who=None): + """Instruct the window manager that the size of this widget shall + be defined by the user if WHO is "user", and by its own policy if WHO is + "program".""" + return self.tk.call('wm', 'sizefrom', self._w, who) + + sizefrom = wm_sizefrom + + def wm_state(self, newstate=None): + """Query or set the state of this widget as one of normal, icon, + iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only).""" + return self.tk.call('wm', 'state', self._w, newstate) + + state = wm_state + + def wm_title(self, string=None): + """Set the title of this widget.""" + return self.tk.call('wm', 'title', self._w, string) + + title = wm_title + + def wm_transient(self, master=None): + """Instruct the window manager that this widget is transient + with regard to widget MASTER.""" + return self.tk.call('wm', 'transient', self._w, master) + + transient = wm_transient + + def wm_withdraw(self): + """Withdraw this widget from the screen such that it is unmapped + and forgotten by the window manager. Re-draw it with wm_deiconify.""" + return self.tk.call('wm', 'withdraw', self._w) + + withdraw = wm_withdraw + + +class Tk(Misc, Wm): + """Toplevel widget of Tk which represents mostly the main window + of an application. It has an associated Tcl interpreter.""" + _w = '.' + + def __init__(self, screenName=None, baseName=None, className='Tk', + useTk=True, sync=False, use=None): + """Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will + be created. BASENAME will be used for the identification of the profile file (see + readprofile). + It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME + is the name of the widget class.""" + self.master = None + self.children = {} + self._tkloaded = False + # to avoid recursions in the getattr code in case of failure, we + # ensure that self.tk is always _something_. + self.tk = None + if baseName is None: + import os + baseName = os.path.basename(sys.argv[0]) + baseName, ext = os.path.splitext(baseName) + if ext not in ('.py', '.pyc'): + baseName = baseName + ext + interactive = False + self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use) + if useTk: + self._loadtk() + if not sys.flags.ignore_environment: + # Issue #16248: Honor the -E flag to avoid code injection. + self.readprofile(baseName, className) + + def loadtk(self): + if not self._tkloaded: + self.tk.loadtk() + self._loadtk() + + def _loadtk(self): + self._tkloaded = True + global _default_root + # Version sanity checks + tk_version = self.tk.getvar('tk_version') + if tk_version != _tkinter.TK_VERSION: + raise RuntimeError("tk.h version (%s) doesn't match libtk.a version (%s)" + % (_tkinter.TK_VERSION, tk_version)) + # Under unknown circumstances, tcl_version gets coerced to float + tcl_version = str(self.tk.getvar('tcl_version')) + if tcl_version != _tkinter.TCL_VERSION: + raise RuntimeError("tcl.h version (%s) doesn't match libtcl.a version (%s)" \ + % (_tkinter.TCL_VERSION, tcl_version)) + # Create and register the tkerror and exit commands + # We need to inline parts of _register here, _ register + # would register differently-named commands. + if self._tclCommands is None: + self._tclCommands = [] + self.tk.createcommand('tkerror', _tkerror) + self.tk.createcommand('exit', _exit) + self._tclCommands.append('tkerror') + self._tclCommands.append('exit') + if _support_default_root and _default_root is None: + _default_root = self + self.protocol("WM_DELETE_WINDOW", self.destroy) + + def destroy(self): + """Destroy this and all descendants widgets. This will + end the application of this Tcl interpreter.""" + for c in list(self.children.values()): c.destroy() + self.tk.call('destroy', self._w) + Misc.destroy(self) + global _default_root + if _support_default_root and _default_root is self: + _default_root = None + + def readprofile(self, baseName, className): + """Internal function. It reads .BASENAME.tcl and .CLASSNAME.tcl into + the Tcl Interpreter and calls exec on the contents of .BASENAME.py and + .CLASSNAME.py if such a file exists in the home directory.""" + import os + if 'HOME' in os.environ: home = os.environ['HOME'] + else: home = os.curdir + class_tcl = os.path.join(home, '.%s.tcl' % className) + class_py = os.path.join(home, '.%s.py' % className) + base_tcl = os.path.join(home, '.%s.tcl' % baseName) + base_py = os.path.join(home, '.%s.py' % baseName) + dir = {'self': self} + exec('from tkinter import *', dir) + if os.path.isfile(class_tcl): + self.tk.call('source', class_tcl) + if os.path.isfile(class_py): + exec(open(class_py).read(), dir) + if os.path.isfile(base_tcl): + self.tk.call('source', base_tcl) + if os.path.isfile(base_py): + exec(open(base_py).read(), dir) + + def report_callback_exception(self, exc, val, tb): + """Report callback exception on sys.stderr. + + Applications may want to override this internal function, and + should when sys.stderr is None.""" + import traceback + print("Exception in Tkinter callback", file=sys.stderr) + sys.last_type = exc + sys.last_value = val + sys.last_traceback = tb + traceback.print_exception(exc, val, tb) + + def __getattr__(self, attr): + "Delegate attribute access to the interpreter object" + return getattr(self.tk, attr) + +# Ideally, the classes Pack, Place and Grid disappear, the +# pack/place/grid methods are defined on the Widget class, and +# everybody uses w.pack_whatever(...) instead of Pack.whatever(w, +# ...), with pack(), place() and grid() being short for +# pack_configure(), place_configure() and grid_columnconfigure(), and +# forget() being short for pack_forget(). As a practical matter, I'm +# afraid that there is too much code out there that may be using the +# Pack, Place or Grid class, so I leave them intact -- but only as +# backwards compatibility features. Also note that those methods that +# take a master as argument (e.g. pack_propagate) have been moved to +# the Misc class (which now incorporates all methods common between +# toplevel and interior widgets). Again, for compatibility, these are +# copied into the Pack, Place or Grid class. + + +def Tcl(screenName=None, baseName=None, className='Tk', useTk=False): + return Tk(screenName, baseName, className, useTk) + + +class Pack: + """Geometry manager Pack. + + Base class to use the methods pack_* in every widget.""" + + def pack_configure(self, cnf={}, **kw): + """Pack a widget in the parent widget. Use as options: + after=widget - pack it after you have packed widget + anchor=NSEW (or subset) - position widget according to + given direction + before=widget - pack it before you will pack widget + expand=bool - expand widget if parent size grows + fill=NONE or X or Y or BOTH - fill widget if widget grows + in=master - use master to contain this widget + in_=master - see 'in' option description + ipadx=amount - add internal padding in x direction + ipady=amount - add internal padding in y direction + padx=amount - add padding in x direction + pady=amount - add padding in y direction + side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget. + """ + self.tk.call( + ('pack', 'configure', self._w) + + self._options(cnf, kw)) + + pack = configure = config = pack_configure + + def pack_forget(self): + """Unmap this widget and do not use it for the packing order.""" + self.tk.call('pack', 'forget', self._w) + + forget = pack_forget + + def pack_info(self): + """Return information about the packing options + for this widget.""" + d = _splitdict(self.tk, self.tk.call('pack', 'info', self._w)) + if 'in' in d: + d['in'] = self.nametowidget(d['in']) + return d + + info = pack_info + propagate = pack_propagate = Misc.pack_propagate + slaves = pack_slaves = Misc.pack_slaves + + +class Place: + """Geometry manager Place. + + Base class to use the methods place_* in every widget.""" + + def place_configure(self, cnf={}, **kw): + """Place a widget in the parent widget. Use as options: + in=master - master relative to which the widget is placed + in_=master - see 'in' option description + x=amount - locate anchor of this widget at position x of master + y=amount - locate anchor of this widget at position y of master + relx=amount - locate anchor of this widget between 0.0 and 1.0 + relative to width of master (1.0 is right edge) + rely=amount - locate anchor of this widget between 0.0 and 1.0 + relative to height of master (1.0 is bottom edge) + anchor=NSEW (or subset) - position anchor according to given direction + width=amount - width of this widget in pixel + height=amount - height of this widget in pixel + relwidth=amount - width of this widget between 0.0 and 1.0 + relative to width of master (1.0 is the same width + as the master) + relheight=amount - height of this widget between 0.0 and 1.0 + relative to height of master (1.0 is the same + height as the master) + bordermode="inside" or "outside" - whether to take border width of + master widget into account + """ + self.tk.call( + ('place', 'configure', self._w) + + self._options(cnf, kw)) + + place = configure = config = place_configure + + def place_forget(self): + """Unmap this widget.""" + self.tk.call('place', 'forget', self._w) + + forget = place_forget + + def place_info(self): + """Return information about the placing options + for this widget.""" + d = _splitdict(self.tk, self.tk.call('place', 'info', self._w)) + if 'in' in d: + d['in'] = self.nametowidget(d['in']) + return d + + info = place_info + slaves = place_slaves = Misc.place_slaves + + +class Grid: + """Geometry manager Grid. + + Base class to use the methods grid_* in every widget.""" + # Thanks to Masazumi Yoshikawa (yosikawa@isi.edu) + + def grid_configure(self, cnf={}, **kw): + """Position a widget in the parent widget in a grid. Use as options: + column=number - use cell identified with given column (starting with 0) + columnspan=number - this widget will span several columns + in=master - use master to contain this widget + in_=master - see 'in' option description + ipadx=amount - add internal padding in x direction + ipady=amount - add internal padding in y direction + padx=amount - add padding in x direction + pady=amount - add padding in y direction + row=number - use cell identified with given row (starting with 0) + rowspan=number - this widget will span several rows + sticky=NSEW - if cell is larger on which sides will this + widget stick to the cell boundary + """ + self.tk.call( + ('grid', 'configure', self._w) + + self._options(cnf, kw)) + + grid = configure = config = grid_configure + bbox = grid_bbox = Misc.grid_bbox + columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure + + def grid_forget(self): + """Unmap this widget.""" + self.tk.call('grid', 'forget', self._w) + + forget = grid_forget + + def grid_remove(self): + """Unmap this widget but remember the grid options.""" + self.tk.call('grid', 'remove', self._w) + + def grid_info(self): + """Return information about the options + for positioning this widget in a grid.""" + d = _splitdict(self.tk, self.tk.call('grid', 'info', self._w)) + if 'in' in d: + d['in'] = self.nametowidget(d['in']) + return d + + info = grid_info + location = grid_location = Misc.grid_location + propagate = grid_propagate = Misc.grid_propagate + rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure + size = grid_size = Misc.grid_size + slaves = grid_slaves = Misc.grid_slaves + + +class BaseWidget(Misc): + """Internal class.""" + + def _setup(self, master, cnf): + """Internal function. Sets up information about children.""" + if master is None: + master = _get_default_root() + self.master = master + self.tk = master.tk + name = None + if 'name' in cnf: + name = cnf['name'] + del cnf['name'] + if not name: + name = self.__class__.__name__.lower() + if master._last_child_ids is None: + master._last_child_ids = {} + count = master._last_child_ids.get(name, 0) + 1 + master._last_child_ids[name] = count + if count == 1: + name = '!%s' % (name,) + else: + name = '!%s%d' % (name, count) + self._name = name + if master._w=='.': + self._w = '.' + name + else: + self._w = master._w + '.' + name + self.children = {} + if self._name in self.master.children: + self.master.children[self._name].destroy() + self.master.children[self._name] = self + + def __init__(self, master, widgetName, cnf={}, kw={}, extra=()): + """Construct a widget with the parent widget MASTER, a name WIDGETNAME + and appropriate options.""" + if kw: + cnf = _cnfmerge((cnf, kw)) + self.widgetName = widgetName + BaseWidget._setup(self, master, cnf) + if self._tclCommands is None: + self._tclCommands = [] + classes = [(k, v) for k, v in cnf.items() if isinstance(k, type)] + for k, v in classes: + del cnf[k] + self.tk.call( + (widgetName, self._w) + extra + self._options(cnf)) + for k, v in classes: + k.configure(self, v) + + def destroy(self): + """Destroy this and all descendants widgets.""" + for c in list(self.children.values()): c.destroy() + self.tk.call('destroy', self._w) + if self._name in self.master.children: + del self.master.children[self._name] + Misc.destroy(self) + + def _do(self, name, args=()): + # XXX Obsolete -- better use self.tk.call directly! + return self.tk.call((self._w, name) + args) + + +class Widget(BaseWidget, Pack, Place, Grid): + """Internal class. + + Base class for a widget which can be positioned with the geometry managers + Pack, Place or Grid.""" + pass + + +class Toplevel(BaseWidget, Wm): + """Toplevel widget, e.g. for dialogs.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a toplevel widget with the parent MASTER. + + Valid resource names: background, bd, bg, borderwidth, class, + colormap, container, cursor, height, highlightbackground, + highlightcolor, highlightthickness, menu, relief, screen, takefocus, + use, visual, width.""" + if kw: + cnf = _cnfmerge((cnf, kw)) + extra = () + for wmkey in ['screen', 'class_', 'class', 'visual', + 'colormap']: + if wmkey in cnf: + val = cnf[wmkey] + # TBD: a hack needed because some keys + # are not valid as keyword arguments + if wmkey[-1] == '_': opt = '-'+wmkey[:-1] + else: opt = '-'+wmkey + extra = extra + (opt, val) + del cnf[wmkey] + BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra) + root = self._root() + self.iconname(root.iconname()) + self.title(root.title()) + self.protocol("WM_DELETE_WINDOW", self.destroy) + + +class Button(Widget): + """Button widget.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a button widget with the parent MASTER. + + STANDARD OPTIONS + + activebackground, activeforeground, anchor, + background, bitmap, borderwidth, cursor, + disabledforeground, font, foreground + highlightbackground, highlightcolor, + highlightthickness, image, justify, + padx, pady, relief, repeatdelay, + repeatinterval, takefocus, text, + textvariable, underline, wraplength + + WIDGET-SPECIFIC OPTIONS + + command, compound, default, height, + overrelief, state, width + """ + Widget.__init__(self, master, 'button', cnf, kw) + + def flash(self): + """Flash the button. + + This is accomplished by redisplaying + the button several times, alternating between active and + normal colors. At the end of the flash the button is left + in the same normal/active state as when the command was + invoked. This command is ignored if the button's state is + disabled. + """ + self.tk.call(self._w, 'flash') + + def invoke(self): + """Invoke the command associated with the button. + + The return value is the return value from the command, + or an empty string if there is no command associated with + the button. This command is ignored if the button's state + is disabled. + """ + return self.tk.call(self._w, 'invoke') + + +class Canvas(Widget, XView, YView): + """Canvas widget to display graphical elements like lines or text.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a canvas widget with the parent MASTER. + + Valid resource names: background, bd, bg, borderwidth, closeenough, + confine, cursor, height, highlightbackground, highlightcolor, + highlightthickness, insertbackground, insertborderwidth, + insertofftime, insertontime, insertwidth, offset, relief, + scrollregion, selectbackground, selectborderwidth, selectforeground, + state, takefocus, width, xscrollcommand, xscrollincrement, + yscrollcommand, yscrollincrement.""" + Widget.__init__(self, master, 'canvas', cnf, kw) + + def addtag(self, *args): + """Internal function.""" + self.tk.call((self._w, 'addtag') + args) + + def addtag_above(self, newtag, tagOrId): + """Add tag NEWTAG to all items above TAGORID.""" + self.addtag(newtag, 'above', tagOrId) + + def addtag_all(self, newtag): + """Add tag NEWTAG to all items.""" + self.addtag(newtag, 'all') + + def addtag_below(self, newtag, tagOrId): + """Add tag NEWTAG to all items below TAGORID.""" + self.addtag(newtag, 'below', tagOrId) + + def addtag_closest(self, newtag, x, y, halo=None, start=None): + """Add tag NEWTAG to item which is closest to pixel at X, Y. + If several match take the top-most. + All items closer than HALO are considered overlapping (all are + closest). If START is specified the next below this tag is taken.""" + self.addtag(newtag, 'closest', x, y, halo, start) + + def addtag_enclosed(self, newtag, x1, y1, x2, y2): + """Add tag NEWTAG to all items in the rectangle defined + by X1,Y1,X2,Y2.""" + self.addtag(newtag, 'enclosed', x1, y1, x2, y2) + + def addtag_overlapping(self, newtag, x1, y1, x2, y2): + """Add tag NEWTAG to all items which overlap the rectangle + defined by X1,Y1,X2,Y2.""" + self.addtag(newtag, 'overlapping', x1, y1, x2, y2) + + def addtag_withtag(self, newtag, tagOrId): + """Add tag NEWTAG to all items with TAGORID.""" + self.addtag(newtag, 'withtag', tagOrId) + + def bbox(self, *args): + """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle + which encloses all items with tags specified as arguments.""" + return self._getints( + self.tk.call((self._w, 'bbox') + args)) or None + + def tag_unbind(self, tagOrId, sequence, funcid=None): + """Unbind for all items with TAGORID for event SEQUENCE the + function identified with FUNCID.""" + self.tk.call(self._w, 'bind', tagOrId, sequence, '') + if funcid: + self.deletecommand(funcid) + + def tag_bind(self, tagOrId, sequence=None, func=None, add=None): + """Bind to all items with TAGORID at event SEQUENCE a call to function FUNC. + + An additional boolean parameter ADD specifies whether FUNC will be + called additionally to the other bound function or whether it will + replace the previous function. See bind for the return value.""" + return self._bind((self._w, 'bind', tagOrId), + sequence, func, add) + + def canvasx(self, screenx, gridspacing=None): + """Return the canvas x coordinate of pixel position SCREENX rounded + to nearest multiple of GRIDSPACING units.""" + return self.tk.getdouble(self.tk.call( + self._w, 'canvasx', screenx, gridspacing)) + + def canvasy(self, screeny, gridspacing=None): + """Return the canvas y coordinate of pixel position SCREENY rounded + to nearest multiple of GRIDSPACING units.""" + return self.tk.getdouble(self.tk.call( + self._w, 'canvasy', screeny, gridspacing)) + + def coords(self, *args): + """Return a list of coordinates for the item given in ARGS.""" + # XXX Should use _flatten on args + return [self.tk.getdouble(x) for x in + self.tk.splitlist( + self.tk.call((self._w, 'coords') + args))] + + def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={}) + """Internal function.""" + args = _flatten(args) + cnf = args[-1] + if isinstance(cnf, (dict, tuple)): + args = args[:-1] + else: + cnf = {} + return self.tk.getint(self.tk.call( + self._w, 'create', itemType, + *(args + self._options(cnf, kw)))) + + def create_arc(self, *args, **kw): + """Create arc shaped region with coordinates x1,y1,x2,y2.""" + return self._create('arc', args, kw) + + def create_bitmap(self, *args, **kw): + """Create bitmap with coordinates x1,y1.""" + return self._create('bitmap', args, kw) + + def create_image(self, *args, **kw): + """Create image item with coordinates x1,y1.""" + return self._create('image', args, kw) + + def create_line(self, *args, **kw): + """Create line with coordinates x1,y1,...,xn,yn.""" + return self._create('line', args, kw) + + def create_oval(self, *args, **kw): + """Create oval with coordinates x1,y1,x2,y2.""" + return self._create('oval', args, kw) + + def create_polygon(self, *args, **kw): + """Create polygon with coordinates x1,y1,...,xn,yn.""" + return self._create('polygon', args, kw) + + def create_rectangle(self, *args, **kw): + """Create rectangle with coordinates x1,y1,x2,y2.""" + return self._create('rectangle', args, kw) + + def create_text(self, *args, **kw): + """Create text with coordinates x1,y1.""" + return self._create('text', args, kw) + + def create_window(self, *args, **kw): + """Create window with coordinates x1,y1,x2,y2.""" + return self._create('window', args, kw) + + def dchars(self, *args): + """Delete characters of text items identified by tag or id in ARGS (possibly + several times) from FIRST to LAST character (including).""" + self.tk.call((self._w, 'dchars') + args) + + def delete(self, *args): + """Delete items identified by all tag or ids contained in ARGS.""" + self.tk.call((self._w, 'delete') + args) + + def dtag(self, *args): + """Delete tag or id given as last arguments in ARGS from items + identified by first argument in ARGS.""" + self.tk.call((self._w, 'dtag') + args) + + def find(self, *args): + """Internal function.""" + return self._getints( + self.tk.call((self._w, 'find') + args)) or () + + def find_above(self, tagOrId): + """Return items above TAGORID.""" + return self.find('above', tagOrId) + + def find_all(self): + """Return all items.""" + return self.find('all') + + def find_below(self, tagOrId): + """Return all items below TAGORID.""" + return self.find('below', tagOrId) + + def find_closest(self, x, y, halo=None, start=None): + """Return item which is closest to pixel at X, Y. + If several match take the top-most. + All items closer than HALO are considered overlapping (all are + closest). If START is specified the next below this tag is taken.""" + return self.find('closest', x, y, halo, start) + + def find_enclosed(self, x1, y1, x2, y2): + """Return all items in rectangle defined + by X1,Y1,X2,Y2.""" + return self.find('enclosed', x1, y1, x2, y2) + + def find_overlapping(self, x1, y1, x2, y2): + """Return all items which overlap the rectangle + defined by X1,Y1,X2,Y2.""" + return self.find('overlapping', x1, y1, x2, y2) + + def find_withtag(self, tagOrId): + """Return all items with TAGORID.""" + return self.find('withtag', tagOrId) + + def focus(self, *args): + """Set focus to the first item specified in ARGS.""" + return self.tk.call((self._w, 'focus') + args) + + def gettags(self, *args): + """Return tags associated with the first item specified in ARGS.""" + return self.tk.splitlist( + self.tk.call((self._w, 'gettags') + args)) + + def icursor(self, *args): + """Set cursor at position POS in the item identified by TAGORID. + In ARGS TAGORID must be first.""" + self.tk.call((self._w, 'icursor') + args) + + def index(self, *args): + """Return position of cursor as integer in item specified in ARGS.""" + return self.tk.getint(self.tk.call((self._w, 'index') + args)) + + def insert(self, *args): + """Insert TEXT in item TAGORID at position POS. ARGS must + be TAGORID POS TEXT.""" + self.tk.call((self._w, 'insert') + args) + + def itemcget(self, tagOrId, option): + """Return the resource value for an OPTION for item TAGORID.""" + return self.tk.call( + (self._w, 'itemcget') + (tagOrId, '-'+option)) + + def itemconfigure(self, tagOrId, cnf=None, **kw): + """Configure resources of an item TAGORID. + + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method without arguments. + """ + return self._configure(('itemconfigure', tagOrId), cnf, kw) + + itemconfig = itemconfigure + + # lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift, + # so the preferred name for them is tag_lower, tag_raise + # (similar to tag_bind, and similar to the Text widget); + # unfortunately can't delete the old ones yet (maybe in 1.6) + def tag_lower(self, *args): + """Lower an item TAGORID given in ARGS + (optional below another item).""" + self.tk.call((self._w, 'lower') + args) + + lower = tag_lower + + def move(self, *args): + """Move an item TAGORID given in ARGS.""" + self.tk.call((self._w, 'move') + args) + + def moveto(self, tagOrId, x='', y=''): + """Move the items given by TAGORID in the canvas coordinate + space so that the first coordinate pair of the bottommost + item with tag TAGORID is located at position (X,Y). + X and Y may be the empty string, in which case the + corresponding coordinate will be unchanged. All items matching + TAGORID remain in the same positions relative to each other.""" + self.tk.call(self._w, 'moveto', tagOrId, x, y) + + def postscript(self, cnf={}, **kw): + """Print the contents of the canvas to a postscript + file. Valid options: colormap, colormode, file, fontmap, + height, pageanchor, pageheight, pagewidth, pagex, pagey, + rotate, width, x, y.""" + return self.tk.call((self._w, 'postscript') + + self._options(cnf, kw)) + + def tag_raise(self, *args): + """Raise an item TAGORID given in ARGS + (optional above another item).""" + self.tk.call((self._w, 'raise') + args) + + lift = tkraise = tag_raise + + def scale(self, *args): + """Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE.""" + self.tk.call((self._w, 'scale') + args) + + def scan_mark(self, x, y): + """Remember the current X, Y coordinates.""" + self.tk.call(self._w, 'scan', 'mark', x, y) + + def scan_dragto(self, x, y, gain=10): + """Adjust the view of the canvas to GAIN times the + difference between X and Y and the coordinates given in + scan_mark.""" + self.tk.call(self._w, 'scan', 'dragto', x, y, gain) + + def select_adjust(self, tagOrId, index): + """Adjust the end of the selection near the cursor of an item TAGORID to index.""" + self.tk.call(self._w, 'select', 'adjust', tagOrId, index) + + def select_clear(self): + """Clear the selection if it is in this widget.""" + self.tk.call(self._w, 'select', 'clear') + + def select_from(self, tagOrId, index): + """Set the fixed end of a selection in item TAGORID to INDEX.""" + self.tk.call(self._w, 'select', 'from', tagOrId, index) + + def select_item(self): + """Return the item which has the selection.""" + return self.tk.call(self._w, 'select', 'item') or None + + def select_to(self, tagOrId, index): + """Set the variable end of a selection in item TAGORID to INDEX.""" + self.tk.call(self._w, 'select', 'to', tagOrId, index) + + def type(self, tagOrId): + """Return the type of the item TAGORID.""" + return self.tk.call(self._w, 'type', tagOrId) or None + + +class Checkbutton(Widget): + """Checkbutton widget which is either in on- or off-state.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a checkbutton widget with the parent MASTER. + + Valid resource names: activebackground, activeforeground, anchor, + background, bd, bg, bitmap, borderwidth, command, cursor, + disabledforeground, fg, font, foreground, height, + highlightbackground, highlightcolor, highlightthickness, image, + indicatoron, justify, offvalue, onvalue, padx, pady, relief, + selectcolor, selectimage, state, takefocus, text, textvariable, + underline, variable, width, wraplength.""" + Widget.__init__(self, master, 'checkbutton', cnf, kw) + + def deselect(self): + """Put the button in off-state.""" + self.tk.call(self._w, 'deselect') + + def flash(self): + """Flash the button.""" + self.tk.call(self._w, 'flash') + + def invoke(self): + """Toggle the button and invoke a command if given as resource.""" + return self.tk.call(self._w, 'invoke') + + def select(self): + """Put the button in on-state.""" + self.tk.call(self._w, 'select') + + def toggle(self): + """Toggle the button.""" + self.tk.call(self._w, 'toggle') + + +class Entry(Widget, XView): + """Entry widget which allows displaying simple text.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct an entry widget with the parent MASTER. + + Valid resource names: background, bd, bg, borderwidth, cursor, + exportselection, fg, font, foreground, highlightbackground, + highlightcolor, highlightthickness, insertbackground, + insertborderwidth, insertofftime, insertontime, insertwidth, + invalidcommand, invcmd, justify, relief, selectbackground, + selectborderwidth, selectforeground, show, state, takefocus, + textvariable, validate, validatecommand, vcmd, width, + xscrollcommand.""" + Widget.__init__(self, master, 'entry', cnf, kw) + + def delete(self, first, last=None): + """Delete text from FIRST to LAST (not included).""" + self.tk.call(self._w, 'delete', first, last) + + def get(self): + """Return the text.""" + return self.tk.call(self._w, 'get') + + def icursor(self, index): + """Insert cursor at INDEX.""" + self.tk.call(self._w, 'icursor', index) + + def index(self, index): + """Return position of cursor.""" + return self.tk.getint(self.tk.call( + self._w, 'index', index)) + + def insert(self, index, string): + """Insert STRING at INDEX.""" + self.tk.call(self._w, 'insert', index, string) + + def scan_mark(self, x): + """Remember the current X, Y coordinates.""" + self.tk.call(self._w, 'scan', 'mark', x) + + def scan_dragto(self, x): + """Adjust the view of the canvas to 10 times the + difference between X and Y and the coordinates given in + scan_mark.""" + self.tk.call(self._w, 'scan', 'dragto', x) + + def selection_adjust(self, index): + """Adjust the end of the selection near the cursor to INDEX.""" + self.tk.call(self._w, 'selection', 'adjust', index) + + select_adjust = selection_adjust + + def selection_clear(self): + """Clear the selection if it is in this widget.""" + self.tk.call(self._w, 'selection', 'clear') + + select_clear = selection_clear + + def selection_from(self, index): + """Set the fixed end of a selection to INDEX.""" + self.tk.call(self._w, 'selection', 'from', index) + + select_from = selection_from + + def selection_present(self): + """Return True if there are characters selected in the entry, False + otherwise.""" + return self.tk.getboolean( + self.tk.call(self._w, 'selection', 'present')) + + select_present = selection_present + + def selection_range(self, start, end): + """Set the selection from START to END (not included).""" + self.tk.call(self._w, 'selection', 'range', start, end) + + select_range = selection_range + + def selection_to(self, index): + """Set the variable end of a selection to INDEX.""" + self.tk.call(self._w, 'selection', 'to', index) + + select_to = selection_to + + +class Frame(Widget): + """Frame widget which may contain other widgets and can have a 3D border.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a frame widget with the parent MASTER. + + Valid resource names: background, bd, bg, borderwidth, class, + colormap, container, cursor, height, highlightbackground, + highlightcolor, highlightthickness, relief, takefocus, visual, width.""" + cnf = _cnfmerge((cnf, kw)) + extra = () + if 'class_' in cnf: + extra = ('-class', cnf['class_']) + del cnf['class_'] + elif 'class' in cnf: + extra = ('-class', cnf['class']) + del cnf['class'] + Widget.__init__(self, master, 'frame', cnf, {}, extra) + + +class Label(Widget): + """Label widget which can display text and bitmaps.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a label widget with the parent MASTER. + + STANDARD OPTIONS + + activebackground, activeforeground, anchor, + background, bitmap, borderwidth, cursor, + disabledforeground, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, image, justify, + padx, pady, relief, takefocus, text, + textvariable, underline, wraplength + + WIDGET-SPECIFIC OPTIONS + + height, state, width + + """ + Widget.__init__(self, master, 'label', cnf, kw) + + +class Listbox(Widget, XView, YView): + """Listbox widget which can display a list of strings.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a listbox widget with the parent MASTER. + + Valid resource names: background, bd, bg, borderwidth, cursor, + exportselection, fg, font, foreground, height, highlightbackground, + highlightcolor, highlightthickness, relief, selectbackground, + selectborderwidth, selectforeground, selectmode, setgrid, takefocus, + width, xscrollcommand, yscrollcommand, listvariable.""" + Widget.__init__(self, master, 'listbox', cnf, kw) + + def activate(self, index): + """Activate item identified by INDEX.""" + self.tk.call(self._w, 'activate', index) + + def bbox(self, index): + """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle + which encloses the item identified by the given index.""" + return self._getints(self.tk.call(self._w, 'bbox', index)) or None + + def curselection(self): + """Return the indices of currently selected item.""" + return self._getints(self.tk.call(self._w, 'curselection')) or () + + def delete(self, first, last=None): + """Delete items from FIRST to LAST (included).""" + self.tk.call(self._w, 'delete', first, last) + + def get(self, first, last=None): + """Get list of items from FIRST to LAST (included).""" + if last is not None: + return self.tk.splitlist(self.tk.call( + self._w, 'get', first, last)) + else: + return self.tk.call(self._w, 'get', first) + + def index(self, index): + """Return index of item identified with INDEX.""" + i = self.tk.call(self._w, 'index', index) + if i == 'none': return None + return self.tk.getint(i) + + def insert(self, index, *elements): + """Insert ELEMENTS at INDEX.""" + self.tk.call((self._w, 'insert', index) + elements) + + def nearest(self, y): + """Get index of item which is nearest to y coordinate Y.""" + return self.tk.getint(self.tk.call( + self._w, 'nearest', y)) + + def scan_mark(self, x, y): + """Remember the current X, Y coordinates.""" + self.tk.call(self._w, 'scan', 'mark', x, y) + + def scan_dragto(self, x, y): + """Adjust the view of the listbox to 10 times the + difference between X and Y and the coordinates given in + scan_mark.""" + self.tk.call(self._w, 'scan', 'dragto', x, y) + + def see(self, index): + """Scroll such that INDEX is visible.""" + self.tk.call(self._w, 'see', index) + + def selection_anchor(self, index): + """Set the fixed end oft the selection to INDEX.""" + self.tk.call(self._w, 'selection', 'anchor', index) + + select_anchor = selection_anchor + + def selection_clear(self, first, last=None): + """Clear the selection from FIRST to LAST (included).""" + self.tk.call(self._w, + 'selection', 'clear', first, last) + + select_clear = selection_clear + + def selection_includes(self, index): + """Return True if INDEX is part of the selection.""" + return self.tk.getboolean(self.tk.call( + self._w, 'selection', 'includes', index)) + + select_includes = selection_includes + + def selection_set(self, first, last=None): + """Set the selection from FIRST to LAST (included) without + changing the currently selected elements.""" + self.tk.call(self._w, 'selection', 'set', first, last) + + select_set = selection_set + + def size(self): + """Return the number of elements in the listbox.""" + return self.tk.getint(self.tk.call(self._w, 'size')) + + def itemcget(self, index, option): + """Return the resource value for an ITEM and an OPTION.""" + return self.tk.call( + (self._w, 'itemcget') + (index, '-'+option)) + + def itemconfigure(self, index, cnf=None, **kw): + """Configure resources of an ITEM. + + The values for resources are specified as keyword arguments. + To get an overview about the allowed keyword arguments + call the method without arguments. + Valid resource names: background, bg, foreground, fg, + selectbackground, selectforeground.""" + return self._configure(('itemconfigure', index), cnf, kw) + + itemconfig = itemconfigure + + +class Menu(Widget): + """Menu widget which allows displaying menu bars, pull-down menus and pop-up menus.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct menu widget with the parent MASTER. + + Valid resource names: activebackground, activeborderwidth, + activeforeground, background, bd, bg, borderwidth, cursor, + disabledforeground, fg, font, foreground, postcommand, relief, + selectcolor, takefocus, tearoff, tearoffcommand, title, type.""" + Widget.__init__(self, master, 'menu', cnf, kw) + + def tk_popup(self, x, y, entry=""): + """Post the menu at position X,Y with entry ENTRY.""" + self.tk.call('tk_popup', self._w, x, y, entry) + + def activate(self, index): + """Activate entry at INDEX.""" + self.tk.call(self._w, 'activate', index) + + def add(self, itemType, cnf={}, **kw): + """Internal function.""" + self.tk.call((self._w, 'add', itemType) + + self._options(cnf, kw)) + + def add_cascade(self, cnf={}, **kw): + """Add hierarchical menu item.""" + self.add('cascade', cnf or kw) + + def add_checkbutton(self, cnf={}, **kw): + """Add checkbutton menu item.""" + self.add('checkbutton', cnf or kw) + + def add_command(self, cnf={}, **kw): + """Add command menu item.""" + self.add('command', cnf or kw) + + def add_radiobutton(self, cnf={}, **kw): + """Add radio menu item.""" + self.add('radiobutton', cnf or kw) + + def add_separator(self, cnf={}, **kw): + """Add separator.""" + self.add('separator', cnf or kw) + + def insert(self, index, itemType, cnf={}, **kw): + """Internal function.""" + self.tk.call((self._w, 'insert', index, itemType) + + self._options(cnf, kw)) + + def insert_cascade(self, index, cnf={}, **kw): + """Add hierarchical menu item at INDEX.""" + self.insert(index, 'cascade', cnf or kw) + + def insert_checkbutton(self, index, cnf={}, **kw): + """Add checkbutton menu item at INDEX.""" + self.insert(index, 'checkbutton', cnf or kw) + + def insert_command(self, index, cnf={}, **kw): + """Add command menu item at INDEX.""" + self.insert(index, 'command', cnf or kw) + + def insert_radiobutton(self, index, cnf={}, **kw): + """Add radio menu item at INDEX.""" + self.insert(index, 'radiobutton', cnf or kw) + + def insert_separator(self, index, cnf={}, **kw): + """Add separator at INDEX.""" + self.insert(index, 'separator', cnf or kw) + + def delete(self, index1, index2=None): + """Delete menu items between INDEX1 and INDEX2 (included).""" + if index2 is None: + index2 = index1 + + num_index1, num_index2 = self.index(index1), self.index(index2) + if (num_index1 is None) or (num_index2 is None): + num_index1, num_index2 = 0, -1 + + for i in range(num_index1, num_index2 + 1): + if 'command' in self.entryconfig(i): + c = str(self.entrycget(i, 'command')) + if c: + self.deletecommand(c) + self.tk.call(self._w, 'delete', index1, index2) + + def entrycget(self, index, option): + """Return the resource value of a menu item for OPTION at INDEX.""" + return self.tk.call(self._w, 'entrycget', index, '-' + option) + + def entryconfigure(self, index, cnf=None, **kw): + """Configure a menu item at INDEX.""" + return self._configure(('entryconfigure', index), cnf, kw) + + entryconfig = entryconfigure + + def index(self, index): + """Return the index of a menu item identified by INDEX.""" + i = self.tk.call(self._w, 'index', index) + if i == 'none': return None + return self.tk.getint(i) + + def invoke(self, index): + """Invoke a menu item identified by INDEX and execute + the associated command.""" + return self.tk.call(self._w, 'invoke', index) + + def post(self, x, y): + """Display a menu at position X,Y.""" + self.tk.call(self._w, 'post', x, y) + + def type(self, index): + """Return the type of the menu item at INDEX.""" + return self.tk.call(self._w, 'type', index) + + def unpost(self): + """Unmap a menu.""" + self.tk.call(self._w, 'unpost') + + def xposition(self, index): # new in Tk 8.5 + """Return the x-position of the leftmost pixel of the menu item + at INDEX.""" + return self.tk.getint(self.tk.call(self._w, 'xposition', index)) + + def yposition(self, index): + """Return the y-position of the topmost pixel of the menu item at INDEX.""" + return self.tk.getint(self.tk.call( + self._w, 'yposition', index)) + + +class Menubutton(Widget): + """Menubutton widget, obsolete since Tk8.0.""" + + def __init__(self, master=None, cnf={}, **kw): + Widget.__init__(self, master, 'menubutton', cnf, kw) + + +class Message(Widget): + """Message widget to display multiline text. Obsolete since Label does it too.""" + + def __init__(self, master=None, cnf={}, **kw): + Widget.__init__(self, master, 'message', cnf, kw) + + +class Radiobutton(Widget): + """Radiobutton widget which shows only one of several buttons in on-state.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a radiobutton widget with the parent MASTER. + + Valid resource names: activebackground, activeforeground, anchor, + background, bd, bg, bitmap, borderwidth, command, cursor, + disabledforeground, fg, font, foreground, height, + highlightbackground, highlightcolor, highlightthickness, image, + indicatoron, justify, padx, pady, relief, selectcolor, selectimage, + state, takefocus, text, textvariable, underline, value, variable, + width, wraplength.""" + Widget.__init__(self, master, 'radiobutton', cnf, kw) + + def deselect(self): + """Put the button in off-state.""" + + self.tk.call(self._w, 'deselect') + + def flash(self): + """Flash the button.""" + self.tk.call(self._w, 'flash') + + def invoke(self): + """Toggle the button and invoke a command if given as resource.""" + return self.tk.call(self._w, 'invoke') + + def select(self): + """Put the button in on-state.""" + self.tk.call(self._w, 'select') + + +class Scale(Widget): + """Scale widget which can display a numerical scale.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a scale widget with the parent MASTER. + + Valid resource names: activebackground, background, bigincrement, bd, + bg, borderwidth, command, cursor, digits, fg, font, foreground, from, + highlightbackground, highlightcolor, highlightthickness, label, + length, orient, relief, repeatdelay, repeatinterval, resolution, + showvalue, sliderlength, sliderrelief, state, takefocus, + tickinterval, to, troughcolor, variable, width.""" + Widget.__init__(self, master, 'scale', cnf, kw) + + def get(self): + """Get the current value as integer or float.""" + value = self.tk.call(self._w, 'get') + try: + return self.tk.getint(value) + except (ValueError, TypeError, TclError): + return self.tk.getdouble(value) + + def set(self, value): + """Set the value to VALUE.""" + self.tk.call(self._w, 'set', value) + + def coords(self, value=None): + """Return a tuple (X,Y) of the point along the centerline of the + trough that corresponds to VALUE or the current value if None is + given.""" + + return self._getints(self.tk.call(self._w, 'coords', value)) + + def identify(self, x, y): + """Return where the point X,Y lies. Valid return values are "slider", + "though1" and "though2".""" + return self.tk.call(self._w, 'identify', x, y) + + +class Scrollbar(Widget): + """Scrollbar widget which displays a slider at a certain position.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a scrollbar widget with the parent MASTER. + + Valid resource names: activebackground, activerelief, + background, bd, bg, borderwidth, command, cursor, + elementborderwidth, highlightbackground, + highlightcolor, highlightthickness, jump, orient, + relief, repeatdelay, repeatinterval, takefocus, + troughcolor, width.""" + Widget.__init__(self, master, 'scrollbar', cnf, kw) + + def activate(self, index=None): + """Marks the element indicated by index as active. + The only index values understood by this method are "arrow1", + "slider", or "arrow2". If any other value is specified then no + element of the scrollbar will be active. If index is not specified, + the method returns the name of the element that is currently active, + or None if no element is active.""" + return self.tk.call(self._w, 'activate', index) or None + + def delta(self, deltax, deltay): + """Return the fractional change of the scrollbar setting if it + would be moved by DELTAX or DELTAY pixels.""" + return self.tk.getdouble( + self.tk.call(self._w, 'delta', deltax, deltay)) + + def fraction(self, x, y): + """Return the fractional value which corresponds to a slider + position of X,Y.""" + return self.tk.getdouble(self.tk.call(self._w, 'fraction', x, y)) + + def identify(self, x, y): + """Return the element under position X,Y as one of + "arrow1","slider","arrow2" or "".""" + return self.tk.call(self._w, 'identify', x, y) + + def get(self): + """Return the current fractional values (upper and lower end) + of the slider position.""" + return self._getdoubles(self.tk.call(self._w, 'get')) + + def set(self, first, last): + """Set the fractional values of the slider position (upper and + lower ends as value between 0 and 1).""" + self.tk.call(self._w, 'set', first, last) + + +class Text(Widget, XView, YView): + """Text widget which can display text in various forms.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a text widget with the parent MASTER. + + STANDARD OPTIONS + + background, borderwidth, cursor, + exportselection, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, insertbackground, + insertborderwidth, insertofftime, + insertontime, insertwidth, padx, pady, + relief, selectbackground, + selectborderwidth, selectforeground, + setgrid, takefocus, + xscrollcommand, yscrollcommand, + + WIDGET-SPECIFIC OPTIONS + + autoseparators, height, maxundo, + spacing1, spacing2, spacing3, + state, tabs, undo, width, wrap, + + """ + Widget.__init__(self, master, 'text', cnf, kw) + + def bbox(self, index): + """Return a tuple of (x,y,width,height) which gives the bounding + box of the visible part of the character at the given index.""" + return self._getints( + self.tk.call(self._w, 'bbox', index)) or None + + def compare(self, index1, op, index2): + """Return whether between index INDEX1 and index INDEX2 the + relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=.""" + return self.tk.getboolean(self.tk.call( + self._w, 'compare', index1, op, index2)) + + def count(self, index1, index2, *args): # new in Tk 8.5 + """Counts the number of relevant things between the two indices. + If index1 is after index2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given by + args. The result is a list of integers, one for the result of each + counting option given. Valid counting options are "chars", + "displaychars", "displayindices", "displaylines", "indices", + "lines", "xpixels" and "ypixels". There is an additional possible + option "update", which if given then all subsequent options ensure + that any possible out of date information is recalculated.""" + args = ['-%s' % arg for arg in args if not arg.startswith('-')] + args += [index1, index2] + res = self.tk.call(self._w, 'count', *args) or None + if res is not None and len(args) <= 3: + return (res, ) + else: + return res + + def debug(self, boolean=None): + """Turn on the internal consistency checks of the B-Tree inside the text + widget according to BOOLEAN.""" + if boolean is None: + return self.tk.getboolean(self.tk.call(self._w, 'debug')) + self.tk.call(self._w, 'debug', boolean) + + def delete(self, index1, index2=None): + """Delete the characters between INDEX1 and INDEX2 (not included).""" + self.tk.call(self._w, 'delete', index1, index2) + + def dlineinfo(self, index): + """Return tuple (x,y,width,height,baseline) giving the bounding box + and baseline position of the visible part of the line containing + the character at INDEX.""" + return self._getints(self.tk.call(self._w, 'dlineinfo', index)) + + def dump(self, index1, index2=None, command=None, **kw): + """Return the contents of the widget between index1 and index2. + + The type of contents returned in filtered based on the keyword + parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are + given and true, then the corresponding items are returned. The result + is a list of triples of the form (key, value, index). If none of the + keywords are true then 'all' is used by default. + + If the 'command' argument is given, it is called once for each element + of the list of triples, with the values of each triple serving as the + arguments to the function. In this case the list is not returned.""" + args = [] + func_name = None + result = None + if not command: + # Never call the dump command without the -command flag, since the + # output could involve Tcl quoting and would be a pain to parse + # right. Instead just set the command to build a list of triples + # as if we had done the parsing. + result = [] + def append_triple(key, value, index, result=result): + result.append((key, value, index)) + command = append_triple + try: + if not isinstance(command, str): + func_name = command = self._register(command) + args += ["-command", command] + for key in kw: + if kw[key]: args.append("-" + key) + args.append(index1) + if index2: + args.append(index2) + self.tk.call(self._w, "dump", *args) + return result + finally: + if func_name: + self.deletecommand(func_name) + + ## new in tk8.4 + def edit(self, *args): + """Internal method + + This method controls the undo mechanism and + the modified flag. The exact behavior of the + command depends on the option argument that + follows the edit argument. The following forms + of the command are currently supported: + + edit_modified, edit_redo, edit_reset, edit_separator + and edit_undo + + """ + return self.tk.call(self._w, 'edit', *args) + + def edit_modified(self, arg=None): + """Get or Set the modified flag + + If arg is not specified, returns the modified + flag of the widget. The insert, delete, edit undo and + edit redo commands or the user can set or clear the + modified flag. If boolean is specified, sets the + modified flag of the widget to arg. + """ + return self.edit("modified", arg) + + def edit_redo(self): + """Redo the last undone edit + + When the undo option is true, reapplies the last + undone edits provided no other edits were done since + then. Generates an error when the redo stack is empty. + Does nothing when the undo option is false. + """ + return self.edit("redo") + + def edit_reset(self): + """Clears the undo and redo stacks + """ + return self.edit("reset") + + def edit_separator(self): + """Inserts a separator (boundary) on the undo stack. + + Does nothing when the undo option is false + """ + return self.edit("separator") + + def edit_undo(self): + """Undoes the last edit action + + If the undo option is true. An edit action is defined + as all the insert and delete commands that are recorded + on the undo stack in between two separators. Generates + an error when the undo stack is empty. Does nothing + when the undo option is false + """ + return self.edit("undo") + + def get(self, index1, index2=None): + """Return the text from INDEX1 to INDEX2 (not included).""" + return self.tk.call(self._w, 'get', index1, index2) + # (Image commands are new in 8.0) + + def image_cget(self, index, option): + """Return the value of OPTION of an embedded image at INDEX.""" + if option[:1] != "-": + option = "-" + option + if option[-1:] == "_": + option = option[:-1] + return self.tk.call(self._w, "image", "cget", index, option) + + def image_configure(self, index, cnf=None, **kw): + """Configure an embedded image at INDEX.""" + return self._configure(('image', 'configure', index), cnf, kw) + + def image_create(self, index, cnf={}, **kw): + """Create an embedded image at INDEX.""" + return self.tk.call( + self._w, "image", "create", index, + *self._options(cnf, kw)) + + def image_names(self): + """Return all names of embedded images in this widget.""" + return self.tk.call(self._w, "image", "names") + + def index(self, index): + """Return the index in the form line.char for INDEX.""" + return str(self.tk.call(self._w, 'index', index)) + + def insert(self, index, chars, *args): + """Insert CHARS before the characters at INDEX. An additional + tag can be given in ARGS. Additional CHARS and tags can follow in ARGS.""" + self.tk.call((self._w, 'insert', index, chars) + args) + + def mark_gravity(self, markName, direction=None): + """Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT). + Return the current value if None is given for DIRECTION.""" + return self.tk.call( + (self._w, 'mark', 'gravity', markName, direction)) + + def mark_names(self): + """Return all mark names.""" + return self.tk.splitlist(self.tk.call( + self._w, 'mark', 'names')) + + def mark_set(self, markName, index): + """Set mark MARKNAME before the character at INDEX.""" + self.tk.call(self._w, 'mark', 'set', markName, index) + + def mark_unset(self, *markNames): + """Delete all marks in MARKNAMES.""" + self.tk.call((self._w, 'mark', 'unset') + markNames) + + def mark_next(self, index): + """Return the name of the next mark after INDEX.""" + return self.tk.call(self._w, 'mark', 'next', index) or None + + def mark_previous(self, index): + """Return the name of the previous mark before INDEX.""" + return self.tk.call(self._w, 'mark', 'previous', index) or None + + def peer_create(self, newPathName, cnf={}, **kw): # new in Tk 8.5 + """Creates a peer text widget with the given newPathName, and any + optional standard configuration options. By default the peer will + have the same start and end line as the parent widget, but + these can be overridden with the standard configuration options.""" + self.tk.call(self._w, 'peer', 'create', newPathName, + *self._options(cnf, kw)) + + def peer_names(self): # new in Tk 8.5 + """Returns a list of peers of this widget (this does not include + the widget itself).""" + return self.tk.splitlist(self.tk.call(self._w, 'peer', 'names')) + + def replace(self, index1, index2, chars, *args): # new in Tk 8.5 + """Replaces the range of characters between index1 and index2 with + the given characters and tags specified by args. + + See the method insert for some more information about args, and the + method delete for information about the indices.""" + self.tk.call(self._w, 'replace', index1, index2, chars, *args) + + def scan_mark(self, x, y): + """Remember the current X, Y coordinates.""" + self.tk.call(self._w, 'scan', 'mark', x, y) + + def scan_dragto(self, x, y): + """Adjust the view of the text to 10 times the + difference between X and Y and the coordinates given in + scan_mark.""" + self.tk.call(self._w, 'scan', 'dragto', x, y) + + def search(self, pattern, index, stopindex=None, + forwards=None, backwards=None, exact=None, + regexp=None, nocase=None, count=None, elide=None): + """Search PATTERN beginning from INDEX until STOPINDEX. + Return the index of the first character of a match or an + empty string.""" + args = [self._w, 'search'] + if forwards: args.append('-forwards') + if backwards: args.append('-backwards') + if exact: args.append('-exact') + if regexp: args.append('-regexp') + if nocase: args.append('-nocase') + if elide: args.append('-elide') + if count: args.append('-count'); args.append(count) + if pattern and pattern[0] == '-': args.append('--') + args.append(pattern) + args.append(index) + if stopindex: args.append(stopindex) + return str(self.tk.call(tuple(args))) + + def see(self, index): + """Scroll such that the character at INDEX is visible.""" + self.tk.call(self._w, 'see', index) + + def tag_add(self, tagName, index1, *args): + """Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS. + Additional pairs of indices may follow in ARGS.""" + self.tk.call( + (self._w, 'tag', 'add', tagName, index1) + args) + + def tag_unbind(self, tagName, sequence, funcid=None): + """Unbind for all characters with TAGNAME for event SEQUENCE the + function identified with FUNCID.""" + self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '') + if funcid: + self.deletecommand(funcid) + + def tag_bind(self, tagName, sequence, func, add=None): + """Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC. + + An additional boolean parameter ADD specifies whether FUNC will be + called additionally to the other bound function or whether it will + replace the previous function. See bind for the return value.""" + return self._bind((self._w, 'tag', 'bind', tagName), + sequence, func, add) + + def tag_cget(self, tagName, option): + """Return the value of OPTION for tag TAGNAME.""" + if option[:1] != '-': + option = '-' + option + if option[-1:] == '_': + option = option[:-1] + return self.tk.call(self._w, 'tag', 'cget', tagName, option) + + def tag_configure(self, tagName, cnf=None, **kw): + """Configure a tag TAGNAME.""" + return self._configure(('tag', 'configure', tagName), cnf, kw) + + tag_config = tag_configure + + def tag_delete(self, *tagNames): + """Delete all tags in TAGNAMES.""" + self.tk.call((self._w, 'tag', 'delete') + tagNames) + + def tag_lower(self, tagName, belowThis=None): + """Change the priority of tag TAGNAME such that it is lower + than the priority of BELOWTHIS.""" + self.tk.call(self._w, 'tag', 'lower', tagName, belowThis) + + def tag_names(self, index=None): + """Return a list of all tag names.""" + return self.tk.splitlist( + self.tk.call(self._w, 'tag', 'names', index)) + + def tag_nextrange(self, tagName, index1, index2=None): + """Return a list of start and end index for the first sequence of + characters between INDEX1 and INDEX2 which all have tag TAGNAME. + The text is searched forward from INDEX1.""" + return self.tk.splitlist(self.tk.call( + self._w, 'tag', 'nextrange', tagName, index1, index2)) + + def tag_prevrange(self, tagName, index1, index2=None): + """Return a list of start and end index for the first sequence of + characters between INDEX1 and INDEX2 which all have tag TAGNAME. + The text is searched backwards from INDEX1.""" + return self.tk.splitlist(self.tk.call( + self._w, 'tag', 'prevrange', tagName, index1, index2)) + + def tag_raise(self, tagName, aboveThis=None): + """Change the priority of tag TAGNAME such that it is higher + than the priority of ABOVETHIS.""" + self.tk.call( + self._w, 'tag', 'raise', tagName, aboveThis) + + def tag_ranges(self, tagName): + """Return a list of ranges of text which have tag TAGNAME.""" + return self.tk.splitlist(self.tk.call( + self._w, 'tag', 'ranges', tagName)) + + def tag_remove(self, tagName, index1, index2=None): + """Remove tag TAGNAME from all characters between INDEX1 and INDEX2.""" + self.tk.call( + self._w, 'tag', 'remove', tagName, index1, index2) + + def window_cget(self, index, option): + """Return the value of OPTION of an embedded window at INDEX.""" + if option[:1] != '-': + option = '-' + option + if option[-1:] == '_': + option = option[:-1] + return self.tk.call(self._w, 'window', 'cget', index, option) + + def window_configure(self, index, cnf=None, **kw): + """Configure an embedded window at INDEX.""" + return self._configure(('window', 'configure', index), cnf, kw) + + window_config = window_configure + + def window_create(self, index, cnf={}, **kw): + """Create a window at INDEX.""" + self.tk.call( + (self._w, 'window', 'create', index) + + self._options(cnf, kw)) + + def window_names(self): + """Return all names of embedded windows in this widget.""" + return self.tk.splitlist( + self.tk.call(self._w, 'window', 'names')) + + def yview_pickplace(self, *what): + """Obsolete function, use see.""" + self.tk.call((self._w, 'yview', '-pickplace') + what) + + +class _setit: + """Internal class. It wraps the command in the widget OptionMenu.""" + + def __init__(self, var, value, callback=None): + self.__value = value + self.__var = var + self.__callback = callback + + def __call__(self, *args): + self.__var.set(self.__value) + if self.__callback is not None: + self.__callback(self.__value, *args) + + +class OptionMenu(Menubutton): + """OptionMenu which allows the user to select a value from a menu.""" + + def __init__(self, master, variable, value, *values, **kwargs): + """Construct an optionmenu widget with the parent MASTER, with + the resource textvariable set to VARIABLE, the initially selected + value VALUE, the other menu values VALUES and an additional + keyword argument command.""" + kw = {"borderwidth": 2, "textvariable": variable, + "indicatoron": 1, "relief": RAISED, "anchor": "c", + "highlightthickness": 2} + Widget.__init__(self, master, "menubutton", kw) + self.widgetName = 'tk_optionMenu' + menu = self.__menu = Menu(self, name="menu", tearoff=0) + self.menuname = menu._w + # 'command' is the only supported keyword + callback = kwargs.get('command') + if 'command' in kwargs: + del kwargs['command'] + if kwargs: + raise TclError('unknown option -'+next(iter(kwargs))) + menu.add_command(label=value, + command=_setit(variable, value, callback)) + for v in values: + menu.add_command(label=v, + command=_setit(variable, v, callback)) + self["menu"] = menu + + def __getitem__(self, name): + if name == 'menu': + return self.__menu + return Widget.__getitem__(self, name) + + def destroy(self): + """Destroy this widget and the associated menu.""" + Menubutton.destroy(self) + self.__menu = None + + +class Image: + """Base class for images.""" + _last_id = 0 + + def __init__(self, imgtype, name=None, cnf={}, master=None, **kw): + self.name = None + if master is None: + master = _get_default_root('create image') + self.tk = getattr(master, 'tk', master) + if not name: + Image._last_id += 1 + name = "pyimage%r" % (Image._last_id,) # tk itself would use image + if kw and cnf: cnf = _cnfmerge((cnf, kw)) + elif kw: cnf = kw + options = () + for k, v in cnf.items(): + if callable(v): + v = self._register(v) + options = options + ('-'+k, v) + self.tk.call(('image', 'create', imgtype, name,) + options) + self.name = name + + def __str__(self): return self.name + + def __del__(self): + if self.name: + try: + self.tk.call('image', 'delete', self.name) + except TclError: + # May happen if the root was destroyed + pass + + def __setitem__(self, key, value): + self.tk.call(self.name, 'configure', '-'+key, value) + + def __getitem__(self, key): + return self.tk.call(self.name, 'configure', '-'+key) + + def configure(self, **kw): + """Configure the image.""" + res = () + for k, v in _cnfmerge(kw).items(): + if v is not None: + if k[-1] == '_': k = k[:-1] + if callable(v): + v = self._register(v) + res = res + ('-'+k, v) + self.tk.call((self.name, 'config') + res) + + config = configure + + def height(self): + """Return the height of the image.""" + return self.tk.getint( + self.tk.call('image', 'height', self.name)) + + def type(self): + """Return the type of the image, e.g. "photo" or "bitmap".""" + return self.tk.call('image', 'type', self.name) + + def width(self): + """Return the width of the image.""" + return self.tk.getint( + self.tk.call('image', 'width', self.name)) + + +class PhotoImage(Image): + """Widget which can display images in PGM, PPM, GIF, PNG format.""" + + def __init__(self, name=None, cnf={}, master=None, **kw): + """Create an image with NAME. + + Valid resource names: data, format, file, gamma, height, palette, + width.""" + Image.__init__(self, 'photo', name, cnf, master, **kw) + + def blank(self): + """Display a transparent image.""" + self.tk.call(self.name, 'blank') + + def cget(self, option): + """Return the value of OPTION.""" + return self.tk.call(self.name, 'cget', '-' + option) + # XXX config + + def __getitem__(self, key): + return self.tk.call(self.name, 'cget', '-' + key) + # XXX copy -from, -to, ...? + + def copy(self): + """Return a new PhotoImage with the same image as this widget.""" + destImage = PhotoImage(master=self.tk) + self.tk.call(destImage, 'copy', self.name) + return destImage + + def zoom(self, x, y=''): + """Return a new PhotoImage with the same image as this widget + but zoom it with a factor of x in the X direction and y in the Y + direction. If y is not given, the default value is the same as x. + """ + destImage = PhotoImage(master=self.tk) + if y=='': y=x + self.tk.call(destImage, 'copy', self.name, '-zoom',x,y) + return destImage + + def subsample(self, x, y=''): + """Return a new PhotoImage based on the same image as this widget + but use only every Xth or Yth pixel. If y is not given, the + default value is the same as x. + """ + destImage = PhotoImage(master=self.tk) + if y=='': y=x + self.tk.call(destImage, 'copy', self.name, '-subsample',x,y) + return destImage + + def get(self, x, y): + """Return the color (red, green, blue) of the pixel at X,Y.""" + return self.tk.call(self.name, 'get', x, y) + + def put(self, data, to=None): + """Put row formatted colors to image starting from + position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))""" + args = (self.name, 'put', data) + if to: + if to[0] == '-to': + to = to[1:] + args = args + ('-to',) + tuple(to) + self.tk.call(args) + # XXX read + + def write(self, filename, format=None, from_coords=None): + """Write image to file FILENAME in FORMAT starting from + position FROM_COORDS.""" + args = (self.name, 'write', filename) + if format: + args = args + ('-format', format) + if from_coords: + args = args + ('-from',) + tuple(from_coords) + self.tk.call(args) + + def transparency_get(self, x, y): + """Return True if the pixel at x,y is transparent.""" + return self.tk.getboolean(self.tk.call( + self.name, 'transparency', 'get', x, y)) + + def transparency_set(self, x, y, boolean): + """Set the transparency of the pixel at x,y.""" + self.tk.call(self.name, 'transparency', 'set', x, y, boolean) + + +class BitmapImage(Image): + """Widget which can display images in XBM format.""" + + def __init__(self, name=None, cnf={}, master=None, **kw): + """Create a bitmap with NAME. + + Valid resource names: background, data, file, foreground, maskdata, maskfile.""" + Image.__init__(self, 'bitmap', name, cnf, master, **kw) + + +def image_names(): + tk = _get_default_root('use image_names()').tk + return tk.splitlist(tk.call('image', 'names')) + + +def image_types(): + tk = _get_default_root('use image_types()').tk + return tk.splitlist(tk.call('image', 'types')) + + +class Spinbox(Widget, XView): + """spinbox widget.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a spinbox widget with the parent MASTER. + + STANDARD OPTIONS + + activebackground, background, borderwidth, + cursor, exportselection, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, insertbackground, + insertborderwidth, insertofftime, + insertontime, insertwidth, justify, relief, + repeatdelay, repeatinterval, + selectbackground, selectborderwidth + selectforeground, takefocus, textvariable + xscrollcommand. + + WIDGET-SPECIFIC OPTIONS + + buttonbackground, buttoncursor, + buttondownrelief, buttonuprelief, + command, disabledbackground, + disabledforeground, format, from, + invalidcommand, increment, + readonlybackground, state, to, + validate, validatecommand values, + width, wrap, + """ + Widget.__init__(self, master, 'spinbox', cnf, kw) + + def bbox(self, index): + """Return a tuple of X1,Y1,X2,Y2 coordinates for a + rectangle which encloses the character given by index. + + The first two elements of the list give the x and y + coordinates of the upper-left corner of the screen + area covered by the character (in pixels relative + to the widget) and the last two elements give the + width and height of the character, in pixels. The + bounding box may refer to a region outside the + visible area of the window. + """ + return self._getints(self.tk.call(self._w, 'bbox', index)) or None + + def delete(self, first, last=None): + """Delete one or more elements of the spinbox. + + First is the index of the first character to delete, + and last is the index of the character just after + the last one to delete. If last isn't specified it + defaults to first+1, i.e. a single character is + deleted. This command returns an empty string. + """ + return self.tk.call(self._w, 'delete', first, last) + + def get(self): + """Returns the spinbox's string""" + return self.tk.call(self._w, 'get') + + def icursor(self, index): + """Alter the position of the insertion cursor. + + The insertion cursor will be displayed just before + the character given by index. Returns an empty string + """ + return self.tk.call(self._w, 'icursor', index) + + def identify(self, x, y): + """Returns the name of the widget at position x, y + + Return value is one of: none, buttondown, buttonup, entry + """ + return self.tk.call(self._w, 'identify', x, y) + + def index(self, index): + """Returns the numerical index corresponding to index + """ + return self.tk.call(self._w, 'index', index) + + def insert(self, index, s): + """Insert string s at index + + Returns an empty string. + """ + return self.tk.call(self._w, 'insert', index, s) + + def invoke(self, element): + """Causes the specified element to be invoked + + The element could be buttondown or buttonup + triggering the action associated with it. + """ + return self.tk.call(self._w, 'invoke', element) + + def scan(self, *args): + """Internal function.""" + return self._getints( + self.tk.call((self._w, 'scan') + args)) or () + + def scan_mark(self, x): + """Records x and the current view in the spinbox window; + + used in conjunction with later scan dragto commands. + Typically this command is associated with a mouse button + press in the widget. It returns an empty string. + """ + return self.scan("mark", x) + + def scan_dragto(self, x): + """Compute the difference between the given x argument + and the x argument to the last scan mark command + + It then adjusts the view left or right by 10 times the + difference in x-coordinates. This command is typically + associated with mouse motion events in the widget, to + produce the effect of dragging the spinbox at high speed + through the window. The return value is an empty string. + """ + return self.scan("dragto", x) + + def selection(self, *args): + """Internal function.""" + return self._getints( + self.tk.call((self._w, 'selection') + args)) or () + + def selection_adjust(self, index): + """Locate the end of the selection nearest to the character + given by index, + + Then adjust that end of the selection to be at index + (i.e including but not going beyond index). The other + end of the selection is made the anchor point for future + select to commands. If the selection isn't currently in + the spinbox, then a new selection is created to include + the characters between index and the most recent selection + anchor point, inclusive. + """ + return self.selection("adjust", index) + + def selection_clear(self): + """Clear the selection + + If the selection isn't in this widget then the + command has no effect. + """ + return self.selection("clear") + + def selection_element(self, element=None): + """Sets or gets the currently selected element. + + If a spinbutton element is specified, it will be + displayed depressed. + """ + return self.tk.call(self._w, 'selection', 'element', element) + + def selection_from(self, index): + """Set the fixed end of a selection to INDEX.""" + self.selection('from', index) + + def selection_present(self): + """Return True if there are characters selected in the spinbox, False + otherwise.""" + return self.tk.getboolean( + self.tk.call(self._w, 'selection', 'present')) + + def selection_range(self, start, end): + """Set the selection from START to END (not included).""" + self.selection('range', start, end) + + def selection_to(self, index): + """Set the variable end of a selection to INDEX.""" + self.selection('to', index) + +########################################################################### + + +class LabelFrame(Widget): + """labelframe widget.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a labelframe widget with the parent MASTER. + + STANDARD OPTIONS + + borderwidth, cursor, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, padx, pady, relief, + takefocus, text + + WIDGET-SPECIFIC OPTIONS + + background, class, colormap, container, + height, labelanchor, labelwidget, + visual, width + """ + Widget.__init__(self, master, 'labelframe', cnf, kw) + +######################################################################## + + +class PanedWindow(Widget): + """panedwindow widget.""" + + def __init__(self, master=None, cnf={}, **kw): + """Construct a panedwindow widget with the parent MASTER. + + STANDARD OPTIONS + + background, borderwidth, cursor, height, + orient, relief, width + + WIDGET-SPECIFIC OPTIONS + + handlepad, handlesize, opaqueresize, + sashcursor, sashpad, sashrelief, + sashwidth, showhandle, + """ + Widget.__init__(self, master, 'panedwindow', cnf, kw) + + def add(self, child, **kw): + """Add a child widget to the panedwindow in a new pane. + + The child argument is the name of the child widget + followed by pairs of arguments that specify how to + manage the windows. The possible options and values + are the ones accepted by the paneconfigure method. + """ + self.tk.call((self._w, 'add', child) + self._options(kw)) + + def remove(self, child): + """Remove the pane containing child from the panedwindow + + All geometry management options for child will be forgotten. + """ + self.tk.call(self._w, 'forget', child) + + forget = remove + + def identify(self, x, y): + """Identify the panedwindow component at point x, y + + If the point is over a sash or a sash handle, the result + is a two element list containing the index of the sash or + handle, and a word indicating whether it is over a sash + or a handle, such as {0 sash} or {2 handle}. If the point + is over any other part of the panedwindow, the result is + an empty list. + """ + return self.tk.call(self._w, 'identify', x, y) + + def proxy(self, *args): + """Internal function.""" + return self._getints( + self.tk.call((self._w, 'proxy') + args)) or () + + def proxy_coord(self): + """Return the x and y pair of the most recent proxy location + """ + return self.proxy("coord") + + def proxy_forget(self): + """Remove the proxy from the display. + """ + return self.proxy("forget") + + def proxy_place(self, x, y): + """Place the proxy at the given x and y coordinates. + """ + return self.proxy("place", x, y) + + def sash(self, *args): + """Internal function.""" + return self._getints( + self.tk.call((self._w, 'sash') + args)) or () + + def sash_coord(self, index): + """Return the current x and y pair for the sash given by index. + + Index must be an integer between 0 and 1 less than the + number of panes in the panedwindow. The coordinates given are + those of the top left corner of the region containing the sash. + pathName sash dragto index x y This command computes the + difference between the given coordinates and the coordinates + given to the last sash coord command for the given sash. It then + moves that sash the computed difference. The return value is the + empty string. + """ + return self.sash("coord", index) + + def sash_mark(self, index): + """Records x and y for the sash given by index; + + Used in conjunction with later dragto commands to move the sash. + """ + return self.sash("mark", index) + + def sash_place(self, index, x, y): + """Place the sash given by index at the given coordinates + """ + return self.sash("place", index, x, y) + + def panecget(self, child, option): + """Query a management option for window. + + Option may be any value allowed by the paneconfigure subcommand + """ + return self.tk.call( + (self._w, 'panecget') + (child, '-'+option)) + + def paneconfigure(self, tagOrId, cnf=None, **kw): + """Query or modify the management options for window. + + If no option is specified, returns a list describing all + of the available options for pathName. If option is + specified with no value, then the command returns a list + describing the one named option (this list will be identical + to the corresponding sublist of the value returned if no + option is specified). If one or more option-value pairs are + specified, then the command modifies the given widget + option(s) to have the given value(s); in this case the + command returns an empty string. The following options + are supported: + + after window + Insert the window after the window specified. window + should be the name of a window already managed by pathName. + before window + Insert the window before the window specified. window + should be the name of a window already managed by pathName. + height size + Specify a height for the window. The height will be the + outer dimension of the window including its border, if + any. If size is an empty string, or if -height is not + specified, then the height requested internally by the + window will be used initially; the height may later be + adjusted by the movement of sashes in the panedwindow. + Size may be any value accepted by Tk_GetPixels. + minsize n + Specifies that the size of the window cannot be made + less than n. This constraint only affects the size of + the widget in the paned dimension -- the x dimension + for horizontal panedwindows, the y dimension for + vertical panedwindows. May be any value accepted by + Tk_GetPixels. + padx n + Specifies a non-negative value indicating how much + extra space to leave on each side of the window in + the X-direction. The value may have any of the forms + accepted by Tk_GetPixels. + pady n + Specifies a non-negative value indicating how much + extra space to leave on each side of the window in + the Y-direction. The value may have any of the forms + accepted by Tk_GetPixels. + sticky style + If a window's pane is larger than the requested + dimensions of the window, this option may be used + to position (or stretch) the window within its pane. + Style is a string that contains zero or more of the + characters n, s, e or w. The string can optionally + contains spaces or commas, but they are ignored. Each + letter refers to a side (north, south, east, or west) + that the window will "stick" to. If both n and s + (or e and w) are specified, the window will be + stretched to fill the entire height (or width) of + its cavity. + width size + Specify a width for the window. The width will be + the outer dimension of the window including its + border, if any. If size is an empty string, or + if -width is not specified, then the width requested + internally by the window will be used initially; the + width may later be adjusted by the movement of sashes + in the panedwindow. Size may be any value accepted by + Tk_GetPixels. + + """ + if cnf is None and not kw: + return self._getconfigure(self._w, 'paneconfigure', tagOrId) + if isinstance(cnf, str) and not kw: + return self._getconfigure1( + self._w, 'paneconfigure', tagOrId, '-'+cnf) + self.tk.call((self._w, 'paneconfigure', tagOrId) + + self._options(cnf, kw)) + + paneconfig = paneconfigure + + def panes(self): + """Returns an ordered list of the child panes.""" + return self.tk.splitlist(self.tk.call(self._w, 'panes')) + +# Test: + + +def _test(): + root = Tk() + text = "This is Tcl/Tk version %s" % TclVersion + text += "\nThis should be a cedilla: \xe7" + label = Label(root, text=text) + label.pack() + test = Button(root, text="Click me!", + command=lambda root=root: root.test.configure( + text="[%s]" % root.test['text'])) + test.pack() + root.test = test + quit = Button(root, text="QUIT", command=root.destroy) + quit.pack() + # The following three commands are needed so the window pops + # up on top on Windows... + root.iconify() + root.update() + root.deiconify() + root.mainloop() + + +__all__ = [name for name, obj in globals().items() + if not name.startswith('_') and not isinstance(obj, types.ModuleType) + and name not in {'wantobjects'}] + +if __name__ == '__main__': + _test() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/__main__.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/__main__.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/__main__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/__main__.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,7 @@ +"""Main entry point""" + +import sys +if sys.argv[0].endswith("__main__.py"): + sys.argv[0] = "python -m tkinter" +from . import _test as main +main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/colorchooser.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/colorchooser.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/colorchooser.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/colorchooser.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,86 @@ +# tk common color chooser dialogue +# +# this module provides an interface to the native color dialogue +# available in Tk 4.2 and newer. +# +# written by Fredrik Lundh, May 1997 +# +# fixed initialcolor handling in August 1998 +# + + +from tkinter.commondialog import Dialog + +__all__ = ["Chooser", "askcolor"] + + +class Chooser(Dialog): + """Create a dialog for the tk_chooseColor command. + + Args: + master: The master widget for this dialog. If not provided, + defaults to options['parent'] (if defined). + options: Dictionary of options for the tk_chooseColor call. + initialcolor: Specifies the selected color when the + dialog is first displayed. This can be a tk color + string or a 3-tuple of ints in the range (0, 255) + for an RGB triplet. + parent: The parent window of the color dialog. The + color dialog is displayed on top of this. + title: A string for the title of the dialog box. + """ + + command = "tk_chooseColor" + + def _fixoptions(self): + """Ensure initialcolor is a tk color string. + + Convert initialcolor from a RGB triplet to a color string. + """ + try: + color = self.options["initialcolor"] + if isinstance(color, tuple): + # Assume an RGB triplet. + self.options["initialcolor"] = "#%02x%02x%02x" % color + except KeyError: + pass + + def _fixresult(self, widget, result): + """Adjust result returned from call to tk_chooseColor. + + Return both an RGB tuple of ints in the range (0, 255) and the + tk color string in the form #rrggbb. + """ + # Result can be many things: an empty tuple, an empty string, or + # a _tkinter.Tcl_Obj, so this somewhat weird check handles that. + if not result or not str(result): + return None, None # canceled + + # To simplify application code, the color chooser returns + # an RGB tuple together with the Tk color string. + r, g, b = widget.winfo_rgb(result) + return (r//256, g//256, b//256), str(result) + + +# +# convenience stuff + +def askcolor(color=None, **options): + """Display dialog window for selection of a color. + + Convenience wrapper for the Chooser class. Displays the color + chooser dialog with color as the initial value. + """ + + if color: + options = options.copy() + options["initialcolor"] = color + + return Chooser(**options).show() + + +# -------------------------------------------------------------------- +# test stuff + +if __name__ == "__main__": + print("color", askcolor()) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/commondialog.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/commondialog.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/commondialog.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/commondialog.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,53 @@ +# base class for tk common dialogues +# +# this module provides a base class for accessing the common +# dialogues available in Tk 4.2 and newer. use filedialog, +# colorchooser, and messagebox to access the individual +# dialogs. +# +# written by Fredrik Lundh, May 1997 +# + +__all__ = ["Dialog"] + +from tkinter import Frame, _get_temp_root, _destroy_temp_root + + +class Dialog: + + command = None + + def __init__(self, master=None, **options): + if master is None: + master = options.get('parent') + self.master = master + self.options = options + + def _fixoptions(self): + pass # hook + + def _fixresult(self, widget, result): + return result # hook + + def show(self, **options): + + # update instance options + for k, v in options.items(): + self.options[k] = v + + self._fixoptions() + + master = self.master + if master is None: + master = _get_temp_root() + try: + self._test_callback(master) # The function below is replaced for some tests. + s = master.tk.call(self.command, *master._options(self.options)) + s = self._fixresult(master, s) + finally: + _destroy_temp_root(master) + + return s + + def _test_callback(self, master): + pass diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/constants.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/constants.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/constants.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/constants.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,110 @@ +# Symbolic constants for Tk + +# Booleans +NO=FALSE=OFF=0 +YES=TRUE=ON=1 + +# -anchor and -sticky +N='n' +S='s' +W='w' +E='e' +NW='nw' +SW='sw' +NE='ne' +SE='se' +NS='ns' +EW='ew' +NSEW='nsew' +CENTER='center' + +# -fill +NONE='none' +X='x' +Y='y' +BOTH='both' + +# -side +LEFT='left' +TOP='top' +RIGHT='right' +BOTTOM='bottom' + +# -relief +RAISED='raised' +SUNKEN='sunken' +FLAT='flat' +RIDGE='ridge' +GROOVE='groove' +SOLID = 'solid' + +# -orient +HORIZONTAL='horizontal' +VERTICAL='vertical' + +# -tabs +NUMERIC='numeric' + +# -wrap +CHAR='char' +WORD='word' + +# -align +BASELINE='baseline' + +# -bordermode +INSIDE='inside' +OUTSIDE='outside' + +# Special tags, marks and insert positions +SEL='sel' +SEL_FIRST='sel.first' +SEL_LAST='sel.last' +END='end' +INSERT='insert' +CURRENT='current' +ANCHOR='anchor' +ALL='all' # e.g. Canvas.delete(ALL) + +# Text widget and button states +NORMAL='normal' +DISABLED='disabled' +ACTIVE='active' +# Canvas state +HIDDEN='hidden' + +# Menu item types +CASCADE='cascade' +CHECKBUTTON='checkbutton' +COMMAND='command' +RADIOBUTTON='radiobutton' +SEPARATOR='separator' + +# Selection modes for list boxes +SINGLE='single' +BROWSE='browse' +MULTIPLE='multiple' +EXTENDED='extended' + +# Activestyle for list boxes +# NONE='none' is also valid +DOTBOX='dotbox' +UNDERLINE='underline' + +# Various canvas styles +PIESLICE='pieslice' +CHORD='chord' +ARC='arc' +FIRST='first' +LAST='last' +BUTT='butt' +PROJECTING='projecting' +ROUND='round' +BEVEL='bevel' +MITER='miter' + +# Arguments to xview/yview +MOVETO='moveto' +SCROLL='scroll' +UNITS='units' +PAGES='pages' diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/dialog.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/dialog.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/dialog.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/dialog.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,49 @@ +# dialog.py -- Tkinter interface to the tk_dialog script. + +from tkinter import _cnfmerge, Widget, TclError, Button, Pack + +__all__ = ["Dialog"] + +DIALOG_ICON = 'questhead' + + +class Dialog(Widget): + def __init__(self, master=None, cnf={}, **kw): + cnf = _cnfmerge((cnf, kw)) + self.widgetName = '__dialog__' + Widget._setup(self, master, cnf) + self.num = self.tk.getint( + self.tk.call( + 'tk_dialog', self._w, + cnf['title'], cnf['text'], + cnf['bitmap'], cnf['default'], + *cnf['strings'])) + try: Widget.destroy(self) + except TclError: pass + + def destroy(self): pass + + +def _test(): + d = Dialog(None, {'title': 'File Modified', + 'text': + 'File "Python.h" has been modified' + ' since the last time it was saved.' + ' Do you want to save it before' + ' exiting the application.', + 'bitmap': DIALOG_ICON, + 'default': 0, + 'strings': ('Save File', + 'Discard Changes', + 'Return to Editor')}) + print(d.num) + + +if __name__ == '__main__': + t = Button(None, {'text': 'Test', + 'command': _test, + Pack: {}}) + q = Button(None, {'text': 'Quit', + 'command': t.quit, + Pack: {}}) + t.mainloop() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/dnd.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/dnd.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/dnd.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/dnd.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,324 @@ +"""Drag-and-drop support for Tkinter. + +This is very preliminary. I currently only support dnd *within* one +application, between different windows (or within the same window). + +I am trying to make this as generic as possible -- not dependent on +the use of a particular widget or icon type, etc. I also hope that +this will work with Pmw. + +To enable an object to be dragged, you must create an event binding +for it that starts the drag-and-drop process. Typically, you should +bind to a callback function that you write. The function +should call Tkdnd.dnd_start(source, event), where 'source' is the +object to be dragged, and 'event' is the event that invoked the call +(the argument to your callback function). Even though this is a class +instantiation, the returned instance should not be stored -- it will +be kept alive automatically for the duration of the drag-and-drop. + +When a drag-and-drop is already in process for the Tk interpreter, the +call is *ignored*; this normally averts starting multiple simultaneous +dnd processes, e.g. because different button callbacks all +dnd_start(). + +The object is *not* necessarily a widget -- it can be any +application-specific object that is meaningful to potential +drag-and-drop targets. + +Potential drag-and-drop targets are discovered as follows. Whenever +the mouse moves, and at the start and end of a drag-and-drop move, the +Tk widget directly under the mouse is inspected. This is the target +widget (not to be confused with the target object, yet to be +determined). If there is no target widget, there is no dnd target +object. If there is a target widget, and it has an attribute +dnd_accept, this should be a function (or any callable object). The +function is called as dnd_accept(source, event), where 'source' is the +object being dragged (the object passed to dnd_start() above), and +'event' is the most recent event object (generally a event; +it can also be or ). If the dnd_accept() +function returns something other than None, this is the new dnd target +object. If dnd_accept() returns None, or if the target widget has no +dnd_accept attribute, the target widget's parent is considered as the +target widget, and the search for a target object is repeated from +there. If necessary, the search is repeated all the way up to the +root widget. If none of the target widgets can produce a target +object, there is no target object (the target object is None). + +The target object thus produced, if any, is called the new target +object. It is compared with the old target object (or None, if there +was no old target widget). There are several cases ('source' is the +source object, and 'event' is the most recent event object): + +- Both the old and new target objects are None. Nothing happens. + +- The old and new target objects are the same object. Its method +dnd_motion(source, event) is called. + +- The old target object was None, and the new target object is not +None. The new target object's method dnd_enter(source, event) is +called. + +- The new target object is None, and the old target object is not +None. The old target object's method dnd_leave(source, event) is +called. + +- The old and new target objects differ and neither is None. The old +target object's method dnd_leave(source, event), and then the new +target object's method dnd_enter(source, event) is called. + +Once this is done, the new target object replaces the old one, and the +Tk mainloop proceeds. The return value of the methods mentioned above +is ignored; if they raise an exception, the normal exception handling +mechanisms take over. + +The drag-and-drop processes can end in two ways: a final target object +is selected, or no final target object is selected. When a final +target object is selected, it will always have been notified of the +potential drop by a call to its dnd_enter() method, as described +above, and possibly one or more calls to its dnd_motion() method; its +dnd_leave() method has not been called since the last call to +dnd_enter(). The target is notified of the drop by a call to its +method dnd_commit(source, event). + +If no final target object is selected, and there was an old target +object, its dnd_leave(source, event) method is called to complete the +dnd sequence. + +Finally, the source object is notified that the drag-and-drop process +is over, by a call to source.dnd_end(target, event), specifying either +the selected target object, or None if no target object was selected. +The source object can use this to implement the commit action; this is +sometimes simpler than to do it in the target's dnd_commit(). The +target's dnd_commit() method could then simply be aliased to +dnd_leave(). + +At any time during a dnd sequence, the application can cancel the +sequence by calling the cancel() method on the object returned by +dnd_start(). This will call dnd_leave() if a target is currently +active; it will never call dnd_commit(). + +""" + +import tkinter + +__all__ = ["dnd_start", "DndHandler"] + + +# The factory function + +def dnd_start(source, event): + h = DndHandler(source, event) + if h.root is not None: + return h + else: + return None + + +# The class that does the work + +class DndHandler: + + root = None + + def __init__(self, source, event): + if event.num > 5: + return + root = event.widget._root() + try: + root.__dnd + return # Don't start recursive dnd + except AttributeError: + root.__dnd = self + self.root = root + self.source = source + self.target = None + self.initial_button = button = event.num + self.initial_widget = widget = event.widget + self.release_pattern = "" % (button, button) + self.save_cursor = widget['cursor'] or "" + widget.bind(self.release_pattern, self.on_release) + widget.bind("", self.on_motion) + widget['cursor'] = "hand2" + + def __del__(self): + root = self.root + self.root = None + if root is not None: + try: + del root.__dnd + except AttributeError: + pass + + def on_motion(self, event): + x, y = event.x_root, event.y_root + target_widget = self.initial_widget.winfo_containing(x, y) + source = self.source + new_target = None + while target_widget is not None: + try: + attr = target_widget.dnd_accept + except AttributeError: + pass + else: + new_target = attr(source, event) + if new_target is not None: + break + target_widget = target_widget.master + old_target = self.target + if old_target is new_target: + if old_target is not None: + old_target.dnd_motion(source, event) + else: + if old_target is not None: + self.target = None + old_target.dnd_leave(source, event) + if new_target is not None: + new_target.dnd_enter(source, event) + self.target = new_target + + def on_release(self, event): + self.finish(event, 1) + + def cancel(self, event=None): + self.finish(event, 0) + + def finish(self, event, commit=0): + target = self.target + source = self.source + widget = self.initial_widget + root = self.root + try: + del root.__dnd + self.initial_widget.unbind(self.release_pattern) + self.initial_widget.unbind("") + widget['cursor'] = self.save_cursor + self.target = self.source = self.initial_widget = self.root = None + if target is not None: + if commit: + target.dnd_commit(source, event) + else: + target.dnd_leave(source, event) + finally: + source.dnd_end(target, event) + + +# ---------------------------------------------------------------------- +# The rest is here for testing and demonstration purposes only! + +class Icon: + + def __init__(self, name): + self.name = name + self.canvas = self.label = self.id = None + + def attach(self, canvas, x=10, y=10): + if canvas is self.canvas: + self.canvas.coords(self.id, x, y) + return + if self.canvas is not None: + self.detach() + if canvas is None: + return + label = tkinter.Label(canvas, text=self.name, + borderwidth=2, relief="raised") + id = canvas.create_window(x, y, window=label, anchor="nw") + self.canvas = canvas + self.label = label + self.id = id + label.bind("", self.press) + + def detach(self): + canvas = self.canvas + if canvas is None: + return + id = self.id + label = self.label + self.canvas = self.label = self.id = None + canvas.delete(id) + label.destroy() + + def press(self, event): + if dnd_start(self, event): + # where the pointer is relative to the label widget: + self.x_off = event.x + self.y_off = event.y + # where the widget is relative to the canvas: + self.x_orig, self.y_orig = self.canvas.coords(self.id) + + def move(self, event): + x, y = self.where(self.canvas, event) + self.canvas.coords(self.id, x, y) + + def putback(self): + self.canvas.coords(self.id, self.x_orig, self.y_orig) + + def where(self, canvas, event): + # where the corner of the canvas is relative to the screen: + x_org = canvas.winfo_rootx() + y_org = canvas.winfo_rooty() + # where the pointer is relative to the canvas widget: + x = event.x_root - x_org + y = event.y_root - y_org + # compensate for initial pointer offset + return x - self.x_off, y - self.y_off + + def dnd_end(self, target, event): + pass + + +class Tester: + + def __init__(self, root): + self.top = tkinter.Toplevel(root) + self.canvas = tkinter.Canvas(self.top, width=100, height=100) + self.canvas.pack(fill="both", expand=1) + self.canvas.dnd_accept = self.dnd_accept + + def dnd_accept(self, source, event): + return self + + def dnd_enter(self, source, event): + self.canvas.focus_set() # Show highlight border + x, y = source.where(self.canvas, event) + x1, y1, x2, y2 = source.canvas.bbox(source.id) + dx, dy = x2-x1, y2-y1 + self.dndid = self.canvas.create_rectangle(x, y, x+dx, y+dy) + self.dnd_motion(source, event) + + def dnd_motion(self, source, event): + x, y = source.where(self.canvas, event) + x1, y1, x2, y2 = self.canvas.bbox(self.dndid) + self.canvas.move(self.dndid, x-x1, y-y1) + + def dnd_leave(self, source, event): + self.top.focus_set() # Hide highlight border + self.canvas.delete(self.dndid) + self.dndid = None + + def dnd_commit(self, source, event): + self.dnd_leave(source, event) + x, y = source.where(self.canvas, event) + source.attach(self.canvas, x, y) + + +def test(): + root = tkinter.Tk() + root.geometry("+1+1") + tkinter.Button(command=root.quit, text="Quit").pack() + t1 = Tester(root) + t1.top.geometry("+1+60") + t2 = Tester(root) + t2.top.geometry("+120+60") + t3 = Tester(root) + t3.top.geometry("+240+60") + i1 = Icon("ICON1") + i2 = Icon("ICON2") + i3 = Icon("ICON3") + i1.attach(t1.canvas) + i2.attach(t2.canvas) + i3.attach(t3.canvas) + root.mainloop() + + +if __name__ == '__main__': + test() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/filedialog.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/filedialog.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/filedialog.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/filedialog.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,493 @@ +"""File selection dialog classes. + +Classes: + +- FileDialog +- LoadFileDialog +- SaveFileDialog + +This module also presents tk common file dialogues, it provides interfaces +to the native file dialogues available in Tk 4.2 and newer, and the +directory dialogue available in Tk 8.3 and newer. +These interfaces were written by Fredrik Lundh, May 1997. +""" +__all__ = ["FileDialog", "LoadFileDialog", "SaveFileDialog", + "Open", "SaveAs", "Directory", + "askopenfilename", "asksaveasfilename", "askopenfilenames", + "askopenfile", "askopenfiles", "asksaveasfile", "askdirectory"] + +import fnmatch +import os +from tkinter import ( + Frame, LEFT, YES, BOTTOM, Entry, TOP, Button, Tk, X, + Toplevel, RIGHT, Y, END, Listbox, BOTH, Scrollbar, +) +from tkinter.dialog import Dialog +from tkinter import commondialog +from tkinter.simpledialog import _setup_dialog + + +dialogstates = {} + + +class FileDialog: + + """Standard file selection dialog -- no checks on selected file. + + Usage: + + d = FileDialog(master) + fname = d.go(dir_or_file, pattern, default, key) + if fname is None: ...canceled... + else: ...open file... + + All arguments to go() are optional. + + The 'key' argument specifies a key in the global dictionary + 'dialogstates', which keeps track of the values for the directory + and pattern arguments, overriding the values passed in (it does + not keep track of the default argument!). If no key is specified, + the dialog keeps no memory of previous state. Note that memory is + kept even when the dialog is canceled. (All this emulates the + behavior of the Macintosh file selection dialogs.) + + """ + + title = "File Selection Dialog" + + def __init__(self, master, title=None): + if title is None: title = self.title + self.master = master + self.directory = None + + self.top = Toplevel(master) + self.top.title(title) + self.top.iconname(title) + _setup_dialog(self.top) + + self.botframe = Frame(self.top) + self.botframe.pack(side=BOTTOM, fill=X) + + self.selection = Entry(self.top) + self.selection.pack(side=BOTTOM, fill=X) + self.selection.bind('', self.ok_event) + + self.filter = Entry(self.top) + self.filter.pack(side=TOP, fill=X) + self.filter.bind('', self.filter_command) + + self.midframe = Frame(self.top) + self.midframe.pack(expand=YES, fill=BOTH) + + self.filesbar = Scrollbar(self.midframe) + self.filesbar.pack(side=RIGHT, fill=Y) + self.files = Listbox(self.midframe, exportselection=0, + yscrollcommand=(self.filesbar, 'set')) + self.files.pack(side=RIGHT, expand=YES, fill=BOTH) + btags = self.files.bindtags() + self.files.bindtags(btags[1:] + btags[:1]) + self.files.bind('', self.files_select_event) + self.files.bind('', self.files_double_event) + self.filesbar.config(command=(self.files, 'yview')) + + self.dirsbar = Scrollbar(self.midframe) + self.dirsbar.pack(side=LEFT, fill=Y) + self.dirs = Listbox(self.midframe, exportselection=0, + yscrollcommand=(self.dirsbar, 'set')) + self.dirs.pack(side=LEFT, expand=YES, fill=BOTH) + self.dirsbar.config(command=(self.dirs, 'yview')) + btags = self.dirs.bindtags() + self.dirs.bindtags(btags[1:] + btags[:1]) + self.dirs.bind('', self.dirs_select_event) + self.dirs.bind('', self.dirs_double_event) + + self.ok_button = Button(self.botframe, + text="OK", + command=self.ok_command) + self.ok_button.pack(side=LEFT) + self.filter_button = Button(self.botframe, + text="Filter", + command=self.filter_command) + self.filter_button.pack(side=LEFT, expand=YES) + self.cancel_button = Button(self.botframe, + text="Cancel", + command=self.cancel_command) + self.cancel_button.pack(side=RIGHT) + + self.top.protocol('WM_DELETE_WINDOW', self.cancel_command) + # XXX Are the following okay for a general audience? + self.top.bind('', self.cancel_command) + self.top.bind('', self.cancel_command) + + def go(self, dir_or_file=os.curdir, pattern="*", default="", key=None): + if key and key in dialogstates: + self.directory, pattern = dialogstates[key] + else: + dir_or_file = os.path.expanduser(dir_or_file) + if os.path.isdir(dir_or_file): + self.directory = dir_or_file + else: + self.directory, default = os.path.split(dir_or_file) + self.set_filter(self.directory, pattern) + self.set_selection(default) + self.filter_command() + self.selection.focus_set() + self.top.wait_visibility() # window needs to be visible for the grab + self.top.grab_set() + self.how = None + self.master.mainloop() # Exited by self.quit(how) + if key: + directory, pattern = self.get_filter() + if self.how: + directory = os.path.dirname(self.how) + dialogstates[key] = directory, pattern + self.top.destroy() + return self.how + + def quit(self, how=None): + self.how = how + self.master.quit() # Exit mainloop() + + def dirs_double_event(self, event): + self.filter_command() + + def dirs_select_event(self, event): + dir, pat = self.get_filter() + subdir = self.dirs.get('active') + dir = os.path.normpath(os.path.join(self.directory, subdir)) + self.set_filter(dir, pat) + + def files_double_event(self, event): + self.ok_command() + + def files_select_event(self, event): + file = self.files.get('active') + self.set_selection(file) + + def ok_event(self, event): + self.ok_command() + + def ok_command(self): + self.quit(self.get_selection()) + + def filter_command(self, event=None): + dir, pat = self.get_filter() + try: + names = os.listdir(dir) + except OSError: + self.master.bell() + return + self.directory = dir + self.set_filter(dir, pat) + names.sort() + subdirs = [os.pardir] + matchingfiles = [] + for name in names: + fullname = os.path.join(dir, name) + if os.path.isdir(fullname): + subdirs.append(name) + elif fnmatch.fnmatch(name, pat): + matchingfiles.append(name) + self.dirs.delete(0, END) + for name in subdirs: + self.dirs.insert(END, name) + self.files.delete(0, END) + for name in matchingfiles: + self.files.insert(END, name) + head, tail = os.path.split(self.get_selection()) + if tail == os.curdir: tail = '' + self.set_selection(tail) + + def get_filter(self): + filter = self.filter.get() + filter = os.path.expanduser(filter) + if filter[-1:] == os.sep or os.path.isdir(filter): + filter = os.path.join(filter, "*") + return os.path.split(filter) + + def get_selection(self): + file = self.selection.get() + file = os.path.expanduser(file) + return file + + def cancel_command(self, event=None): + self.quit() + + def set_filter(self, dir, pat): + if not os.path.isabs(dir): + try: + pwd = os.getcwd() + except OSError: + pwd = None + if pwd: + dir = os.path.join(pwd, dir) + dir = os.path.normpath(dir) + self.filter.delete(0, END) + self.filter.insert(END, os.path.join(dir or os.curdir, pat or "*")) + + def set_selection(self, file): + self.selection.delete(0, END) + self.selection.insert(END, os.path.join(self.directory, file)) + + +class LoadFileDialog(FileDialog): + + """File selection dialog which checks that the file exists.""" + + title = "Load File Selection Dialog" + + def ok_command(self): + file = self.get_selection() + if not os.path.isfile(file): + self.master.bell() + else: + self.quit(file) + + +class SaveFileDialog(FileDialog): + + """File selection dialog which checks that the file may be created.""" + + title = "Save File Selection Dialog" + + def ok_command(self): + file = self.get_selection() + if os.path.exists(file): + if os.path.isdir(file): + self.master.bell() + return + d = Dialog(self.top, + title="Overwrite Existing File Question", + text="Overwrite existing file %r?" % (file,), + bitmap='questhead', + default=1, + strings=("Yes", "Cancel")) + if d.num != 0: + return + else: + head, tail = os.path.split(file) + if not os.path.isdir(head): + self.master.bell() + return + self.quit(file) + + +# For the following classes and modules: +# +# options (all have default values): +# +# - defaultextension: added to filename if not explicitly given +# +# - filetypes: sequence of (label, pattern) tuples. the same pattern +# may occur with several patterns. use "*" as pattern to indicate +# all files. +# +# - initialdir: initial directory. preserved by dialog instance. +# +# - initialfile: initial file (ignored by the open dialog). preserved +# by dialog instance. +# +# - parent: which window to place the dialog on top of +# +# - title: dialog title +# +# - multiple: if true user may select more than one file +# +# options for the directory chooser: +# +# - initialdir, parent, title: see above +# +# - mustexist: if true, user must pick an existing directory +# + + +class _Dialog(commondialog.Dialog): + + def _fixoptions(self): + try: + # make sure "filetypes" is a tuple + self.options["filetypes"] = tuple(self.options["filetypes"]) + except KeyError: + pass + + def _fixresult(self, widget, result): + if result: + # keep directory and filename until next time + # convert Tcl path objects to strings + try: + result = result.string + except AttributeError: + # it already is a string + pass + path, file = os.path.split(result) + self.options["initialdir"] = path + self.options["initialfile"] = file + self.filename = result # compatibility + return result + + +# +# file dialogs + +class Open(_Dialog): + "Ask for a filename to open" + + command = "tk_getOpenFile" + + def _fixresult(self, widget, result): + if isinstance(result, tuple): + # multiple results: + result = tuple([getattr(r, "string", r) for r in result]) + if result: + path, file = os.path.split(result[0]) + self.options["initialdir"] = path + # don't set initialfile or filename, as we have multiple of these + return result + if not widget.tk.wantobjects() and "multiple" in self.options: + # Need to split result explicitly + return self._fixresult(widget, widget.tk.splitlist(result)) + return _Dialog._fixresult(self, widget, result) + + +class SaveAs(_Dialog): + "Ask for a filename to save as" + + command = "tk_getSaveFile" + + +# the directory dialog has its own _fix routines. +class Directory(commondialog.Dialog): + "Ask for a directory" + + command = "tk_chooseDirectory" + + def _fixresult(self, widget, result): + if result: + # convert Tcl path objects to strings + try: + result = result.string + except AttributeError: + # it already is a string + pass + # keep directory until next time + self.options["initialdir"] = result + self.directory = result # compatibility + return result + +# +# convenience stuff + + +def askopenfilename(**options): + "Ask for a filename to open" + + return Open(**options).show() + + +def asksaveasfilename(**options): + "Ask for a filename to save as" + + return SaveAs(**options).show() + + +def askopenfilenames(**options): + """Ask for multiple filenames to open + + Returns a list of filenames or empty list if + cancel button selected + """ + options["multiple"]=1 + return Open(**options).show() + +# FIXME: are the following perhaps a bit too convenient? + + +def askopenfile(mode = "r", **options): + "Ask for a filename to open, and returned the opened file" + + filename = Open(**options).show() + if filename: + return open(filename, mode) + return None + + +def askopenfiles(mode = "r", **options): + """Ask for multiple filenames and return the open file + objects + + returns a list of open file objects or an empty list if + cancel selected + """ + + files = askopenfilenames(**options) + if files: + ofiles=[] + for filename in files: + ofiles.append(open(filename, mode)) + files=ofiles + return files + + +def asksaveasfile(mode = "w", **options): + "Ask for a filename to save as, and returned the opened file" + + filename = SaveAs(**options).show() + if filename: + return open(filename, mode) + return None + + +def askdirectory (**options): + "Ask for a directory, and return the file name" + return Directory(**options).show() + + +# -------------------------------------------------------------------- +# test stuff + +def test(): + """Simple test program.""" + root = Tk() + root.withdraw() + fd = LoadFileDialog(root) + loadfile = fd.go(key="test") + fd = SaveFileDialog(root) + savefile = fd.go(key="test") + print(loadfile, savefile) + + # Since the file name may contain non-ASCII characters, we need + # to find an encoding that likely supports the file name, and + # displays correctly on the terminal. + + # Start off with UTF-8 + enc = "utf-8" + import sys + + # See whether CODESET is defined + try: + import locale + locale.setlocale(locale.LC_ALL,'') + enc = locale.nl_langinfo(locale.CODESET) + except (ImportError, AttributeError): + pass + + # dialog for opening files + + openfilename=askopenfilename(filetypes=[("all files", "*")]) + try: + fp=open(openfilename,"r") + fp.close() + except: + print("Could not open File: ") + print(sys.exc_info()[1]) + + print("open", openfilename.encode(enc)) + + # dialog for saving files + + saveasfilename=asksaveasfilename() + print("saveas", saveasfilename.encode(enc)) + + +if __name__ == '__main__': + test() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/font.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/font.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/font.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/font.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,239 @@ +# Tkinter font wrapper +# +# written by Fredrik Lundh, February 1998 +# + +import itertools +import tkinter + +__version__ = "0.9" +__all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC", + "nametofont", "Font", "families", "names"] + +# weight/slant +NORMAL = "normal" +ROMAN = "roman" +BOLD = "bold" +ITALIC = "italic" + + +def nametofont(name, root=None): + """Given the name of a tk named font, returns a Font representation. + """ + return Font(name=name, exists=True, root=root) + + +class Font: + """Represents a named font. + + Constructor options are: + + font -- font specifier (name, system font, or (family, size, style)-tuple) + name -- name to use for this font configuration (defaults to a unique name) + exists -- does a named font by this name already exist? + Creates a new named font if False, points to the existing font if True. + Raises _tkinter.TclError if the assertion is false. + + the following are ignored if font is specified: + + family -- font 'family', e.g. Courier, Times, Helvetica + size -- font size in points + weight -- font thickness: NORMAL, BOLD + slant -- font slant: ROMAN, ITALIC + underline -- font underlining: false (0), true (1) + overstrike -- font strikeout: false (0), true (1) + + """ + + counter = itertools.count(1) + + def _set(self, kw): + options = [] + for k, v in kw.items(): + options.append("-"+k) + options.append(str(v)) + return tuple(options) + + def _get(self, args): + options = [] + for k in args: + options.append("-"+k) + return tuple(options) + + def _mkdict(self, args): + options = {} + for i in range(0, len(args), 2): + options[args[i][1:]] = args[i+1] + return options + + def __init__(self, root=None, font=None, name=None, exists=False, + **options): + if root is None: + root = tkinter._get_default_root('use font') + tk = getattr(root, 'tk', root) + if font: + # get actual settings corresponding to the given font + font = tk.splitlist(tk.call("font", "actual", font)) + else: + font = self._set(options) + if not name: + name = "font" + str(next(self.counter)) + self.name = name + + if exists: + self.delete_font = False + # confirm font exists + if self.name not in tk.splitlist(tk.call("font", "names")): + raise tkinter._tkinter.TclError( + "named font %s does not already exist" % (self.name,)) + # if font config info supplied, apply it + if font: + tk.call("font", "configure", self.name, *font) + else: + # create new font (raises TclError if the font exists) + tk.call("font", "create", self.name, *font) + self.delete_font = True + self._tk = tk + self._split = tk.splitlist + self._call = tk.call + + def __str__(self): + return self.name + + def __repr__(self): + return f"<{self.__class__.__module__}.{self.__class__.__qualname__}" \ + f" object {self.name!r}>" + + def __eq__(self, other): + if not isinstance(other, Font): + return NotImplemented + return self.name == other.name and self._tk == other._tk + + def __getitem__(self, key): + return self.cget(key) + + def __setitem__(self, key, value): + self.configure(**{key: value}) + + def __del__(self): + try: + if self.delete_font: + self._call("font", "delete", self.name) + except Exception: + pass + + def copy(self): + "Return a distinct copy of the current font" + return Font(self._tk, **self.actual()) + + def actual(self, option=None, displayof=None): + "Return actual font attributes" + args = () + if displayof: + args = ('-displayof', displayof) + if option: + args = args + ('-' + option, ) + return self._call("font", "actual", self.name, *args) + else: + return self._mkdict( + self._split(self._call("font", "actual", self.name, *args))) + + def cget(self, option): + "Get font attribute" + return self._call("font", "config", self.name, "-"+option) + + def config(self, **options): + "Modify font attributes" + if options: + self._call("font", "config", self.name, + *self._set(options)) + else: + return self._mkdict( + self._split(self._call("font", "config", self.name))) + + configure = config + + def measure(self, text, displayof=None): + "Return text width" + args = (text,) + if displayof: + args = ('-displayof', displayof, text) + return self._tk.getint(self._call("font", "measure", self.name, *args)) + + def metrics(self, *options, **kw): + """Return font metrics. + + For best performance, create a dummy widget + using this font before calling this method.""" + args = () + displayof = kw.pop('displayof', None) + if displayof: + args = ('-displayof', displayof) + if options: + args = args + self._get(options) + return self._tk.getint( + self._call("font", "metrics", self.name, *args)) + else: + res = self._split(self._call("font", "metrics", self.name, *args)) + options = {} + for i in range(0, len(res), 2): + options[res[i][1:]] = self._tk.getint(res[i+1]) + return options + + +def families(root=None, displayof=None): + "Get font families (as a tuple)" + if root is None: + root = tkinter._get_default_root('use font.families()') + args = () + if displayof: + args = ('-displayof', displayof) + return root.tk.splitlist(root.tk.call("font", "families", *args)) + + +def names(root=None): + "Get names of defined fonts (as a tuple)" + if root is None: + root = tkinter._get_default_root('use font.names()') + return root.tk.splitlist(root.tk.call("font", "names")) + + +# -------------------------------------------------------------------- +# test stuff + +if __name__ == "__main__": + + root = tkinter.Tk() + + # create a font + f = Font(family="times", size=30, weight=NORMAL) + + print(f.actual()) + print(f.actual("family")) + print(f.actual("weight")) + + print(f.config()) + print(f.cget("family")) + print(f.cget("weight")) + + print(names()) + + print(f.measure("hello"), f.metrics("linespace")) + + print(f.metrics(displayof=root)) + + f = Font(font=("Courier", 20, "bold")) + print(f.measure("hello"), f.metrics("linespace", displayof=root)) + + w = tkinter.Label(root, text="Hello, world", font=f) + w.pack() + + w = tkinter.Button(root, text="Quit!", command=root.destroy) + w.pack() + + fb = Font(font=w["font"]).copy() + fb.config(weight=BOLD) + + w.config(font=fb) + + tkinter.mainloop() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/messagebox.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/messagebox.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/messagebox.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/messagebox.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,146 @@ +# tk common message boxes +# +# this module provides an interface to the native message boxes +# available in Tk 4.2 and newer. +# +# written by Fredrik Lundh, May 1997 +# + +# +# options (all have default values): +# +# - default: which button to make default (one of the reply codes) +# +# - icon: which icon to display (see below) +# +# - message: the message to display +# +# - parent: which window to place the dialog on top of +# +# - title: dialog title +# +# - type: dialog type; that is, which buttons to display (see below) +# + +from tkinter.commondialog import Dialog + +__all__ = ["showinfo", "showwarning", "showerror", + "askquestion", "askokcancel", "askyesno", + "askyesnocancel", "askretrycancel"] + +# +# constants + +# icons +ERROR = "error" +INFO = "info" +QUESTION = "question" +WARNING = "warning" + +# types +ABORTRETRYIGNORE = "abortretryignore" +OK = "ok" +OKCANCEL = "okcancel" +RETRYCANCEL = "retrycancel" +YESNO = "yesno" +YESNOCANCEL = "yesnocancel" + +# replies +ABORT = "abort" +RETRY = "retry" +IGNORE = "ignore" +OK = "ok" +CANCEL = "cancel" +YES = "yes" +NO = "no" + + +# +# message dialog class + +class Message(Dialog): + "A message box" + + command = "tk_messageBox" + + +# +# convenience stuff + +# Rename _icon and _type options to allow overriding them in options +def _show(title=None, message=None, _icon=None, _type=None, **options): + if _icon and "icon" not in options: options["icon"] = _icon + if _type and "type" not in options: options["type"] = _type + if title: options["title"] = title + if message: options["message"] = message + res = Message(**options).show() + # In some Tcl installations, yes/no is converted into a boolean. + if isinstance(res, bool): + if res: + return YES + return NO + # In others we get a Tcl_Obj. + return str(res) + + +def showinfo(title=None, message=None, **options): + "Show an info message" + return _show(title, message, INFO, OK, **options) + + +def showwarning(title=None, message=None, **options): + "Show a warning message" + return _show(title, message, WARNING, OK, **options) + + +def showerror(title=None, message=None, **options): + "Show an error message" + return _show(title, message, ERROR, OK, **options) + + +def askquestion(title=None, message=None, **options): + "Ask a question" + return _show(title, message, QUESTION, YESNO, **options) + + +def askokcancel(title=None, message=None, **options): + "Ask if operation should proceed; return true if the answer is ok" + s = _show(title, message, QUESTION, OKCANCEL, **options) + return s == OK + + +def askyesno(title=None, message=None, **options): + "Ask a question; return true if the answer is yes" + s = _show(title, message, QUESTION, YESNO, **options) + return s == YES + + +def askyesnocancel(title=None, message=None, **options): + "Ask a question; return true if the answer is yes, None if cancelled." + s = _show(title, message, QUESTION, YESNOCANCEL, **options) + # s might be a Tcl index object, so convert it to a string + s = str(s) + if s == CANCEL: + return None + return s == YES + + +def askretrycancel(title=None, message=None, **options): + "Ask if operation should be retried; return true if the answer is yes" + s = _show(title, message, WARNING, RETRYCANCEL, **options) + return s == RETRY + + +# -------------------------------------------------------------------- +# test stuff + +if __name__ == "__main__": + + print("info", showinfo("Spam", "Egg Information")) + print("warning", showwarning("Spam", "Egg Warning")) + print("error", showerror("Spam", "Egg Alert")) + print("question", askquestion("Spam", "Question?")) + print("proceed", askokcancel("Spam", "Proceed?")) + print("yes/no", askyesno("Spam", "Got it?")) + print("yes/no/cancel", askyesnocancel("Spam", "Want it?")) + print("try again", askretrycancel("Spam", "Try again?")) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/scrolledtext.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/scrolledtext.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/scrolledtext.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/scrolledtext.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,56 @@ +"""A ScrolledText widget feels like a text widget but also has a +vertical scroll bar on its right. (Later, options may be added to +add a horizontal bar as well, to make the bars disappear +automatically when not needed, to move them to the other side of the +window, etc.) + +Configuration options are passed to the Text widget. +A Frame widget is inserted between the master and the text, to hold +the Scrollbar widget. +Most methods calls are inherited from the Text widget; Pack, Grid and +Place methods are redirected to the Frame widget however. +""" + +from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place +from tkinter.constants import RIGHT, LEFT, Y, BOTH + +__all__ = ['ScrolledText'] + + +class ScrolledText(Text): + def __init__(self, master=None, **kw): + self.frame = Frame(master) + self.vbar = Scrollbar(self.frame) + self.vbar.pack(side=RIGHT, fill=Y) + + kw.update({'yscrollcommand': self.vbar.set}) + Text.__init__(self, self.frame, **kw) + self.pack(side=LEFT, fill=BOTH, expand=True) + self.vbar['command'] = self.yview + + # Copy geometry methods of self.frame without overriding Text + # methods -- hack! + text_meths = vars(Text).keys() + methods = vars(Pack).keys() | vars(Grid).keys() | vars(Place).keys() + methods = methods.difference(text_meths) + + for m in methods: + if m[0] != '_' and m != 'config' and m != 'configure': + setattr(self, m, getattr(self.frame, m)) + + def __str__(self): + return str(self.frame) + + +def example(): + from tkinter.constants import END + + stext = ScrolledText(bg='white', height=10) + stext.insert(END, __doc__) + stext.pack(fill=BOTH, side=LEFT, expand=True) + stext.focus_set() + stext.mainloop() + + +if __name__ == "__main__": + example() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/simpledialog.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/simpledialog.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/simpledialog.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/simpledialog.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,441 @@ +# +# An Introduction to Tkinter +# +# Copyright (c) 1997 by Fredrik Lundh +# +# This copyright applies to Dialog, askinteger, askfloat and asktring +# +# fredrik@pythonware.com +# http://www.pythonware.com +# +"""This modules handles dialog boxes. + +It contains the following public symbols: + +SimpleDialog -- A simple but flexible modal dialog box + +Dialog -- a base class for dialogs + +askinteger -- get an integer from the user + +askfloat -- get a float from the user + +askstring -- get a string from the user +""" + +from tkinter import * +from tkinter import _get_temp_root, _destroy_temp_root +from tkinter import messagebox + + +class SimpleDialog: + + def __init__(self, master, + text='', buttons=[], default=None, cancel=None, + title=None, class_=None): + if class_: + self.root = Toplevel(master, class_=class_) + else: + self.root = Toplevel(master) + if title: + self.root.title(title) + self.root.iconname(title) + + _setup_dialog(self.root) + + self.message = Message(self.root, text=text, aspect=400) + self.message.pack(expand=1, fill=BOTH) + self.frame = Frame(self.root) + self.frame.pack() + self.num = default + self.cancel = cancel + self.default = default + self.root.bind('', self.return_event) + for num in range(len(buttons)): + s = buttons[num] + b = Button(self.frame, text=s, + command=(lambda self=self, num=num: self.done(num))) + if num == default: + b.config(relief=RIDGE, borderwidth=8) + b.pack(side=LEFT, fill=BOTH, expand=1) + self.root.protocol('WM_DELETE_WINDOW', self.wm_delete_window) + self.root.transient(master) + _place_window(self.root, master) + + def go(self): + self.root.wait_visibility() + self.root.grab_set() + self.root.mainloop() + self.root.destroy() + return self.num + + def return_event(self, event): + if self.default is None: + self.root.bell() + else: + self.done(self.default) + + def wm_delete_window(self): + if self.cancel is None: + self.root.bell() + else: + self.done(self.cancel) + + def done(self, num): + self.num = num + self.root.quit() + + +class Dialog(Toplevel): + + '''Class to open dialogs. + + This class is intended as a base class for custom dialogs + ''' + + def __init__(self, parent, title = None): + '''Initialize a dialog. + + Arguments: + + parent -- a parent window (the application window) + + title -- the dialog title + ''' + master = parent + if master is None: + master = _get_temp_root() + + Toplevel.__init__(self, master) + + self.withdraw() # remain invisible for now + # If the parent is not viewable, don't + # make the child transient, or else it + # would be opened withdrawn + if parent is not None and parent.winfo_viewable(): + self.transient(parent) + + if title: + self.title(title) + + _setup_dialog(self) + + self.parent = parent + + self.result = None + + body = Frame(self) + self.initial_focus = self.body(body) + body.pack(padx=5, pady=5) + + self.buttonbox() + + if self.initial_focus is None: + self.initial_focus = self + + self.protocol("WM_DELETE_WINDOW", self.cancel) + + _place_window(self, parent) + + self.initial_focus.focus_set() + + # wait for window to appear on screen before calling grab_set + self.wait_visibility() + self.grab_set() + self.wait_window(self) + + def destroy(self): + '''Destroy the window''' + self.initial_focus = None + Toplevel.destroy(self) + _destroy_temp_root(self.master) + + # + # construction hooks + + def body(self, master): + '''create dialog body. + + return widget that should have initial focus. + This method should be overridden, and is called + by the __init__ method. + ''' + pass + + def buttonbox(self): + '''add standard button box. + + override if you do not want the standard buttons + ''' + + box = Frame(self) + + w = Button(box, text="OK", width=10, command=self.ok, default=ACTIVE) + w.pack(side=LEFT, padx=5, pady=5) + w = Button(box, text="Cancel", width=10, command=self.cancel) + w.pack(side=LEFT, padx=5, pady=5) + + self.bind("", self.ok) + self.bind("", self.cancel) + + box.pack() + + # + # standard button semantics + + def ok(self, event=None): + + if not self.validate(): + self.initial_focus.focus_set() # put focus back + return + + self.withdraw() + self.update_idletasks() + + try: + self.apply() + finally: + self.cancel() + + def cancel(self, event=None): + + # put focus back to the parent window + if self.parent is not None: + self.parent.focus_set() + self.destroy() + + # + # command hooks + + def validate(self): + '''validate the data + + This method is called automatically to validate the data before the + dialog is destroyed. By default, it always validates OK. + ''' + + return 1 # override + + def apply(self): + '''process the data + + This method is called automatically to process the data, *after* + the dialog is destroyed. By default, it does nothing. + ''' + + pass # override + + +# Place a toplevel window at the center of parent or screen +# It is a Python implementation of ::tk::PlaceWindow. +def _place_window(w, parent=None): + w.wm_withdraw() # Remain invisible while we figure out the geometry + w.update_idletasks() # Actualize geometry information + + minwidth = w.winfo_reqwidth() + minheight = w.winfo_reqheight() + maxwidth = w.winfo_vrootwidth() + maxheight = w.winfo_vrootheight() + if parent is not None and parent.winfo_ismapped(): + x = parent.winfo_rootx() + (parent.winfo_width() - minwidth) // 2 + y = parent.winfo_rooty() + (parent.winfo_height() - minheight) // 2 + vrootx = w.winfo_vrootx() + vrooty = w.winfo_vrooty() + x = min(x, vrootx + maxwidth - minwidth) + x = max(x, vrootx) + y = min(y, vrooty + maxheight - minheight) + y = max(y, vrooty) + if w._windowingsystem == 'aqua': + # Avoid the native menu bar which sits on top of everything. + y = max(y, 22) + else: + x = (w.winfo_screenwidth() - minwidth) // 2 + y = (w.winfo_screenheight() - minheight) // 2 + + w.wm_maxsize(maxwidth, maxheight) + w.wm_geometry('+%d+%d' % (x, y)) + w.wm_deiconify() # Become visible at the desired location + + +def _setup_dialog(w): + if w._windowingsystem == "aqua": + w.tk.call("::tk::unsupported::MacWindowStyle", "style", + w, "moveableModal", "") + elif w._windowingsystem == "x11": + w.wm_attributes("-type", "dialog") + +# -------------------------------------------------------------------- +# convenience dialogues + +class _QueryDialog(Dialog): + + def __init__(self, title, prompt, + initialvalue=None, + minvalue = None, maxvalue = None, + parent = None): + + self.prompt = prompt + self.minvalue = minvalue + self.maxvalue = maxvalue + + self.initialvalue = initialvalue + + Dialog.__init__(self, parent, title) + + def destroy(self): + self.entry = None + Dialog.destroy(self) + + def body(self, master): + + w = Label(master, text=self.prompt, justify=LEFT) + w.grid(row=0, padx=5, sticky=W) + + self.entry = Entry(master, name="entry") + self.entry.grid(row=1, padx=5, sticky=W+E) + + if self.initialvalue is not None: + self.entry.insert(0, self.initialvalue) + self.entry.select_range(0, END) + + return self.entry + + def validate(self): + try: + result = self.getresult() + except ValueError: + messagebox.showwarning( + "Illegal value", + self.errormessage + "\nPlease try again", + parent = self + ) + return 0 + + if self.minvalue is not None and result < self.minvalue: + messagebox.showwarning( + "Too small", + "The allowed minimum value is %s. " + "Please try again." % self.minvalue, + parent = self + ) + return 0 + + if self.maxvalue is not None and result > self.maxvalue: + messagebox.showwarning( + "Too large", + "The allowed maximum value is %s. " + "Please try again." % self.maxvalue, + parent = self + ) + return 0 + + self.result = result + + return 1 + + +class _QueryInteger(_QueryDialog): + errormessage = "Not an integer." + + def getresult(self): + return self.getint(self.entry.get()) + + +def askinteger(title, prompt, **kw): + '''get an integer from the user + + Arguments: + + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + + Return value is an integer + ''' + d = _QueryInteger(title, prompt, **kw) + return d.result + + +class _QueryFloat(_QueryDialog): + errormessage = "Not a floating point value." + + def getresult(self): + return self.getdouble(self.entry.get()) + + +def askfloat(title, prompt, **kw): + '''get a float from the user + + Arguments: + + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + + Return value is a float + ''' + d = _QueryFloat(title, prompt, **kw) + return d.result + + +class _QueryString(_QueryDialog): + def __init__(self, *args, **kw): + if "show" in kw: + self.__show = kw["show"] + del kw["show"] + else: + self.__show = None + _QueryDialog.__init__(self, *args, **kw) + + def body(self, master): + entry = _QueryDialog.body(self, master) + if self.__show is not None: + entry.configure(show=self.__show) + return entry + + def getresult(self): + return self.entry.get() + + +def askstring(title, prompt, **kw): + '''get a string from the user + + Arguments: + + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + + Return value is a string + ''' + d = _QueryString(title, prompt, **kw) + return d.result + + +if __name__ == '__main__': + + def test(): + root = Tk() + def doit(root=root): + d = SimpleDialog(root, + text="This is a test dialog. " + "Would this have been an actual dialog, " + "the buttons below would have been glowing " + "in soft pink light.\n" + "Do you believe this?", + buttons=["Yes", "No", "Cancel"], + default=0, + cancel=2, + title="Test Dialog") + print(d.go()) + print(askinteger("Spam", "Egg count", initialvalue=12*12)) + print(askfloat("Spam", "Egg weight\n(in tons)", minvalue=1, + maxvalue=100)) + print(askstring("Spam", "Egg label")) + t = Button(root, text='Test', command=doit) + t.pack() + q = Button(root, text='Quit', command=t.quit) + q.pack() + t.mainloop() + + test() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/README python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/README --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/README 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/README 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,14 @@ +Writing new tests +================= + +Precaution +---------- + + New tests should always use only one Tk window at once, like all the + current tests do. This means that you have to destroy the current window + before creating another one, and clean up after the test. The motivation + behind this is that some tests may depend on having its window focused + while it is running to work properly, and it may be hard to force focus + on your window across platforms (right now only test_traversal at + test_ttk.test_widgets.NotebookTest depends on this). + diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/support.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/support.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/support.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/support.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,136 @@ +import functools +import re +import tkinter +import unittest + +class AbstractTkTest: + + @classmethod + def setUpClass(cls): + cls._old_support_default_root = tkinter._support_default_root + destroy_default_root() + tkinter.NoDefaultRoot() + cls.root = tkinter.Tk() + cls.wantobjects = cls.root.wantobjects() + # De-maximize main window. + # Some window managers can maximize new windows. + cls.root.wm_state('normal') + try: + cls.root.wm_attributes('-zoomed', False) + except tkinter.TclError: + pass + + @classmethod + def tearDownClass(cls): + cls.root.update_idletasks() + cls.root.destroy() + del cls.root + tkinter._default_root = None + tkinter._support_default_root = cls._old_support_default_root + + def setUp(self): + self.root.deiconify() + + def tearDown(self): + for w in self.root.winfo_children(): + w.destroy() + self.root.withdraw() + + +class AbstractDefaultRootTest: + + def setUp(self): + self._old_support_default_root = tkinter._support_default_root + destroy_default_root() + tkinter._support_default_root = True + self.wantobjects = tkinter.wantobjects + + def tearDown(self): + destroy_default_root() + tkinter._default_root = None + tkinter._support_default_root = self._old_support_default_root + + def _test_widget(self, constructor): + # no master passing + x = constructor() + self.assertIsNotNone(tkinter._default_root) + self.assertIs(x.master, tkinter._default_root) + self.assertIs(x.tk, tkinter._default_root.tk) + x.destroy() + destroy_default_root() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, constructor) + self.assertFalse(hasattr(tkinter, '_default_root')) + + +def destroy_default_root(): + if getattr(tkinter, '_default_root', None): + tkinter._default_root.update_idletasks() + tkinter._default_root.destroy() + tkinter._default_root = None + +def simulate_mouse_click(widget, x, y): + """Generate proper events to click at the x, y position (tries to act + like an X server).""" + widget.event_generate('', x=0, y=0) + widget.event_generate('', x=x, y=y) + widget.event_generate('', x=x, y=y) + widget.event_generate('', x=x, y=y) + + +import _tkinter +tcl_version = tuple(map(int, _tkinter.TCL_VERSION.split('.'))) + +def requires_tcl(*version): + if len(version) <= 2: + return unittest.skipUnless(tcl_version >= version, + 'requires Tcl version >= ' + '.'.join(map(str, version))) + + def deco(test): + @functools.wraps(test) + def newtest(self): + if get_tk_patchlevel() < version: + self.skipTest('requires Tcl version >= ' + + '.'.join(map(str, version))) + test(self) + return newtest + return deco + +_tk_patchlevel = None +def get_tk_patchlevel(): + global _tk_patchlevel + if _tk_patchlevel is None: + tcl = tkinter.Tcl() + _tk_patchlevel = tcl.info_patchlevel() + return _tk_patchlevel + +units = { + 'c': 72 / 2.54, # centimeters + 'i': 72, # inches + 'm': 72 / 25.4, # millimeters + 'p': 1, # points +} + +def pixels_conv(value): + return float(value[:-1]) * units[value[-1:]] + +def tcl_obj_eq(actual, expected): + if actual == expected: + return True + if isinstance(actual, _tkinter.Tcl_Obj): + if isinstance(expected, str): + return str(actual) == expected + if isinstance(actual, tuple): + if isinstance(expected, tuple): + return (len(actual) == len(expected) and + all(tcl_obj_eq(act, exp) + for act, exp in zip(actual, expected))) + return False + +def widget_eq(actual, expected): + if actual == expected: + return True + if isinstance(actual, (str, tkinter.Widget)): + if isinstance(expected, (str, tkinter.Widget)): + return str(actual) == str(expected) + return False diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_colorchooser.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_colorchooser.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_colorchooser.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_colorchooser.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,68 @@ +import unittest +import tkinter +from test.support import requires, swap_attr +from tkinter.test.support import AbstractDefaultRootTest, AbstractTkTest +from tkinter import colorchooser +from tkinter.colorchooser import askcolor +from tkinter.commondialog import Dialog + +requires('gui') + + +class ChooserTest(AbstractTkTest, unittest.TestCase): + + @classmethod + def setUpClass(cls): + AbstractTkTest.setUpClass.__func__(cls) + cls.cc = colorchooser.Chooser(initialcolor='dark blue slate') + + def test_fixoptions(self): + cc = self.cc + cc._fixoptions() + self.assertEqual(cc.options['initialcolor'], 'dark blue slate') + + cc.options['initialcolor'] = '#D2D269691E1E' + cc._fixoptions() + self.assertEqual(cc.options['initialcolor'], '#D2D269691E1E') + + cc.options['initialcolor'] = (210, 105, 30) + cc._fixoptions() + self.assertEqual(cc.options['initialcolor'], '#d2691e') + + def test_fixresult(self): + cc = self.cc + self.assertEqual(cc._fixresult(self.root, ()), (None, None)) + self.assertEqual(cc._fixresult(self.root, ''), (None, None)) + self.assertEqual(cc._fixresult(self.root, 'chocolate'), + ((210, 105, 30), 'chocolate')) + self.assertEqual(cc._fixresult(self.root, '#4a3c8c'), + ((74, 60, 140), '#4a3c8c')) + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_askcolor(self): + def test_callback(dialog, master): + nonlocal ismapped + master.update() + ismapped = master.winfo_ismapped() + raise ZeroDivisionError + + with swap_attr(Dialog, '_test_callback', test_callback): + ismapped = None + self.assertRaises(ZeroDivisionError, askcolor) + #askcolor() + self.assertEqual(ismapped, False) + + root = tkinter.Tk() + ismapped = None + self.assertRaises(ZeroDivisionError, askcolor) + self.assertEqual(ismapped, True) + root.destroy() + + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, askcolor) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_font.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_font.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_font.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_font.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,163 @@ +import unittest +import tkinter +from tkinter import font +from test.support import requires, gc_collect, ALWAYS_EQ +from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest + +requires('gui') + +fontname = "TkDefaultFont" + +class FontTest(AbstractTkTest, unittest.TestCase): + + @classmethod + def setUpClass(cls): + AbstractTkTest.setUpClass.__func__(cls) + try: + cls.font = font.Font(root=cls.root, name=fontname, exists=True) + except tkinter.TclError: + cls.font = font.Font(root=cls.root, name=fontname, exists=False) + + def test_configure(self): + options = self.font.configure() + self.assertGreaterEqual(set(options), + {'family', 'size', 'weight', 'slant', 'underline', 'overstrike'}) + for key in options: + self.assertEqual(self.font.cget(key), options[key]) + self.assertEqual(self.font[key], options[key]) + for key in 'family', 'weight', 'slant': + self.assertIsInstance(options[key], str) + self.assertIsInstance(self.font.cget(key), str) + self.assertIsInstance(self.font[key], str) + sizetype = int if self.wantobjects else str + for key in 'size', 'underline', 'overstrike': + self.assertIsInstance(options[key], sizetype) + self.assertIsInstance(self.font.cget(key), sizetype) + self.assertIsInstance(self.font[key], sizetype) + + def test_unicode_family(self): + family = 'MS \u30b4\u30b7\u30c3\u30af' + try: + f = font.Font(root=self.root, family=family, exists=True) + except tkinter.TclError: + f = font.Font(root=self.root, family=family, exists=False) + self.assertEqual(f.cget('family'), family) + del f + gc_collect() + + def test_actual(self): + options = self.font.actual() + self.assertGreaterEqual(set(options), + {'family', 'size', 'weight', 'slant', 'underline', 'overstrike'}) + for key in options: + self.assertEqual(self.font.actual(key), options[key]) + for key in 'family', 'weight', 'slant': + self.assertIsInstance(options[key], str) + self.assertIsInstance(self.font.actual(key), str) + sizetype = int if self.wantobjects else str + for key in 'size', 'underline', 'overstrike': + self.assertIsInstance(options[key], sizetype) + self.assertIsInstance(self.font.actual(key), sizetype) + + def test_name(self): + self.assertEqual(self.font.name, fontname) + self.assertEqual(str(self.font), fontname) + + def test_equality(self): + font1 = font.Font(root=self.root, name=fontname, exists=True) + font2 = font.Font(root=self.root, name=fontname, exists=True) + self.assertIsNot(font1, font2) + self.assertEqual(font1, font2) + self.assertNotEqual(font1, font1.copy()) + + self.assertNotEqual(font1, 0) + self.assertEqual(font1, ALWAYS_EQ) + + root2 = tkinter.Tk() + self.addCleanup(root2.destroy) + font3 = font.Font(root=root2, name=fontname, exists=True) + self.assertEqual(str(font1), str(font3)) + self.assertNotEqual(font1, font3) + + def test_measure(self): + self.assertIsInstance(self.font.measure('abc'), int) + + def test_metrics(self): + metrics = self.font.metrics() + self.assertGreaterEqual(set(metrics), + {'ascent', 'descent', 'linespace', 'fixed'}) + for key in metrics: + self.assertEqual(self.font.metrics(key), metrics[key]) + self.assertIsInstance(metrics[key], int) + self.assertIsInstance(self.font.metrics(key), int) + + def test_families(self): + families = font.families(self.root) + self.assertIsInstance(families, tuple) + self.assertTrue(families) + for family in families: + self.assertIsInstance(family, str) + self.assertTrue(family) + + def test_names(self): + names = font.names(self.root) + self.assertIsInstance(names, tuple) + self.assertTrue(names) + for name in names: + self.assertIsInstance(name, str) + self.assertTrue(name) + self.assertIn(fontname, names) + + def test_nametofont(self): + testfont = font.nametofont(fontname, root=self.root) + self.assertIsInstance(testfont, font.Font) + self.assertEqual(testfont.name, fontname) + + def test_repr(self): + self.assertEqual( + repr(self.font), f'' + ) + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_families(self): + self.assertRaises(RuntimeError, font.families) + root = tkinter.Tk() + families = font.families() + self.assertIsInstance(families, tuple) + self.assertTrue(families) + for family in families: + self.assertIsInstance(family, str) + self.assertTrue(family) + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, font.families) + + def test_names(self): + self.assertRaises(RuntimeError, font.names) + root = tkinter.Tk() + names = font.names() + self.assertIsInstance(names, tuple) + self.assertTrue(names) + for name in names: + self.assertIsInstance(name, str) + self.assertTrue(name) + self.assertIn(fontname, names) + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, font.names) + + def test_nametofont(self): + self.assertRaises(RuntimeError, font.nametofont, fontname) + root = tkinter.Tk() + testfont = font.nametofont(fontname) + self.assertIsInstance(testfont, font.Font) + self.assertEqual(testfont.name, fontname) + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, font.nametofont, fontname) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_geometry_managers.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_geometry_managers.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_geometry_managers.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_geometry_managers.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,899 @@ +import unittest +import re +import tkinter +from tkinter import TclError +from test.support import requires + +from tkinter.test.support import pixels_conv +from tkinter.test.widget_tests import AbstractWidgetTest + +requires('gui') + + +class PackTest(AbstractWidgetTest, unittest.TestCase): + + test_keys = None + + def create2(self): + pack = tkinter.Toplevel(self.root, name='pack') + pack.wm_geometry('300x200+0+0') + pack.wm_minsize(1, 1) + a = tkinter.Frame(pack, name='a', width=20, height=40, bg='red') + b = tkinter.Frame(pack, name='b', width=50, height=30, bg='blue') + c = tkinter.Frame(pack, name='c', width=80, height=80, bg='green') + d = tkinter.Frame(pack, name='d', width=40, height=30, bg='yellow') + return pack, a, b, c, d + + def test_pack_configure_after(self): + pack, a, b, c, d = self.create2() + with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b): + a.pack_configure(after=b) + with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'): + a.pack_configure(after='.foo') + a.pack_configure(side='top') + b.pack_configure(side='top') + c.pack_configure(side='top') + d.pack_configure(side='top') + self.assertEqual(pack.pack_slaves(), [a, b, c, d]) + a.pack_configure(after=b) + self.assertEqual(pack.pack_slaves(), [b, a, c, d]) + a.pack_configure(after=a) + self.assertEqual(pack.pack_slaves(), [b, a, c, d]) + + def test_pack_configure_anchor(self): + pack, a, b, c, d = self.create2() + def check(anchor, geom): + a.pack_configure(side='top', ipadx=5, padx=10, ipady=15, pady=20, + expand=True, anchor=anchor) + self.root.update() + self.assertEqual(a.winfo_geometry(), geom) + check('n', '30x70+135+20') + check('ne', '30x70+260+20') + check('e', '30x70+260+65') + check('se', '30x70+260+110') + check('s', '30x70+135+110') + check('sw', '30x70+10+110') + check('w', '30x70+10+65') + check('nw', '30x70+10+20') + check('center', '30x70+135+65') + + def test_pack_configure_before(self): + pack, a, b, c, d = self.create2() + with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b): + a.pack_configure(before=b) + with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'): + a.pack_configure(before='.foo') + a.pack_configure(side='top') + b.pack_configure(side='top') + c.pack_configure(side='top') + d.pack_configure(side='top') + self.assertEqual(pack.pack_slaves(), [a, b, c, d]) + a.pack_configure(before=d) + self.assertEqual(pack.pack_slaves(), [b, c, a, d]) + a.pack_configure(before=a) + self.assertEqual(pack.pack_slaves(), [b, c, a, d]) + + def test_pack_configure_expand(self): + pack, a, b, c, d = self.create2() + def check(*geoms): + self.root.update() + self.assertEqual(a.winfo_geometry(), geoms[0]) + self.assertEqual(b.winfo_geometry(), geoms[1]) + self.assertEqual(c.winfo_geometry(), geoms[2]) + self.assertEqual(d.winfo_geometry(), geoms[3]) + a.pack_configure(side='left') + b.pack_configure(side='top') + c.pack_configure(side='right') + d.pack_configure(side='bottom') + check('20x40+0+80', '50x30+135+0', '80x80+220+75', '40x30+100+170') + a.pack_configure(side='left', expand='yes') + b.pack_configure(side='top', expand='on') + c.pack_configure(side='right', expand=True) + d.pack_configure(side='bottom', expand=1) + check('20x40+40+80', '50x30+175+35', '80x80+180+110', '40x30+100+135') + a.pack_configure(side='left', expand='yes', fill='both') + b.pack_configure(side='top', expand='on', fill='both') + c.pack_configure(side='right', expand=True, fill='both') + d.pack_configure(side='bottom', expand=1, fill='both') + check('100x200+0+0', '200x100+100+0', '160x100+140+100', '40x100+100+100') + + def test_pack_configure_in(self): + pack, a, b, c, d = self.create2() + a.pack_configure(side='top') + b.pack_configure(side='top') + c.pack_configure(side='top') + d.pack_configure(side='top') + a.pack_configure(in_=pack) + self.assertEqual(pack.pack_slaves(), [b, c, d, a]) + a.pack_configure(in_=c) + self.assertEqual(pack.pack_slaves(), [b, c, d]) + self.assertEqual(c.pack_slaves(), [a]) + with self.assertRaisesRegex(TclError, + 'can\'t pack %s inside itself' % (a,)): + a.pack_configure(in_=a) + with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'): + a.pack_configure(in_='.foo') + + def test_pack_configure_padx_ipadx_fill(self): + pack, a, b, c, d = self.create2() + def check(geom1, geom2, **kwargs): + a.pack_forget() + b.pack_forget() + a.pack_configure(**kwargs) + b.pack_configure(expand=True, fill='both') + self.root.update() + self.assertEqual(a.winfo_geometry(), geom1) + self.assertEqual(b.winfo_geometry(), geom2) + check('20x40+260+80', '240x200+0+0', side='right', padx=20) + check('20x40+250+80', '240x200+0+0', side='right', padx=(10, 30)) + check('60x40+240+80', '240x200+0+0', side='right', ipadx=20) + check('30x40+260+80', '250x200+0+0', side='right', ipadx=5, padx=10) + check('20x40+260+80', '240x200+0+0', side='right', padx=20, fill='x') + check('20x40+249+80', '240x200+0+0', + side='right', padx=(9, 31), fill='x') + check('60x40+240+80', '240x200+0+0', side='right', ipadx=20, fill='x') + check('30x40+260+80', '250x200+0+0', + side='right', ipadx=5, padx=10, fill='x') + check('30x40+255+80', '250x200+0+0', + side='right', ipadx=5, padx=(5, 15), fill='x') + check('20x40+140+0', '300x160+0+40', side='top', padx=20) + check('20x40+120+0', '300x160+0+40', side='top', padx=(0, 40)) + check('60x40+120+0', '300x160+0+40', side='top', ipadx=20) + check('30x40+135+0', '300x160+0+40', side='top', ipadx=5, padx=10) + check('30x40+130+0', '300x160+0+40', side='top', ipadx=5, padx=(5, 15)) + check('260x40+20+0', '300x160+0+40', side='top', padx=20, fill='x') + check('260x40+25+0', '300x160+0+40', + side='top', padx=(25, 15), fill='x') + check('300x40+0+0', '300x160+0+40', side='top', ipadx=20, fill='x') + check('280x40+10+0', '300x160+0+40', + side='top', ipadx=5, padx=10, fill='x') + check('280x40+5+0', '300x160+0+40', + side='top', ipadx=5, padx=(5, 15), fill='x') + a.pack_configure(padx='1c') + self.assertEqual(a.pack_info()['padx'], + self._str(pack.winfo_pixels('1c'))) + a.pack_configure(ipadx='1c') + self.assertEqual(a.pack_info()['ipadx'], + self._str(pack.winfo_pixels('1c'))) + + def test_pack_configure_pady_ipady_fill(self): + pack, a, b, c, d = self.create2() + def check(geom1, geom2, **kwargs): + a.pack_forget() + b.pack_forget() + a.pack_configure(**kwargs) + b.pack_configure(expand=True, fill='both') + self.root.update() + self.assertEqual(a.winfo_geometry(), geom1) + self.assertEqual(b.winfo_geometry(), geom2) + check('20x40+280+80', '280x200+0+0', side='right', pady=20) + check('20x40+280+70', '280x200+0+0', side='right', pady=(10, 30)) + check('20x80+280+60', '280x200+0+0', side='right', ipady=20) + check('20x50+280+75', '280x200+0+0', side='right', ipady=5, pady=10) + check('20x40+280+80', '280x200+0+0', side='right', pady=20, fill='x') + check('20x40+280+69', '280x200+0+0', + side='right', pady=(9, 31), fill='x') + check('20x80+280+60', '280x200+0+0', side='right', ipady=20, fill='x') + check('20x50+280+75', '280x200+0+0', + side='right', ipady=5, pady=10, fill='x') + check('20x50+280+70', '280x200+0+0', + side='right', ipady=5, pady=(5, 15), fill='x') + check('20x40+140+20', '300x120+0+80', side='top', pady=20) + check('20x40+140+0', '300x120+0+80', side='top', pady=(0, 40)) + check('20x80+140+0', '300x120+0+80', side='top', ipady=20) + check('20x50+140+10', '300x130+0+70', side='top', ipady=5, pady=10) + check('20x50+140+5', '300x130+0+70', side='top', ipady=5, pady=(5, 15)) + check('300x40+0+20', '300x120+0+80', side='top', pady=20, fill='x') + check('300x40+0+25', '300x120+0+80', + side='top', pady=(25, 15), fill='x') + check('300x80+0+0', '300x120+0+80', side='top', ipady=20, fill='x') + check('300x50+0+10', '300x130+0+70', + side='top', ipady=5, pady=10, fill='x') + check('300x50+0+5', '300x130+0+70', + side='top', ipady=5, pady=(5, 15), fill='x') + a.pack_configure(pady='1c') + self.assertEqual(a.pack_info()['pady'], + self._str(pack.winfo_pixels('1c'))) + a.pack_configure(ipady='1c') + self.assertEqual(a.pack_info()['ipady'], + self._str(pack.winfo_pixels('1c'))) + + def test_pack_configure_side(self): + pack, a, b, c, d = self.create2() + def check(side, geom1, geom2): + a.pack_configure(side=side) + self.assertEqual(a.pack_info()['side'], side) + b.pack_configure(expand=True, fill='both') + self.root.update() + self.assertEqual(a.winfo_geometry(), geom1) + self.assertEqual(b.winfo_geometry(), geom2) + check('top', '20x40+140+0', '300x160+0+40') + check('bottom', '20x40+140+160', '300x160+0+0') + check('left', '20x40+0+80', '280x200+20+0') + check('right', '20x40+280+80', '280x200+0+0') + + def test_pack_forget(self): + pack, a, b, c, d = self.create2() + a.pack_configure() + b.pack_configure() + c.pack_configure() + self.assertEqual(pack.pack_slaves(), [a, b, c]) + b.pack_forget() + self.assertEqual(pack.pack_slaves(), [a, c]) + b.pack_forget() + self.assertEqual(pack.pack_slaves(), [a, c]) + d.pack_forget() + + def test_pack_info(self): + pack, a, b, c, d = self.create2() + with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % a): + a.pack_info() + a.pack_configure() + b.pack_configure(side='right', in_=a, anchor='s', expand=True, fill='x', + ipadx=5, padx=10, ipady=2, pady=(5, 15)) + info = a.pack_info() + self.assertIsInstance(info, dict) + self.assertEqual(info['anchor'], 'center') + self.assertEqual(info['expand'], self._str(0)) + self.assertEqual(info['fill'], 'none') + self.assertEqual(info['in'], pack) + self.assertEqual(info['ipadx'], self._str(0)) + self.assertEqual(info['ipady'], self._str(0)) + self.assertEqual(info['padx'], self._str(0)) + self.assertEqual(info['pady'], self._str(0)) + self.assertEqual(info['side'], 'top') + info = b.pack_info() + self.assertIsInstance(info, dict) + self.assertEqual(info['anchor'], 's') + self.assertEqual(info['expand'], self._str(1)) + self.assertEqual(info['fill'], 'x') + self.assertEqual(info['in'], a) + self.assertEqual(info['ipadx'], self._str(5)) + self.assertEqual(info['ipady'], self._str(2)) + self.assertEqual(info['padx'], self._str(10)) + self.assertEqual(info['pady'], self._str((5, 15))) + self.assertEqual(info['side'], 'right') + + def test_pack_propagate(self): + pack, a, b, c, d = self.create2() + pack.configure(width=300, height=200) + a.pack_configure() + pack.pack_propagate(False) + self.root.update() + self.assertEqual(pack.winfo_reqwidth(), 300) + self.assertEqual(pack.winfo_reqheight(), 200) + pack.pack_propagate(True) + self.root.update() + self.assertEqual(pack.winfo_reqwidth(), 20) + self.assertEqual(pack.winfo_reqheight(), 40) + + def test_pack_slaves(self): + pack, a, b, c, d = self.create2() + self.assertEqual(pack.pack_slaves(), []) + a.pack_configure() + self.assertEqual(pack.pack_slaves(), [a]) + b.pack_configure() + self.assertEqual(pack.pack_slaves(), [a, b]) + + +class PlaceTest(AbstractWidgetTest, unittest.TestCase): + + test_keys = None + + def create2(self): + t = tkinter.Toplevel(self.root, width=300, height=200, bd=0) + t.wm_geometry('300x200+0+0') + f = tkinter.Frame(t, width=154, height=84, bd=2, relief='raised') + f.place_configure(x=48, y=38) + f2 = tkinter.Frame(t, width=30, height=60, bd=2, relief='raised') + self.root.update() + return t, f, f2 + + def test_place_configure_in(self): + t, f, f2 = self.create2() + self.assertEqual(f2.winfo_manager(), '') + with self.assertRaisesRegex(TclError, "can't place %s relative to " + "itself" % re.escape(str(f2))): + f2.place_configure(in_=f2) + self.assertEqual(f2.winfo_manager(), '') + with self.assertRaisesRegex(TclError, 'bad window path name'): + f2.place_configure(in_='spam') + f2.place_configure(in_=f) + self.assertEqual(f2.winfo_manager(), 'place') + + def test_place_configure_x(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f) + self.assertEqual(f2.place_info()['x'], '0') + self.root.update() + self.assertEqual(f2.winfo_x(), 50) + f2.place_configure(x=100) + self.assertEqual(f2.place_info()['x'], '100') + self.root.update() + self.assertEqual(f2.winfo_x(), 150) + f2.place_configure(x=-10, relx=1) + self.assertEqual(f2.place_info()['x'], '-10') + self.root.update() + self.assertEqual(f2.winfo_x(), 190) + with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'): + f2.place_configure(in_=f, x='spam') + + def test_place_configure_y(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f) + self.assertEqual(f2.place_info()['y'], '0') + self.root.update() + self.assertEqual(f2.winfo_y(), 40) + f2.place_configure(y=50) + self.assertEqual(f2.place_info()['y'], '50') + self.root.update() + self.assertEqual(f2.winfo_y(), 90) + f2.place_configure(y=-10, rely=1) + self.assertEqual(f2.place_info()['y'], '-10') + self.root.update() + self.assertEqual(f2.winfo_y(), 110) + with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'): + f2.place_configure(in_=f, y='spam') + + def test_place_configure_relx(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f) + self.assertEqual(f2.place_info()['relx'], '0') + self.root.update() + self.assertEqual(f2.winfo_x(), 50) + f2.place_configure(relx=0.5) + self.assertEqual(f2.place_info()['relx'], '0.5') + self.root.update() + self.assertEqual(f2.winfo_x(), 125) + f2.place_configure(relx=1) + self.assertEqual(f2.place_info()['relx'], '1') + self.root.update() + self.assertEqual(f2.winfo_x(), 200) + with self.assertRaisesRegex(TclError, 'expected floating-point number ' + 'but got "spam"'): + f2.place_configure(in_=f, relx='spam') + + def test_place_configure_rely(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f) + self.assertEqual(f2.place_info()['rely'], '0') + self.root.update() + self.assertEqual(f2.winfo_y(), 40) + f2.place_configure(rely=0.5) + self.assertEqual(f2.place_info()['rely'], '0.5') + self.root.update() + self.assertEqual(f2.winfo_y(), 80) + f2.place_configure(rely=1) + self.assertEqual(f2.place_info()['rely'], '1') + self.root.update() + self.assertEqual(f2.winfo_y(), 120) + with self.assertRaisesRegex(TclError, 'expected floating-point number ' + 'but got "spam"'): + f2.place_configure(in_=f, rely='spam') + + def test_place_configure_anchor(self): + f = tkinter.Frame(self.root) + with self.assertRaisesRegex(TclError, 'bad anchor "j"'): + f.place_configure(anchor='j') + with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'): + f.place_configure(anchor='') + for value in 'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center': + f.place_configure(anchor=value) + self.assertEqual(f.place_info()['anchor'], value) + + def test_place_configure_width(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f, width=120) + self.root.update() + self.assertEqual(f2.winfo_width(), 120) + f2.place_configure(width='') + self.root.update() + self.assertEqual(f2.winfo_width(), 30) + with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'): + f2.place_configure(width='abcd') + + def test_place_configure_height(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f, height=120) + self.root.update() + self.assertEqual(f2.winfo_height(), 120) + f2.place_configure(height='') + self.root.update() + self.assertEqual(f2.winfo_height(), 60) + with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'): + f2.place_configure(height='abcd') + + def test_place_configure_relwidth(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f, relwidth=0.5) + self.root.update() + self.assertEqual(f2.winfo_width(), 75) + f2.place_configure(relwidth='') + self.root.update() + self.assertEqual(f2.winfo_width(), 30) + with self.assertRaisesRegex(TclError, 'expected floating-point number ' + 'but got "abcd"'): + f2.place_configure(relwidth='abcd') + + def test_place_configure_relheight(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f, relheight=0.5) + self.root.update() + self.assertEqual(f2.winfo_height(), 40) + f2.place_configure(relheight='') + self.root.update() + self.assertEqual(f2.winfo_height(), 60) + with self.assertRaisesRegex(TclError, 'expected floating-point number ' + 'but got "abcd"'): + f2.place_configure(relheight='abcd') + + def test_place_configure_bordermode(self): + f = tkinter.Frame(self.root) + with self.assertRaisesRegex(TclError, 'bad bordermode "j"'): + f.place_configure(bordermode='j') + with self.assertRaisesRegex(TclError, 'ambiguous bordermode ""'): + f.place_configure(bordermode='') + for value in 'inside', 'outside', 'ignore': + f.place_configure(bordermode=value) + self.assertEqual(f.place_info()['bordermode'], value) + + def test_place_forget(self): + foo = tkinter.Frame(self.root) + foo.place_configure(width=50, height=50) + self.root.update() + foo.place_forget() + self.root.update() + self.assertFalse(foo.winfo_ismapped()) + with self.assertRaises(TypeError): + foo.place_forget(0) + + def test_place_info(self): + t, f, f2 = self.create2() + f2.place_configure(in_=f, x=1, y=2, width=3, height=4, + relx=0.1, rely=0.2, relwidth=0.3, relheight=0.4, + anchor='se', bordermode='outside') + info = f2.place_info() + self.assertIsInstance(info, dict) + self.assertEqual(info['x'], '1') + self.assertEqual(info['y'], '2') + self.assertEqual(info['width'], '3') + self.assertEqual(info['height'], '4') + self.assertEqual(info['relx'], '0.1') + self.assertEqual(info['rely'], '0.2') + self.assertEqual(info['relwidth'], '0.3') + self.assertEqual(info['relheight'], '0.4') + self.assertEqual(info['anchor'], 'se') + self.assertEqual(info['bordermode'], 'outside') + self.assertEqual(info['x'], '1') + self.assertEqual(info['x'], '1') + with self.assertRaises(TypeError): + f2.place_info(0) + + def test_place_slaves(self): + foo = tkinter.Frame(self.root) + bar = tkinter.Frame(self.root) + self.assertEqual(foo.place_slaves(), []) + bar.place_configure(in_=foo) + self.assertEqual(foo.place_slaves(), [bar]) + with self.assertRaises(TypeError): + foo.place_slaves(0) + + +class GridTest(AbstractWidgetTest, unittest.TestCase): + + test_keys = None + + def tearDown(self): + cols, rows = self.root.grid_size() + for i in range(cols + 1): + self.root.grid_columnconfigure(i, weight=0, minsize=0, pad=0, uniform='') + for i in range(rows + 1): + self.root.grid_rowconfigure(i, weight=0, minsize=0, pad=0, uniform='') + self.root.grid_propagate(1) + self.root.grid_anchor('nw') + super().tearDown() + + def test_grid_configure(self): + b = tkinter.Button(self.root) + self.assertEqual(b.grid_info(), {}) + b.grid_configure() + self.assertEqual(b.grid_info()['in'], self.root) + self.assertEqual(b.grid_info()['column'], self._str(0)) + self.assertEqual(b.grid_info()['row'], self._str(0)) + b.grid_configure({'column': 1}, row=2) + self.assertEqual(b.grid_info()['column'], self._str(1)) + self.assertEqual(b.grid_info()['row'], self._str(2)) + + def test_grid_configure_column(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad column value "-1": ' + 'must be a non-negative integer'): + b.grid_configure(column=-1) + b.grid_configure(column=2) + self.assertEqual(b.grid_info()['column'], self._str(2)) + + def test_grid_configure_columnspan(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad columnspan value "0": ' + 'must be a positive integer'): + b.grid_configure(columnspan=0) + b.grid_configure(columnspan=2) + self.assertEqual(b.grid_info()['columnspan'], self._str(2)) + + def test_grid_configure_in(self): + f = tkinter.Frame(self.root) + b = tkinter.Button(self.root) + self.assertEqual(b.grid_info(), {}) + b.grid_configure() + self.assertEqual(b.grid_info()['in'], self.root) + b.grid_configure(in_=f) + self.assertEqual(b.grid_info()['in'], f) + b.grid_configure({'in': self.root}) + self.assertEqual(b.grid_info()['in'], self.root) + + def test_grid_configure_ipadx(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad ipadx value "-1": ' + 'must be positive screen distance'): + b.grid_configure(ipadx=-1) + b.grid_configure(ipadx=1) + self.assertEqual(b.grid_info()['ipadx'], self._str(1)) + b.grid_configure(ipadx='.5c') + self.assertEqual(b.grid_info()['ipadx'], + self._str(round(pixels_conv('.5c') * self.scaling))) + + def test_grid_configure_ipady(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad ipady value "-1": ' + 'must be positive screen distance'): + b.grid_configure(ipady=-1) + b.grid_configure(ipady=1) + self.assertEqual(b.grid_info()['ipady'], self._str(1)) + b.grid_configure(ipady='.5c') + self.assertEqual(b.grid_info()['ipady'], + self._str(round(pixels_conv('.5c') * self.scaling))) + + def test_grid_configure_padx(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad pad value "-1": ' + 'must be positive screen distance'): + b.grid_configure(padx=-1) + b.grid_configure(padx=1) + self.assertEqual(b.grid_info()['padx'], self._str(1)) + b.grid_configure(padx=(10, 5)) + self.assertEqual(b.grid_info()['padx'], self._str((10, 5))) + b.grid_configure(padx='.5c') + self.assertEqual(b.grid_info()['padx'], + self._str(round(pixels_conv('.5c') * self.scaling))) + + def test_grid_configure_pady(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad pad value "-1": ' + 'must be positive screen distance'): + b.grid_configure(pady=-1) + b.grid_configure(pady=1) + self.assertEqual(b.grid_info()['pady'], self._str(1)) + b.grid_configure(pady=(10, 5)) + self.assertEqual(b.grid_info()['pady'], self._str((10, 5))) + b.grid_configure(pady='.5c') + self.assertEqual(b.grid_info()['pady'], + self._str(round(pixels_conv('.5c') * self.scaling))) + + def test_grid_configure_row(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad (row|grid) value "-1": ' + 'must be a non-negative integer'): + b.grid_configure(row=-1) + b.grid_configure(row=2) + self.assertEqual(b.grid_info()['row'], self._str(2)) + + def test_grid_configure_rownspan(self): + b = tkinter.Button(self.root) + with self.assertRaisesRegex(TclError, 'bad rowspan value "0": ' + 'must be a positive integer'): + b.grid_configure(rowspan=0) + b.grid_configure(rowspan=2) + self.assertEqual(b.grid_info()['rowspan'], self._str(2)) + + def test_grid_configure_sticky(self): + f = tkinter.Frame(self.root, bg='red') + with self.assertRaisesRegex(TclError, 'bad stickyness value "glue"'): + f.grid_configure(sticky='glue') + f.grid_configure(sticky='ne') + self.assertEqual(f.grid_info()['sticky'], 'ne') + f.grid_configure(sticky='n,s,e,w') + self.assertEqual(f.grid_info()['sticky'], 'nesw') + + def test_grid_columnconfigure(self): + with self.assertRaises(TypeError): + self.root.grid_columnconfigure() + self.assertEqual(self.root.grid_columnconfigure(0), + {'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0}) + with self.assertRaisesRegex(TclError, 'bad option "-foo"'): + self.root.grid_columnconfigure(0, 'foo') + self.root.grid_columnconfigure((0, 3), weight=2) + with self.assertRaisesRegex(TclError, + 'must specify a single element on retrieval'): + self.root.grid_columnconfigure((0, 3)) + b = tkinter.Button(self.root) + b.grid_configure(column=0, row=0) + self.root.grid_columnconfigure('all', weight=3) + with self.assertRaisesRegex(TclError, 'expected integer but got "all"'): + self.root.grid_columnconfigure('all') + self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3) + self.assertEqual(self.root.grid_columnconfigure(3, 'weight'), 2) + self.assertEqual(self.root.grid_columnconfigure(265, 'weight'), 0) + self.root.grid_columnconfigure(b, weight=4) + self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 4) + + def test_grid_columnconfigure_minsize(self): + with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): + self.root.grid_columnconfigure(0, minsize='foo') + self.root.grid_columnconfigure(0, minsize=10) + self.assertEqual(self.root.grid_columnconfigure(0, 'minsize'), 10) + self.assertEqual(self.root.grid_columnconfigure(0)['minsize'], 10) + + def test_grid_columnconfigure_weight(self): + with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'): + self.root.grid_columnconfigure(0, weight='bad') + with self.assertRaisesRegex(TclError, 'invalid arg "-weight": ' + 'should be non-negative'): + self.root.grid_columnconfigure(0, weight=-3) + self.root.grid_columnconfigure(0, weight=3) + self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3) + self.assertEqual(self.root.grid_columnconfigure(0)['weight'], 3) + + def test_grid_columnconfigure_pad(self): + with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): + self.root.grid_columnconfigure(0, pad='foo') + with self.assertRaisesRegex(TclError, 'invalid arg "-pad": ' + 'should be non-negative'): + self.root.grid_columnconfigure(0, pad=-3) + self.root.grid_columnconfigure(0, pad=3) + self.assertEqual(self.root.grid_columnconfigure(0, 'pad'), 3) + self.assertEqual(self.root.grid_columnconfigure(0)['pad'], 3) + + def test_grid_columnconfigure_uniform(self): + self.root.grid_columnconfigure(0, uniform='foo') + self.assertEqual(self.root.grid_columnconfigure(0, 'uniform'), 'foo') + self.assertEqual(self.root.grid_columnconfigure(0)['uniform'], 'foo') + + def test_grid_rowconfigure(self): + with self.assertRaises(TypeError): + self.root.grid_rowconfigure() + self.assertEqual(self.root.grid_rowconfigure(0), + {'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0}) + with self.assertRaisesRegex(TclError, 'bad option "-foo"'): + self.root.grid_rowconfigure(0, 'foo') + self.root.grid_rowconfigure((0, 3), weight=2) + with self.assertRaisesRegex(TclError, + 'must specify a single element on retrieval'): + self.root.grid_rowconfigure((0, 3)) + b = tkinter.Button(self.root) + b.grid_configure(column=0, row=0) + self.root.grid_rowconfigure('all', weight=3) + with self.assertRaisesRegex(TclError, 'expected integer but got "all"'): + self.root.grid_rowconfigure('all') + self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3) + self.assertEqual(self.root.grid_rowconfigure(3, 'weight'), 2) + self.assertEqual(self.root.grid_rowconfigure(265, 'weight'), 0) + self.root.grid_rowconfigure(b, weight=4) + self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 4) + + def test_grid_rowconfigure_minsize(self): + with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): + self.root.grid_rowconfigure(0, minsize='foo') + self.root.grid_rowconfigure(0, minsize=10) + self.assertEqual(self.root.grid_rowconfigure(0, 'minsize'), 10) + self.assertEqual(self.root.grid_rowconfigure(0)['minsize'], 10) + + def test_grid_rowconfigure_weight(self): + with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'): + self.root.grid_rowconfigure(0, weight='bad') + with self.assertRaisesRegex(TclError, 'invalid arg "-weight": ' + 'should be non-negative'): + self.root.grid_rowconfigure(0, weight=-3) + self.root.grid_rowconfigure(0, weight=3) + self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3) + self.assertEqual(self.root.grid_rowconfigure(0)['weight'], 3) + + def test_grid_rowconfigure_pad(self): + with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'): + self.root.grid_rowconfigure(0, pad='foo') + with self.assertRaisesRegex(TclError, 'invalid arg "-pad": ' + 'should be non-negative'): + self.root.grid_rowconfigure(0, pad=-3) + self.root.grid_rowconfigure(0, pad=3) + self.assertEqual(self.root.grid_rowconfigure(0, 'pad'), 3) + self.assertEqual(self.root.grid_rowconfigure(0)['pad'], 3) + + def test_grid_rowconfigure_uniform(self): + self.root.grid_rowconfigure(0, uniform='foo') + self.assertEqual(self.root.grid_rowconfigure(0, 'uniform'), 'foo') + self.assertEqual(self.root.grid_rowconfigure(0)['uniform'], 'foo') + + def test_grid_forget(self): + b = tkinter.Button(self.root) + c = tkinter.Button(self.root) + b.grid_configure(row=2, column=2, rowspan=2, columnspan=2, + padx=3, pady=4, sticky='ns') + self.assertEqual(self.root.grid_slaves(), [b]) + b.grid_forget() + c.grid_forget() + self.assertEqual(self.root.grid_slaves(), []) + self.assertEqual(b.grid_info(), {}) + b.grid_configure(row=0, column=0) + info = b.grid_info() + self.assertEqual(info['row'], self._str(0)) + self.assertEqual(info['column'], self._str(0)) + self.assertEqual(info['rowspan'], self._str(1)) + self.assertEqual(info['columnspan'], self._str(1)) + self.assertEqual(info['padx'], self._str(0)) + self.assertEqual(info['pady'], self._str(0)) + self.assertEqual(info['sticky'], '') + + def test_grid_remove(self): + b = tkinter.Button(self.root) + c = tkinter.Button(self.root) + b.grid_configure(row=2, column=2, rowspan=2, columnspan=2, + padx=3, pady=4, sticky='ns') + self.assertEqual(self.root.grid_slaves(), [b]) + b.grid_remove() + c.grid_remove() + self.assertEqual(self.root.grid_slaves(), []) + self.assertEqual(b.grid_info(), {}) + b.grid_configure(row=0, column=0) + info = b.grid_info() + self.assertEqual(info['row'], self._str(0)) + self.assertEqual(info['column'], self._str(0)) + self.assertEqual(info['rowspan'], self._str(2)) + self.assertEqual(info['columnspan'], self._str(2)) + self.assertEqual(info['padx'], self._str(3)) + self.assertEqual(info['pady'], self._str(4)) + self.assertEqual(info['sticky'], 'ns') + + def test_grid_info(self): + b = tkinter.Button(self.root) + self.assertEqual(b.grid_info(), {}) + b.grid_configure(row=2, column=2, rowspan=2, columnspan=2, + padx=3, pady=4, sticky='ns') + info = b.grid_info() + self.assertIsInstance(info, dict) + self.assertEqual(info['in'], self.root) + self.assertEqual(info['row'], self._str(2)) + self.assertEqual(info['column'], self._str(2)) + self.assertEqual(info['rowspan'], self._str(2)) + self.assertEqual(info['columnspan'], self._str(2)) + self.assertEqual(info['padx'], self._str(3)) + self.assertEqual(info['pady'], self._str(4)) + self.assertEqual(info['sticky'], 'ns') + + def test_grid_anchor(self): + with self.assertRaisesRegex(TclError, 'bad anchor "x"'): + self.root.grid_anchor('x') + with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'): + self.root.grid_anchor('') + with self.assertRaises(TypeError): + self.root.grid_anchor('se', 'nw') + self.root.grid_anchor('se') + self.assertEqual(self.root.tk.call('grid', 'anchor', self.root), 'se') + + def test_grid_bbox(self): + self.assertEqual(self.root.grid_bbox(), (0, 0, 0, 0)) + self.assertEqual(self.root.grid_bbox(0, 0), (0, 0, 0, 0)) + self.assertEqual(self.root.grid_bbox(0, 0, 1, 1), (0, 0, 0, 0)) + with self.assertRaisesRegex(TclError, 'expected integer but got "x"'): + self.root.grid_bbox('x', 0) + with self.assertRaisesRegex(TclError, 'expected integer but got "x"'): + self.root.grid_bbox(0, 'x') + with self.assertRaisesRegex(TclError, 'expected integer but got "x"'): + self.root.grid_bbox(0, 0, 'x', 0) + with self.assertRaisesRegex(TclError, 'expected integer but got "x"'): + self.root.grid_bbox(0, 0, 0, 'x') + with self.assertRaises(TypeError): + self.root.grid_bbox(0, 0, 0, 0, 0) + t = self.root + # de-maximize + t.wm_geometry('1x1+0+0') + t.wm_geometry('') + f1 = tkinter.Frame(t, width=75, height=75, bg='red') + f2 = tkinter.Frame(t, width=90, height=90, bg='blue') + f1.grid_configure(row=0, column=0) + f2.grid_configure(row=1, column=1) + self.root.update() + self.assertEqual(t.grid_bbox(), (0, 0, 165, 165)) + self.assertEqual(t.grid_bbox(0, 0), (0, 0, 75, 75)) + self.assertEqual(t.grid_bbox(0, 0, 1, 1), (0, 0, 165, 165)) + self.assertEqual(t.grid_bbox(1, 1), (75, 75, 90, 90)) + self.assertEqual(t.grid_bbox(10, 10, 0, 0), (0, 0, 165, 165)) + self.assertEqual(t.grid_bbox(-2, -2, -1, -1), (0, 0, 0, 0)) + self.assertEqual(t.grid_bbox(10, 10, 12, 12), (165, 165, 0, 0)) + + def test_grid_location(self): + with self.assertRaises(TypeError): + self.root.grid_location() + with self.assertRaises(TypeError): + self.root.grid_location(0) + with self.assertRaises(TypeError): + self.root.grid_location(0, 0, 0) + with self.assertRaisesRegex(TclError, 'bad screen distance "x"'): + self.root.grid_location('x', 'y') + with self.assertRaisesRegex(TclError, 'bad screen distance "y"'): + self.root.grid_location('1c', 'y') + t = self.root + # de-maximize + t.wm_geometry('1x1+0+0') + t.wm_geometry('') + f = tkinter.Frame(t, width=200, height=100, + highlightthickness=0, bg='red') + self.assertEqual(f.grid_location(10, 10), (-1, -1)) + f.grid_configure() + self.root.update() + self.assertEqual(t.grid_location(-10, -10), (-1, -1)) + self.assertEqual(t.grid_location(-10, 0), (-1, 0)) + self.assertEqual(t.grid_location(-1, 0), (-1, 0)) + self.assertEqual(t.grid_location(0, -10), (0, -1)) + self.assertEqual(t.grid_location(0, -1), (0, -1)) + self.assertEqual(t.grid_location(0, 0), (0, 0)) + self.assertEqual(t.grid_location(200, 0), (0, 0)) + self.assertEqual(t.grid_location(201, 0), (1, 0)) + self.assertEqual(t.grid_location(0, 100), (0, 0)) + self.assertEqual(t.grid_location(0, 101), (0, 1)) + self.assertEqual(t.grid_location(201, 101), (1, 1)) + + def test_grid_propagate(self): + self.assertEqual(self.root.grid_propagate(), True) + with self.assertRaises(TypeError): + self.root.grid_propagate(False, False) + self.root.grid_propagate(False) + self.assertFalse(self.root.grid_propagate()) + f = tkinter.Frame(self.root, width=100, height=100, bg='red') + f.grid_configure(row=0, column=0) + self.root.update() + self.assertEqual(f.winfo_width(), 100) + self.assertEqual(f.winfo_height(), 100) + f.grid_propagate(False) + g = tkinter.Frame(self.root, width=75, height=85, bg='green') + g.grid_configure(in_=f, row=0, column=0) + self.root.update() + self.assertEqual(f.winfo_width(), 100) + self.assertEqual(f.winfo_height(), 100) + f.grid_propagate(True) + self.root.update() + self.assertEqual(f.winfo_width(), 75) + self.assertEqual(f.winfo_height(), 85) + + def test_grid_size(self): + with self.assertRaises(TypeError): + self.root.grid_size(0) + self.assertEqual(self.root.grid_size(), (0, 0)) + f = tkinter.Scale(self.root) + f.grid_configure(row=0, column=0) + self.assertEqual(self.root.grid_size(), (1, 1)) + f.grid_configure(row=4, column=5) + self.assertEqual(self.root.grid_size(), (6, 5)) + + def test_grid_slaves(self): + self.assertEqual(self.root.grid_slaves(), []) + a = tkinter.Label(self.root) + a.grid_configure(row=0, column=1) + b = tkinter.Label(self.root) + b.grid_configure(row=1, column=0) + c = tkinter.Label(self.root) + c.grid_configure(row=1, column=1) + d = tkinter.Label(self.root) + d.grid_configure(row=1, column=1) + self.assertEqual(self.root.grid_slaves(), [d, c, b, a]) + self.assertEqual(self.root.grid_slaves(row=0), [a]) + self.assertEqual(self.root.grid_slaves(row=1), [d, c, b]) + self.assertEqual(self.root.grid_slaves(column=0), [b]) + self.assertEqual(self.root.grid_slaves(column=1), [d, c, a]) + self.assertEqual(self.root.grid_slaves(row=1, column=1), [d, c]) + + +tests_gui = ( + PackTest, PlaceTest, GridTest, +) + +if __name__ == '__main__': + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_images.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_images.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_images.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_images.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,380 @@ +import unittest +import tkinter +from test import support +from test.support import os_helper +from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest, requires_tcl + +support.requires('gui') + + +class MiscTest(AbstractTkTest, unittest.TestCase): + + def test_image_types(self): + image_types = self.root.image_types() + self.assertIsInstance(image_types, tuple) + self.assertIn('photo', image_types) + self.assertIn('bitmap', image_types) + + def test_image_names(self): + image_names = self.root.image_names() + self.assertIsInstance(image_names, tuple) + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_image_types(self): + self.assertRaises(RuntimeError, tkinter.image_types) + root = tkinter.Tk() + image_types = tkinter.image_types() + self.assertIsInstance(image_types, tuple) + self.assertIn('photo', image_types) + self.assertIn('bitmap', image_types) + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, tkinter.image_types) + + def test_image_names(self): + self.assertRaises(RuntimeError, tkinter.image_names) + root = tkinter.Tk() + image_names = tkinter.image_names() + self.assertIsInstance(image_names, tuple) + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, tkinter.image_names) + + def test_image_create_bitmap(self): + self.assertRaises(RuntimeError, tkinter.BitmapImage) + root = tkinter.Tk() + image = tkinter.BitmapImage() + self.assertIn(image.name, tkinter.image_names()) + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, tkinter.BitmapImage) + + def test_image_create_photo(self): + self.assertRaises(RuntimeError, tkinter.PhotoImage) + root = tkinter.Tk() + image = tkinter.PhotoImage() + self.assertIn(image.name, tkinter.image_names()) + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, tkinter.PhotoImage) + + +class BitmapImageTest(AbstractTkTest, unittest.TestCase): + + @classmethod + def setUpClass(cls): + AbstractTkTest.setUpClass.__func__(cls) + cls.testfile = support.findfile('python.xbm', subdir='imghdrdata') + + def test_create_from_file(self): + image = tkinter.BitmapImage('::img::test', master=self.root, + foreground='yellow', background='blue', + file=self.testfile) + self.assertEqual(str(image), '::img::test') + self.assertEqual(image.type(), 'bitmap') + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + self.assertIn('::img::test', self.root.image_names()) + del image + support.gc_collect() # For PyPy or other GCs. + self.assertNotIn('::img::test', self.root.image_names()) + + def test_create_from_data(self): + with open(self.testfile, 'rb') as f: + data = f.read() + image = tkinter.BitmapImage('::img::test', master=self.root, + foreground='yellow', background='blue', + data=data) + self.assertEqual(str(image), '::img::test') + self.assertEqual(image.type(), 'bitmap') + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + self.assertIn('::img::test', self.root.image_names()) + del image + support.gc_collect() # For PyPy or other GCs. + self.assertNotIn('::img::test', self.root.image_names()) + + def assertEqualStrList(self, actual, expected): + self.assertIsInstance(actual, str) + self.assertEqual(self.root.splitlist(actual), expected) + + def test_configure_data(self): + image = tkinter.BitmapImage('::img::test', master=self.root) + self.assertEqual(image['data'], '-data {} {} {} {}') + with open(self.testfile, 'rb') as f: + data = f.read() + image.configure(data=data) + self.assertEqualStrList(image['data'], + ('-data', '', '', '', data.decode('ascii'))) + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + + self.assertEqual(image['maskdata'], '-maskdata {} {} {} {}') + image.configure(maskdata=data) + self.assertEqualStrList(image['maskdata'], + ('-maskdata', '', '', '', data.decode('ascii'))) + + def test_configure_file(self): + image = tkinter.BitmapImage('::img::test', master=self.root) + self.assertEqual(image['file'], '-file {} {} {} {}') + image.configure(file=self.testfile) + self.assertEqualStrList(image['file'], + ('-file', '', '', '',self.testfile)) + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + + self.assertEqual(image['maskfile'], '-maskfile {} {} {} {}') + image.configure(maskfile=self.testfile) + self.assertEqualStrList(image['maskfile'], + ('-maskfile', '', '', '', self.testfile)) + + def test_configure_background(self): + image = tkinter.BitmapImage('::img::test', master=self.root) + self.assertEqual(image['background'], '-background {} {} {} {}') + image.configure(background='blue') + self.assertEqual(image['background'], '-background {} {} {} blue') + + def test_configure_foreground(self): + image = tkinter.BitmapImage('::img::test', master=self.root) + self.assertEqual(image['foreground'], + '-foreground {} {} #000000 #000000') + image.configure(foreground='yellow') + self.assertEqual(image['foreground'], + '-foreground {} {} #000000 yellow') + + +class PhotoImageTest(AbstractTkTest, unittest.TestCase): + + @classmethod + def setUpClass(cls): + AbstractTkTest.setUpClass.__func__(cls) + cls.testfile = support.findfile('python.gif', subdir='imghdrdata') + + def create(self): + return tkinter.PhotoImage('::img::test', master=self.root, + file=self.testfile) + + def colorlist(self, *args): + if tkinter.TkVersion >= 8.6 and self.wantobjects: + return args + else: + return tkinter._join(args) + + def check_create_from_file(self, ext): + testfile = support.findfile('python.' + ext, subdir='imghdrdata') + image = tkinter.PhotoImage('::img::test', master=self.root, + file=testfile) + self.assertEqual(str(image), '::img::test') + self.assertEqual(image.type(), 'photo') + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + self.assertEqual(image['data'], '') + self.assertEqual(image['file'], testfile) + self.assertIn('::img::test', self.root.image_names()) + del image + support.gc_collect() # For PyPy or other GCs. + self.assertNotIn('::img::test', self.root.image_names()) + + def check_create_from_data(self, ext): + testfile = support.findfile('python.' + ext, subdir='imghdrdata') + with open(testfile, 'rb') as f: + data = f.read() + image = tkinter.PhotoImage('::img::test', master=self.root, + data=data) + self.assertEqual(str(image), '::img::test') + self.assertEqual(image.type(), 'photo') + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + self.assertEqual(image['data'], data if self.wantobjects + else data.decode('latin1')) + self.assertEqual(image['file'], '') + self.assertIn('::img::test', self.root.image_names()) + del image + support.gc_collect() # For PyPy or other GCs. + self.assertNotIn('::img::test', self.root.image_names()) + + def test_create_from_ppm_file(self): + self.check_create_from_file('ppm') + + def test_create_from_ppm_data(self): + self.check_create_from_data('ppm') + + def test_create_from_pgm_file(self): + self.check_create_from_file('pgm') + + def test_create_from_pgm_data(self): + self.check_create_from_data('pgm') + + def test_create_from_gif_file(self): + self.check_create_from_file('gif') + + def test_create_from_gif_data(self): + self.check_create_from_data('gif') + + @requires_tcl(8, 6) + def test_create_from_png_file(self): + self.check_create_from_file('png') + + @requires_tcl(8, 6) + def test_create_from_png_data(self): + self.check_create_from_data('png') + + def test_configure_data(self): + image = tkinter.PhotoImage('::img::test', master=self.root) + self.assertEqual(image['data'], '') + with open(self.testfile, 'rb') as f: + data = f.read() + image.configure(data=data) + self.assertEqual(image['data'], data if self.wantobjects + else data.decode('latin1')) + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + + def test_configure_format(self): + image = tkinter.PhotoImage('::img::test', master=self.root) + self.assertEqual(image['format'], '') + image.configure(file=self.testfile, format='gif') + self.assertEqual(image['format'], ('gif',) if self.wantobjects + else 'gif') + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + + def test_configure_file(self): + image = tkinter.PhotoImage('::img::test', master=self.root) + self.assertEqual(image['file'], '') + image.configure(file=self.testfile) + self.assertEqual(image['file'], self.testfile) + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + + def test_configure_gamma(self): + image = tkinter.PhotoImage('::img::test', master=self.root) + self.assertEqual(image['gamma'], '1.0') + image.configure(gamma=2.0) + self.assertEqual(image['gamma'], '2.0') + + def test_configure_width_height(self): + image = tkinter.PhotoImage('::img::test', master=self.root) + self.assertEqual(image['width'], '0') + self.assertEqual(image['height'], '0') + image.configure(width=20) + image.configure(height=10) + self.assertEqual(image['width'], '20') + self.assertEqual(image['height'], '10') + self.assertEqual(image.width(), 20) + self.assertEqual(image.height(), 10) + + def test_configure_palette(self): + image = tkinter.PhotoImage('::img::test', master=self.root) + self.assertEqual(image['palette'], '') + image.configure(palette=256) + self.assertEqual(image['palette'], '256') + image.configure(palette='3/4/2') + self.assertEqual(image['palette'], '3/4/2') + + def test_blank(self): + image = self.create() + image.blank() + self.assertEqual(image.width(), 16) + self.assertEqual(image.height(), 16) + self.assertEqual(image.get(4, 6), self.colorlist(0, 0, 0)) + + def test_copy(self): + image = self.create() + image2 = image.copy() + self.assertEqual(image2.width(), 16) + self.assertEqual(image2.height(), 16) + self.assertEqual(image.get(4, 6), image.get(4, 6)) + + def test_subsample(self): + image = self.create() + image2 = image.subsample(2, 3) + self.assertEqual(image2.width(), 8) + self.assertEqual(image2.height(), 6) + self.assertEqual(image2.get(2, 2), image.get(4, 6)) + + image2 = image.subsample(2) + self.assertEqual(image2.width(), 8) + self.assertEqual(image2.height(), 8) + self.assertEqual(image2.get(2, 3), image.get(4, 6)) + + def test_zoom(self): + image = self.create() + image2 = image.zoom(2, 3) + self.assertEqual(image2.width(), 32) + self.assertEqual(image2.height(), 48) + self.assertEqual(image2.get(8, 18), image.get(4, 6)) + self.assertEqual(image2.get(9, 20), image.get(4, 6)) + + image2 = image.zoom(2) + self.assertEqual(image2.width(), 32) + self.assertEqual(image2.height(), 32) + self.assertEqual(image2.get(8, 12), image.get(4, 6)) + self.assertEqual(image2.get(9, 13), image.get(4, 6)) + + def test_put(self): + image = self.create() + image.put('{red green} {blue yellow}', to=(4, 6)) + self.assertEqual(image.get(4, 6), self.colorlist(255, 0, 0)) + self.assertEqual(image.get(5, 6), + self.colorlist(0, 128 if tkinter.TkVersion >= 8.6 + else 255, 0)) + self.assertEqual(image.get(4, 7), self.colorlist(0, 0, 255)) + self.assertEqual(image.get(5, 7), self.colorlist(255, 255, 0)) + + image.put((('#f00', '#00ff00'), ('#000000fff', '#ffffffff0000'))) + self.assertEqual(image.get(0, 0), self.colorlist(255, 0, 0)) + self.assertEqual(image.get(1, 0), self.colorlist(0, 255, 0)) + self.assertEqual(image.get(0, 1), self.colorlist(0, 0, 255)) + self.assertEqual(image.get(1, 1), self.colorlist(255, 255, 0)) + + def test_get(self): + image = self.create() + self.assertEqual(image.get(4, 6), self.colorlist(62, 116, 162)) + self.assertEqual(image.get(0, 0), self.colorlist(0, 0, 0)) + self.assertEqual(image.get(15, 15), self.colorlist(0, 0, 0)) + self.assertRaises(tkinter.TclError, image.get, -1, 0) + self.assertRaises(tkinter.TclError, image.get, 0, -1) + self.assertRaises(tkinter.TclError, image.get, 16, 15) + self.assertRaises(tkinter.TclError, image.get, 15, 16) + + def test_write(self): + image = self.create() + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + + image.write(os_helper.TESTFN) + image2 = tkinter.PhotoImage('::img::test2', master=self.root, + format='ppm', + file=os_helper.TESTFN) + self.assertEqual(str(image2), '::img::test2') + self.assertEqual(image2.type(), 'photo') + self.assertEqual(image2.width(), 16) + self.assertEqual(image2.height(), 16) + self.assertEqual(image2.get(0, 0), image.get(0, 0)) + self.assertEqual(image2.get(15, 8), image.get(15, 8)) + + image.write(os_helper.TESTFN, format='gif', from_coords=(4, 6, 6, 9)) + image3 = tkinter.PhotoImage('::img::test3', master=self.root, + format='gif', + file=os_helper.TESTFN) + self.assertEqual(str(image3), '::img::test3') + self.assertEqual(image3.type(), 'photo') + self.assertEqual(image3.width(), 2) + self.assertEqual(image3.height(), 3) + self.assertEqual(image3.get(0, 0), image.get(4, 6)) + self.assertEqual(image3.get(1, 2), image.get(5, 8)) + + def test_transparency(self): + image = self.create() + self.assertEqual(image.transparency_get(0, 0), True) + self.assertEqual(image.transparency_get(4, 6), False) + image.transparency_set(4, 6, True) + self.assertEqual(image.transparency_get(4, 6), True) + image.transparency_set(4, 6, False) + self.assertEqual(image.transparency_get(4, 6), False) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_loadtk.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_loadtk.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_loadtk.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_loadtk.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,46 @@ +import os +import sys +import unittest +import test.support as test_support +from test.support import os_helper +from tkinter import Tcl, TclError + +test_support.requires('gui') + +class TkLoadTest(unittest.TestCase): + + @unittest.skipIf('DISPLAY' not in os.environ, 'No $DISPLAY set.') + def testLoadTk(self): + tcl = Tcl() + self.assertRaises(TclError,tcl.winfo_geometry) + tcl.loadtk() + self.assertEqual('1x1+0+0', tcl.winfo_geometry()) + tcl.destroy() + + def testLoadTkFailure(self): + old_display = None + if sys.platform.startswith(('win', 'darwin', 'cygwin')): + # no failure possible on windows? + + # XXX Maybe on tk older than 8.4.13 it would be possible, + # see tkinter.h. + return + with os_helper.EnvironmentVarGuard() as env: + if 'DISPLAY' in os.environ: + del env['DISPLAY'] + # on some platforms, deleting environment variables + # doesn't actually carry through to the process level + # because they don't support unsetenv + # If that's the case, abort. + with os.popen('echo $DISPLAY') as pipe: + display = pipe.read().strip() + if display: + return + + tcl = Tcl() + self.assertRaises(TclError, tcl.winfo_geometry) + self.assertRaises(TclError, tcl.loadtk) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_messagebox.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_messagebox.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_messagebox.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_messagebox.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,36 @@ +import unittest +import tkinter +from test.support import requires, swap_attr +from tkinter.test.support import AbstractDefaultRootTest +from tkinter.commondialog import Dialog +from tkinter.messagebox import showinfo + +requires('gui') + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_showinfo(self): + def test_callback(dialog, master): + nonlocal ismapped + master.update() + ismapped = master.winfo_ismapped() + raise ZeroDivisionError + + with swap_attr(Dialog, '_test_callback', test_callback): + ismapped = None + self.assertRaises(ZeroDivisionError, showinfo, "Spam", "Egg Information") + self.assertEqual(ismapped, False) + + root = tkinter.Tk() + ismapped = None + self.assertRaises(ZeroDivisionError, showinfo, "Spam", "Egg Information") + self.assertEqual(ismapped, True) + root.destroy() + + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, showinfo, "Spam", "Egg Information") + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_misc.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_misc.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_misc.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_misc.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,430 @@ +import functools +import unittest +import tkinter +import enum +from test import support +from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest + +support.requires('gui') + +class MiscTest(AbstractTkTest, unittest.TestCase): + + def test_all(self): + self.assertIn("Widget", tkinter.__all__) + # Check that variables from tkinter.constants are also in tkinter.__all__ + self.assertIn("CASCADE", tkinter.__all__) + self.assertIsNotNone(tkinter.CASCADE) + # Check that sys, re, and constants are not in tkinter.__all__ + self.assertNotIn("re", tkinter.__all__) + self.assertNotIn("sys", tkinter.__all__) + self.assertNotIn("constants", tkinter.__all__) + # Check that an underscored functions is not in tkinter.__all__ + self.assertNotIn("_tkerror", tkinter.__all__) + # Check that wantobjects is not in tkinter.__all__ + self.assertNotIn("wantobjects", tkinter.__all__) + + def test_repr(self): + t = tkinter.Toplevel(self.root, name='top') + f = tkinter.Frame(t, name='child') + self.assertEqual(repr(f), '') + + def test_generated_names(self): + t = tkinter.Toplevel(self.root) + f = tkinter.Frame(t) + f2 = tkinter.Frame(t) + b = tkinter.Button(f2) + for name in str(b).split('.'): + self.assertFalse(name.isidentifier(), msg=repr(name)) + + def test_tk_setPalette(self): + root = self.root + root.tk_setPalette('black') + self.assertEqual(root['background'], 'black') + root.tk_setPalette('white') + self.assertEqual(root['background'], 'white') + self.assertRaisesRegex(tkinter.TclError, + '^unknown color name "spam"$', + root.tk_setPalette, 'spam') + + root.tk_setPalette(background='black') + self.assertEqual(root['background'], 'black') + root.tk_setPalette(background='blue', highlightColor='yellow') + self.assertEqual(root['background'], 'blue') + self.assertEqual(root['highlightcolor'], 'yellow') + root.tk_setPalette(background='yellow', highlightColor='blue') + self.assertEqual(root['background'], 'yellow') + self.assertEqual(root['highlightcolor'], 'blue') + self.assertRaisesRegex(tkinter.TclError, + '^unknown color name "spam"$', + root.tk_setPalette, background='spam') + self.assertRaisesRegex(tkinter.TclError, + '^must specify a background color$', + root.tk_setPalette, spam='white') + self.assertRaisesRegex(tkinter.TclError, + '^must specify a background color$', + root.tk_setPalette, highlightColor='blue') + + def test_after(self): + root = self.root + + def callback(start=0, step=1): + nonlocal count + count = start + step + + # Without function, sleeps for ms. + self.assertIsNone(root.after(1)) + + # Set up with callback with no args. + count = 0 + timer1 = root.after(0, callback) + self.assertIn(timer1, root.tk.call('after', 'info')) + (script, _) = root.tk.splitlist(root.tk.call('after', 'info', timer1)) + root.update() # Process all pending events. + self.assertEqual(count, 1) + with self.assertRaises(tkinter.TclError): + root.tk.call(script) + + # Set up with callback with args. + count = 0 + timer1 = root.after(0, callback, 42, 11) + root.update() # Process all pending events. + self.assertEqual(count, 53) + + # Cancel before called. + timer1 = root.after(1000, callback) + self.assertIn(timer1, root.tk.call('after', 'info')) + (script, _) = root.tk.splitlist(root.tk.call('after', 'info', timer1)) + root.after_cancel(timer1) # Cancel this event. + self.assertEqual(count, 53) + with self.assertRaises(tkinter.TclError): + root.tk.call(script) + + # Call with a callable class + count = 0 + timer1 = root.after(0, functools.partial(callback, 42, 11)) + root.update() # Process all pending events. + self.assertEqual(count, 53) + + def test_after_idle(self): + root = self.root + + def callback(start=0, step=1): + nonlocal count + count = start + step + + # Set up with callback with no args. + count = 0 + idle1 = root.after_idle(callback) + self.assertIn(idle1, root.tk.call('after', 'info')) + (script, _) = root.tk.splitlist(root.tk.call('after', 'info', idle1)) + root.update_idletasks() # Process all pending events. + self.assertEqual(count, 1) + with self.assertRaises(tkinter.TclError): + root.tk.call(script) + + # Set up with callback with args. + count = 0 + idle1 = root.after_idle(callback, 42, 11) + root.update_idletasks() # Process all pending events. + self.assertEqual(count, 53) + + # Cancel before called. + idle1 = root.after_idle(callback) + self.assertIn(idle1, root.tk.call('after', 'info')) + (script, _) = root.tk.splitlist(root.tk.call('after', 'info', idle1)) + root.after_cancel(idle1) # Cancel this event. + self.assertEqual(count, 53) + with self.assertRaises(tkinter.TclError): + root.tk.call(script) + + def test_after_cancel(self): + root = self.root + + def callback(): + nonlocal count + count += 1 + + timer1 = root.after(5000, callback) + idle1 = root.after_idle(callback) + + # No value for id raises a ValueError. + with self.assertRaises(ValueError): + root.after_cancel(None) + + # Cancel timer event. + count = 0 + (script, _) = root.tk.splitlist(root.tk.call('after', 'info', timer1)) + root.tk.call(script) + self.assertEqual(count, 1) + root.after_cancel(timer1) + with self.assertRaises(tkinter.TclError): + root.tk.call(script) + self.assertEqual(count, 1) + with self.assertRaises(tkinter.TclError): + root.tk.call('after', 'info', timer1) + + # Cancel same event - nothing happens. + root.after_cancel(timer1) + + # Cancel idle event. + count = 0 + (script, _) = root.tk.splitlist(root.tk.call('after', 'info', idle1)) + root.tk.call(script) + self.assertEqual(count, 1) + root.after_cancel(idle1) + with self.assertRaises(tkinter.TclError): + root.tk.call(script) + self.assertEqual(count, 1) + with self.assertRaises(tkinter.TclError): + root.tk.call('after', 'info', idle1) + + def test_clipboard(self): + root = self.root + root.clipboard_clear() + root.clipboard_append('Ùñî') + self.assertEqual(root.clipboard_get(), 'Ùñî') + root.clipboard_append('çōđě') + self.assertEqual(root.clipboard_get(), 'Ùñîçōđě') + root.clipboard_clear() + with self.assertRaises(tkinter.TclError): + root.clipboard_get() + + def test_clipboard_astral(self): + root = self.root + root.clipboard_clear() + root.clipboard_append('𝔘𝔫𝔦') + self.assertEqual(root.clipboard_get(), '𝔘𝔫𝔦') + root.clipboard_append('𝔠𝔬𝔡𝔢') + self.assertEqual(root.clipboard_get(), '𝔘𝔫𝔦𝔠𝔬𝔡𝔢') + root.clipboard_clear() + with self.assertRaises(tkinter.TclError): + root.clipboard_get() + + def test_winfo_rgb(self): + + def assertApprox(col1, col2): + # A small amount of flexibility is required (bpo-45496) + # 33 is ~0.05% of 65535, which is a reasonable margin + for col1_channel, col2_channel in zip(col1, col2): + self.assertAlmostEqual(col1_channel, col2_channel, delta=33) + + root = self.root + rgb = root.winfo_rgb + + # Color name. + self.assertEqual(rgb('red'), (65535, 0, 0)) + self.assertEqual(rgb('dark slate blue'), (18504, 15677, 35723)) + # #RGB - extends each 4-bit hex value to be 16-bit. + self.assertEqual(rgb('#F0F'), (0xFFFF, 0x0000, 0xFFFF)) + # #RRGGBB - extends each 8-bit hex value to be 16-bit. + assertApprox(rgb('#4a3c8c'), (0x4a4a, 0x3c3c, 0x8c8c)) + # #RRRRGGGGBBBB + assertApprox(rgb('#dede14143939'), (0xdede, 0x1414, 0x3939)) + # Invalid string. + with self.assertRaises(tkinter.TclError): + rgb('#123456789a') + # RGB triplet is invalid input. + with self.assertRaises(tkinter.TclError): + rgb((111, 78, 55)) + + def test_event_repr_defaults(self): + e = tkinter.Event() + e.serial = 12345 + e.num = '??' + e.height = '??' + e.keycode = '??' + e.state = 0 + e.time = 123456789 + e.width = '??' + e.x = '??' + e.y = '??' + e.char = '' + e.keysym = '??' + e.keysym_num = '??' + e.type = '100' + e.widget = '??' + e.x_root = '??' + e.y_root = '??' + e.delta = 0 + self.assertEqual(repr(e), '<100 event>') + + def test_event_repr(self): + e = tkinter.Event() + e.serial = 12345 + e.num = 3 + e.focus = True + e.height = 200 + e.keycode = 65 + e.state = 0x30405 + e.time = 123456789 + e.width = 300 + e.x = 10 + e.y = 20 + e.char = 'A' + e.send_event = True + e.keysym = 'Key-A' + e.keysym_num = ord('A') + e.type = tkinter.EventType.Configure + e.widget = '.text' + e.x_root = 1010 + e.y_root = 1020 + e.delta = -1 + self.assertEqual(repr(e), + "") + + def test_eventtype_enum(self): + class CheckedEventType(enum.StrEnum): + KeyPress = '2' + Key = KeyPress + KeyRelease = '3' + ButtonPress = '4' + Button = ButtonPress + ButtonRelease = '5' + Motion = '6' + Enter = '7' + Leave = '8' + FocusIn = '9' + FocusOut = '10' + Keymap = '11' # undocumented + Expose = '12' + GraphicsExpose = '13' # undocumented + NoExpose = '14' # undocumented + Visibility = '15' + Create = '16' + Destroy = '17' + Unmap = '18' + Map = '19' + MapRequest = '20' + Reparent = '21' + Configure = '22' + ConfigureRequest = '23' + Gravity = '24' + ResizeRequest = '25' + Circulate = '26' + CirculateRequest = '27' + Property = '28' + SelectionClear = '29' # undocumented + SelectionRequest = '30' # undocumented + Selection = '31' # undocumented + Colormap = '32' + ClientMessage = '33' # undocumented + Mapping = '34' # undocumented + VirtualEvent = '35' # undocumented + Activate = '36' + Deactivate = '37' + MouseWheel = '38' + enum._test_simple_enum(CheckedEventType, tkinter.EventType) + + def test_getboolean(self): + for v in 'true', 'yes', 'on', '1', 't', 'y', 1, True: + self.assertIs(self.root.getboolean(v), True) + for v in 'false', 'no', 'off', '0', 'f', 'n', 0, False: + self.assertIs(self.root.getboolean(v), False) + self.assertRaises(ValueError, self.root.getboolean, 'yea') + self.assertRaises(ValueError, self.root.getboolean, '') + self.assertRaises(TypeError, self.root.getboolean, None) + self.assertRaises(TypeError, self.root.getboolean, ()) + + def test_mainloop(self): + log = [] + def callback(): + log.append(1) + self.root.after(100, self.root.quit) + self.root.after(100, callback) + self.root.mainloop(1) + self.assertEqual(log, []) + self.root.mainloop(0) + self.assertEqual(log, [1]) + self.assertTrue(self.root.winfo_exists()) + + def test_info_patchlevel(self): + vi = self.root.info_patchlevel() + f = tkinter.Frame(self.root) + self.assertEqual(f.info_patchlevel(), vi) + # The following is almost a copy of tests for sys.version_info. + self.assertIsInstance(vi[:], tuple) + self.assertEqual(len(vi), 5) + self.assertIsInstance(vi[0], int) + self.assertIsInstance(vi[1], int) + self.assertIsInstance(vi[2], int) + self.assertIn(vi[3], ("alpha", "beta", "candidate", "final")) + self.assertIsInstance(vi[4], int) + self.assertIsInstance(vi.major, int) + self.assertIsInstance(vi.minor, int) + self.assertIsInstance(vi.micro, int) + self.assertIn(vi.releaselevel, ("alpha", "beta", "final")) + self.assertIsInstance(vi.serial, int) + self.assertEqual(vi[0], vi.major) + self.assertEqual(vi[1], vi.minor) + self.assertEqual(vi[2], vi.micro) + self.assertEqual(vi[3], vi.releaselevel) + self.assertEqual(vi[4], vi.serial) + self.assertTrue(vi > (1,0,0)) + if vi.releaselevel == 'final': + self.assertEqual(vi.serial, 0) + else: + self.assertEqual(vi.micro, 0) + self.assertTrue(str(vi).startswith(f'{vi.major}.{vi.minor}')) + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_default_root(self): + self.assertIs(tkinter._support_default_root, True) + self.assertIsNone(tkinter._default_root) + root = tkinter.Tk() + root2 = tkinter.Tk() + root3 = tkinter.Tk() + self.assertIs(tkinter._default_root, root) + root2.destroy() + self.assertIs(tkinter._default_root, root) + root.destroy() + self.assertIsNone(tkinter._default_root) + root3.destroy() + self.assertIsNone(tkinter._default_root) + + def test_no_default_root(self): + self.assertIs(tkinter._support_default_root, True) + self.assertIsNone(tkinter._default_root) + root = tkinter.Tk() + self.assertIs(tkinter._default_root, root) + tkinter.NoDefaultRoot() + self.assertIs(tkinter._support_default_root, False) + self.assertFalse(hasattr(tkinter, '_default_root')) + # repeated call is no-op + tkinter.NoDefaultRoot() + self.assertIs(tkinter._support_default_root, False) + self.assertFalse(hasattr(tkinter, '_default_root')) + root.destroy() + self.assertIs(tkinter._support_default_root, False) + self.assertFalse(hasattr(tkinter, '_default_root')) + root = tkinter.Tk() + self.assertIs(tkinter._support_default_root, False) + self.assertFalse(hasattr(tkinter, '_default_root')) + root.destroy() + + def test_getboolean(self): + self.assertRaises(RuntimeError, tkinter.getboolean, '1') + root = tkinter.Tk() + self.assertIs(tkinter.getboolean('1'), True) + self.assertRaises(ValueError, tkinter.getboolean, 'yea') + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, tkinter.getboolean, '1') + + def test_mainloop(self): + self.assertRaises(RuntimeError, tkinter.mainloop) + root = tkinter.Tk() + root.after_idle(root.quit) + tkinter.mainloop() + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, tkinter.mainloop) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_simpledialog.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_simpledialog.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_simpledialog.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_simpledialog.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,35 @@ +import unittest +import tkinter +from test.support import requires, swap_attr +from tkinter.test.support import AbstractDefaultRootTest +from tkinter.simpledialog import Dialog, askinteger + +requires('gui') + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_askinteger(self): + @staticmethod + def mock_wait_window(w): + nonlocal ismapped + ismapped = w.master.winfo_ismapped() + w.destroy() + + with swap_attr(Dialog, 'wait_window', mock_wait_window): + ismapped = None + askinteger("Go To Line", "Line number") + self.assertEqual(ismapped, False) + + root = tkinter.Tk() + ismapped = None + askinteger("Go To Line", "Line number") + self.assertEqual(ismapped, True) + root.destroy() + + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, askinteger, "Go To Line", "Line number") + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_text.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_text.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_text.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_text.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,45 @@ +import unittest +import tkinter +from test.support import requires +from tkinter.test.support import AbstractTkTest + +requires('gui') + +class TextTest(AbstractTkTest, unittest.TestCase): + + def setUp(self): + super().setUp() + self.text = tkinter.Text(self.root) + + def test_debug(self): + text = self.text + olddebug = text.debug() + try: + text.debug(0) + self.assertEqual(text.debug(), 0) + text.debug(1) + self.assertEqual(text.debug(), 1) + finally: + text.debug(olddebug) + self.assertEqual(text.debug(), olddebug) + + def test_search(self): + text = self.text + + # pattern and index are obligatory arguments. + self.assertRaises(tkinter.TclError, text.search, None, '1.0') + self.assertRaises(tkinter.TclError, text.search, 'a', None) + self.assertRaises(tkinter.TclError, text.search, None, None) + + # Invalid text index. + self.assertRaises(tkinter.TclError, text.search, '', 0) + + # Check if we are getting the indices as strings -- you are likely + # to get Tcl_Obj under Tk 8.5 if Tkinter doesn't convert it. + text.insert('1.0', 'hi-test') + self.assertEqual(text.search('-test', '1.0', 'end'), '1.2') + self.assertEqual(text.search('test', '1.0', 'end'), '1.3') + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_variables.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_variables.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_variables.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_variables.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,342 @@ +import unittest +from test import support + +import gc +import tkinter +from tkinter import (Variable, StringVar, IntVar, DoubleVar, BooleanVar, Tcl, + TclError) +from test.support import ALWAYS_EQ +from tkinter.test.support import AbstractDefaultRootTest + + +class Var(Variable): + + _default = "default" + side_effect = False + + def set(self, value): + self.side_effect = True + super().set(value) + + +class TestBase(unittest.TestCase): + + def setUp(self): + self.root = Tcl() + + def tearDown(self): + del self.root + + +class TestVariable(TestBase): + + def info_exists(self, *args): + return self.root.getboolean(self.root.call("info", "exists", *args)) + + def test_default(self): + v = Variable(self.root) + self.assertEqual("", v.get()) + self.assertRegex(str(v), r"^PY_VAR(\d+)$") + + def test_name_and_value(self): + v = Variable(self.root, "sample string", "varname") + self.assertEqual("sample string", v.get()) + self.assertEqual("varname", str(v)) + + def test___del__(self): + self.assertFalse(self.info_exists("varname")) + v = Variable(self.root, "sample string", "varname") + self.assertTrue(self.info_exists("varname")) + del v + support.gc_collect() # For PyPy or other GCs. + self.assertFalse(self.info_exists("varname")) + + def test_dont_unset_not_existing(self): + self.assertFalse(self.info_exists("varname")) + v1 = Variable(self.root, name="name") + v2 = Variable(self.root, name="name") + del v1 + support.gc_collect() # For PyPy or other GCs. + self.assertFalse(self.info_exists("name")) + # shouldn't raise exception + del v2 + support.gc_collect() # For PyPy or other GCs. + self.assertFalse(self.info_exists("name")) + + def test_equality(self): + # values doesn't matter, only class and name are checked + v1 = Variable(self.root, name="abc") + v2 = Variable(self.root, name="abc") + self.assertIsNot(v1, v2) + self.assertEqual(v1, v2) + + v3 = Variable(self.root, name="cba") + self.assertNotEqual(v1, v3) + + v4 = StringVar(self.root, name="abc") + self.assertEqual(str(v1), str(v4)) + self.assertNotEqual(v1, v4) + + V = type('Variable', (), {}) + self.assertNotEqual(v1, V()) + + self.assertNotEqual(v1, object()) + self.assertEqual(v1, ALWAYS_EQ) + + root2 = tkinter.Tk() + self.addCleanup(root2.destroy) + v5 = Variable(root2, name="abc") + self.assertEqual(str(v1), str(v5)) + self.assertNotEqual(v1, v5) + + def test_invalid_name(self): + with self.assertRaises(TypeError): + Variable(self.root, name=123) + + def test_null_in_name(self): + with self.assertRaises(ValueError): + Variable(self.root, name='var\x00name') + with self.assertRaises(ValueError): + self.root.globalsetvar('var\x00name', "value") + with self.assertRaises(ValueError): + self.root.globalsetvar(b'var\x00name', "value") + with self.assertRaises(ValueError): + self.root.setvar('var\x00name', "value") + with self.assertRaises(ValueError): + self.root.setvar(b'var\x00name', "value") + + def test_initialize(self): + v = Var(self.root) + self.assertFalse(v.side_effect) + v.set("value") + self.assertTrue(v.side_effect) + + def test_trace_old(self): + # Old interface + v = Variable(self.root) + vname = str(v) + trace = [] + def read_tracer(*args): + trace.append(('read',) + args) + def write_tracer(*args): + trace.append(('write',) + args) + cb1 = v.trace_variable('r', read_tracer) + cb2 = v.trace_variable('wu', write_tracer) + self.assertEqual(sorted(v.trace_vinfo()), [('r', cb1), ('wu', cb2)]) + self.assertEqual(trace, []) + + v.set('spam') + self.assertEqual(trace, [('write', vname, '', 'w')]) + + trace = [] + v.get() + self.assertEqual(trace, [('read', vname, '', 'r')]) + + trace = [] + info = sorted(v.trace_vinfo()) + v.trace_vdelete('w', cb1) # Wrong mode + self.assertEqual(sorted(v.trace_vinfo()), info) + with self.assertRaises(TclError): + v.trace_vdelete('r', 'spam') # Wrong command name + self.assertEqual(sorted(v.trace_vinfo()), info) + v.trace_vdelete('r', (cb1, 43)) # Wrong arguments + self.assertEqual(sorted(v.trace_vinfo()), info) + v.get() + self.assertEqual(trace, [('read', vname, '', 'r')]) + + trace = [] + v.trace_vdelete('r', cb1) + self.assertEqual(v.trace_vinfo(), [('wu', cb2)]) + v.get() + self.assertEqual(trace, []) + + trace = [] + del write_tracer + gc.collect() + v.set('eggs') + self.assertEqual(trace, [('write', vname, '', 'w')]) + + trace = [] + del v + gc.collect() + self.assertEqual(trace, [('write', vname, '', 'u')]) + + def test_trace(self): + v = Variable(self.root) + vname = str(v) + trace = [] + def read_tracer(*args): + trace.append(('read',) + args) + def write_tracer(*args): + trace.append(('write',) + args) + tr1 = v.trace_add('read', read_tracer) + tr2 = v.trace_add(['write', 'unset'], write_tracer) + self.assertEqual(sorted(v.trace_info()), [ + (('read',), tr1), + (('write', 'unset'), tr2)]) + self.assertEqual(trace, []) + + v.set('spam') + self.assertEqual(trace, [('write', vname, '', 'write')]) + + trace = [] + v.get() + self.assertEqual(trace, [('read', vname, '', 'read')]) + + trace = [] + info = sorted(v.trace_info()) + v.trace_remove('write', tr1) # Wrong mode + self.assertEqual(sorted(v.trace_info()), info) + with self.assertRaises(TclError): + v.trace_remove('read', 'spam') # Wrong command name + self.assertEqual(sorted(v.trace_info()), info) + v.get() + self.assertEqual(trace, [('read', vname, '', 'read')]) + + trace = [] + v.trace_remove('read', tr1) + self.assertEqual(v.trace_info(), [(('write', 'unset'), tr2)]) + v.get() + self.assertEqual(trace, []) + + trace = [] + del write_tracer + gc.collect() + v.set('eggs') + self.assertEqual(trace, [('write', vname, '', 'write')]) + + trace = [] + del v + gc.collect() + self.assertEqual(trace, [('write', vname, '', 'unset')]) + + +class TestStringVar(TestBase): + + def test_default(self): + v = StringVar(self.root) + self.assertEqual("", v.get()) + + def test_get(self): + v = StringVar(self.root, "abc", "name") + self.assertEqual("abc", v.get()) + self.root.globalsetvar("name", "value") + self.assertEqual("value", v.get()) + + def test_get_null(self): + v = StringVar(self.root, "abc\x00def", "name") + self.assertEqual("abc\x00def", v.get()) + self.root.globalsetvar("name", "val\x00ue") + self.assertEqual("val\x00ue", v.get()) + + +class TestIntVar(TestBase): + + def test_default(self): + v = IntVar(self.root) + self.assertEqual(0, v.get()) + + def test_get(self): + v = IntVar(self.root, 123, "name") + self.assertEqual(123, v.get()) + self.root.globalsetvar("name", "345") + self.assertEqual(345, v.get()) + self.root.globalsetvar("name", "876.5") + self.assertEqual(876, v.get()) + + def test_invalid_value(self): + v = IntVar(self.root, name="name") + self.root.globalsetvar("name", "value") + with self.assertRaises((ValueError, TclError)): + v.get() + + +class TestDoubleVar(TestBase): + + def test_default(self): + v = DoubleVar(self.root) + self.assertEqual(0.0, v.get()) + + def test_get(self): + v = DoubleVar(self.root, 1.23, "name") + self.assertAlmostEqual(1.23, v.get()) + self.root.globalsetvar("name", "3.45") + self.assertAlmostEqual(3.45, v.get()) + + def test_get_from_int(self): + v = DoubleVar(self.root, 1.23, "name") + self.assertAlmostEqual(1.23, v.get()) + self.root.globalsetvar("name", "3.45") + self.assertAlmostEqual(3.45, v.get()) + self.root.globalsetvar("name", "456") + self.assertAlmostEqual(456, v.get()) + + def test_invalid_value(self): + v = DoubleVar(self.root, name="name") + self.root.globalsetvar("name", "value") + with self.assertRaises((ValueError, TclError)): + v.get() + + +class TestBooleanVar(TestBase): + + def test_default(self): + v = BooleanVar(self.root) + self.assertIs(v.get(), False) + + def test_get(self): + v = BooleanVar(self.root, True, "name") + self.assertIs(v.get(), True) + self.root.globalsetvar("name", "0") + self.assertIs(v.get(), False) + self.root.globalsetvar("name", 42 if self.root.wantobjects() else 1) + self.assertIs(v.get(), True) + self.root.globalsetvar("name", 0) + self.assertIs(v.get(), False) + self.root.globalsetvar("name", "on") + self.assertIs(v.get(), True) + + def test_set(self): + true = 1 if self.root.wantobjects() else "1" + false = 0 if self.root.wantobjects() else "0" + v = BooleanVar(self.root, name="name") + v.set(True) + self.assertEqual(self.root.globalgetvar("name"), true) + v.set("0") + self.assertEqual(self.root.globalgetvar("name"), false) + v.set(42) + self.assertEqual(self.root.globalgetvar("name"), true) + v.set(0) + self.assertEqual(self.root.globalgetvar("name"), false) + v.set("on") + self.assertEqual(self.root.globalgetvar("name"), true) + + def test_invalid_value_domain(self): + false = 0 if self.root.wantobjects() else "0" + v = BooleanVar(self.root, name="name") + with self.assertRaises(TclError): + v.set("value") + self.assertEqual(self.root.globalgetvar("name"), false) + self.root.globalsetvar("name", "value") + with self.assertRaises(ValueError): + v.get() + self.root.globalsetvar("name", "1.0") + with self.assertRaises(ValueError): + v.get() + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_variable(self): + self.assertRaises(RuntimeError, Variable) + root = tkinter.Tk() + v = Variable() + v.set("value") + self.assertEqual(v.get(), "value") + root.destroy() + tkinter.NoDefaultRoot() + self.assertRaises(RuntimeError, Variable) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_widgets.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_tkinter/test_widgets.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_tkinter/test_widgets.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,1292 @@ +import unittest +import tkinter +from tkinter import TclError +import os +from test.support import requires + +from tkinter.test.support import (requires_tcl, + get_tk_patchlevel, widget_eq, + AbstractDefaultRootTest) +from tkinter.test.widget_tests import ( + add_standard_options, + AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests, + setUpModule) + +requires('gui') + + +def float_round(x): + return float(round(x)) + + +class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests): + _conv_pad_pixels = False + + def test_configure_class(self): + widget = self.create() + self.assertEqual(widget['class'], + widget.__class__.__name__.title()) + self.checkInvalidParam(widget, 'class', 'Foo', + errmsg="can't modify -class option after widget is created") + widget2 = self.create(class_='Foo') + self.assertEqual(widget2['class'], 'Foo') + + def test_configure_colormap(self): + widget = self.create() + self.assertEqual(widget['colormap'], '') + self.checkInvalidParam(widget, 'colormap', 'new', + errmsg="can't modify -colormap option after widget is created") + widget2 = self.create(colormap='new') + self.assertEqual(widget2['colormap'], 'new') + + def test_configure_container(self): + widget = self.create() + self.assertEqual(widget['container'], 0 if self.wantobjects else '0') + self.checkInvalidParam(widget, 'container', 1, + errmsg="can't modify -container option after widget is created") + widget2 = self.create(container=True) + self.assertEqual(widget2['container'], 1 if self.wantobjects else '1') + + def test_configure_visual(self): + widget = self.create() + self.assertEqual(widget['visual'], '') + self.checkInvalidParam(widget, 'visual', 'default', + errmsg="can't modify -visual option after widget is created") + widget2 = self.create(visual='default') + self.assertEqual(widget2['visual'], 'default') + + +@add_standard_options(StandardOptionsTests) +class ToplevelTest(AbstractToplevelTest, unittest.TestCase): + OPTIONS = ( + 'background', 'borderwidth', + 'class', 'colormap', 'container', 'cursor', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'menu', 'padx', 'pady', 'relief', 'screen', + 'takefocus', 'use', 'visual', 'width', + ) + + def create(self, **kwargs): + return tkinter.Toplevel(self.root, **kwargs) + + def test_configure_menu(self): + widget = self.create() + menu = tkinter.Menu(self.root) + self.checkParam(widget, 'menu', menu, eq=widget_eq) + self.checkParam(widget, 'menu', '') + + def test_configure_screen(self): + widget = self.create() + self.assertEqual(widget['screen'], '') + try: + display = os.environ['DISPLAY'] + except KeyError: + self.skipTest('No $DISPLAY set.') + self.checkInvalidParam(widget, 'screen', display, + errmsg="can't modify -screen option after widget is created") + widget2 = self.create(screen=display) + self.assertEqual(widget2['screen'], display) + + def test_configure_use(self): + widget = self.create() + self.assertEqual(widget['use'], '') + parent = self.create(container=True) + wid = hex(parent.winfo_id()) + with self.subTest(wid=wid): + widget2 = self.create(use=wid) + self.assertEqual(widget2['use'], wid) + + +@add_standard_options(StandardOptionsTests) +class FrameTest(AbstractToplevelTest, unittest.TestCase): + OPTIONS = ( + 'background', 'borderwidth', + 'class', 'colormap', 'container', 'cursor', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'padx', 'pady', 'relief', 'takefocus', 'visual', 'width', + ) + + def create(self, **kwargs): + return tkinter.Frame(self.root, **kwargs) + + +@add_standard_options(StandardOptionsTests) +class LabelFrameTest(AbstractToplevelTest, unittest.TestCase): + OPTIONS = ( + 'background', 'borderwidth', + 'class', 'colormap', 'container', 'cursor', + 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'labelanchor', 'labelwidget', 'padx', 'pady', 'relief', + 'takefocus', 'text', 'visual', 'width', + ) + + def create(self, **kwargs): + return tkinter.LabelFrame(self.root, **kwargs) + + def test_configure_labelanchor(self): + widget = self.create() + self.checkEnumParam(widget, 'labelanchor', + 'e', 'en', 'es', 'n', 'ne', 'nw', + 's', 'se', 'sw', 'w', 'wn', 'ws') + self.checkInvalidParam(widget, 'labelanchor', 'center') + + def test_configure_labelwidget(self): + widget = self.create() + label = tkinter.Label(self.root, text='Mupp', name='foo') + self.checkParam(widget, 'labelwidget', label, expected='.foo') + label.destroy() + + +class AbstractLabelTest(AbstractWidgetTest, IntegerSizeTests): + _conv_pixels = False + + def test_configure_highlightthickness(self): + widget = self.create() + self.checkPixelsParam(widget, 'highlightthickness', + 0, 1.3, 2.6, 6, -2, '10p') + + +@add_standard_options(StandardOptionsTests) +class LabelTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'activeforeground', 'anchor', + 'background', 'bitmap', 'borderwidth', 'compound', 'cursor', + 'disabledforeground', 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'image', 'justify', 'padx', 'pady', 'relief', 'state', + 'takefocus', 'text', 'textvariable', + 'underline', 'width', 'wraplength', + ) + + def create(self, **kwargs): + return tkinter.Label(self.root, **kwargs) + + +@add_standard_options(StandardOptionsTests) +class ButtonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'activeforeground', 'anchor', + 'background', 'bitmap', 'borderwidth', + 'command', 'compound', 'cursor', 'default', + 'disabledforeground', 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'image', 'justify', 'overrelief', 'padx', 'pady', 'relief', + 'repeatdelay', 'repeatinterval', + 'state', 'takefocus', 'text', 'textvariable', + 'underline', 'width', 'wraplength') + + def create(self, **kwargs): + return tkinter.Button(self.root, **kwargs) + + def test_configure_default(self): + widget = self.create() + self.checkEnumParam(widget, 'default', 'active', 'disabled', 'normal') + + +@add_standard_options(StandardOptionsTests) +class CheckbuttonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'activeforeground', 'anchor', + 'background', 'bitmap', 'borderwidth', + 'command', 'compound', 'cursor', + 'disabledforeground', 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'image', 'indicatoron', 'justify', + 'offrelief', 'offvalue', 'onvalue', 'overrelief', + 'padx', 'pady', 'relief', 'selectcolor', 'selectimage', 'state', + 'takefocus', 'text', 'textvariable', + 'tristateimage', 'tristatevalue', + 'underline', 'variable', 'width', 'wraplength', + ) + + def create(self, **kwargs): + return tkinter.Checkbutton(self.root, **kwargs) + + + def test_configure_offvalue(self): + widget = self.create() + self.checkParams(widget, 'offvalue', 1, 2.3, '', 'any string') + + def test_configure_onvalue(self): + widget = self.create() + self.checkParams(widget, 'onvalue', 1, 2.3, '', 'any string') + + +@add_standard_options(StandardOptionsTests) +class RadiobuttonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'activeforeground', 'anchor', + 'background', 'bitmap', 'borderwidth', + 'command', 'compound', 'cursor', + 'disabledforeground', 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'image', 'indicatoron', 'justify', 'offrelief', 'overrelief', + 'padx', 'pady', 'relief', 'selectcolor', 'selectimage', 'state', + 'takefocus', 'text', 'textvariable', + 'tristateimage', 'tristatevalue', + 'underline', 'value', 'variable', 'width', 'wraplength', + ) + + def create(self, **kwargs): + return tkinter.Radiobutton(self.root, **kwargs) + + def test_configure_value(self): + widget = self.create() + self.checkParams(widget, 'value', 1, 2.3, '', 'any string') + + +@add_standard_options(StandardOptionsTests) +class MenubuttonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'activeforeground', 'anchor', + 'background', 'bitmap', 'borderwidth', + 'compound', 'cursor', 'direction', + 'disabledforeground', 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'image', 'indicatoron', 'justify', 'menu', + 'padx', 'pady', 'relief', 'state', + 'takefocus', 'text', 'textvariable', + 'underline', 'width', 'wraplength', + ) + _conv_pixels = round + + def create(self, **kwargs): + return tkinter.Menubutton(self.root, **kwargs) + + def test_configure_direction(self): + widget = self.create() + self.checkEnumParam(widget, 'direction', + 'above', 'below', 'flush', 'left', 'right') + + def test_configure_height(self): + widget = self.create() + self.checkIntegerParam(widget, 'height', 100, -100, 0, conv=str) + + test_configure_highlightthickness = \ + StandardOptionsTests.test_configure_highlightthickness + + def test_configure_image(self): + widget = self.create() + image = tkinter.PhotoImage(master=self.root, name='image1') + self.checkParam(widget, 'image', image, conv=str) + errmsg = 'image "spam" doesn\'t exist' + with self.assertRaises(tkinter.TclError) as cm: + widget['image'] = 'spam' + if errmsg is not None: + self.assertEqual(str(cm.exception), errmsg) + with self.assertRaises(tkinter.TclError) as cm: + widget.configure({'image': 'spam'}) + if errmsg is not None: + self.assertEqual(str(cm.exception), errmsg) + + def test_configure_menu(self): + widget = self.create() + menu = tkinter.Menu(widget, name='menu') + self.checkParam(widget, 'menu', menu, eq=widget_eq) + menu.destroy() + + def test_configure_padx(self): + widget = self.create() + self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m') + self.checkParam(widget, 'padx', -2, expected=0) + + def test_configure_pady(self): + widget = self.create() + self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m') + self.checkParam(widget, 'pady', -2, expected=0) + + def test_configure_width(self): + widget = self.create() + self.checkIntegerParam(widget, 'width', 402, -402, 0, conv=str) + + +class OptionMenuTest(MenubuttonTest, unittest.TestCase): + + def create(self, default='b', values=('a', 'b', 'c'), **kwargs): + return tkinter.OptionMenu(self.root, None, default, *values, **kwargs) + + def test_bad_kwarg(self): + with self.assertRaisesRegex(TclError, r"^unknown option -image$"): + tkinter.OptionMenu(self.root, None, 'b', image='') + + +@add_standard_options(IntegerSizeTests, StandardOptionsTests) +class EntryTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'background', 'borderwidth', 'cursor', + 'disabledbackground', 'disabledforeground', + 'exportselection', 'font', 'foreground', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'insertbackground', 'insertborderwidth', + 'insertofftime', 'insertontime', 'insertwidth', + 'invalidcommand', 'justify', 'readonlybackground', 'relief', + 'selectbackground', 'selectborderwidth', 'selectforeground', + 'show', 'state', 'takefocus', 'textvariable', + 'validate', 'validatecommand', 'width', 'xscrollcommand', + ) + + def create(self, **kwargs): + return tkinter.Entry(self.root, **kwargs) + + def test_configure_disabledbackground(self): + widget = self.create() + self.checkColorParam(widget, 'disabledbackground') + + def test_configure_insertborderwidth(self): + widget = self.create(insertwidth=100) + self.checkPixelsParam(widget, 'insertborderwidth', + 0, 1.3, 2.6, 6, -2, '10p') + # insertborderwidth is bounded above by a half of insertwidth. + self.checkParam(widget, 'insertborderwidth', 60, expected=100//2) + + def test_configure_insertwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'insertwidth', 1.3, 3.6, '10p') + self.checkParam(widget, 'insertwidth', 0.1, expected=2) + self.checkParam(widget, 'insertwidth', -2, expected=2) + self.checkParam(widget, 'insertwidth', 0.9, expected=1) + + def test_configure_invalidcommand(self): + widget = self.create() + self.checkCommandParam(widget, 'invalidcommand') + self.checkCommandParam(widget, 'invcmd') + + def test_configure_readonlybackground(self): + widget = self.create() + self.checkColorParam(widget, 'readonlybackground') + + def test_configure_show(self): + widget = self.create() + self.checkParam(widget, 'show', '*') + self.checkParam(widget, 'show', '') + self.checkParam(widget, 'show', ' ') + + def test_configure_state(self): + widget = self.create() + self.checkEnumParam(widget, 'state', + 'disabled', 'normal', 'readonly') + + def test_configure_validate(self): + widget = self.create() + self.checkEnumParam(widget, 'validate', + 'all', 'key', 'focus', 'focusin', 'focusout', 'none') + + def test_configure_validatecommand(self): + widget = self.create() + self.checkCommandParam(widget, 'validatecommand') + self.checkCommandParam(widget, 'vcmd') + + def test_selection_methods(self): + widget = self.create() + widget.insert(0, '12345') + self.assertFalse(widget.selection_present()) + widget.selection_range(0, 'end') + self.assertEqual(widget.selection_get(), '12345') + self.assertTrue(widget.selection_present()) + widget.selection_from(1) + widget.selection_to(2) + self.assertEqual(widget.selection_get(), '2') + widget.selection_range(3, 4) + self.assertEqual(widget.selection_get(), '4') + widget.selection_clear() + self.assertFalse(widget.selection_present()) + widget.selection_range(0, 'end') + widget.selection_adjust(4) + self.assertEqual(widget.selection_get(), '1234') + widget.selection_adjust(1) + self.assertEqual(widget.selection_get(), '234') + widget.selection_adjust(5) + self.assertEqual(widget.selection_get(), '2345') + widget.selection_adjust(0) + self.assertEqual(widget.selection_get(), '12345') + widget.selection_adjust(0) + + +@add_standard_options(StandardOptionsTests) +class SpinboxTest(EntryTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'background', 'borderwidth', + 'buttonbackground', 'buttoncursor', 'buttondownrelief', 'buttonuprelief', + 'command', 'cursor', 'disabledbackground', 'disabledforeground', + 'exportselection', 'font', 'foreground', 'format', 'from', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'increment', + 'insertbackground', 'insertborderwidth', + 'insertofftime', 'insertontime', 'insertwidth', + 'invalidcommand', 'justify', 'relief', 'readonlybackground', + 'repeatdelay', 'repeatinterval', + 'selectbackground', 'selectborderwidth', 'selectforeground', + 'state', 'takefocus', 'textvariable', 'to', + 'validate', 'validatecommand', 'values', + 'width', 'wrap', 'xscrollcommand', + ) + + def create(self, **kwargs): + return tkinter.Spinbox(self.root, **kwargs) + + test_configure_show = None + + def test_configure_buttonbackground(self): + widget = self.create() + self.checkColorParam(widget, 'buttonbackground') + + def test_configure_buttoncursor(self): + widget = self.create() + self.checkCursorParam(widget, 'buttoncursor') + + def test_configure_buttondownrelief(self): + widget = self.create() + self.checkReliefParam(widget, 'buttondownrelief') + + def test_configure_buttonuprelief(self): + widget = self.create() + self.checkReliefParam(widget, 'buttonuprelief') + + def test_configure_format(self): + widget = self.create() + self.checkParam(widget, 'format', '%2f') + self.checkParam(widget, 'format', '%2.2f') + self.checkParam(widget, 'format', '%.2f') + self.checkParam(widget, 'format', '%2.f') + self.checkInvalidParam(widget, 'format', '%2e-1f') + self.checkInvalidParam(widget, 'format', '2.2') + self.checkInvalidParam(widget, 'format', '%2.-2f') + self.checkParam(widget, 'format', '%-2.02f') + self.checkParam(widget, 'format', '% 2.02f') + self.checkParam(widget, 'format', '% -2.200f') + self.checkParam(widget, 'format', '%09.200f') + self.checkInvalidParam(widget, 'format', '%d') + + def test_configure_from(self): + widget = self.create() + self.checkParam(widget, 'to', 100.0) + self.checkFloatParam(widget, 'from', -10, 10.2, 11.7) + self.checkInvalidParam(widget, 'from', 200, + errmsg='-to value must be greater than -from value') + + def test_configure_increment(self): + widget = self.create() + self.checkFloatParam(widget, 'increment', -1, 1, 10.2, 12.8, 0) + + def test_configure_to(self): + widget = self.create() + self.checkParam(widget, 'from', -100.0) + self.checkFloatParam(widget, 'to', -10, 10.2, 11.7) + self.checkInvalidParam(widget, 'to', -200, + errmsg='-to value must be greater than -from value') + + def test_configure_values(self): + # XXX + widget = self.create() + self.assertEqual(widget['values'], '') + self.checkParam(widget, 'values', 'mon tue wed thur') + self.checkParam(widget, 'values', ('mon', 'tue', 'wed', 'thur'), + expected='mon tue wed thur') + self.checkParam(widget, 'values', (42, 3.14, '', 'any string'), + expected='42 3.14 {} {any string}') + self.checkParam(widget, 'values', '') + + def test_configure_wrap(self): + widget = self.create() + self.checkBooleanParam(widget, 'wrap') + + def test_bbox(self): + widget = self.create() + self.assertIsBoundingBox(widget.bbox(0)) + self.assertRaises(tkinter.TclError, widget.bbox, 'noindex') + self.assertRaises(tkinter.TclError, widget.bbox, None) + self.assertRaises(TypeError, widget.bbox) + self.assertRaises(TypeError, widget.bbox, 0, 1) + + def test_selection_methods(self): + widget = self.create() + widget.insert(0, '12345') + self.assertFalse(widget.selection_present()) + widget.selection_range(0, 'end') + self.assertEqual(widget.selection_get(), '12345') + self.assertTrue(widget.selection_present()) + widget.selection_from(1) + widget.selection_to(2) + self.assertEqual(widget.selection_get(), '2') + widget.selection_range(3, 4) + self.assertEqual(widget.selection_get(), '4') + widget.selection_clear() + self.assertFalse(widget.selection_present()) + widget.selection_range(0, 'end') + widget.selection_adjust(4) + self.assertEqual(widget.selection_get(), '1234') + widget.selection_adjust(1) + self.assertEqual(widget.selection_get(), '234') + widget.selection_adjust(5) + self.assertEqual(widget.selection_get(), '2345') + widget.selection_adjust(0) + self.assertEqual(widget.selection_get(), '12345') + + def test_selection_element(self): + widget = self.create() + self.assertEqual(widget.selection_element(), "none") + widget.selection_element("buttonup") + self.assertEqual(widget.selection_element(), "buttonup") + widget.selection_element("buttondown") + self.assertEqual(widget.selection_element(), "buttondown") + + +@add_standard_options(StandardOptionsTests) +class TextTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'autoseparators', 'background', 'blockcursor', 'borderwidth', + 'cursor', 'endline', 'exportselection', + 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'inactiveselectbackground', 'insertbackground', 'insertborderwidth', + 'insertofftime', 'insertontime', 'insertunfocussed', 'insertwidth', + 'maxundo', 'padx', 'pady', 'relief', + 'selectbackground', 'selectborderwidth', 'selectforeground', + 'setgrid', 'spacing1', 'spacing2', 'spacing3', 'startline', 'state', + 'tabs', 'tabstyle', 'takefocus', 'undo', 'width', 'wrap', + 'xscrollcommand', 'yscrollcommand', + ) + + def create(self, **kwargs): + return tkinter.Text(self.root, **kwargs) + + def test_configure_autoseparators(self): + widget = self.create() + self.checkBooleanParam(widget, 'autoseparators') + + def test_configure_blockcursor(self): + widget = self.create() + self.checkBooleanParam(widget, 'blockcursor') + + def test_configure_endline(self): + widget = self.create() + text = '\n'.join('Line %d' for i in range(100)) + widget.insert('end', text) + self.checkParam(widget, 'endline', 200, expected='') + self.checkParam(widget, 'endline', -10, expected='') + self.checkInvalidParam(widget, 'endline', 'spam', + errmsg='expected integer but got "spam"') + self.checkParam(widget, 'endline', 50) + self.checkParam(widget, 'startline', 15) + self.checkInvalidParam(widget, 'endline', 10, + errmsg='-startline must be less than or equal to -endline') + + def test_configure_height(self): + widget = self.create() + self.checkPixelsParam(widget, 'height', 100, 101.2, 102.6, '3c') + self.checkParam(widget, 'height', -100, expected=1) + self.checkParam(widget, 'height', 0, expected=1) + + def test_configure_maxundo(self): + widget = self.create() + self.checkIntegerParam(widget, 'maxundo', 0, 5, -1) + + def test_configure_inactiveselectbackground(self): + widget = self.create() + self.checkColorParam(widget, 'inactiveselectbackground') + + @requires_tcl(8, 6) + def test_configure_insertunfocussed(self): + widget = self.create() + self.checkEnumParam(widget, 'insertunfocussed', + 'hollow', 'none', 'solid') + + def test_configure_selectborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'selectborderwidth', + 1.3, 2.6, -2, '10p', conv=False) + + def test_configure_spacing1(self): + widget = self.create() + self.checkPixelsParam(widget, 'spacing1', 20, 21.4, 22.6, '0.5c') + self.checkParam(widget, 'spacing1', -5, expected=0) + + def test_configure_spacing2(self): + widget = self.create() + self.checkPixelsParam(widget, 'spacing2', 5, 6.4, 7.6, '0.1c') + self.checkParam(widget, 'spacing2', -1, expected=0) + + def test_configure_spacing3(self): + widget = self.create() + self.checkPixelsParam(widget, 'spacing3', 20, 21.4, 22.6, '0.5c') + self.checkParam(widget, 'spacing3', -10, expected=0) + + def test_configure_startline(self): + widget = self.create() + text = '\n'.join('Line %d' for i in range(100)) + widget.insert('end', text) + self.checkParam(widget, 'startline', 200, expected='') + self.checkParam(widget, 'startline', -10, expected='') + self.checkInvalidParam(widget, 'startline', 'spam', + errmsg='expected integer but got "spam"') + self.checkParam(widget, 'startline', 10) + self.checkParam(widget, 'endline', 50) + self.checkInvalidParam(widget, 'startline', 70, + errmsg='-startline must be less than or equal to -endline') + + def test_configure_state(self): + widget = self.create() + self.checkEnumParam(widget, 'state', 'disabled', 'normal') + + def test_configure_tabs(self): + widget = self.create() + self.checkParam(widget, 'tabs', (10.2, 20.7, '1i', '2i')) + self.checkParam(widget, 'tabs', '10.2 20.7 1i 2i', + expected=('10.2', '20.7', '1i', '2i')) + self.checkParam(widget, 'tabs', '2c left 4c 6c center', + expected=('2c', 'left', '4c', '6c', 'center')) + self.checkInvalidParam(widget, 'tabs', 'spam', + errmsg='bad screen distance "spam"') + + def test_configure_tabstyle(self): + widget = self.create() + self.checkEnumParam(widget, 'tabstyle', 'tabular', 'wordprocessor') + + def test_configure_undo(self): + widget = self.create() + self.checkBooleanParam(widget, 'undo') + + def test_configure_width(self): + widget = self.create() + self.checkIntegerParam(widget, 'width', 402) + self.checkParam(widget, 'width', -402, expected=1) + self.checkParam(widget, 'width', 0, expected=1) + + def test_configure_wrap(self): + widget = self.create() + self.checkEnumParam(widget, 'wrap', 'char', 'none', 'word') + + def test_bbox(self): + widget = self.create() + self.assertIsBoundingBox(widget.bbox('1.1')) + self.assertIsNone(widget.bbox('end')) + self.assertRaises(tkinter.TclError, widget.bbox, 'noindex') + self.assertRaises(tkinter.TclError, widget.bbox, None) + self.assertRaises(TypeError, widget.bbox) + self.assertRaises(TypeError, widget.bbox, '1.1', 'end') + + +@add_standard_options(PixelSizeTests, StandardOptionsTests) +class CanvasTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'background', 'borderwidth', + 'closeenough', 'confine', 'cursor', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'insertbackground', 'insertborderwidth', + 'insertofftime', 'insertontime', 'insertwidth', + 'offset', 'relief', 'scrollregion', + 'selectbackground', 'selectborderwidth', 'selectforeground', + 'state', 'takefocus', + 'xscrollcommand', 'xscrollincrement', + 'yscrollcommand', 'yscrollincrement', 'width', + ) + + _conv_pixels = round + _stringify = True + + def create(self, **kwargs): + return tkinter.Canvas(self.root, **kwargs) + + def test_configure_closeenough(self): + widget = self.create() + self.checkFloatParam(widget, 'closeenough', 24, 2.4, 3.6, -3, + conv=float) + + def test_configure_confine(self): + widget = self.create() + self.checkBooleanParam(widget, 'confine') + + def test_configure_offset(self): + widget = self.create() + self.assertEqual(widget['offset'], '0,0') + self.checkParams(widget, 'offset', + 'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center') + self.checkParam(widget, 'offset', '10,20') + self.checkParam(widget, 'offset', '#5,6') + self.checkInvalidParam(widget, 'offset', 'spam') + + def test_configure_scrollregion(self): + widget = self.create() + self.checkParam(widget, 'scrollregion', '0 0 200 150') + self.checkParam(widget, 'scrollregion', (0, 0, 200, 150), + expected='0 0 200 150') + self.checkParam(widget, 'scrollregion', '') + self.checkInvalidParam(widget, 'scrollregion', 'spam', + errmsg='bad scrollRegion "spam"') + self.checkInvalidParam(widget, 'scrollregion', (0, 0, 200, 'spam')) + self.checkInvalidParam(widget, 'scrollregion', (0, 0, 200)) + self.checkInvalidParam(widget, 'scrollregion', (0, 0, 200, 150, 0)) + + def test_configure_state(self): + widget = self.create() + self.checkEnumParam(widget, 'state', 'disabled', 'normal', + errmsg='bad state value "{}": must be normal or disabled') + + def test_configure_xscrollincrement(self): + widget = self.create() + self.checkPixelsParam(widget, 'xscrollincrement', + 40, 0, 41.2, 43.6, -40, '0.5i') + + def test_configure_yscrollincrement(self): + widget = self.create() + self.checkPixelsParam(widget, 'yscrollincrement', + 10, 0, 11.2, 13.6, -10, '0.1i') + + @requires_tcl(8, 6) + def test_moveto(self): + widget = self.create() + i1 = widget.create_rectangle(1, 1, 20, 20, tags='group') + i2 = widget.create_rectangle(30, 30, 50, 70, tags='group') + x1, y1, _, _ = widget.bbox(i1) + x2, y2, _, _ = widget.bbox(i2) + widget.moveto('group', 200, 100) + x1_2, y1_2, _, _ = widget.bbox(i1) + x2_2, y2_2, _, _ = widget.bbox(i2) + self.assertEqual(x1_2, 200) + self.assertEqual(y1_2, 100) + self.assertEqual(x2 - x1, x2_2 - x1_2) + self.assertEqual(y2 - y1, y2_2 - y1_2) + widget.tag_lower(i2, i1) + widget.moveto('group', y=50) + x1_3, y1_3, _, _ = widget.bbox(i1) + x2_3, y2_3, _, _ = widget.bbox(i2) + self.assertEqual(y2_3, 50) + self.assertEqual(x2_3, x2_2) + self.assertEqual(x2_2 - x1_2, x2_3 - x1_3) + self.assertEqual(y2_2 - y1_2, y2_3 - y1_3) + + +@add_standard_options(IntegerSizeTests, StandardOptionsTests) +class ListboxTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'activestyle', 'background', 'borderwidth', 'cursor', + 'disabledforeground', 'exportselection', + 'font', 'foreground', 'height', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'justify', 'listvariable', 'relief', + 'selectbackground', 'selectborderwidth', 'selectforeground', + 'selectmode', 'setgrid', 'state', + 'takefocus', 'width', 'xscrollcommand', 'yscrollcommand', + ) + + def create(self, **kwargs): + return tkinter.Listbox(self.root, **kwargs) + + def test_configure_activestyle(self): + widget = self.create() + self.checkEnumParam(widget, 'activestyle', + 'dotbox', 'none', 'underline') + + test_configure_justify = requires_tcl(8, 6, 5)(StandardOptionsTests.test_configure_justify) + + def test_configure_listvariable(self): + widget = self.create() + var = tkinter.DoubleVar(self.root) + self.checkVariableParam(widget, 'listvariable', var) + + def test_configure_selectmode(self): + widget = self.create() + self.checkParam(widget, 'selectmode', 'single') + self.checkParam(widget, 'selectmode', 'browse') + self.checkParam(widget, 'selectmode', 'multiple') + self.checkParam(widget, 'selectmode', 'extended') + + def test_configure_state(self): + widget = self.create() + self.checkEnumParam(widget, 'state', 'disabled', 'normal') + + def test_itemconfigure(self): + widget = self.create() + with self.assertRaisesRegex(TclError, 'item number "0" out of range'): + widget.itemconfigure(0) + colors = 'red orange yellow green blue white violet'.split() + widget.insert('end', *colors) + for i, color in enumerate(colors): + widget.itemconfigure(i, background=color) + with self.assertRaises(TypeError): + widget.itemconfigure() + with self.assertRaisesRegex(TclError, 'bad listbox index "red"'): + widget.itemconfigure('red') + self.assertEqual(widget.itemconfigure(0, 'background'), + ('background', 'background', 'Background', '', 'red')) + self.assertEqual(widget.itemconfigure('end', 'background'), + ('background', 'background', 'Background', '', 'violet')) + self.assertEqual(widget.itemconfigure('@0,0', 'background'), + ('background', 'background', 'Background', '', 'red')) + + d = widget.itemconfigure(0) + self.assertIsInstance(d, dict) + for k, v in d.items(): + self.assertIn(len(v), (2, 5)) + if len(v) == 5: + self.assertEqual(v, widget.itemconfigure(0, k)) + self.assertEqual(v[4], widget.itemcget(0, k)) + + def check_itemconfigure(self, name, value): + widget = self.create() + widget.insert('end', 'a', 'b', 'c', 'd') + widget.itemconfigure(0, **{name: value}) + self.assertEqual(widget.itemconfigure(0, name)[4], value) + self.assertEqual(widget.itemcget(0, name), value) + with self.assertRaisesRegex(TclError, 'unknown color name "spam"'): + widget.itemconfigure(0, **{name: 'spam'}) + + def test_itemconfigure_background(self): + self.check_itemconfigure('background', '#ff0000') + + def test_itemconfigure_bg(self): + self.check_itemconfigure('bg', '#ff0000') + + def test_itemconfigure_fg(self): + self.check_itemconfigure('fg', '#110022') + + def test_itemconfigure_foreground(self): + self.check_itemconfigure('foreground', '#110022') + + def test_itemconfigure_selectbackground(self): + self.check_itemconfigure('selectbackground', '#110022') + + def test_itemconfigure_selectforeground(self): + self.check_itemconfigure('selectforeground', '#654321') + + def test_box(self): + lb = self.create() + lb.insert(0, *('el%d' % i for i in range(8))) + lb.pack() + self.assertIsBoundingBox(lb.bbox(0)) + self.assertIsNone(lb.bbox(-1)) + self.assertIsNone(lb.bbox(10)) + self.assertRaises(TclError, lb.bbox, 'noindex') + self.assertRaises(TclError, lb.bbox, None) + self.assertRaises(TypeError, lb.bbox) + self.assertRaises(TypeError, lb.bbox, 0, 1) + + def test_curselection(self): + lb = self.create() + lb.insert(0, *('el%d' % i for i in range(8))) + lb.selection_clear(0, tkinter.END) + lb.selection_set(2, 4) + lb.selection_set(6) + self.assertEqual(lb.curselection(), (2, 3, 4, 6)) + self.assertRaises(TypeError, lb.curselection, 0) + + def test_get(self): + lb = self.create() + lb.insert(0, *('el%d' % i for i in range(8))) + self.assertEqual(lb.get(0), 'el0') + self.assertEqual(lb.get(3), 'el3') + self.assertEqual(lb.get('end'), 'el7') + self.assertEqual(lb.get(8), '') + self.assertEqual(lb.get(-1), '') + self.assertEqual(lb.get(3, 5), ('el3', 'el4', 'el5')) + self.assertEqual(lb.get(5, 'end'), ('el5', 'el6', 'el7')) + self.assertEqual(lb.get(5, 0), ()) + self.assertEqual(lb.get(0, 0), ('el0',)) + self.assertRaises(TclError, lb.get, 'noindex') + self.assertRaises(TclError, lb.get, None) + self.assertRaises(TypeError, lb.get) + self.assertRaises(TclError, lb.get, 'end', 'noindex') + self.assertRaises(TypeError, lb.get, 1, 2, 3) + self.assertRaises(TclError, lb.get, 2.4) + + +@add_standard_options(PixelSizeTests, StandardOptionsTests) +class ScaleTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'background', 'bigincrement', 'borderwidth', + 'command', 'cursor', 'digits', 'font', 'foreground', 'from', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'label', 'length', 'orient', 'relief', + 'repeatdelay', 'repeatinterval', + 'resolution', 'showvalue', 'sliderlength', 'sliderrelief', 'state', + 'takefocus', 'tickinterval', 'to', 'troughcolor', 'variable', 'width', + ) + default_orient = 'vertical' + + def create(self, **kwargs): + return tkinter.Scale(self.root, **kwargs) + + def test_configure_bigincrement(self): + widget = self.create() + self.checkFloatParam(widget, 'bigincrement', 12.4, 23.6, -5) + + def test_configure_digits(self): + widget = self.create() + self.checkIntegerParam(widget, 'digits', 5, 0) + + def test_configure_from(self): + widget = self.create() + conv = float if get_tk_patchlevel() >= (8, 6, 10) else float_round + self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=conv) + + def test_configure_label(self): + widget = self.create() + self.checkParam(widget, 'label', 'any string') + self.checkParam(widget, 'label', '') + + def test_configure_length(self): + widget = self.create() + self.checkPixelsParam(widget, 'length', 130, 131.2, 135.6, '5i') + + def test_configure_resolution(self): + widget = self.create() + self.checkFloatParam(widget, 'resolution', 4.2, 0, 6.7, -2) + + def test_configure_showvalue(self): + widget = self.create() + self.checkBooleanParam(widget, 'showvalue') + + def test_configure_sliderlength(self): + widget = self.create() + self.checkPixelsParam(widget, 'sliderlength', + 10, 11.2, 15.6, -3, '3m') + + def test_configure_sliderrelief(self): + widget = self.create() + self.checkReliefParam(widget, 'sliderrelief') + + def test_configure_tickinterval(self): + widget = self.create() + self.checkFloatParam(widget, 'tickinterval', 1, 4.3, 7.6, 0, + conv=float_round) + self.checkParam(widget, 'tickinterval', -2, expected=2, + conv=float_round) + + def test_configure_to(self): + widget = self.create() + self.checkFloatParam(widget, 'to', 300, 14.9, 15.1, -10, + conv=float_round) + + +@add_standard_options(PixelSizeTests, StandardOptionsTests) +class ScrollbarTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'activerelief', + 'background', 'borderwidth', + 'command', 'cursor', 'elementborderwidth', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'jump', 'orient', 'relief', + 'repeatdelay', 'repeatinterval', + 'takefocus', 'troughcolor', 'width', + ) + _conv_pixels = round + _stringify = True + default_orient = 'vertical' + + def create(self, **kwargs): + return tkinter.Scrollbar(self.root, **kwargs) + + def test_configure_activerelief(self): + widget = self.create() + self.checkReliefParam(widget, 'activerelief') + + def test_configure_elementborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'elementborderwidth', 4.3, 5.6, -2, '1m') + + def test_configure_orient(self): + widget = self.create() + self.checkEnumParam(widget, 'orient', 'vertical', 'horizontal', + errmsg='bad orientation "{}": must be vertical or horizontal') + + def test_activate(self): + sb = self.create() + for e in ('arrow1', 'slider', 'arrow2'): + sb.activate(e) + self.assertEqual(sb.activate(), e) + sb.activate('') + self.assertIsNone(sb.activate()) + self.assertRaises(TypeError, sb.activate, 'arrow1', 'arrow2') + + def test_set(self): + sb = self.create() + sb.set(0.2, 0.4) + self.assertEqual(sb.get(), (0.2, 0.4)) + self.assertRaises(TclError, sb.set, 'abc', 'def') + self.assertRaises(TclError, sb.set, 0.6, 'def') + self.assertRaises(TclError, sb.set, 0.6, None) + self.assertRaises(TypeError, sb.set, 0.6) + self.assertRaises(TypeError, sb.set, 0.6, 0.7, 0.8) + + +@add_standard_options(StandardOptionsTests) +class PanedWindowTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'background', 'borderwidth', 'cursor', + 'handlepad', 'handlesize', 'height', + 'opaqueresize', 'orient', + 'proxybackground', 'proxyborderwidth', 'proxyrelief', + 'relief', + 'sashcursor', 'sashpad', 'sashrelief', 'sashwidth', + 'showhandle', 'width', + ) + default_orient = 'horizontal' + + def create(self, **kwargs): + return tkinter.PanedWindow(self.root, **kwargs) + + def test_configure_handlepad(self): + widget = self.create() + self.checkPixelsParam(widget, 'handlepad', 5, 6.4, 7.6, -3, '1m') + + def test_configure_handlesize(self): + widget = self.create() + self.checkPixelsParam(widget, 'handlesize', 8, 9.4, 10.6, -3, '2m', + conv=False) + + def test_configure_height(self): + widget = self.create() + self.checkPixelsParam(widget, 'height', 100, 101.2, 102.6, -100, 0, '1i', + conv=False) + + def test_configure_opaqueresize(self): + widget = self.create() + self.checkBooleanParam(widget, 'opaqueresize') + + @requires_tcl(8, 6, 5) + def test_configure_proxybackground(self): + widget = self.create() + self.checkColorParam(widget, 'proxybackground') + + @requires_tcl(8, 6, 5) + def test_configure_proxyborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'proxyborderwidth', + 0, 1.3, 2.9, 6, -2, '10p', + conv=False) + + @requires_tcl(8, 6, 5) + def test_configure_proxyrelief(self): + widget = self.create() + self.checkReliefParam(widget, 'proxyrelief') + + def test_configure_sashcursor(self): + widget = self.create() + self.checkCursorParam(widget, 'sashcursor') + + def test_configure_sashpad(self): + widget = self.create() + self.checkPixelsParam(widget, 'sashpad', 8, 1.3, 2.6, -2, '2m') + + def test_configure_sashrelief(self): + widget = self.create() + self.checkReliefParam(widget, 'sashrelief') + + def test_configure_sashwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'sashwidth', 10, 11.1, 15.6, -3, '1m', + conv=False) + + def test_configure_showhandle(self): + widget = self.create() + self.checkBooleanParam(widget, 'showhandle') + + def test_configure_width(self): + widget = self.create() + self.checkPixelsParam(widget, 'width', 402, 403.4, 404.6, -402, 0, '5i', + conv=False) + + def create2(self): + p = self.create() + b = tkinter.Button(p) + c = tkinter.Button(p) + p.add(b) + p.add(c) + return p, b, c + + def test_paneconfigure(self): + p, b, c = self.create2() + self.assertRaises(TypeError, p.paneconfigure) + d = p.paneconfigure(b) + self.assertIsInstance(d, dict) + for k, v in d.items(): + self.assertEqual(len(v), 5) + self.assertEqual(v, p.paneconfigure(b, k)) + self.assertEqual(v[4], p.panecget(b, k)) + + def check_paneconfigure(self, p, b, name, value, expected): + if not self.wantobjects: + expected = str(expected) + p.paneconfigure(b, **{name: value}) + self.assertEqual(p.paneconfigure(b, name)[4], expected) + self.assertEqual(p.panecget(b, name), expected) + + def check_paneconfigure_bad(self, p, b, name, msg): + with self.assertRaisesRegex(TclError, msg): + p.paneconfigure(b, **{name: 'badValue'}) + + def test_paneconfigure_after(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'after', c, str(c)) + self.check_paneconfigure_bad(p, b, 'after', + 'bad window path name "badValue"') + + def test_paneconfigure_before(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'before', c, str(c)) + self.check_paneconfigure_bad(p, b, 'before', + 'bad window path name "badValue"') + + def test_paneconfigure_height(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'height', 10, 10) + self.check_paneconfigure_bad(p, b, 'height', + 'bad screen distance "badValue"') + + def test_paneconfigure_hide(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'hide', False, 0) + self.check_paneconfigure_bad(p, b, 'hide', + 'expected boolean value but got "badValue"') + + def test_paneconfigure_minsize(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'minsize', 10, 10) + self.check_paneconfigure_bad(p, b, 'minsize', + 'bad screen distance "badValue"') + + def test_paneconfigure_padx(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'padx', 1.3, 1) + self.check_paneconfigure_bad(p, b, 'padx', + 'bad screen distance "badValue"') + + def test_paneconfigure_pady(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'pady', 1.3, 1) + self.check_paneconfigure_bad(p, b, 'pady', + 'bad screen distance "badValue"') + + def test_paneconfigure_sticky(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'sticky', 'nsew', 'nesw') + self.check_paneconfigure_bad(p, b, 'sticky', + 'bad stickyness value "badValue": must ' + 'be a string containing zero or more of ' + 'n, e, s, and w') + + def test_paneconfigure_stretch(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'stretch', 'alw', 'always') + self.check_paneconfigure_bad(p, b, 'stretch', + 'bad stretch "badValue": must be ' + 'always, first, last, middle, or never') + + def test_paneconfigure_width(self): + p, b, c = self.create2() + self.check_paneconfigure(p, b, 'width', 10, 10) + self.check_paneconfigure_bad(p, b, 'width', + 'bad screen distance "badValue"') + + +@add_standard_options(StandardOptionsTests) +class MenuTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'activebackground', 'activeborderwidth', 'activeforeground', + 'background', 'borderwidth', 'cursor', + 'disabledforeground', 'font', 'foreground', + 'postcommand', 'relief', 'selectcolor', 'takefocus', + 'tearoff', 'tearoffcommand', 'title', 'type', + ) + _conv_pixels = False + + def create(self, **kwargs): + return tkinter.Menu(self.root, **kwargs) + + def test_configure_postcommand(self): + widget = self.create() + self.checkCommandParam(widget, 'postcommand') + + def test_configure_tearoff(self): + widget = self.create() + self.checkBooleanParam(widget, 'tearoff') + + def test_configure_tearoffcommand(self): + widget = self.create() + self.checkCommandParam(widget, 'tearoffcommand') + + def test_configure_title(self): + widget = self.create() + self.checkParam(widget, 'title', 'any string') + + def test_configure_type(self): + widget = self.create() + self.checkEnumParam( + widget, 'type', + 'normal', 'tearoff', 'menubar', + errmsg='bad type "{}": must be normal, tearoff, or menubar', + ) + + def test_entryconfigure(self): + m1 = self.create() + m1.add_command(label='test') + self.assertRaises(TypeError, m1.entryconfigure) + with self.assertRaisesRegex(TclError, 'bad menu entry index "foo"'): + m1.entryconfigure('foo') + d = m1.entryconfigure(1) + self.assertIsInstance(d, dict) + for k, v in d.items(): + self.assertIsInstance(k, str) + self.assertIsInstance(v, tuple) + self.assertEqual(len(v), 5) + self.assertEqual(v[0], k) + self.assertEqual(m1.entrycget(1, k), v[4]) + m1.destroy() + + def test_entryconfigure_label(self): + m1 = self.create() + m1.add_command(label='test') + self.assertEqual(m1.entrycget(1, 'label'), 'test') + m1.entryconfigure(1, label='changed') + self.assertEqual(m1.entrycget(1, 'label'), 'changed') + + def test_entryconfigure_variable(self): + m1 = self.create() + v1 = tkinter.BooleanVar(self.root) + v2 = tkinter.BooleanVar(self.root) + m1.add_checkbutton(variable=v1, onvalue=True, offvalue=False, + label='Nonsense') + self.assertEqual(str(m1.entrycget(1, 'variable')), str(v1)) + m1.entryconfigure(1, variable=v2) + self.assertEqual(str(m1.entrycget(1, 'variable')), str(v2)) + + +@add_standard_options(PixelSizeTests, StandardOptionsTests) +class MessageTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'anchor', 'aspect', 'background', 'borderwidth', + 'cursor', 'font', 'foreground', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'justify', 'padx', 'pady', 'relief', + 'takefocus', 'text', 'textvariable', 'width', + ) + _conv_pad_pixels = False + + def create(self, **kwargs): + return tkinter.Message(self.root, **kwargs) + + def test_configure_aspect(self): + widget = self.create() + self.checkIntegerParam(widget, 'aspect', 250, 0, -300) + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_frame(self): + self._test_widget(tkinter.Frame) + + def test_label(self): + self._test_widget(tkinter.Label) + + +tests_gui = ( + ButtonTest, CanvasTest, CheckbuttonTest, EntryTest, + FrameTest, LabelFrameTest,LabelTest, ListboxTest, + MenubuttonTest, MenuTest, MessageTest, OptionMenuTest, + PanedWindowTest, RadiobuttonTest, ScaleTest, ScrollbarTest, + SpinboxTest, TextTest, ToplevelTest, DefaultRootTest, +) + +if __name__ == '__main__': + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_ttk/test_extensions.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_ttk/test_extensions.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_ttk/test_extensions.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_ttk/test_extensions.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,328 @@ +import sys +import unittest +import tkinter +from tkinter import ttk +from test.support import requires, gc_collect +from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest + +requires('gui') + +class LabeledScaleTest(AbstractTkTest, unittest.TestCase): + + def tearDown(self): + self.root.update_idletasks() + super().tearDown() + + def test_widget_destroy(self): + # automatically created variable + x = ttk.LabeledScale(self.root) + var = x._variable._name + x.destroy() + gc_collect() # For PyPy or other GCs. + self.assertRaises(tkinter.TclError, x.tk.globalgetvar, var) + + # manually created variable + myvar = tkinter.DoubleVar(self.root) + name = myvar._name + x = ttk.LabeledScale(self.root, variable=myvar) + x.destroy() + if self.wantobjects: + self.assertEqual(x.tk.globalgetvar(name), myvar.get()) + else: + self.assertEqual(float(x.tk.globalgetvar(name)), myvar.get()) + del myvar + gc_collect() # For PyPy or other GCs. + self.assertRaises(tkinter.TclError, x.tk.globalgetvar, name) + + # checking that the tracing callback is properly removed + myvar = tkinter.IntVar(self.root) + # LabeledScale will start tracing myvar + x = ttk.LabeledScale(self.root, variable=myvar) + x.destroy() + # Unless the tracing callback was removed, creating a new + # LabeledScale with the same var will cause an error now. This + # happens because the variable will be set to (possibly) a new + # value which causes the tracing callback to be called and then + # it tries calling instance attributes not yet defined. + ttk.LabeledScale(self.root, variable=myvar) + if hasattr(sys, 'last_type'): + self.assertNotEqual(sys.last_type, tkinter.TclError) + + def test_initialization(self): + # master passing + master = tkinter.Frame(self.root) + x = ttk.LabeledScale(master) + self.assertEqual(x.master, master) + x.destroy() + + # variable initialization/passing + passed_expected = (('0', 0), (0, 0), (10, 10), + (-1, -1), (sys.maxsize + 1, sys.maxsize + 1), + (2.5, 2), ('2.5', 2)) + for pair in passed_expected: + x = ttk.LabeledScale(self.root, from_=pair[0]) + self.assertEqual(x.value, pair[1]) + x.destroy() + x = ttk.LabeledScale(self.root, from_=None) + self.assertRaises((ValueError, tkinter.TclError), x._variable.get) + x.destroy() + # variable should have its default value set to the from_ value + myvar = tkinter.DoubleVar(self.root, value=20) + x = ttk.LabeledScale(self.root, variable=myvar) + self.assertEqual(x.value, 0) + x.destroy() + # check that it is really using a DoubleVar + x = ttk.LabeledScale(self.root, variable=myvar, from_=0.5) + self.assertEqual(x.value, 0.5) + self.assertEqual(x._variable._name, myvar._name) + x.destroy() + + # widget positionment + def check_positions(scale, scale_pos, label, label_pos): + self.assertEqual(scale.pack_info()['side'], scale_pos) + self.assertEqual(label.place_info()['anchor'], label_pos) + x = ttk.LabeledScale(self.root, compound='top') + check_positions(x.scale, 'bottom', x.label, 'n') + x.destroy() + x = ttk.LabeledScale(self.root, compound='bottom') + check_positions(x.scale, 'top', x.label, 's') + x.destroy() + # invert default positions + x = ttk.LabeledScale(self.root, compound='unknown') + check_positions(x.scale, 'top', x.label, 's') + x.destroy() + x = ttk.LabeledScale(self.root) # take default positions + check_positions(x.scale, 'bottom', x.label, 'n') + x.destroy() + + # extra, and invalid, kwargs + self.assertRaises(tkinter.TclError, ttk.LabeledScale, master, a='b') + + + def test_horizontal_range(self): + lscale = ttk.LabeledScale(self.root, from_=0, to=10) + lscale.pack() + lscale.update() + + linfo_1 = lscale.label.place_info() + prev_xcoord = lscale.scale.coords()[0] + self.assertEqual(prev_xcoord, int(linfo_1['x'])) + # change range to: from -5 to 5. This should change the x coord of + # the scale widget, since 0 is at the middle of the new + # range. + lscale.scale.configure(from_=-5, to=5) + # The following update is needed since the test doesn't use mainloop, + # at the same time this shouldn't affect test outcome + lscale.update() + curr_xcoord = lscale.scale.coords()[0] + self.assertNotEqual(prev_xcoord, curr_xcoord) + # the label widget should have been repositioned too + linfo_2 = lscale.label.place_info() + self.assertEqual(lscale.label['text'], 0 if self.wantobjects else '0') + self.assertEqual(curr_xcoord, int(linfo_2['x'])) + # change the range back + lscale.scale.configure(from_=0, to=10) + self.assertNotEqual(prev_xcoord, curr_xcoord) + self.assertEqual(prev_xcoord, int(linfo_1['x'])) + + lscale.destroy() + + + def test_variable_change(self): + x = ttk.LabeledScale(self.root) + x.pack() + x.update() + + curr_xcoord = x.scale.coords()[0] + newval = x.value + 1 + x.value = newval + # The following update is needed since the test doesn't use mainloop, + # at the same time this shouldn't affect test outcome + x.update() + self.assertEqual(x.value, newval) + self.assertEqual(x.label['text'], + newval if self.wantobjects else str(newval)) + self.assertEqual(float(x.scale.get()), newval) + self.assertGreater(x.scale.coords()[0], curr_xcoord) + self.assertEqual(x.scale.coords()[0], + int(x.label.place_info()['x'])) + + # value outside range + if self.wantobjects: + conv = lambda x: x + else: + conv = int + x.value = conv(x.scale['to']) + 1 # no changes shouldn't happen + x.update() + self.assertEqual(x.value, newval) + self.assertEqual(conv(x.label['text']), newval) + self.assertEqual(float(x.scale.get()), newval) + self.assertEqual(x.scale.coords()[0], + int(x.label.place_info()['x'])) + + # non-integer value + x.value = newval = newval + 1.5 + x.update() + self.assertEqual(x.value, int(newval)) + self.assertEqual(conv(x.label['text']), int(newval)) + self.assertEqual(float(x.scale.get()), newval) + + x.destroy() + + + def test_resize(self): + x = ttk.LabeledScale(self.root) + x.pack(expand=True, fill='both') + gc_collect() # For PyPy or other GCs. + x.update() + + width, height = x.master.winfo_width(), x.master.winfo_height() + width_new, height_new = width * 2, height * 2 + + x.value = 3 + x.update() + x.master.wm_geometry("%dx%d" % (width_new, height_new)) + self.assertEqual(int(x.label.place_info()['x']), + x.scale.coords()[0]) + + # Reset geometry + x.master.wm_geometry("%dx%d" % (width, height)) + x.destroy() + + +class OptionMenuTest(AbstractTkTest, unittest.TestCase): + + def setUp(self): + super().setUp() + self.textvar = tkinter.StringVar(self.root) + + def tearDown(self): + del self.textvar + super().tearDown() + + + def test_widget_destroy(self): + var = tkinter.StringVar(self.root) + optmenu = ttk.OptionMenu(self.root, var) + name = var._name + optmenu.update_idletasks() + optmenu.destroy() + self.assertEqual(optmenu.tk.globalgetvar(name), var.get()) + del var + gc_collect() # For PyPy or other GCs. + self.assertRaises(tkinter.TclError, optmenu.tk.globalgetvar, name) + + + def test_initialization(self): + self.assertRaises(tkinter.TclError, + ttk.OptionMenu, self.root, self.textvar, invalid='thing') + + optmenu = ttk.OptionMenu(self.root, self.textvar, 'b', 'a', 'b') + self.assertEqual(optmenu._variable.get(), 'b') + + self.assertTrue(optmenu['menu']) + self.assertTrue(optmenu['textvariable']) + + optmenu.destroy() + + + def test_menu(self): + items = ('a', 'b', 'c') + default = 'a' + optmenu = ttk.OptionMenu(self.root, self.textvar, default, *items) + found_default = False + for i in range(len(items)): + value = optmenu['menu'].entrycget(i, 'value') + self.assertEqual(value, items[i]) + if value == default: + found_default = True + self.assertTrue(found_default) + optmenu.destroy() + + # default shouldn't be in menu if it is not part of values + default = 'd' + optmenu = ttk.OptionMenu(self.root, self.textvar, default, *items) + curr = None + i = 0 + while True: + last, curr = curr, optmenu['menu'].entryconfigure(i, 'value') + if last == curr: + # no more menu entries + break + self.assertNotEqual(curr, default) + i += 1 + self.assertEqual(i, len(items)) + + # check that variable is updated correctly + optmenu.pack() + gc_collect() # For PyPy or other GCs. + optmenu['menu'].invoke(0) + self.assertEqual(optmenu._variable.get(), items[0]) + + # changing to an invalid index shouldn't change the variable + self.assertRaises(tkinter.TclError, optmenu['menu'].invoke, -1) + self.assertEqual(optmenu._variable.get(), items[0]) + + optmenu.destroy() + + # specifying a callback + success = [] + def cb_test(item): + self.assertEqual(item, items[1]) + success.append(True) + optmenu = ttk.OptionMenu(self.root, self.textvar, 'a', command=cb_test, + *items) + optmenu['menu'].invoke(1) + if not success: + self.fail("Menu callback not invoked") + + optmenu.destroy() + + def test_unique_radiobuttons(self): + # check that radiobuttons are unique across instances (bpo25684) + items = ('a', 'b', 'c') + default = 'a' + optmenu = ttk.OptionMenu(self.root, self.textvar, default, *items) + textvar2 = tkinter.StringVar(self.root) + optmenu2 = ttk.OptionMenu(self.root, textvar2, default, *items) + optmenu.pack() + optmenu2.pack() + optmenu['menu'].invoke(1) + optmenu2['menu'].invoke(2) + optmenu_stringvar_name = optmenu['menu'].entrycget(0, 'variable') + optmenu2_stringvar_name = optmenu2['menu'].entrycget(0, 'variable') + self.assertNotEqual(optmenu_stringvar_name, + optmenu2_stringvar_name) + self.assertEqual(self.root.tk.globalgetvar(optmenu_stringvar_name), + items[1]) + self.assertEqual(self.root.tk.globalgetvar(optmenu2_stringvar_name), + items[2]) + + optmenu.destroy() + optmenu2.destroy() + + def test_trace_variable(self): + # prior to bpo45160, tracing a variable would cause the callback to be made twice + success = [] + items = ('a', 'b', 'c') + textvar = tkinter.StringVar(self.root) + def cb_test(*args): + success.append(textvar.get()) + optmenu = ttk.OptionMenu(self.root, textvar, "a", *items) + optmenu.pack() + cb_name = textvar.trace_add("write", cb_test) + optmenu['menu'].invoke(1) + self.assertEqual(success, ['b']) + self.assertEqual(textvar.get(), 'b') + textvar.trace_remove("write", cb_name) + optmenu.destroy() + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_labeledscale(self): + self._test_widget(ttk.LabeledScale) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_ttk/test_style.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_ttk/test_style.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_ttk/test_style.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_ttk/test_style.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,181 @@ +import unittest +import sys +import tkinter +from tkinter import ttk +from test import support +from test.support import requires +from tkinter.test.support import AbstractTkTest, get_tk_patchlevel + +requires('gui') + +CLASS_NAMES = [ + '.', 'ComboboxPopdownFrame', 'Heading', + 'Horizontal.TProgressbar', 'Horizontal.TScale', 'Item', 'Sash', + 'TButton', 'TCheckbutton', 'TCombobox', 'TEntry', + 'TLabelframe', 'TLabelframe.Label', 'TMenubutton', + 'TNotebook', 'TNotebook.Tab', 'Toolbutton', 'TProgressbar', + 'TRadiobutton', 'Treeview', 'TScale', 'TScrollbar', 'TSpinbox', + 'Vertical.TProgressbar', 'Vertical.TScale' +] + +class StyleTest(AbstractTkTest, unittest.TestCase): + + def setUp(self): + super().setUp() + self.style = ttk.Style(self.root) + + + def test_configure(self): + style = self.style + style.configure('TButton', background='yellow') + self.assertEqual(style.configure('TButton', 'background'), + 'yellow') + self.assertIsInstance(style.configure('TButton'), dict) + + + def test_map(self): + style = self.style + + # Single state + for states in ['active'], [('active',)]: + with self.subTest(states=states): + style.map('TButton', background=[(*states, 'white')]) + expected = [('active', 'white')] + self.assertEqual(style.map('TButton', 'background'), expected) + m = style.map('TButton') + self.assertIsInstance(m, dict) + self.assertEqual(m['background'], expected) + + # Multiple states + for states in ['pressed', '!disabled'], ['pressed !disabled'], [('pressed', '!disabled')]: + with self.subTest(states=states): + style.map('TButton', background=[(*states, 'black')]) + expected = [('pressed', '!disabled', 'black')] + self.assertEqual(style.map('TButton', 'background'), expected) + m = style.map('TButton') + self.assertIsInstance(m, dict) + self.assertEqual(m['background'], expected) + + # Default state + for states in [], [''], [()]: + with self.subTest(states=states): + style.map('TButton', background=[(*states, 'grey')]) + expected = [('grey',)] + self.assertEqual(style.map('TButton', 'background'), expected) + m = style.map('TButton') + self.assertIsInstance(m, dict) + self.assertEqual(m['background'], expected) + + + def test_lookup(self): + style = self.style + style.configure('TButton', background='yellow') + style.map('TButton', background=[('active', 'background', 'blue')]) + + self.assertEqual(style.lookup('TButton', 'background'), 'yellow') + self.assertEqual(style.lookup('TButton', 'background', + ['active', 'background']), 'blue') + self.assertEqual(style.lookup('TButton', 'optionnotdefined', + default='iknewit'), 'iknewit') + + + def test_layout(self): + style = self.style + self.assertRaises(tkinter.TclError, style.layout, 'NotALayout') + tv_style = style.layout('Treeview') + + # "erase" Treeview layout + style.layout('Treeview', '') + self.assertEqual(style.layout('Treeview'), + [('null', {'sticky': 'nswe'})] + ) + + # restore layout + style.layout('Treeview', tv_style) + self.assertEqual(style.layout('Treeview'), tv_style) + + # should return a list + self.assertIsInstance(style.layout('TButton'), list) + + # correct layout, but "option" doesn't exist as option + self.assertRaises(tkinter.TclError, style.layout, 'Treeview', + [('name', {'option': 'inexistent'})]) + + + def test_theme_use(self): + self.assertRaises(tkinter.TclError, self.style.theme_use, + 'nonexistingname') + + curr_theme = self.style.theme_use() + new_theme = None + for theme in self.style.theme_names(): + if theme != curr_theme: + new_theme = theme + self.style.theme_use(theme) + break + else: + # just one theme available, can't go on with tests + return + + self.assertFalse(curr_theme == new_theme) + self.assertFalse(new_theme != self.style.theme_use()) + + self.style.theme_use(curr_theme) + + + def test_configure_custom_copy(self): + style = self.style + + curr_theme = self.style.theme_use() + self.addCleanup(self.style.theme_use, curr_theme) + for theme in self.style.theme_names(): + self.style.theme_use(theme) + for name in CLASS_NAMES: + default = style.configure(name) + if not default: + continue + with self.subTest(theme=theme, name=name): + if support.verbose >= 2: + print('configure', theme, name, default) + if (theme in ('vista', 'xpnative') + and sys.getwindowsversion()[:2] == (6, 1)): + # Fails on the Windows 7 buildbot + continue + newname = f'C.{name}' + self.assertEqual(style.configure(newname), None) + style.configure(newname, **default) + self.assertEqual(style.configure(newname), default) + for key, value in default.items(): + self.assertEqual(style.configure(newname, key), value) + + + def test_map_custom_copy(self): + style = self.style + + curr_theme = self.style.theme_use() + self.addCleanup(self.style.theme_use, curr_theme) + for theme in self.style.theme_names(): + self.style.theme_use(theme) + for name in CLASS_NAMES: + default = style.map(name) + if not default: + continue + with self.subTest(theme=theme, name=name): + if support.verbose >= 2: + print('map', theme, name, default) + if (theme in ('vista', 'xpnative') + and sys.getwindowsversion()[:2] == (6, 1)): + # Fails on the Windows 7 buildbot + continue + newname = f'C.{name}' + self.assertEqual(style.map(newname), {}) + style.map(newname, **default) + if theme == 'alt' and name == '.' and get_tk_patchlevel() < (8, 6, 1): + default['embossed'] = [('disabled', '1')] + self.assertEqual(style.map(newname), default) + for key, value in default.items(): + self.assertEqual(style.map(newname, key), value) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_ttk/test_widgets.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_ttk/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/test_ttk/test_widgets.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/test_ttk/test_widgets.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,1858 @@ +import unittest +import tkinter +from tkinter import ttk, TclError +from test.support import requires, gc_collect +import sys + +from test.test_ttk_textonly import MockTclObj +from tkinter.test.support import (AbstractTkTest, tcl_version, get_tk_patchlevel, + simulate_mouse_click, AbstractDefaultRootTest) +from tkinter.test.widget_tests import (add_standard_options, + AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests, + setUpModule) + +requires('gui') + + +class StandardTtkOptionsTests(StandardOptionsTests): + + def test_configure_class(self): + widget = self.create() + self.assertEqual(widget['class'], '') + errmsg='attempt to change read-only option' + if get_tk_patchlevel() < (8, 6, 0, 'beta', 3): + errmsg='Attempt to change read-only option' + self.checkInvalidParam(widget, 'class', 'Foo', errmsg=errmsg) + widget2 = self.create(class_='Foo') + self.assertEqual(widget2['class'], 'Foo') + + def test_configure_padding(self): + widget = self.create() + self.checkParam(widget, 'padding', 0, expected=('0',)) + self.checkParam(widget, 'padding', 5, expected=('5',)) + self.checkParam(widget, 'padding', (5, 6), expected=('5', '6')) + self.checkParam(widget, 'padding', (5, 6, 7), + expected=('5', '6', '7')) + self.checkParam(widget, 'padding', (5, 6, 7, 8), + expected=('5', '6', '7', '8')) + self.checkParam(widget, 'padding', ('5p', '6p', '7p', '8p')) + self.checkParam(widget, 'padding', (), expected='') + + def test_configure_style(self): + widget = self.create() + self.assertEqual(widget['style'], '') + errmsg = 'Layout Foo not found' + if hasattr(self, 'default_orient'): + errmsg = ('Layout %s.Foo not found' % + getattr(self, 'default_orient').title()) + self.checkInvalidParam(widget, 'style', 'Foo', + errmsg=errmsg) + widget2 = self.create(class_='Foo') + self.assertEqual(widget2['class'], 'Foo') + # XXX + pass + + +class WidgetTest(AbstractTkTest, unittest.TestCase): + """Tests methods available in every ttk widget.""" + + def setUp(self): + super().setUp() + self.widget = ttk.Button(self.root, width=0, text="Text") + self.widget.pack() + + def test_identify(self): + self.widget.update() + self.assertEqual(self.widget.identify( + int(self.widget.winfo_width() / 2), + int(self.widget.winfo_height() / 2) + ), "label") + self.assertEqual(self.widget.identify(-1, -1), "") + + self.assertRaises(tkinter.TclError, self.widget.identify, None, 5) + self.assertRaises(tkinter.TclError, self.widget.identify, 5, None) + self.assertRaises(tkinter.TclError, self.widget.identify, 5, '') + + def test_widget_state(self): + # XXX not sure about the portability of all these tests + self.assertEqual(self.widget.state(), ()) + self.assertEqual(self.widget.instate(['!disabled']), True) + + # changing from !disabled to disabled + self.assertEqual(self.widget.state(['disabled']), ('!disabled', )) + # no state change + self.assertEqual(self.widget.state(['disabled']), ()) + # change back to !disable but also active + self.assertEqual(self.widget.state(['!disabled', 'active']), + ('!active', 'disabled')) + # no state changes, again + self.assertEqual(self.widget.state(['!disabled', 'active']), ()) + self.assertEqual(self.widget.state(['active', '!disabled']), ()) + + def test_cb(arg1, **kw): + return arg1, kw + self.assertEqual(self.widget.instate(['!disabled'], + test_cb, "hi", **{"msg": "there"}), + ('hi', {'msg': 'there'})) + + # attempt to set invalid statespec + currstate = self.widget.state() + self.assertRaises(tkinter.TclError, self.widget.instate, + ['badstate']) + self.assertRaises(tkinter.TclError, self.widget.instate, + ['disabled', 'badstate']) + # verify that widget didn't change its state + self.assertEqual(currstate, self.widget.state()) + + # ensuring that passing None as state doesn't modify current state + self.widget.state(['active', '!disabled']) + self.assertEqual(self.widget.state(), ('active', )) + + +class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests): + _conv_pixels = False + + +@add_standard_options(StandardTtkOptionsTests) +class FrameTest(AbstractToplevelTest, unittest.TestCase): + OPTIONS = ( + 'borderwidth', 'class', 'cursor', 'height', + 'padding', 'relief', 'style', 'takefocus', + 'width', + ) + + def create(self, **kwargs): + return ttk.Frame(self.root, **kwargs) + + +@add_standard_options(StandardTtkOptionsTests) +class LabelFrameTest(AbstractToplevelTest, unittest.TestCase): + OPTIONS = ( + 'borderwidth', 'class', 'cursor', 'height', + 'labelanchor', 'labelwidget', + 'padding', 'relief', 'style', 'takefocus', + 'text', 'underline', 'width', + ) + + def create(self, **kwargs): + return ttk.LabelFrame(self.root, **kwargs) + + def test_configure_labelanchor(self): + widget = self.create() + self.checkEnumParam(widget, 'labelanchor', + 'e', 'en', 'es', 'n', 'ne', 'nw', 's', 'se', 'sw', 'w', 'wn', 'ws', + errmsg='Bad label anchor specification {}') + self.checkInvalidParam(widget, 'labelanchor', 'center') + + def test_configure_labelwidget(self): + widget = self.create() + label = ttk.Label(self.root, text='Mupp', name='foo') + self.checkParam(widget, 'labelwidget', label, expected='.foo') + label.destroy() + + +class AbstractLabelTest(AbstractWidgetTest): + + def checkImageParam(self, widget, name): + image = tkinter.PhotoImage(master=self.root, name='image1') + image2 = tkinter.PhotoImage(master=self.root, name='image2') + self.checkParam(widget, name, image, expected=('image1',)) + self.checkParam(widget, name, 'image1', expected=('image1',)) + self.checkParam(widget, name, (image,), expected=('image1',)) + self.checkParam(widget, name, (image, 'active', image2), + expected=('image1', 'active', 'image2')) + self.checkParam(widget, name, 'image1 active image2', + expected=('image1', 'active', 'image2')) + self.checkInvalidParam(widget, name, 'spam', + errmsg='image "spam" doesn\'t exist') + + def test_configure_compound(self): + options = 'none text image center top bottom left right'.split() + errmsg = ( + 'bad compound "{}": must be' + f' {", ".join(options[:-1])}, or {options[-1]}' + ) + widget = self.create() + self.checkEnumParam(widget, 'compound', *options, errmsg=errmsg) + + def test_configure_state(self): + widget = self.create() + self.checkParams(widget, 'state', 'active', 'disabled', 'normal') + + def test_configure_width(self): + widget = self.create() + self.checkParams(widget, 'width', 402, -402, 0) + + +@add_standard_options(StandardTtkOptionsTests) +class LabelTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'anchor', 'background', 'borderwidth', + 'class', 'compound', 'cursor', 'font', 'foreground', + 'image', 'justify', 'padding', 'relief', 'state', 'style', + 'takefocus', 'text', 'textvariable', + 'underline', 'width', 'wraplength', + ) + _conv_pixels = False + + def create(self, **kwargs): + return ttk.Label(self.root, **kwargs) + + def test_configure_font(self): + widget = self.create() + self.checkParam(widget, 'font', + '-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*') + + +@add_standard_options(StandardTtkOptionsTests) +class ButtonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'class', 'command', 'compound', 'cursor', 'default', + 'image', 'padding', 'state', 'style', + 'takefocus', 'text', 'textvariable', + 'underline', 'width', + ) + + def create(self, **kwargs): + return ttk.Button(self.root, **kwargs) + + def test_configure_default(self): + widget = self.create() + self.checkEnumParam(widget, 'default', 'normal', 'active', 'disabled') + + def test_invoke(self): + success = [] + btn = ttk.Button(self.root, command=lambda: success.append(1)) + btn.invoke() + self.assertTrue(success) + + +@add_standard_options(StandardTtkOptionsTests) +class CheckbuttonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'class', 'command', 'compound', 'cursor', + 'image', + 'offvalue', 'onvalue', + 'padding', 'state', 'style', + 'takefocus', 'text', 'textvariable', + 'underline', 'variable', 'width', + ) + + def create(self, **kwargs): + return ttk.Checkbutton(self.root, **kwargs) + + def test_configure_offvalue(self): + widget = self.create() + self.checkParams(widget, 'offvalue', 1, 2.3, '', 'any string') + + def test_configure_onvalue(self): + widget = self.create() + self.checkParams(widget, 'onvalue', 1, 2.3, '', 'any string') + + def test_invoke(self): + success = [] + def cb_test(): + success.append(1) + return "cb test called" + + cbtn = ttk.Checkbutton(self.root, command=cb_test) + # the variable automatically created by ttk.Checkbutton is actually + # undefined till we invoke the Checkbutton + self.assertEqual(cbtn.state(), ('alternate', )) + self.assertRaises(tkinter.TclError, cbtn.tk.globalgetvar, + cbtn['variable']) + + res = cbtn.invoke() + self.assertEqual(res, "cb test called") + self.assertEqual(cbtn['onvalue'], + cbtn.tk.globalgetvar(cbtn['variable'])) + self.assertTrue(success) + + cbtn['command'] = '' + res = cbtn.invoke() + self.assertFalse(str(res)) + self.assertLessEqual(len(success), 1) + self.assertEqual(cbtn['offvalue'], + cbtn.tk.globalgetvar(cbtn['variable'])) + + +@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +class EntryTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'background', 'class', 'cursor', + 'exportselection', 'font', 'foreground', + 'invalidcommand', 'justify', + 'show', 'state', 'style', 'takefocus', 'textvariable', + 'validate', 'validatecommand', 'width', 'xscrollcommand', + ) + IDENTIFY_AS = 'Entry.field' if sys.platform == 'darwin' else 'textarea' + + def setUp(self): + super().setUp() + self.entry = self.create() + + def create(self, **kwargs): + return ttk.Entry(self.root, **kwargs) + + def test_configure_invalidcommand(self): + widget = self.create() + self.checkCommandParam(widget, 'invalidcommand') + + def test_configure_show(self): + widget = self.create() + self.checkParam(widget, 'show', '*') + self.checkParam(widget, 'show', '') + self.checkParam(widget, 'show', ' ') + + def test_configure_state(self): + widget = self.create() + self.checkParams(widget, 'state', + 'disabled', 'normal', 'readonly') + + def test_configure_validate(self): + widget = self.create() + self.checkEnumParam(widget, 'validate', + 'all', 'key', 'focus', 'focusin', 'focusout', 'none') + + def test_configure_validatecommand(self): + widget = self.create() + self.checkCommandParam(widget, 'validatecommand') + + def test_bbox(self): + self.assertIsBoundingBox(self.entry.bbox(0)) + self.assertRaises(tkinter.TclError, self.entry.bbox, 'noindex') + self.assertRaises(tkinter.TclError, self.entry.bbox, None) + + def test_identify(self): + self.entry.pack() + self.entry.update() + + # bpo-27313: macOS Cocoa widget differs from X, allow either + self.assertEqual(self.entry.identify(5, 5), self.IDENTIFY_AS) + self.assertEqual(self.entry.identify(-1, -1), "") + + self.assertRaises(tkinter.TclError, self.entry.identify, None, 5) + self.assertRaises(tkinter.TclError, self.entry.identify, 5, None) + self.assertRaises(tkinter.TclError, self.entry.identify, 5, '') + + def test_validation_options(self): + success = [] + test_invalid = lambda: success.append(True) + + self.entry['validate'] = 'none' + self.entry['validatecommand'] = lambda: False + + self.entry['invalidcommand'] = test_invalid + self.entry.validate() + self.assertTrue(success) + + self.entry['invalidcommand'] = '' + self.entry.validate() + self.assertEqual(len(success), 1) + + self.entry['invalidcommand'] = test_invalid + self.entry['validatecommand'] = lambda: True + self.entry.validate() + self.assertEqual(len(success), 1) + + self.entry['validatecommand'] = '' + self.entry.validate() + self.assertEqual(len(success), 1) + + self.entry['validatecommand'] = True + self.assertRaises(tkinter.TclError, self.entry.validate) + + def test_validation(self): + validation = [] + def validate(to_insert): + if not 'a' <= to_insert.lower() <= 'z': + validation.append(False) + return False + validation.append(True) + return True + + self.entry['validate'] = 'key' + self.entry['validatecommand'] = self.entry.register(validate), '%S' + + self.entry.insert('end', 1) + self.entry.insert('end', 'a') + self.assertEqual(validation, [False, True]) + self.assertEqual(self.entry.get(), 'a') + + def test_revalidation(self): + def validate(content): + for letter in content: + if not 'a' <= letter.lower() <= 'z': + return False + return True + + self.entry['validatecommand'] = self.entry.register(validate), '%P' + + self.entry.insert('end', 'avocado') + self.assertEqual(self.entry.validate(), True) + self.assertEqual(self.entry.state(), ()) + + self.entry.delete(0, 'end') + self.assertEqual(self.entry.get(), '') + + self.entry.insert('end', 'a1b') + self.assertEqual(self.entry.validate(), False) + self.assertEqual(self.entry.state(), ('invalid', )) + + self.entry.delete(1) + self.assertEqual(self.entry.validate(), True) + self.assertEqual(self.entry.state(), ()) + + +@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +class ComboboxTest(EntryTest, unittest.TestCase): + OPTIONS = ( + 'background', 'class', 'cursor', 'exportselection', + 'font', 'foreground', 'height', 'invalidcommand', + 'justify', 'postcommand', 'show', 'state', 'style', + 'takefocus', 'textvariable', + 'validate', 'validatecommand', 'values', + 'width', 'xscrollcommand', + ) + IDENTIFY_AS = 'Combobox.button' if sys.platform == 'darwin' else 'textarea' + + def setUp(self): + super().setUp() + self.combo = self.create() + + def create(self, **kwargs): + return ttk.Combobox(self.root, **kwargs) + + def test_configure_height(self): + widget = self.create() + self.checkParams(widget, 'height', 100, 101.2, 102.6, -100, 0, '1i') + + def _show_drop_down_listbox(self): + width = self.combo.winfo_width() + x, y = width - 5, 5 + if sys.platform != 'darwin': # there's no down arrow on macOS + self.assertRegex(self.combo.identify(x, y), r'.*downarrow\Z') + self.combo.event_generate('', x=x, y=y) + self.combo.event_generate('', x=x, y=y) + self.combo.update_idletasks() + + def test_virtual_event(self): + success = [] + + self.combo['values'] = [1] + self.combo.bind('<>', + lambda evt: success.append(True)) + self.combo.pack() + self.combo.update() + + height = self.combo.winfo_height() + self._show_drop_down_listbox() + self.combo.update() + self.combo.event_generate('') + self.combo.update() + + self.assertTrue(success) + + def test_configure_postcommand(self): + success = [] + + self.combo['postcommand'] = lambda: success.append(True) + self.combo.pack() + self.combo.update() + + self._show_drop_down_listbox() + self.assertTrue(success) + + # testing postcommand removal + self.combo['postcommand'] = '' + self._show_drop_down_listbox() + self.assertEqual(len(success), 1) + + def test_configure_values(self): + def check_get_current(getval, currval): + self.assertEqual(self.combo.get(), getval) + self.assertEqual(self.combo.current(), currval) + + self.assertEqual(self.combo['values'], '') + check_get_current('', -1) + + self.checkParam(self.combo, 'values', 'mon tue wed thur', + expected=('mon', 'tue', 'wed', 'thur')) + self.checkParam(self.combo, 'values', ('mon', 'tue', 'wed', 'thur')) + self.checkParam(self.combo, 'values', (42, 3.14, '', 'any string')) + self.checkParam(self.combo, 'values', '') + + self.combo['values'] = ['a', 1, 'c'] + + self.combo.set('c') + check_get_current('c', 2) + + self.combo.current(0) + check_get_current('a', 0) + + self.combo.set('d') + check_get_current('d', -1) + + # testing values with empty string + self.combo.set('') + self.combo['values'] = (1, 2, '', 3) + check_get_current('', 2) + + # testing values with empty string set through configure + self.combo.configure(values=[1, '', 2]) + self.assertEqual(self.combo['values'], + ('1', '', '2') if self.wantobjects else + '1 {} 2') + + # testing values with spaces + self.combo['values'] = ['a b', 'a\tb', 'a\nb'] + self.assertEqual(self.combo['values'], + ('a b', 'a\tb', 'a\nb') if self.wantobjects else + '{a b} {a\tb} {a\nb}') + + # testing values with special characters + self.combo['values'] = [r'a\tb', '"a"', '} {'] + self.assertEqual(self.combo['values'], + (r'a\tb', '"a"', '} {') if self.wantobjects else + r'a\\tb {"a"} \}\ \{') + + # out of range + self.assertRaises(tkinter.TclError, self.combo.current, + len(self.combo['values'])) + # it expects an integer (or something that can be converted to int) + self.assertRaises(tkinter.TclError, self.combo.current, '') + + # testing creating combobox with empty string in values + combo2 = ttk.Combobox(self.root, values=[1, 2, '']) + self.assertEqual(combo2['values'], + ('1', '2', '') if self.wantobjects else '1 2 {}') + combo2.destroy() + + +@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +class PanedWindowTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'cursor', 'height', + 'orient', 'style', 'takefocus', 'width', + ) + + def setUp(self): + super().setUp() + self.paned = self.create() + + def create(self, **kwargs): + return ttk.PanedWindow(self.root, **kwargs) + + def test_configure_orient(self): + widget = self.create() + self.assertEqual(str(widget['orient']), 'vertical') + errmsg='attempt to change read-only option' + if get_tk_patchlevel() < (8, 6, 0, 'beta', 3): + errmsg='Attempt to change read-only option' + self.checkInvalidParam(widget, 'orient', 'horizontal', + errmsg=errmsg) + widget2 = self.create(orient='horizontal') + self.assertEqual(str(widget2['orient']), 'horizontal') + + def test_add(self): + # attempt to add a child that is not a direct child of the paned window + label = ttk.Label(self.paned) + child = ttk.Label(label) + self.assertRaises(tkinter.TclError, self.paned.add, child) + label.destroy() + child.destroy() + # another attempt + label = ttk.Label(self.root) + child = ttk.Label(label) + self.assertRaises(tkinter.TclError, self.paned.add, child) + child.destroy() + label.destroy() + + good_child = ttk.Label(self.root) + self.paned.add(good_child) + # re-adding a child is not accepted + self.assertRaises(tkinter.TclError, self.paned.add, good_child) + + other_child = ttk.Label(self.paned) + self.paned.add(other_child) + self.assertEqual(self.paned.pane(0), self.paned.pane(1)) + self.assertRaises(tkinter.TclError, self.paned.pane, 2) + good_child.destroy() + other_child.destroy() + self.assertRaises(tkinter.TclError, self.paned.pane, 0) + + def test_forget(self): + self.assertRaises(tkinter.TclError, self.paned.forget, None) + self.assertRaises(tkinter.TclError, self.paned.forget, 0) + + self.paned.add(ttk.Label(self.root)) + self.paned.forget(0) + self.assertRaises(tkinter.TclError, self.paned.forget, 0) + + def test_insert(self): + self.assertRaises(tkinter.TclError, self.paned.insert, None, 0) + self.assertRaises(tkinter.TclError, self.paned.insert, 0, None) + self.assertRaises(tkinter.TclError, self.paned.insert, 0, 0) + + child = ttk.Label(self.root) + child2 = ttk.Label(self.root) + child3 = ttk.Label(self.root) + + self.assertRaises(tkinter.TclError, self.paned.insert, 0, child) + + self.paned.insert('end', child2) + self.paned.insert(0, child) + self.assertEqual(self.paned.panes(), (str(child), str(child2))) + + self.paned.insert(0, child2) + self.assertEqual(self.paned.panes(), (str(child2), str(child))) + + self.paned.insert('end', child3) + self.assertEqual(self.paned.panes(), + (str(child2), str(child), str(child3))) + + # reinserting a child should move it to its current position + panes = self.paned.panes() + self.paned.insert('end', child3) + self.assertEqual(panes, self.paned.panes()) + + # moving child3 to child2 position should result in child2 ending up + # in previous child position and child ending up in previous child3 + # position + self.paned.insert(child2, child3) + self.assertEqual(self.paned.panes(), + (str(child3), str(child2), str(child))) + + def test_pane(self): + self.assertRaises(tkinter.TclError, self.paned.pane, 0) + + child = ttk.Label(self.root) + self.paned.add(child) + self.assertIsInstance(self.paned.pane(0), dict) + self.assertEqual(self.paned.pane(0, weight=None), + 0 if self.wantobjects else '0') + # newer form for querying a single option + self.assertEqual(self.paned.pane(0, 'weight'), + 0 if self.wantobjects else '0') + self.assertEqual(self.paned.pane(0), self.paned.pane(str(child))) + + self.assertRaises(tkinter.TclError, self.paned.pane, 0, + badoption='somevalue') + + def test_sashpos(self): + self.assertRaises(tkinter.TclError, self.paned.sashpos, None) + self.assertRaises(tkinter.TclError, self.paned.sashpos, '') + self.assertRaises(tkinter.TclError, self.paned.sashpos, 0) + + child = ttk.Label(self.paned, text='a') + self.paned.add(child, weight=1) + self.assertRaises(tkinter.TclError, self.paned.sashpos, 0) + child2 = ttk.Label(self.paned, text='b') + self.paned.add(child2) + self.assertRaises(tkinter.TclError, self.paned.sashpos, 1) + + self.paned.pack(expand=True, fill='both') + + curr_pos = self.paned.sashpos(0) + self.paned.sashpos(0, 1000) + self.assertNotEqual(curr_pos, self.paned.sashpos(0)) + self.assertIsInstance(self.paned.sashpos(0), int) + + +@add_standard_options(StandardTtkOptionsTests) +class RadiobuttonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'class', 'command', 'compound', 'cursor', + 'image', + 'padding', 'state', 'style', + 'takefocus', 'text', 'textvariable', + 'underline', 'value', 'variable', 'width', + ) + + def create(self, **kwargs): + return ttk.Radiobutton(self.root, **kwargs) + + def test_configure_value(self): + widget = self.create() + self.checkParams(widget, 'value', 1, 2.3, '', 'any string') + + def test_configure_invoke(self): + success = [] + def cb_test(): + success.append(1) + return "cb test called" + + myvar = tkinter.IntVar(self.root) + cbtn = ttk.Radiobutton(self.root, command=cb_test, + variable=myvar, value=0) + cbtn2 = ttk.Radiobutton(self.root, command=cb_test, + variable=myvar, value=1) + + if self.wantobjects: + conv = lambda x: x + else: + conv = int + + res = cbtn.invoke() + self.assertEqual(res, "cb test called") + self.assertEqual(conv(cbtn['value']), myvar.get()) + self.assertEqual(myvar.get(), + conv(cbtn.tk.globalgetvar(cbtn['variable']))) + self.assertTrue(success) + + cbtn2['command'] = '' + res = cbtn2.invoke() + self.assertEqual(str(res), '') + self.assertLessEqual(len(success), 1) + self.assertEqual(conv(cbtn2['value']), myvar.get()) + self.assertEqual(myvar.get(), + conv(cbtn.tk.globalgetvar(cbtn['variable']))) + + self.assertEqual(str(cbtn['variable']), str(cbtn2['variable'])) + + +class MenubuttonTest(AbstractLabelTest, unittest.TestCase): + OPTIONS = ( + 'class', 'compound', 'cursor', 'direction', + 'image', 'menu', 'padding', 'state', 'style', + 'takefocus', 'text', 'textvariable', + 'underline', 'width', + ) + + def create(self, **kwargs): + return ttk.Menubutton(self.root, **kwargs) + + def test_direction(self): + widget = self.create() + self.checkEnumParam(widget, 'direction', + 'above', 'below', 'left', 'right', 'flush') + + def test_configure_menu(self): + widget = self.create() + menu = tkinter.Menu(widget, name='menu') + self.checkParam(widget, 'menu', menu, conv=str) + menu.destroy() + + +@add_standard_options(StandardTtkOptionsTests) +class ScaleTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'command', 'cursor', 'from', 'length', + 'orient', 'style', 'takefocus', 'to', 'value', 'variable', + ) + _conv_pixels = False + default_orient = 'horizontal' + + def setUp(self): + super().setUp() + self.scale = self.create() + self.scale.pack() + self.scale.update() + + def create(self, **kwargs): + return ttk.Scale(self.root, **kwargs) + + def test_configure_from(self): + widget = self.create() + self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=False) + + def test_configure_length(self): + widget = self.create() + self.checkPixelsParam(widget, 'length', 130, 131.2, 135.6, '5i') + + def test_configure_to(self): + widget = self.create() + self.checkFloatParam(widget, 'to', 300, 14.9, 15.1, -10, conv=False) + + def test_configure_value(self): + widget = self.create() + self.checkFloatParam(widget, 'value', 300, 14.9, 15.1, -10, conv=False) + + def test_custom_event(self): + failure = [1, 1, 1] # will need to be empty + + funcid = self.scale.bind('<>', lambda evt: failure.pop()) + + self.scale['from'] = 10 + self.scale['from_'] = 10 + self.scale['to'] = 3 + + self.assertFalse(failure) + + failure = [1, 1, 1] + self.scale.configure(from_=2, to=5) + self.scale.configure(from_=0, to=-2) + self.scale.configure(to=10) + + self.assertFalse(failure) + + def test_get(self): + if self.wantobjects: + conv = lambda x: x + else: + conv = float + + scale_width = self.scale.winfo_width() + self.assertEqual(self.scale.get(scale_width, 0), self.scale['to']) + + self.assertEqual(conv(self.scale.get(0, 0)), conv(self.scale['from'])) + self.assertEqual(self.scale.get(), self.scale['value']) + self.scale['value'] = 30 + self.assertEqual(self.scale.get(), self.scale['value']) + + self.assertRaises(tkinter.TclError, self.scale.get, '', 0) + self.assertRaises(tkinter.TclError, self.scale.get, 0, '') + + def test_set(self): + if self.wantobjects: + conv = lambda x: x + else: + conv = float + + # set restricts the max/min values according to the current range + max = conv(self.scale['to']) + new_max = max + 10 + self.scale.set(new_max) + self.assertEqual(conv(self.scale.get()), max) + min = conv(self.scale['from']) + self.scale.set(min - 1) + self.assertEqual(conv(self.scale.get()), min) + + # changing directly the variable doesn't impose this limitation tho + var = tkinter.DoubleVar(self.root) + self.scale['variable'] = var + var.set(max + 5) + self.assertEqual(conv(self.scale.get()), var.get()) + self.assertEqual(conv(self.scale.get()), max + 5) + del var + gc_collect() # For PyPy or other GCs. + + # the same happens with the value option + self.scale['value'] = max + 10 + self.assertEqual(conv(self.scale.get()), max + 10) + self.assertEqual(conv(self.scale.get()), conv(self.scale['value'])) + + # nevertheless, note that the max/min values we can get specifying + # x, y coords are the ones according to the current range + self.assertEqual(conv(self.scale.get(0, 0)), min) + self.assertEqual(conv(self.scale.get(self.scale.winfo_width(), 0)), max) + + self.assertRaises(tkinter.TclError, self.scale.set, None) + + +@add_standard_options(StandardTtkOptionsTests) +class ProgressbarTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'cursor', 'orient', 'length', + 'mode', 'maximum', 'phase', + 'style', 'takefocus', 'value', 'variable', + ) + _conv_pixels = False + default_orient = 'horizontal' + + def create(self, **kwargs): + return ttk.Progressbar(self.root, **kwargs) + + def test_configure_length(self): + widget = self.create() + self.checkPixelsParam(widget, 'length', 100.1, 56.7, '2i') + + def test_configure_maximum(self): + widget = self.create() + self.checkFloatParam(widget, 'maximum', 150.2, 77.7, 0, -10, conv=False) + + def test_configure_mode(self): + widget = self.create() + self.checkEnumParam(widget, 'mode', 'determinate', 'indeterminate') + + def test_configure_phase(self): + # XXX + pass + + def test_configure_value(self): + widget = self.create() + self.checkFloatParam(widget, 'value', 150.2, 77.7, 0, -10, + conv=False) + + +@unittest.skipIf(sys.platform == 'darwin', + 'ttk.Scrollbar is special on MacOSX') +@add_standard_options(StandardTtkOptionsTests) +class ScrollbarTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'command', 'cursor', 'orient', 'style', 'takefocus', + ) + default_orient = 'vertical' + + def create(self, **kwargs): + return ttk.Scrollbar(self.root, **kwargs) + + +@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +class NotebookTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'cursor', 'height', 'padding', 'style', 'takefocus', 'width', + ) + + def setUp(self): + super().setUp() + self.nb = self.create(padding=0) + self.child1 = ttk.Label(self.root) + self.child2 = ttk.Label(self.root) + self.nb.add(self.child1, text='a') + self.nb.add(self.child2, text='b') + + def create(self, **kwargs): + return ttk.Notebook(self.root, **kwargs) + + def test_tab_identifiers(self): + self.nb.forget(0) + self.nb.hide(self.child2) + self.assertRaises(tkinter.TclError, self.nb.tab, self.child1) + self.assertEqual(self.nb.index('end'), 1) + self.nb.add(self.child2) + self.assertEqual(self.nb.index('end'), 1) + self.nb.select(self.child2) + + self.assertTrue(self.nb.tab('current')) + self.nb.add(self.child1, text='a') + + self.nb.pack() + self.nb.update() + if sys.platform == 'darwin': + tb_idx = "@20,5" + else: + tb_idx = "@5,5" + self.assertEqual(self.nb.tab(tb_idx), self.nb.tab('current')) + + for i in range(5, 100, 5): + try: + if self.nb.tab('@%d, 5' % i, text=None) == 'a': + break + except tkinter.TclError: + pass + + else: + self.fail("Tab with text 'a' not found") + + def test_add_and_hidden(self): + self.assertRaises(tkinter.TclError, self.nb.hide, -1) + self.assertRaises(tkinter.TclError, self.nb.hide, 'hi') + self.assertRaises(tkinter.TclError, self.nb.hide, None) + self.assertRaises(tkinter.TclError, self.nb.add, None) + self.assertRaises(tkinter.TclError, self.nb.add, ttk.Label(self.root), + unknown='option') + + tabs = self.nb.tabs() + self.nb.hide(self.child1) + self.nb.add(self.child1) + self.assertEqual(self.nb.tabs(), tabs) + + child = ttk.Label(self.root) + self.nb.add(child, text='c') + tabs = self.nb.tabs() + + curr = self.nb.index('current') + # verify that the tab gets re-added at its previous position + child2_index = self.nb.index(self.child2) + self.nb.hide(self.child2) + self.nb.add(self.child2) + self.assertEqual(self.nb.tabs(), tabs) + self.assertEqual(self.nb.index(self.child2), child2_index) + self.assertEqual(str(self.child2), self.nb.tabs()[child2_index]) + # but the tab next to it (not hidden) is the one selected now + self.assertEqual(self.nb.index('current'), curr + 1) + + def test_forget(self): + self.assertRaises(tkinter.TclError, self.nb.forget, -1) + self.assertRaises(tkinter.TclError, self.nb.forget, 'hi') + self.assertRaises(tkinter.TclError, self.nb.forget, None) + + tabs = self.nb.tabs() + child1_index = self.nb.index(self.child1) + self.nb.forget(self.child1) + self.assertNotIn(str(self.child1), self.nb.tabs()) + self.assertEqual(len(tabs) - 1, len(self.nb.tabs())) + + self.nb.add(self.child1) + self.assertEqual(self.nb.index(self.child1), 1) + self.assertNotEqual(child1_index, self.nb.index(self.child1)) + + def test_index(self): + self.assertRaises(tkinter.TclError, self.nb.index, -1) + self.assertRaises(tkinter.TclError, self.nb.index, None) + + self.assertIsInstance(self.nb.index('end'), int) + self.assertEqual(self.nb.index(self.child1), 0) + self.assertEqual(self.nb.index(self.child2), 1) + self.assertEqual(self.nb.index('end'), 2) + + def test_insert(self): + # moving tabs + tabs = self.nb.tabs() + self.nb.insert(1, tabs[0]) + self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0])) + self.nb.insert(self.child1, self.child2) + self.assertEqual(self.nb.tabs(), tabs) + self.nb.insert('end', self.child1) + self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0])) + self.nb.insert('end', 0) + self.assertEqual(self.nb.tabs(), tabs) + # bad moves + self.assertRaises(tkinter.TclError, self.nb.insert, 2, tabs[0]) + self.assertRaises(tkinter.TclError, self.nb.insert, -1, tabs[0]) + + # new tab + child3 = ttk.Label(self.root) + self.nb.insert(1, child3) + self.assertEqual(self.nb.tabs(), (tabs[0], str(child3), tabs[1])) + self.nb.forget(child3) + self.assertEqual(self.nb.tabs(), tabs) + self.nb.insert(self.child1, child3) + self.assertEqual(self.nb.tabs(), (str(child3), ) + tabs) + self.nb.forget(child3) + self.assertRaises(tkinter.TclError, self.nb.insert, 2, child3) + self.assertRaises(tkinter.TclError, self.nb.insert, -1, child3) + + # bad inserts + self.assertRaises(tkinter.TclError, self.nb.insert, 'end', None) + self.assertRaises(tkinter.TclError, self.nb.insert, None, 0) + self.assertRaises(tkinter.TclError, self.nb.insert, None, None) + + def test_select(self): + self.nb.pack() + self.nb.update() + + success = [] + tab_changed = [] + + self.child1.bind('', lambda evt: success.append(True)) + self.nb.bind('<>', + lambda evt: tab_changed.append(True)) + + self.assertEqual(self.nb.select(), str(self.child1)) + self.nb.select(self.child2) + self.assertTrue(success) + self.assertEqual(self.nb.select(), str(self.child2)) + + self.nb.update() + self.assertTrue(tab_changed) + + def test_tab(self): + self.assertRaises(tkinter.TclError, self.nb.tab, -1) + self.assertRaises(tkinter.TclError, self.nb.tab, 'notab') + self.assertRaises(tkinter.TclError, self.nb.tab, None) + + self.assertIsInstance(self.nb.tab(self.child1), dict) + self.assertEqual(self.nb.tab(self.child1, text=None), 'a') + # newer form for querying a single option + self.assertEqual(self.nb.tab(self.child1, 'text'), 'a') + self.nb.tab(self.child1, text='abc') + self.assertEqual(self.nb.tab(self.child1, text=None), 'abc') + self.assertEqual(self.nb.tab(self.child1, 'text'), 'abc') + + def test_configure_tabs(self): + self.assertEqual(len(self.nb.tabs()), 2) + + self.nb.forget(self.child1) + self.nb.forget(self.child2) + + self.assertEqual(self.nb.tabs(), ()) + + def test_traversal(self): + self.nb.pack() + self.nb.update() + + self.nb.select(0) + + focus_identify_as = 'focus' if sys.platform != 'darwin' else '' + self.assertEqual(self.nb.identify(5, 5), focus_identify_as) + simulate_mouse_click(self.nb, 5, 5) + self.nb.focus_force() + self.nb.event_generate('') + self.assertEqual(self.nb.select(), str(self.child2)) + self.nb.focus_force() + self.nb.event_generate('') + self.assertEqual(self.nb.select(), str(self.child1)) + self.nb.focus_force() + self.nb.event_generate('') + self.assertEqual(self.nb.select(), str(self.child2)) + + self.nb.tab(self.child1, text='a', underline=0) + self.nb.tab(self.child2, text='e', underline=0) + self.nb.enable_traversal() + self.nb.focus_force() + self.assertEqual(self.nb.identify(5, 5), focus_identify_as) + simulate_mouse_click(self.nb, 5, 5) + # on macOS Emacs-style keyboard shortcuts are region-dependent; + # let's use the regular arrow keys instead + if sys.platform == 'darwin': + begin = '' + end = '' + else: + begin = '' + end = '' + self.nb.event_generate(begin) + self.assertEqual(self.nb.select(), str(self.child1)) + self.nb.event_generate(end) + self.assertEqual(self.nb.select(), str(self.child2)) + + +@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +class SpinboxTest(EntryTest, unittest.TestCase): + OPTIONS = ( + 'background', 'class', 'command', 'cursor', 'exportselection', + 'font', 'foreground', 'format', 'from', 'increment', + 'invalidcommand', 'justify', 'show', 'state', 'style', + 'takefocus', 'textvariable', 'to', 'validate', 'validatecommand', + 'values', 'width', 'wrap', 'xscrollcommand', + ) + IDENTIFY_AS = 'Spinbox.field' if sys.platform == 'darwin' else 'textarea' + + def setUp(self): + super().setUp() + self.spin = self.create() + self.spin.pack() + + def create(self, **kwargs): + return ttk.Spinbox(self.root, **kwargs) + + def _click_increment_arrow(self): + width = self.spin.winfo_width() + height = self.spin.winfo_height() + x = width - 5 + y = height//2 - 5 + self.assertRegex(self.spin.identify(x, y), r'.*uparrow\Z') + self.spin.event_generate('', x=x, y=y) + self.spin.event_generate('', x=x, y=y) + self.spin.update_idletasks() + + def _click_decrement_arrow(self): + width = self.spin.winfo_width() + height = self.spin.winfo_height() + x = width - 5 + y = height//2 + 4 + self.assertRegex(self.spin.identify(x, y), r'.*downarrow\Z') + self.spin.event_generate('', x=x, y=y) + self.spin.event_generate('', x=x, y=y) + self.spin.update_idletasks() + + def test_configure_command(self): + success = [] + + self.spin['command'] = lambda: success.append(True) + self.spin.update() + self._click_increment_arrow() + self.spin.update() + self.assertTrue(success) + + self._click_decrement_arrow() + self.assertEqual(len(success), 2) + + # testing postcommand removal + self.spin['command'] = '' + self.spin.update_idletasks() + self._click_increment_arrow() + self._click_decrement_arrow() + self.spin.update() + self.assertEqual(len(success), 2) + + def test_configure_to(self): + self.spin['from'] = 0 + self.spin['to'] = 5 + self.spin.set(4) + self.spin.update() + self._click_increment_arrow() # 5 + + self.assertEqual(self.spin.get(), '5') + + self._click_increment_arrow() # 5 + self.assertEqual(self.spin.get(), '5') + + def test_configure_from(self): + self.spin['from'] = 1 + self.spin['to'] = 10 + self.spin.set(2) + self.spin.update() + self._click_decrement_arrow() # 1 + self.assertEqual(self.spin.get(), '1') + self._click_decrement_arrow() # 1 + self.assertEqual(self.spin.get(), '1') + + def test_configure_increment(self): + self.spin['from'] = 0 + self.spin['to'] = 10 + self.spin['increment'] = 4 + self.spin.set(1) + self.spin.update() + + self._click_increment_arrow() # 5 + self.assertEqual(self.spin.get(), '5') + self.spin['increment'] = 2 + self.spin.update() + self._click_decrement_arrow() # 3 + self.assertEqual(self.spin.get(), '3') + + def test_configure_format(self): + self.spin.set(1) + self.spin['format'] = '%10.3f' + self.spin.update() + self._click_increment_arrow() + value = self.spin.get() + + self.assertEqual(len(value), 10) + self.assertEqual(value.index('.'), 6) + + self.spin['format'] = '' + self.spin.update() + self._click_increment_arrow() + value = self.spin.get() + self.assertTrue('.' not in value) + self.assertEqual(len(value), 1) + + def test_configure_wrap(self): + self.spin['to'] = 10 + self.spin['from'] = 1 + self.spin.set(1) + self.spin['wrap'] = True + self.spin.update() + + self._click_decrement_arrow() + self.assertEqual(self.spin.get(), '10') + + self._click_increment_arrow() + self.assertEqual(self.spin.get(), '1') + + self.spin['wrap'] = False + self.spin.update() + + self._click_decrement_arrow() + self.assertEqual(self.spin.get(), '1') + + def test_configure_values(self): + self.assertEqual(self.spin['values'], '') + self.checkParam(self.spin, 'values', 'mon tue wed thur', + expected=('mon', 'tue', 'wed', 'thur')) + self.checkParam(self.spin, 'values', ('mon', 'tue', 'wed', 'thur')) + self.checkParam(self.spin, 'values', (42, 3.14, '', 'any string')) + self.checkParam(self.spin, 'values', '') + + self.spin['values'] = ['a', 1, 'c'] + + # test incrementing / decrementing values + self.spin.set('a') + self.spin.update() + self._click_increment_arrow() + self.assertEqual(self.spin.get(), '1') + + self._click_decrement_arrow() + self.assertEqual(self.spin.get(), 'a') + + # testing values with empty string set through configure + self.spin.configure(values=[1, '', 2]) + self.assertEqual(self.spin['values'], + ('1', '', '2') if self.wantobjects else + '1 {} 2') + + # testing values with spaces + self.spin['values'] = ['a b', 'a\tb', 'a\nb'] + self.assertEqual(self.spin['values'], + ('a b', 'a\tb', 'a\nb') if self.wantobjects else + '{a b} {a\tb} {a\nb}') + + # testing values with special characters + self.spin['values'] = [r'a\tb', '"a"', '} {'] + self.assertEqual(self.spin['values'], + (r'a\tb', '"a"', '} {') if self.wantobjects else + r'a\\tb {"a"} \}\ \{') + + # testing creating spinbox with empty string in values + spin2 = ttk.Spinbox(self.root, values=[1, 2, '']) + self.assertEqual(spin2['values'], + ('1', '2', '') if self.wantobjects else '1 2 {}') + spin2.destroy() + + +@add_standard_options(StandardTtkOptionsTests) +class TreeviewTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'columns', 'cursor', 'displaycolumns', + 'height', 'padding', 'selectmode', 'show', + 'style', 'takefocus', 'xscrollcommand', 'yscrollcommand', + ) + + def setUp(self): + super().setUp() + self.tv = self.create(padding=0) + + def create(self, **kwargs): + return ttk.Treeview(self.root, **kwargs) + + def test_configure_columns(self): + widget = self.create() + self.checkParam(widget, 'columns', 'a b c', + expected=('a', 'b', 'c')) + self.checkParam(widget, 'columns', ('a', 'b', 'c')) + self.checkParam(widget, 'columns', '') + + def test_configure_displaycolumns(self): + widget = self.create() + widget['columns'] = ('a', 'b', 'c') + self.checkParam(widget, 'displaycolumns', 'b a c', + expected=('b', 'a', 'c')) + self.checkParam(widget, 'displaycolumns', ('b', 'a', 'c')) + self.checkParam(widget, 'displaycolumns', '#all', + expected=('#all',)) + self.checkParam(widget, 'displaycolumns', (2, 1, 0)) + self.checkInvalidParam(widget, 'displaycolumns', ('a', 'b', 'd'), + errmsg='Invalid column index d') + self.checkInvalidParam(widget, 'displaycolumns', (1, 2, 3), + errmsg='Column index 3 out of bounds') + self.checkInvalidParam(widget, 'displaycolumns', (1, -2), + errmsg='Column index -2 out of bounds') + + def test_configure_height(self): + widget = self.create() + self.checkPixelsParam(widget, 'height', 100, -100, 0, '3c', conv=False) + self.checkPixelsParam(widget, 'height', 101.2, 102.6, conv=False) + + def test_configure_selectmode(self): + widget = self.create() + self.checkEnumParam(widget, 'selectmode', + 'none', 'browse', 'extended') + + def test_configure_show(self): + widget = self.create() + self.checkParam(widget, 'show', 'tree headings', + expected=('tree', 'headings')) + self.checkParam(widget, 'show', ('tree', 'headings')) + self.checkParam(widget, 'show', ('headings', 'tree')) + self.checkParam(widget, 'show', 'tree', expected=('tree',)) + self.checkParam(widget, 'show', 'headings', expected=('headings',)) + + def test_bbox(self): + self.tv.pack() + self.assertEqual(self.tv.bbox(''), '') + self.tv.update() + + item_id = self.tv.insert('', 'end') + children = self.tv.get_children() + self.assertTrue(children) + + bbox = self.tv.bbox(children[0]) + self.assertIsBoundingBox(bbox) + + # compare width in bboxes + self.tv['columns'] = ['test'] + self.tv.column('test', width=50) + bbox_column0 = self.tv.bbox(children[0], 0) + root_width = self.tv.column('#0', width=None) + if not self.wantobjects: + root_width = int(root_width) + self.assertEqual(bbox_column0[0], bbox[0] + root_width) + + # verify that bbox of a closed item is the empty string + child1 = self.tv.insert(item_id, 'end') + self.assertEqual(self.tv.bbox(child1), '') + + def test_children(self): + # no children yet, should get an empty tuple + self.assertEqual(self.tv.get_children(), ()) + + item_id = self.tv.insert('', 'end') + self.assertIsInstance(self.tv.get_children(), tuple) + self.assertEqual(self.tv.get_children()[0], item_id) + + # add item_id and child3 as children of child2 + child2 = self.tv.insert('', 'end') + child3 = self.tv.insert('', 'end') + self.tv.set_children(child2, item_id, child3) + self.assertEqual(self.tv.get_children(child2), (item_id, child3)) + + # child3 has child2 as parent, thus trying to set child2 as a children + # of child3 should result in an error + self.assertRaises(tkinter.TclError, + self.tv.set_children, child3, child2) + + # remove child2 children + self.tv.set_children(child2) + self.assertEqual(self.tv.get_children(child2), ()) + + # remove root's children + self.tv.set_children('') + self.assertEqual(self.tv.get_children(), ()) + + def test_column(self): + # return a dict with all options/values + self.assertIsInstance(self.tv.column('#0'), dict) + # return a single value of the given option + if self.wantobjects: + self.assertIsInstance(self.tv.column('#0', width=None), int) + # set a new value for an option + self.tv.column('#0', width=10) + # testing new way to get option value + self.assertEqual(self.tv.column('#0', 'width'), + 10 if self.wantobjects else '10') + self.assertEqual(self.tv.column('#0', width=None), + 10 if self.wantobjects else '10') + # check read-only option + self.assertRaises(tkinter.TclError, self.tv.column, '#0', id='X') + + self.assertRaises(tkinter.TclError, self.tv.column, 'invalid') + invalid_kws = [ + {'unknown_option': 'some value'}, {'stretch': 'wrong'}, + {'anchor': 'wrong'}, {'width': 'wrong'}, {'minwidth': 'wrong'} + ] + for kw in invalid_kws: + self.assertRaises(tkinter.TclError, self.tv.column, '#0', + **kw) + + def test_delete(self): + self.assertRaises(tkinter.TclError, self.tv.delete, '#0') + + item_id = self.tv.insert('', 'end') + item2 = self.tv.insert(item_id, 'end') + self.assertEqual(self.tv.get_children(), (item_id, )) + self.assertEqual(self.tv.get_children(item_id), (item2, )) + + self.tv.delete(item_id) + self.assertFalse(self.tv.get_children()) + + # reattach should fail + self.assertRaises(tkinter.TclError, + self.tv.reattach, item_id, '', 'end') + + # test multiple item delete + item1 = self.tv.insert('', 'end') + item2 = self.tv.insert('', 'end') + self.assertEqual(self.tv.get_children(), (item1, item2)) + + self.tv.delete(item1, item2) + self.assertFalse(self.tv.get_children()) + + def test_detach_reattach(self): + item_id = self.tv.insert('', 'end') + item2 = self.tv.insert(item_id, 'end') + + # calling detach without items is valid, although it does nothing + prev = self.tv.get_children() + self.tv.detach() # this should do nothing + self.assertEqual(prev, self.tv.get_children()) + + self.assertEqual(self.tv.get_children(), (item_id, )) + self.assertEqual(self.tv.get_children(item_id), (item2, )) + + # detach item with children + self.tv.detach(item_id) + self.assertFalse(self.tv.get_children()) + + # reattach item with children + self.tv.reattach(item_id, '', 'end') + self.assertEqual(self.tv.get_children(), (item_id, )) + self.assertEqual(self.tv.get_children(item_id), (item2, )) + + # move a children to the root + self.tv.move(item2, '', 'end') + self.assertEqual(self.tv.get_children(), (item_id, item2)) + self.assertEqual(self.tv.get_children(item_id), ()) + + # bad values + self.assertRaises(tkinter.TclError, + self.tv.reattach, 'nonexistent', '', 'end') + self.assertRaises(tkinter.TclError, + self.tv.detach, 'nonexistent') + self.assertRaises(tkinter.TclError, + self.tv.reattach, item2, 'otherparent', 'end') + self.assertRaises(tkinter.TclError, + self.tv.reattach, item2, '', 'invalid') + + # multiple detach + self.tv.detach(item_id, item2) + self.assertEqual(self.tv.get_children(), ()) + self.assertEqual(self.tv.get_children(item_id), ()) + + def test_exists(self): + self.assertEqual(self.tv.exists('something'), False) + self.assertEqual(self.tv.exists(''), True) + self.assertEqual(self.tv.exists({}), False) + + # the following will make a tk.call equivalent to + # tk.call(treeview, "exists") which should result in an error + # in the tcl interpreter since tk requires an item. + self.assertRaises(tkinter.TclError, self.tv.exists, None) + + def test_focus(self): + # nothing is focused right now + self.assertEqual(self.tv.focus(), '') + + item1 = self.tv.insert('', 'end') + self.tv.focus(item1) + self.assertEqual(self.tv.focus(), item1) + + self.tv.delete(item1) + self.assertEqual(self.tv.focus(), '') + + # try focusing inexistent item + self.assertRaises(tkinter.TclError, self.tv.focus, 'hi') + + def test_heading(self): + # check a dict is returned + self.assertIsInstance(self.tv.heading('#0'), dict) + + # check a value is returned + self.tv.heading('#0', text='hi') + self.assertEqual(self.tv.heading('#0', 'text'), 'hi') + self.assertEqual(self.tv.heading('#0', text=None), 'hi') + + # invalid option + self.assertRaises(tkinter.TclError, self.tv.heading, '#0', + background=None) + # invalid value + self.assertRaises(tkinter.TclError, self.tv.heading, '#0', + anchor=1) + + def test_heading_callback(self): + def simulate_heading_click(x, y): + if tcl_version >= (8, 6): + self.assertEqual(self.tv.identify_column(x), '#0') + self.assertEqual(self.tv.identify_region(x, y), 'heading') + simulate_mouse_click(self.tv, x, y) + self.tv.update() + + success = [] # no success for now + + self.tv.pack() + self.tv.heading('#0', command=lambda: success.append(True)) + self.tv.column('#0', width=100) + self.tv.update() + + # assuming that the coords (5, 5) fall into heading #0 + simulate_heading_click(5, 5) + if not success: + self.fail("The command associated to the treeview heading wasn't " + "invoked.") + + success = [] + commands = self.tv.master._tclCommands + self.tv.heading('#0', command=str(self.tv.heading('#0', command=None))) + self.assertEqual(commands, self.tv.master._tclCommands) + simulate_heading_click(5, 5) + if not success: + self.fail("The command associated to the treeview heading wasn't " + "invoked.") + + # XXX The following raises an error in a tcl interpreter, but not in + # Python + #self.tv.heading('#0', command='I dont exist') + #simulate_heading_click(5, 5) + + def test_index(self): + # item 'what' doesn't exist + self.assertRaises(tkinter.TclError, self.tv.index, 'what') + + self.assertEqual(self.tv.index(''), 0) + + item1 = self.tv.insert('', 'end') + item2 = self.tv.insert('', 'end') + c1 = self.tv.insert(item1, 'end') + c2 = self.tv.insert(item1, 'end') + self.assertEqual(self.tv.index(item1), 0) + self.assertEqual(self.tv.index(c1), 0) + self.assertEqual(self.tv.index(c2), 1) + self.assertEqual(self.tv.index(item2), 1) + + self.tv.move(item2, '', 0) + self.assertEqual(self.tv.index(item2), 0) + self.assertEqual(self.tv.index(item1), 1) + + # check that index still works even after its parent and siblings + # have been detached + self.tv.detach(item1) + self.assertEqual(self.tv.index(c2), 1) + self.tv.detach(c1) + self.assertEqual(self.tv.index(c2), 0) + + # but it fails after item has been deleted + self.tv.delete(item1) + self.assertRaises(tkinter.TclError, self.tv.index, c2) + + def test_insert_item(self): + # parent 'none' doesn't exist + self.assertRaises(tkinter.TclError, self.tv.insert, 'none', 'end') + + # open values + self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', + open='') + self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', + open='please') + self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=True))) + self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=False))) + + # invalid index + self.assertRaises(tkinter.TclError, self.tv.insert, '', 'middle') + + # trying to duplicate item id is invalid + itemid = self.tv.insert('', 'end', 'first-item') + self.assertEqual(itemid, 'first-item') + self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', + 'first-item') + self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', + MockTclObj('first-item')) + + # unicode values + value = '\xe1ba' + item = self.tv.insert('', 'end', values=(value, )) + self.assertEqual(self.tv.item(item, 'values'), + (value,) if self.wantobjects else value) + self.assertEqual(self.tv.item(item, values=None), + (value,) if self.wantobjects else value) + + self.tv.item(item, values=self.root.splitlist(self.tv.item(item, values=None))) + self.assertEqual(self.tv.item(item, values=None), + (value,) if self.wantobjects else value) + + self.assertIsInstance(self.tv.item(item), dict) + + # erase item values + self.tv.item(item, values='') + self.assertFalse(self.tv.item(item, values=None)) + + # item tags + item = self.tv.insert('', 'end', tags=[1, 2, value]) + self.assertEqual(self.tv.item(item, tags=None), + ('1', '2', value) if self.wantobjects else + '1 2 %s' % value) + self.tv.item(item, tags=[]) + self.assertFalse(self.tv.item(item, tags=None)) + self.tv.item(item, tags=(1, 2)) + self.assertEqual(self.tv.item(item, tags=None), + ('1', '2') if self.wantobjects else '1 2') + + # values with spaces + item = self.tv.insert('', 'end', values=('a b c', + '%s %s' % (value, value))) + self.assertEqual(self.tv.item(item, values=None), + ('a b c', '%s %s' % (value, value)) if self.wantobjects else + '{a b c} {%s %s}' % (value, value)) + + # text + self.assertEqual(self.tv.item( + self.tv.insert('', 'end', text="Label here"), text=None), + "Label here") + self.assertEqual(self.tv.item( + self.tv.insert('', 'end', text=value), text=None), + value) + + # test for values which are not None + itemid = self.tv.insert('', 'end', 0) + self.assertEqual(itemid, '0') + itemid = self.tv.insert('', 'end', 0.0) + self.assertEqual(itemid, '0.0') + # this is because False resolves to 0 and element with 0 iid is already present + self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', False) + self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', '') + + def test_selection(self): + self.assertRaises(TypeError, self.tv.selection, 'spam') + # item 'none' doesn't exist + self.assertRaises(tkinter.TclError, self.tv.selection_set, 'none') + self.assertRaises(tkinter.TclError, self.tv.selection_add, 'none') + self.assertRaises(tkinter.TclError, self.tv.selection_remove, 'none') + self.assertRaises(tkinter.TclError, self.tv.selection_toggle, 'none') + + item1 = self.tv.insert('', 'end') + item2 = self.tv.insert('', 'end') + c1 = self.tv.insert(item1, 'end') + c2 = self.tv.insert(item1, 'end') + c3 = self.tv.insert(item1, 'end') + self.assertEqual(self.tv.selection(), ()) + + self.tv.selection_set(c1, item2) + self.assertEqual(self.tv.selection(), (c1, item2)) + self.tv.selection_set(c2) + self.assertEqual(self.tv.selection(), (c2,)) + + self.tv.selection_add(c1, item2) + self.assertEqual(self.tv.selection(), (c1, c2, item2)) + self.tv.selection_add(item1) + self.assertEqual(self.tv.selection(), (item1, c1, c2, item2)) + self.tv.selection_add() + self.assertEqual(self.tv.selection(), (item1, c1, c2, item2)) + + self.tv.selection_remove(item1, c3) + self.assertEqual(self.tv.selection(), (c1, c2, item2)) + self.tv.selection_remove(c2) + self.assertEqual(self.tv.selection(), (c1, item2)) + self.tv.selection_remove() + self.assertEqual(self.tv.selection(), (c1, item2)) + + self.tv.selection_toggle(c1, c3) + self.assertEqual(self.tv.selection(), (c3, item2)) + self.tv.selection_toggle(item2) + self.assertEqual(self.tv.selection(), (c3,)) + self.tv.selection_toggle() + self.assertEqual(self.tv.selection(), (c3,)) + + self.tv.insert('', 'end', id='with spaces') + self.tv.selection_set('with spaces') + self.assertEqual(self.tv.selection(), ('with spaces',)) + + self.tv.insert('', 'end', id='{brace') + self.tv.selection_set('{brace') + self.assertEqual(self.tv.selection(), ('{brace',)) + + self.tv.insert('', 'end', id='unicode\u20ac') + self.tv.selection_set('unicode\u20ac') + self.assertEqual(self.tv.selection(), ('unicode\u20ac',)) + + self.tv.insert('', 'end', id=b'bytes\xe2\x82\xac') + self.tv.selection_set(b'bytes\xe2\x82\xac') + self.assertEqual(self.tv.selection(), ('bytes\xe2\x82\xac',)) + + self.tv.selection_set() + self.assertEqual(self.tv.selection(), ()) + + # Old interface + self.tv.selection_set((c1, item2)) + self.assertEqual(self.tv.selection(), (c1, item2)) + self.tv.selection_add((c1, item1)) + self.assertEqual(self.tv.selection(), (item1, c1, item2)) + self.tv.selection_remove((item1, c3)) + self.assertEqual(self.tv.selection(), (c1, item2)) + self.tv.selection_toggle((c1, c3)) + self.assertEqual(self.tv.selection(), (c3, item2)) + + def test_set(self): + self.tv['columns'] = ['A', 'B'] + item = self.tv.insert('', 'end', values=['a', 'b']) + self.assertEqual(self.tv.set(item), {'A': 'a', 'B': 'b'}) + + self.tv.set(item, 'B', 'a') + self.assertEqual(self.tv.item(item, values=None), + ('a', 'a') if self.wantobjects else 'a a') + + self.tv['columns'] = ['B'] + self.assertEqual(self.tv.set(item), {'B': 'a'}) + + self.tv.set(item, 'B', 'b') + self.assertEqual(self.tv.set(item, column='B'), 'b') + self.assertEqual(self.tv.item(item, values=None), + ('b', 'a') if self.wantobjects else 'b a') + + self.tv.set(item, 'B', 123) + self.assertEqual(self.tv.set(item, 'B'), + 123 if self.wantobjects else '123') + self.assertEqual(self.tv.item(item, values=None), + (123, 'a') if self.wantobjects else '123 a') + self.assertEqual(self.tv.set(item), + {'B': 123} if self.wantobjects else {'B': '123'}) + + # inexistent column + self.assertRaises(tkinter.TclError, self.tv.set, item, 'A') + self.assertRaises(tkinter.TclError, self.tv.set, item, 'A', 'b') + + # inexistent item + self.assertRaises(tkinter.TclError, self.tv.set, 'notme') + + def test_tag_bind(self): + events = [] + item1 = self.tv.insert('', 'end', tags=['call']) + item2 = self.tv.insert('', 'end', tags=['call']) + self.tv.tag_bind('call', '', + lambda evt: events.append(1)) + self.tv.tag_bind('call', '', + lambda evt: events.append(2)) + + self.tv.pack() + self.tv.update() + + pos_y = set() + found = set() + for i in range(0, 100, 10): + if len(found) == 2: # item1 and item2 already found + break + item_id = self.tv.identify_row(i) + if item_id and item_id not in found: + pos_y.add(i) + found.add(item_id) + + self.assertEqual(len(pos_y), 2) # item1 and item2 y pos + for y in pos_y: + simulate_mouse_click(self.tv, 0, y) + + # by now there should be 4 things in the events list, since each + # item had a bind for two events that were simulated above + self.assertEqual(len(events), 4) + for evt in zip(events[::2], events[1::2]): + self.assertEqual(evt, (1, 2)) + + def test_tag_configure(self): + # Just testing parameter passing for now + self.assertRaises(TypeError, self.tv.tag_configure) + self.assertRaises(tkinter.TclError, self.tv.tag_configure, + 'test', sky='blue') + self.tv.tag_configure('test', foreground='blue') + self.assertEqual(str(self.tv.tag_configure('test', 'foreground')), + 'blue') + self.assertEqual(str(self.tv.tag_configure('test', foreground=None)), + 'blue') + self.assertIsInstance(self.tv.tag_configure('test'), dict) + + def test_tag_has(self): + item1 = self.tv.insert('', 'end', text='Item 1', tags=['tag1']) + item2 = self.tv.insert('', 'end', text='Item 2', tags=['tag2']) + self.assertRaises(TypeError, self.tv.tag_has) + self.assertRaises(TclError, self.tv.tag_has, 'tag1', 'non-existing') + self.assertTrue(self.tv.tag_has('tag1', item1)) + self.assertFalse(self.tv.tag_has('tag1', item2)) + self.assertFalse(self.tv.tag_has('tag2', item1)) + self.assertTrue(self.tv.tag_has('tag2', item2)) + self.assertFalse(self.tv.tag_has('tag3', item1)) + self.assertFalse(self.tv.tag_has('tag3', item2)) + self.assertEqual(self.tv.tag_has('tag1'), (item1,)) + self.assertEqual(self.tv.tag_has('tag2'), (item2,)) + self.assertEqual(self.tv.tag_has('tag3'), ()) + + +@add_standard_options(StandardTtkOptionsTests) +class SeparatorTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'cursor', 'orient', 'style', 'takefocus', + # 'state'? + ) + default_orient = 'horizontal' + + def create(self, **kwargs): + return ttk.Separator(self.root, **kwargs) + + +@add_standard_options(StandardTtkOptionsTests) +class SizegripTest(AbstractWidgetTest, unittest.TestCase): + OPTIONS = ( + 'class', 'cursor', 'style', 'takefocus', + # 'state'? + ) + + def create(self, **kwargs): + return ttk.Sizegrip(self.root, **kwargs) + + +class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): + + def test_frame(self): + self._test_widget(ttk.Frame) + + def test_label(self): + self._test_widget(ttk.Label) + + +tests_gui = ( + ButtonTest, CheckbuttonTest, ComboboxTest, EntryTest, + FrameTest, LabelFrameTest, LabelTest, MenubuttonTest, + NotebookTest, PanedWindowTest, ProgressbarTest, + RadiobuttonTest, ScaleTest, ScrollbarTest, SeparatorTest, + SizegripTest, SpinboxTest, TreeviewTest, WidgetTest, DefaultRootTest, + ) + +if __name__ == "__main__": + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/widget_tests.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/widget_tests.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/test/widget_tests.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/test/widget_tests.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,520 @@ +# Common tests for test_tkinter/test_widgets.py and test_ttk/test_widgets.py + +import unittest +import tkinter +from tkinter.test.support import (AbstractTkTest, tcl_version, + pixels_conv, tcl_obj_eq) +import test.support + + +_sentinel = object() + +class AbstractWidgetTest(AbstractTkTest): + _conv_pixels = round + _conv_pad_pixels = None + _stringify = False + + @property + def scaling(self): + try: + return self._scaling + except AttributeError: + self._scaling = float(self.root.call('tk', 'scaling')) + return self._scaling + + def _str(self, value): + if not self._stringify and self.wantobjects and tcl_version >= (8, 6): + return value + if isinstance(value, tuple): + return ' '.join(map(self._str, value)) + return str(value) + + def assertEqual2(self, actual, expected, msg=None, eq=object.__eq__): + if eq(actual, expected): + return + self.assertEqual(actual, expected, msg) + + def checkParam(self, widget, name, value, *, expected=_sentinel, + conv=False, eq=None): + widget[name] = value + if expected is _sentinel: + expected = value + if conv: + expected = conv(expected) + if self._stringify or not self.wantobjects: + if isinstance(expected, tuple): + expected = tkinter._join(expected) + else: + expected = str(expected) + if eq is None: + eq = tcl_obj_eq + self.assertEqual2(widget[name], expected, eq=eq) + self.assertEqual2(widget.cget(name), expected, eq=eq) + t = widget.configure(name) + self.assertEqual(len(t), 5) + self.assertEqual2(t[4], expected, eq=eq) + + def checkInvalidParam(self, widget, name, value, errmsg=None): + orig = widget[name] + if errmsg is not None: + errmsg = errmsg.format(value) + with self.assertRaises(tkinter.TclError) as cm: + widget[name] = value + if errmsg is not None: + self.assertEqual(str(cm.exception), errmsg) + self.assertEqual(widget[name], orig) + with self.assertRaises(tkinter.TclError) as cm: + widget.configure({name: value}) + if errmsg is not None: + self.assertEqual(str(cm.exception), errmsg) + self.assertEqual(widget[name], orig) + + def checkParams(self, widget, name, *values, **kwargs): + for value in values: + self.checkParam(widget, name, value, **kwargs) + + def checkIntegerParam(self, widget, name, *values, **kwargs): + self.checkParams(widget, name, *values, **kwargs) + self.checkInvalidParam(widget, name, '', + errmsg='expected integer but got ""') + self.checkInvalidParam(widget, name, '10p', + errmsg='expected integer but got "10p"') + self.checkInvalidParam(widget, name, 3.2, + errmsg='expected integer but got "3.2"') + + def checkFloatParam(self, widget, name, *values, conv=float, **kwargs): + for value in values: + self.checkParam(widget, name, value, conv=conv, **kwargs) + self.checkInvalidParam(widget, name, '', + errmsg='expected floating-point number but got ""') + self.checkInvalidParam(widget, name, 'spam', + errmsg='expected floating-point number but got "spam"') + + def checkBooleanParam(self, widget, name): + for value in (False, 0, 'false', 'no', 'off'): + self.checkParam(widget, name, value, expected=0) + for value in (True, 1, 'true', 'yes', 'on'): + self.checkParam(widget, name, value, expected=1) + self.checkInvalidParam(widget, name, '', + errmsg='expected boolean value but got ""') + self.checkInvalidParam(widget, name, 'spam', + errmsg='expected boolean value but got "spam"') + + def checkColorParam(self, widget, name, *, allow_empty=None, **kwargs): + self.checkParams(widget, name, + '#ff0000', '#00ff00', '#0000ff', '#123456', + 'red', 'green', 'blue', 'white', 'black', 'grey', + **kwargs) + self.checkInvalidParam(widget, name, 'spam', + errmsg='unknown color name "spam"') + + def checkCursorParam(self, widget, name, **kwargs): + self.checkParams(widget, name, 'arrow', 'watch', 'cross', '',**kwargs) + self.checkParam(widget, name, 'none') + self.checkInvalidParam(widget, name, 'spam', + errmsg='bad cursor spec "spam"') + + def checkCommandParam(self, widget, name): + def command(*args): + pass + widget[name] = command + self.assertTrue(widget[name]) + self.checkParams(widget, name, '') + + def checkEnumParam(self, widget, name, *values, errmsg=None, **kwargs): + self.checkParams(widget, name, *values, **kwargs) + if errmsg is None: + errmsg2 = ' %s "{}": must be %s%s or %s' % ( + name, + ', '.join(values[:-1]), + ',' if len(values) > 2 else '', + values[-1]) + self.checkInvalidParam(widget, name, '', + errmsg='ambiguous' + errmsg2) + errmsg = 'bad' + errmsg2 + self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) + + def checkPixelsParam(self, widget, name, *values, + conv=None, **kwargs): + if conv is None: + conv = self._conv_pixels + for value in values: + expected = _sentinel + conv1 = conv + if isinstance(value, str): + if conv1 and conv1 is not str: + expected = pixels_conv(value) * self.scaling + conv1 = round + self.checkParam(widget, name, value, expected=expected, + conv=conv1, **kwargs) + self.checkInvalidParam(widget, name, '6x', + errmsg='bad screen distance "6x"') + self.checkInvalidParam(widget, name, 'spam', + errmsg='bad screen distance "spam"') + + def checkReliefParam(self, widget, name): + self.checkParams(widget, name, + 'flat', 'groove', 'raised', 'ridge', 'solid', 'sunken') + errmsg='bad relief "spam": must be '\ + 'flat, groove, raised, ridge, solid, or sunken' + if tcl_version < (8, 6): + errmsg = None + self.checkInvalidParam(widget, name, 'spam', + errmsg=errmsg) + + def checkImageParam(self, widget, name): + image = tkinter.PhotoImage(master=self.root, name='image1') + self.checkParam(widget, name, image, conv=str) + self.checkInvalidParam(widget, name, 'spam', + errmsg='image "spam" doesn\'t exist') + widget[name] = '' + + def checkVariableParam(self, widget, name, var): + self.checkParam(widget, name, var, conv=str) + + def assertIsBoundingBox(self, bbox): + self.assertIsNotNone(bbox) + self.assertIsInstance(bbox, tuple) + if len(bbox) != 4: + self.fail('Invalid bounding box: %r' % (bbox,)) + for item in bbox: + if not isinstance(item, int): + self.fail('Invalid bounding box: %r' % (bbox,)) + break + + + def test_keys(self): + widget = self.create() + keys = widget.keys() + self.assertEqual(sorted(keys), sorted(widget.configure())) + for k in keys: + widget[k] + # Test if OPTIONS contains all keys + if test.support.verbose: + aliases = { + 'bd': 'borderwidth', + 'bg': 'background', + 'fg': 'foreground', + 'invcmd': 'invalidcommand', + 'vcmd': 'validatecommand', + } + keys = set(keys) + expected = set(self.OPTIONS) + for k in sorted(keys - expected): + if not (k in aliases and + aliases[k] in keys and + aliases[k] in expected): + print('%s.OPTIONS doesn\'t contain "%s"' % + (self.__class__.__name__, k)) + + +class StandardOptionsTests: + STANDARD_OPTIONS = ( + 'activebackground', 'activeborderwidth', 'activeforeground', 'anchor', + 'background', 'bitmap', 'borderwidth', 'compound', 'cursor', + 'disabledforeground', 'exportselection', 'font', 'foreground', + 'highlightbackground', 'highlightcolor', 'highlightthickness', + 'image', 'insertbackground', 'insertborderwidth', + 'insertofftime', 'insertontime', 'insertwidth', + 'jump', 'justify', 'orient', 'padx', 'pady', 'relief', + 'repeatdelay', 'repeatinterval', + 'selectbackground', 'selectborderwidth', 'selectforeground', + 'setgrid', 'takefocus', 'text', 'textvariable', 'troughcolor', + 'underline', 'wraplength', 'xscrollcommand', 'yscrollcommand', + ) + + def test_configure_activebackground(self): + widget = self.create() + self.checkColorParam(widget, 'activebackground') + + def test_configure_activeborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'activeborderwidth', + 0, 1.3, 2.9, 6, -2, '10p') + + def test_configure_activeforeground(self): + widget = self.create() + self.checkColorParam(widget, 'activeforeground') + + def test_configure_anchor(self): + widget = self.create() + self.checkEnumParam(widget, 'anchor', + 'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center') + + def test_configure_background(self): + widget = self.create() + self.checkColorParam(widget, 'background') + if 'bg' in self.OPTIONS: + self.checkColorParam(widget, 'bg') + + def test_configure_bitmap(self): + widget = self.create() + self.checkParam(widget, 'bitmap', 'questhead') + self.checkParam(widget, 'bitmap', 'gray50') + filename = test.support.findfile('python.xbm', subdir='imghdrdata') + self.checkParam(widget, 'bitmap', '@' + filename) + # Cocoa Tk widgets don't detect invalid -bitmap values + # See https://core.tcl.tk/tk/info/31cd33dbf0 + if not ('aqua' in self.root.tk.call('tk', 'windowingsystem') and + 'AppKit' in self.root.winfo_server()): + self.checkInvalidParam(widget, 'bitmap', 'spam', + errmsg='bitmap "spam" not defined') + + def test_configure_borderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'borderwidth', + 0, 1.3, 2.6, 6, -2, '10p') + if 'bd' in self.OPTIONS: + self.checkPixelsParam(widget, 'bd', 0, 1.3, 2.6, 6, -2, '10p') + + def test_configure_compound(self): + widget = self.create() + self.checkEnumParam(widget, 'compound', + 'bottom', 'center', 'left', 'none', 'right', 'top') + + def test_configure_cursor(self): + widget = self.create() + self.checkCursorParam(widget, 'cursor') + + def test_configure_disabledforeground(self): + widget = self.create() + self.checkColorParam(widget, 'disabledforeground') + + def test_configure_exportselection(self): + widget = self.create() + self.checkBooleanParam(widget, 'exportselection') + + def test_configure_font(self): + widget = self.create() + self.checkParam(widget, 'font', + '-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*') + self.checkInvalidParam(widget, 'font', '', + errmsg='font "" doesn\'t exist') + + def test_configure_foreground(self): + widget = self.create() + self.checkColorParam(widget, 'foreground') + if 'fg' in self.OPTIONS: + self.checkColorParam(widget, 'fg') + + def test_configure_highlightbackground(self): + widget = self.create() + self.checkColorParam(widget, 'highlightbackground') + + def test_configure_highlightcolor(self): + widget = self.create() + self.checkColorParam(widget, 'highlightcolor') + + def test_configure_highlightthickness(self): + widget = self.create() + self.checkPixelsParam(widget, 'highlightthickness', + 0, 1.3, 2.6, 6, '10p') + self.checkParam(widget, 'highlightthickness', -2, expected=0, + conv=self._conv_pixels) + + def test_configure_image(self): + widget = self.create() + self.checkImageParam(widget, 'image') + + def test_configure_insertbackground(self): + widget = self.create() + self.checkColorParam(widget, 'insertbackground') + + def test_configure_insertborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'insertborderwidth', + 0, 1.3, 2.6, 6, -2, '10p') + + def test_configure_insertofftime(self): + widget = self.create() + self.checkIntegerParam(widget, 'insertofftime', 100) + + def test_configure_insertontime(self): + widget = self.create() + self.checkIntegerParam(widget, 'insertontime', 100) + + def test_configure_insertwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'insertwidth', 1.3, 2.6, -2, '10p') + + def test_configure_jump(self): + widget = self.create() + self.checkBooleanParam(widget, 'jump') + + def test_configure_justify(self): + widget = self.create() + self.checkEnumParam(widget, 'justify', 'left', 'right', 'center', + errmsg='bad justification "{}": must be ' + 'left, right, or center') + self.checkInvalidParam(widget, 'justify', '', + errmsg='ambiguous justification "": must be ' + 'left, right, or center') + + def test_configure_orient(self): + widget = self.create() + self.assertEqual(str(widget['orient']), self.default_orient) + self.checkEnumParam(widget, 'orient', 'horizontal', 'vertical') + + def test_configure_padx(self): + widget = self.create() + self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, -2, '12m', + conv=self._conv_pad_pixels) + + def test_configure_pady(self): + widget = self.create() + self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, -2, '12m', + conv=self._conv_pad_pixels) + + def test_configure_relief(self): + widget = self.create() + self.checkReliefParam(widget, 'relief') + + def test_configure_repeatdelay(self): + widget = self.create() + self.checkIntegerParam(widget, 'repeatdelay', -500, 500) + + def test_configure_repeatinterval(self): + widget = self.create() + self.checkIntegerParam(widget, 'repeatinterval', -500, 500) + + def test_configure_selectbackground(self): + widget = self.create() + self.checkColorParam(widget, 'selectbackground') + + def test_configure_selectborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'selectborderwidth', 1.3, 2.6, -2, '10p') + + def test_configure_selectforeground(self): + widget = self.create() + self.checkColorParam(widget, 'selectforeground') + + def test_configure_setgrid(self): + widget = self.create() + self.checkBooleanParam(widget, 'setgrid') + + def test_configure_state(self): + widget = self.create() + self.checkEnumParam(widget, 'state', 'active', 'disabled', 'normal') + + def test_configure_takefocus(self): + widget = self.create() + self.checkParams(widget, 'takefocus', '0', '1', '') + + def test_configure_text(self): + widget = self.create() + self.checkParams(widget, 'text', '', 'any string') + + def test_configure_textvariable(self): + widget = self.create() + var = tkinter.StringVar(self.root) + self.checkVariableParam(widget, 'textvariable', var) + + def test_configure_troughcolor(self): + widget = self.create() + self.checkColorParam(widget, 'troughcolor') + + def test_configure_underline(self): + widget = self.create() + self.checkIntegerParam(widget, 'underline', 0, 1, 10) + + def test_configure_wraplength(self): + widget = self.create() + self.checkPixelsParam(widget, 'wraplength', 100) + + def test_configure_xscrollcommand(self): + widget = self.create() + self.checkCommandParam(widget, 'xscrollcommand') + + def test_configure_yscrollcommand(self): + widget = self.create() + self.checkCommandParam(widget, 'yscrollcommand') + + # non-standard but common options + + def test_configure_command(self): + widget = self.create() + self.checkCommandParam(widget, 'command') + + def test_configure_indicatoron(self): + widget = self.create() + self.checkBooleanParam(widget, 'indicatoron') + + def test_configure_offrelief(self): + widget = self.create() + self.checkReliefParam(widget, 'offrelief') + + def test_configure_overrelief(self): + widget = self.create() + self.checkReliefParam(widget, 'overrelief') + + def test_configure_selectcolor(self): + widget = self.create() + self.checkColorParam(widget, 'selectcolor') + + def test_configure_selectimage(self): + widget = self.create() + self.checkImageParam(widget, 'selectimage') + + def test_configure_tristateimage(self): + widget = self.create() + self.checkImageParam(widget, 'tristateimage') + + def test_configure_tristatevalue(self): + widget = self.create() + self.checkParam(widget, 'tristatevalue', 'unknowable') + + def test_configure_variable(self): + widget = self.create() + var = tkinter.DoubleVar(self.root) + self.checkVariableParam(widget, 'variable', var) + + +class IntegerSizeTests: + def test_configure_height(self): + widget = self.create() + self.checkIntegerParam(widget, 'height', 100, -100, 0) + + def test_configure_width(self): + widget = self.create() + self.checkIntegerParam(widget, 'width', 402, -402, 0) + + +class PixelSizeTests: + def test_configure_height(self): + widget = self.create() + self.checkPixelsParam(widget, 'height', 100, 101.2, 102.6, -100, 0, '3c') + + def test_configure_width(self): + widget = self.create() + self.checkPixelsParam(widget, 'width', 402, 403.4, 404.6, -402, 0, '5i') + + +def add_standard_options(*source_classes): + # This decorator adds test_configure_xxx methods from source classes for + # every xxx option in the OPTIONS class attribute if they are not defined + # explicitly. + def decorator(cls): + for option in cls.OPTIONS: + methodname = 'test_configure_' + option + if not hasattr(cls, methodname): + for source_class in source_classes: + if hasattr(source_class, methodname): + setattr(cls, methodname, + getattr(source_class, methodname)) + break + else: + def test(self, option=option): + widget = self.create() + widget[option] + raise AssertionError('Option "%s" is not tested in %s' % + (option, cls.__name__)) + test.__name__ = methodname + setattr(cls, methodname, test) + return cls + return decorator + +def setUpModule(): + if test.support.verbose: + tcl = tkinter.Tcl() + print('patchlevel =', tcl.call('info', 'patchlevel'), flush=True) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/tix.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/tix.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/tix.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/tix.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,1948 @@ +# Tix.py -- Tix widget wrappers. +# +# For Tix, see http://tix.sourceforge.net +# +# - Sudhir Shenoy (sshenoy@gol.com), Dec. 1995. +# based on an idea of Jean-Marc Lugrin (lugrin@ms.com) +# +# NOTE: In order to minimize changes to Tkinter.py, some of the code here +# (TixWidget.__init__) has been taken from Tkinter (Widget.__init__) +# and will break if there are major changes in Tkinter. +# +# The Tix widgets are represented by a class hierarchy in python with proper +# inheritance of base classes. +# +# As a result after creating a 'w = StdButtonBox', I can write +# w.ok['text'] = 'Who Cares' +# or w.ok['bg'] = w['bg'] +# or even w.ok.invoke() +# etc. +# +# Compare the demo tixwidgets.py to the original Tcl program and you will +# appreciate the advantages. +# +# NOTE: This module is deprecated since Python 3.6. + +import os +import warnings +import tkinter +from tkinter import * +from tkinter import _cnfmerge + +warnings.warn( + 'The Tix Tk extension is unmaintained, and the tkinter.tix wrapper module' + ' is deprecated in favor of tkinter.ttk', + DeprecationWarning, + stacklevel=2, + ) + +# Some more constants (for consistency with Tkinter) +WINDOW = 'window' +TEXT = 'text' +STATUS = 'status' +IMMEDIATE = 'immediate' +IMAGE = 'image' +IMAGETEXT = 'imagetext' +BALLOON = 'balloon' +AUTO = 'auto' +ACROSSTOP = 'acrosstop' + +# A few useful constants for the Grid widget +ASCII = 'ascii' +CELL = 'cell' +COLUMN = 'column' +DECREASING = 'decreasing' +INCREASING = 'increasing' +INTEGER = 'integer' +MAIN = 'main' +MAX = 'max' +REAL = 'real' +ROW = 'row' +S_REGION = 's-region' +X_REGION = 'x-region' +Y_REGION = 'y-region' + +# Some constants used by Tkinter dooneevent() +TCL_DONT_WAIT = 1 << 1 +TCL_WINDOW_EVENTS = 1 << 2 +TCL_FILE_EVENTS = 1 << 3 +TCL_TIMER_EVENTS = 1 << 4 +TCL_IDLE_EVENTS = 1 << 5 +TCL_ALL_EVENTS = 0 + +# BEWARE - this is implemented by copying some code from the Widget class +# in Tkinter (to override Widget initialization) and is therefore +# liable to break. + +# Could probably add this to Tkinter.Misc +class tixCommand: + """The tix commands provide access to miscellaneous elements + of Tix's internal state and the Tix application context. + Most of the information manipulated by these commands pertains + to the application as a whole, or to a screen or + display, rather than to a particular window. + + This is a mixin class, assumed to be mixed to Tkinter.Tk + that supports the self.tk.call method. + """ + + def tix_addbitmapdir(self, directory): + """Tix maintains a list of directories under which + the tix_getimage and tix_getbitmap commands will + search for image files. The standard bitmap directory + is $TIX_LIBRARY/bitmaps. The addbitmapdir command + adds directory into this list. By using this + command, the image files of an applications can + also be located using the tix_getimage or tix_getbitmap + command. + """ + return self.tk.call('tix', 'addbitmapdir', directory) + + def tix_cget(self, option): + """Returns the current value of the configuration + option given by option. Option may be any of the + options described in the CONFIGURATION OPTIONS section. + """ + return self.tk.call('tix', 'cget', option) + + def tix_configure(self, cnf=None, **kw): + """Query or modify the configuration options of the Tix application + context. If no option is specified, returns a dictionary all of the + available options. If option is specified with no value, then the + command returns a list describing the one named option (this list + will be identical to the corresponding sublist of the value + returned if no option is specified). If one or more option-value + pairs are specified, then the command modifies the given option(s) + to have the given value(s); in this case the command returns an + empty string. Option may be any of the configuration options. + """ + # Copied from Tkinter.py + if kw: + cnf = _cnfmerge((cnf, kw)) + elif cnf: + cnf = _cnfmerge(cnf) + if cnf is None: + return self._getconfigure('tix', 'configure') + if isinstance(cnf, str): + return self._getconfigure1('tix', 'configure', '-'+cnf) + return self.tk.call(('tix', 'configure') + self._options(cnf)) + + def tix_filedialog(self, dlgclass=None): + """Returns the file selection dialog that may be shared among + different calls from this application. This command will create a + file selection dialog widget when it is called the first time. This + dialog will be returned by all subsequent calls to tix_filedialog. + An optional dlgclass parameter can be passed to specified what type + of file selection dialog widget is desired. Possible options are + tix FileSelectDialog or tixExFileSelectDialog. + """ + if dlgclass is not None: + return self.tk.call('tix', 'filedialog', dlgclass) + else: + return self.tk.call('tix', 'filedialog') + + def tix_getbitmap(self, name): + """Locates a bitmap file of the name name.xpm or name in one of the + bitmap directories (see the tix_addbitmapdir command above). By + using tix_getbitmap, you can avoid hard coding the pathnames of the + bitmap files in your application. When successful, it returns the + complete pathname of the bitmap file, prefixed with the character + '@'. The returned value can be used to configure the -bitmap + option of the TK and Tix widgets. + """ + return self.tk.call('tix', 'getbitmap', name) + + def tix_getimage(self, name): + """Locates an image file of the name name.xpm, name.xbm or name.ppm + in one of the bitmap directories (see the addbitmapdir command + above). If more than one file with the same name (but different + extensions) exist, then the image type is chosen according to the + depth of the X display: xbm images are chosen on monochrome + displays and color images are chosen on color displays. By using + tix_ getimage, you can avoid hard coding the pathnames of the + image files in your application. When successful, this command + returns the name of the newly created image, which can be used to + configure the -image option of the Tk and Tix widgets. + """ + return self.tk.call('tix', 'getimage', name) + + def tix_option_get(self, name): + """Gets the options maintained by the Tix + scheme mechanism. Available options include: + + active_bg active_fg bg + bold_font dark1_bg dark1_fg + dark2_bg dark2_fg disabled_fg + fg fixed_font font + inactive_bg inactive_fg input1_bg + input2_bg italic_font light1_bg + light1_fg light2_bg light2_fg + menu_font output1_bg output2_bg + select_bg select_fg selector + """ + # could use self.tk.globalgetvar('tixOption', name) + return self.tk.call('tix', 'option', 'get', name) + + def tix_resetoptions(self, newScheme, newFontSet, newScmPrio=None): + """Resets the scheme and fontset of the Tix application to + newScheme and newFontSet, respectively. This affects only those + widgets created after this call. Therefore, it is best to call the + resetoptions command before the creation of any widgets in a Tix + application. + + The optional parameter newScmPrio can be given to reset the + priority level of the Tk options set by the Tix schemes. + + Because of the way Tk handles the X option database, after Tix has + been has imported and inited, it is not possible to reset the color + schemes and font sets using the tix config command. Instead, the + tix_resetoptions command must be used. + """ + if newScmPrio is not None: + return self.tk.call('tix', 'resetoptions', newScheme, newFontSet, newScmPrio) + else: + return self.tk.call('tix', 'resetoptions', newScheme, newFontSet) + +class Tk(tkinter.Tk, tixCommand): + """Toplevel widget of Tix which represents mostly the main window + of an application. It has an associated Tcl interpreter.""" + def __init__(self, screenName=None, baseName=None, className='Tix'): + tkinter.Tk.__init__(self, screenName, baseName, className) + tixlib = os.environ.get('TIX_LIBRARY') + self.tk.eval('global auto_path; lappend auto_path [file dir [info nameof]]') + if tixlib is not None: + self.tk.eval('global auto_path; lappend auto_path {%s}' % tixlib) + self.tk.eval('global tcl_pkgPath; lappend tcl_pkgPath {%s}' % tixlib) + # Load Tix - this should work dynamically or statically + # If it's static, tcl/tix8.1/pkgIndex.tcl should have + # 'load {} Tix' + # If it's dynamic under Unix, tcl/tix8.1/pkgIndex.tcl should have + # 'load libtix8.1.8.3.so Tix' + self.tk.eval('package require Tix') + + def destroy(self): + # For safety, remove the delete_window binding before destroy + self.protocol("WM_DELETE_WINDOW", "") + tkinter.Tk.destroy(self) + +# The Tix 'tixForm' geometry manager +class Form: + """The Tix Form geometry manager + + Widgets can be arranged by specifying attachments to other widgets. + See Tix documentation for complete details""" + + def config(self, cnf={}, **kw): + self.tk.call('tixForm', self._w, *self._options(cnf, kw)) + + form = config + + def __setitem__(self, key, value): + Form.form(self, {key: value}) + + def check(self): + return self.tk.call('tixForm', 'check', self._w) + + def forget(self): + self.tk.call('tixForm', 'forget', self._w) + + def grid(self, xsize=0, ysize=0): + if (not xsize) and (not ysize): + x = self.tk.call('tixForm', 'grid', self._w) + y = self.tk.splitlist(x) + z = () + for x in y: + z = z + (self.tk.getint(x),) + return z + return self.tk.call('tixForm', 'grid', self._w, xsize, ysize) + + def info(self, option=None): + if not option: + return self.tk.call('tixForm', 'info', self._w) + if option[0] != '-': + option = '-' + option + return self.tk.call('tixForm', 'info', self._w, option) + + def slaves(self): + return [self._nametowidget(x) for x in + self.tk.splitlist( + self.tk.call( + 'tixForm', 'slaves', self._w))] + + + +tkinter.Widget.__bases__ = tkinter.Widget.__bases__ + (Form,) + +class TixWidget(tkinter.Widget): + """A TixWidget class is used to package all (or most) Tix widgets. + + Widget initialization is extended in two ways: + 1) It is possible to give a list of options which must be part of + the creation command (so called Tix 'static' options). These cannot be + given as a 'config' command later. + 2) It is possible to give the name of an existing TK widget. These are + child widgets created automatically by a Tix mega-widget. The Tk call + to create these widgets is therefore bypassed in TixWidget.__init__ + + Both options are for use by subclasses only. + """ + def __init__ (self, master=None, widgetName=None, + static_options=None, cnf={}, kw={}): + # Merge keywords and dictionary arguments + if kw: + cnf = _cnfmerge((cnf, kw)) + else: + cnf = _cnfmerge(cnf) + + # Move static options into extra. static_options must be + # a list of keywords (or None). + extra=() + + # 'options' is always a static option + if static_options: + static_options.append('options') + else: + static_options = ['options'] + + for k,v in list(cnf.items()): + if k in static_options: + extra = extra + ('-' + k, v) + del cnf[k] + + self.widgetName = widgetName + Widget._setup(self, master, cnf) + + # If widgetName is None, this is a dummy creation call where the + # corresponding Tk widget has already been created by Tix + if widgetName: + self.tk.call(widgetName, self._w, *extra) + + # Non-static options - to be done via a 'config' command + if cnf: + Widget.config(self, cnf) + + # Dictionary to hold subwidget names for easier access. We can't + # use the children list because the public Tix names may not be the + # same as the pathname component + self.subwidget_list = {} + + # We set up an attribute access function so that it is possible to + # do w.ok['text'] = 'Hello' rather than w.subwidget('ok')['text'] = 'Hello' + # when w is a StdButtonBox. + # We can even do w.ok.invoke() because w.ok is subclassed from the + # Button class if you go through the proper constructors + def __getattr__(self, name): + if name in self.subwidget_list: + return self.subwidget_list[name] + raise AttributeError(name) + + def set_silent(self, value): + """Set a variable without calling its action routine""" + self.tk.call('tixSetSilent', self._w, value) + + def subwidget(self, name): + """Return the named subwidget (which must have been created by + the sub-class).""" + n = self._subwidget_name(name) + if not n: + raise TclError("Subwidget " + name + " not child of " + self._name) + # Remove header of name and leading dot + n = n[len(self._w)+1:] + return self._nametowidget(n) + + def subwidgets_all(self): + """Return all subwidgets.""" + names = self._subwidget_names() + if not names: + return [] + retlist = [] + for name in names: + name = name[len(self._w)+1:] + try: + retlist.append(self._nametowidget(name)) + except: + # some of the widgets are unknown e.g. border in LabelFrame + pass + return retlist + + def _subwidget_name(self,name): + """Get a subwidget name (returns a String, not a Widget !)""" + try: + return self.tk.call(self._w, 'subwidget', name) + except TclError: + return None + + def _subwidget_names(self): + """Return the name of all subwidgets.""" + try: + x = self.tk.call(self._w, 'subwidgets', '-all') + return self.tk.splitlist(x) + except TclError: + return None + + def config_all(self, option, value): + """Set configuration options for all subwidgets (and self).""" + if option == '': + return + elif not isinstance(option, str): + option = repr(option) + if not isinstance(value, str): + value = repr(value) + names = self._subwidget_names() + for name in names: + self.tk.call(name, 'configure', '-' + option, value) + # These are missing from Tkinter + def image_create(self, imgtype, cnf={}, master=None, **kw): + if master is None: + master = self + if kw and cnf: cnf = _cnfmerge((cnf, kw)) + elif kw: cnf = kw + options = () + for k, v in cnf.items(): + if callable(v): + v = self._register(v) + options = options + ('-'+k, v) + return master.tk.call(('image', 'create', imgtype,) + options) + def image_delete(self, imgname): + try: + self.tk.call('image', 'delete', imgname) + except TclError: + # May happen if the root was destroyed + pass + +# Subwidgets are child widgets created automatically by mega-widgets. +# In python, we have to create these subwidgets manually to mirror their +# existence in Tk/Tix. +class TixSubWidget(TixWidget): + """Subwidget class. + + This is used to mirror child widgets automatically created + by Tix/Tk as part of a mega-widget in Python (which is not informed + of this)""" + + def __init__(self, master, name, + destroy_physically=1, check_intermediate=1): + if check_intermediate: + path = master._subwidget_name(name) + try: + path = path[len(master._w)+1:] + plist = path.split('.') + except: + plist = [] + + if not check_intermediate: + # immediate descendant + TixWidget.__init__(self, master, None, None, {'name' : name}) + else: + # Ensure that the intermediate widgets exist + parent = master + for i in range(len(plist) - 1): + n = '.'.join(plist[:i+1]) + try: + w = master._nametowidget(n) + parent = w + except KeyError: + # Create the intermediate widget + parent = TixSubWidget(parent, plist[i], + destroy_physically=0, + check_intermediate=0) + # The Tk widget name is in plist, not in name + if plist: + name = plist[-1] + TixWidget.__init__(self, parent, None, None, {'name' : name}) + self.destroy_physically = destroy_physically + + def destroy(self): + # For some widgets e.g., a NoteBook, when we call destructors, + # we must be careful not to destroy the frame widget since this + # also destroys the parent NoteBook thus leading to an exception + # in Tkinter when it finally calls Tcl to destroy the NoteBook + for c in list(self.children.values()): c.destroy() + if self._name in self.master.children: + del self.master.children[self._name] + if self._name in self.master.subwidget_list: + del self.master.subwidget_list[self._name] + if self.destroy_physically: + # This is bypassed only for a few widgets + self.tk.call('destroy', self._w) + + +# Useful class to create a display style - later shared by many items. +# Contributed by Steffen Kremser +class DisplayStyle: + """DisplayStyle - handle configuration options shared by + (multiple) Display Items""" + + def __init__(self, itemtype, cnf={}, *, master=None, **kw): + if master is None: + if 'refwindow' in kw: + master = kw['refwindow'] + elif 'refwindow' in cnf: + master = cnf['refwindow'] + else: + master = tkinter._get_default_root('create display style') + self.tk = master.tk + self.stylename = self.tk.call('tixDisplayStyle', itemtype, + *self._options(cnf,kw) ) + + def __str__(self): + return self.stylename + + def _options(self, cnf, kw): + if kw and cnf: + cnf = _cnfmerge((cnf, kw)) + elif kw: + cnf = kw + opts = () + for k, v in cnf.items(): + opts = opts + ('-'+k, v) + return opts + + def delete(self): + self.tk.call(self.stylename, 'delete') + + def __setitem__(self,key,value): + self.tk.call(self.stylename, 'configure', '-%s'%key, value) + + def config(self, cnf={}, **kw): + return self._getconfigure( + self.stylename, 'configure', *self._options(cnf,kw)) + + def __getitem__(self,key): + return self.tk.call(self.stylename, 'cget', '-%s'%key) + + +###################################################### +### The Tix Widget classes - in alphabetical order ### +###################################################### + +class Balloon(TixWidget): + """Balloon help widget. + + Subwidget Class + --------- ----- + label Label + message Message""" + + # FIXME: It should inherit -superclass tixShell + def __init__(self, master=None, cnf={}, **kw): + # static seem to be -installcolormap -initwait -statusbar -cursor + static = ['options', 'installcolormap', 'initwait', 'statusbar', + 'cursor'] + TixWidget.__init__(self, master, 'tixBalloon', static, cnf, kw) + self.subwidget_list['label'] = _dummyLabel(self, 'label', + destroy_physically=0) + self.subwidget_list['message'] = _dummyLabel(self, 'message', + destroy_physically=0) + + def bind_widget(self, widget, cnf={}, **kw): + """Bind balloon widget to another. + One balloon widget may be bound to several widgets at the same time""" + self.tk.call(self._w, 'bind', widget._w, *self._options(cnf, kw)) + + def unbind_widget(self, widget): + self.tk.call(self._w, 'unbind', widget._w) + +class ButtonBox(TixWidget): + """ButtonBox - A container for pushbuttons. + Subwidgets are the buttons added with the add method. + """ + def __init__(self, master=None, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixButtonBox', + ['orientation', 'options'], cnf, kw) + + def add(self, name, cnf={}, **kw): + """Add a button with given name to box.""" + + btn = self.tk.call(self._w, 'add', name, *self._options(cnf, kw)) + self.subwidget_list[name] = _dummyButton(self, name) + return btn + + def invoke(self, name): + if name in self.subwidget_list: + self.tk.call(self._w, 'invoke', name) + +class ComboBox(TixWidget): + """ComboBox - an Entry field with a dropdown menu. The user can select a + choice by either typing in the entry subwidget or selecting from the + listbox subwidget. + + Subwidget Class + --------- ----- + entry Entry + arrow Button + slistbox ScrolledListBox + tick Button + cross Button : present if created with the fancy option""" + + # FIXME: It should inherit -superclass tixLabelWidget + def __init__ (self, master=None, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixComboBox', + ['editable', 'dropdown', 'fancy', 'options'], + cnf, kw) + self.subwidget_list['label'] = _dummyLabel(self, 'label') + self.subwidget_list['entry'] = _dummyEntry(self, 'entry') + self.subwidget_list['arrow'] = _dummyButton(self, 'arrow') + self.subwidget_list['slistbox'] = _dummyScrolledListBox(self, + 'slistbox') + try: + self.subwidget_list['tick'] = _dummyButton(self, 'tick') + self.subwidget_list['cross'] = _dummyButton(self, 'cross') + except TypeError: + # unavailable when -fancy not specified + pass + + # align + + def add_history(self, str): + self.tk.call(self._w, 'addhistory', str) + + def append_history(self, str): + self.tk.call(self._w, 'appendhistory', str) + + def insert(self, index, str): + self.tk.call(self._w, 'insert', index, str) + + def pick(self, index): + self.tk.call(self._w, 'pick', index) + +class Control(TixWidget): + """Control - An entry field with value change arrows. The user can + adjust the value by pressing the two arrow buttons or by entering + the value directly into the entry. The new value will be checked + against the user-defined upper and lower limits. + + Subwidget Class + --------- ----- + incr Button + decr Button + entry Entry + label Label""" + + # FIXME: It should inherit -superclass tixLabelWidget + def __init__ (self, master=None, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixControl', ['options'], cnf, kw) + self.subwidget_list['incr'] = _dummyButton(self, 'incr') + self.subwidget_list['decr'] = _dummyButton(self, 'decr') + self.subwidget_list['label'] = _dummyLabel(self, 'label') + self.subwidget_list['entry'] = _dummyEntry(self, 'entry') + + def decrement(self): + self.tk.call(self._w, 'decr') + + def increment(self): + self.tk.call(self._w, 'incr') + + def invoke(self): + self.tk.call(self._w, 'invoke') + + def update(self): + self.tk.call(self._w, 'update') + +class DirList(TixWidget): + """DirList - displays a list view of a directory, its previous + directories and its sub-directories. The user can choose one of + the directories displayed in the list or change to another directory. + + Subwidget Class + --------- ----- + hlist HList + hsb Scrollbar + vsb Scrollbar""" + + # FIXME: It should inherit -superclass tixScrolledHList + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixDirList', ['options'], cnf, kw) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + + def chdir(self, dir): + self.tk.call(self._w, 'chdir', dir) + +class DirTree(TixWidget): + """DirTree - Directory Listing in a hierarchical view. + Displays a tree view of a directory, its previous directories and its + sub-directories. The user can choose one of the directories displayed + in the list or change to another directory. + + Subwidget Class + --------- ----- + hlist HList + hsb Scrollbar + vsb Scrollbar""" + + # FIXME: It should inherit -superclass tixScrolledHList + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixDirTree', ['options'], cnf, kw) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + + def chdir(self, dir): + self.tk.call(self._w, 'chdir', dir) + +class DirSelectBox(TixWidget): + """DirSelectBox - Motif style file select box. + It is generally used for + the user to choose a file. FileSelectBox stores the files mostly + recently selected into a ComboBox widget so that they can be quickly + selected again. + + Subwidget Class + --------- ----- + selection ComboBox + filter ComboBox + dirlist ScrolledListBox + filelist ScrolledListBox""" + + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixDirSelectBox', ['options'], cnf, kw) + self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist') + self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx') + +class ExFileSelectBox(TixWidget): + """ExFileSelectBox - MS Windows style file select box. + It provides a convenient method for the user to select files. + + Subwidget Class + --------- ----- + cancel Button + ok Button + hidden Checkbutton + types ComboBox + dir ComboBox + file ComboBox + dirlist ScrolledListBox + filelist ScrolledListBox""" + + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixExFileSelectBox', ['options'], cnf, kw) + self.subwidget_list['cancel'] = _dummyButton(self, 'cancel') + self.subwidget_list['ok'] = _dummyButton(self, 'ok') + self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden') + self.subwidget_list['types'] = _dummyComboBox(self, 'types') + self.subwidget_list['dir'] = _dummyComboBox(self, 'dir') + self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist') + self.subwidget_list['file'] = _dummyComboBox(self, 'file') + self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist') + + def filter(self): + self.tk.call(self._w, 'filter') + + def invoke(self): + self.tk.call(self._w, 'invoke') + + +# Should inherit from a Dialog class +class DirSelectDialog(TixWidget): + """The DirSelectDialog widget presents the directories in the file + system in a dialog window. The user can use this dialog window to + navigate through the file system to select the desired directory. + + Subwidgets Class + ---------- ----- + dirbox DirSelectDialog""" + + # FIXME: It should inherit -superclass tixDialogShell + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixDirSelectDialog', + ['options'], cnf, kw) + self.subwidget_list['dirbox'] = _dummyDirSelectBox(self, 'dirbox') + # cancel and ok buttons are missing + + def popup(self): + self.tk.call(self._w, 'popup') + + def popdown(self): + self.tk.call(self._w, 'popdown') + + +# Should inherit from a Dialog class +class ExFileSelectDialog(TixWidget): + """ExFileSelectDialog - MS Windows style file select dialog. + It provides a convenient method for the user to select files. + + Subwidgets Class + ---------- ----- + fsbox ExFileSelectBox""" + + # FIXME: It should inherit -superclass tixDialogShell + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixExFileSelectDialog', + ['options'], cnf, kw) + self.subwidget_list['fsbox'] = _dummyExFileSelectBox(self, 'fsbox') + + def popup(self): + self.tk.call(self._w, 'popup') + + def popdown(self): + self.tk.call(self._w, 'popdown') + +class FileSelectBox(TixWidget): + """ExFileSelectBox - Motif style file select box. + It is generally used for + the user to choose a file. FileSelectBox stores the files mostly + recently selected into a ComboBox widget so that they can be quickly + selected again. + + Subwidget Class + --------- ----- + selection ComboBox + filter ComboBox + dirlist ScrolledListBox + filelist ScrolledListBox""" + + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixFileSelectBox', ['options'], cnf, kw) + self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist') + self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist') + self.subwidget_list['filter'] = _dummyComboBox(self, 'filter') + self.subwidget_list['selection'] = _dummyComboBox(self, 'selection') + + def apply_filter(self): # name of subwidget is same as command + self.tk.call(self._w, 'filter') + + def invoke(self): + self.tk.call(self._w, 'invoke') + +# Should inherit from a Dialog class +class FileSelectDialog(TixWidget): + """FileSelectDialog - Motif style file select dialog. + + Subwidgets Class + ---------- ----- + btns StdButtonBox + fsbox FileSelectBox""" + + # FIXME: It should inherit -superclass tixStdDialogShell + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixFileSelectDialog', + ['options'], cnf, kw) + self.subwidget_list['btns'] = _dummyStdButtonBox(self, 'btns') + self.subwidget_list['fsbox'] = _dummyFileSelectBox(self, 'fsbox') + + def popup(self): + self.tk.call(self._w, 'popup') + + def popdown(self): + self.tk.call(self._w, 'popdown') + +class FileEntry(TixWidget): + """FileEntry - Entry field with button that invokes a FileSelectDialog. + The user can type in the filename manually. Alternatively, the user can + press the button widget that sits next to the entry, which will bring + up a file selection dialog. + + Subwidgets Class + ---------- ----- + button Button + entry Entry""" + + # FIXME: It should inherit -superclass tixLabelWidget + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixFileEntry', + ['dialogtype', 'options'], cnf, kw) + self.subwidget_list['button'] = _dummyButton(self, 'button') + self.subwidget_list['entry'] = _dummyEntry(self, 'entry') + + def invoke(self): + self.tk.call(self._w, 'invoke') + + def file_dialog(self): + # FIXME: return python object + pass + +class HList(TixWidget, XView, YView): + """HList - Hierarchy display widget can be used to display any data + that have a hierarchical structure, for example, file system directory + trees. The list entries are indented and connected by branch lines + according to their places in the hierarchy. + + Subwidgets - None""" + + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self, master, 'tixHList', + ['columns', 'options'], cnf, kw) + + def add(self, entry, cnf={}, **kw): + return self.tk.call(self._w, 'add', entry, *self._options(cnf, kw)) + + def add_child(self, parent=None, cnf={}, **kw): + if parent is None: + parent = '' + return self.tk.call( + self._w, 'addchild', parent, *self._options(cnf, kw)) + + def anchor_set(self, entry): + self.tk.call(self._w, 'anchor', 'set', entry) + + def anchor_clear(self): + self.tk.call(self._w, 'anchor', 'clear') + + def column_width(self, col=0, width=None, chars=None): + if not chars: + return self.tk.call(self._w, 'column', 'width', col, width) + else: + return self.tk.call(self._w, 'column', 'width', col, + '-char', chars) + + def delete_all(self): + self.tk.call(self._w, 'delete', 'all') + + def delete_entry(self, entry): + self.tk.call(self._w, 'delete', 'entry', entry) + + def delete_offsprings(self, entry): + self.tk.call(self._w, 'delete', 'offsprings', entry) + + def delete_siblings(self, entry): + self.tk.call(self._w, 'delete', 'siblings', entry) + + def dragsite_set(self, index): + self.tk.call(self._w, 'dragsite', 'set', index) + + def dragsite_clear(self): + self.tk.call(self._w, 'dragsite', 'clear') + + def dropsite_set(self, index): + self.tk.call(self._w, 'dropsite', 'set', index) + + def dropsite_clear(self): + self.tk.call(self._w, 'dropsite', 'clear') + + def header_create(self, col, cnf={}, **kw): + self.tk.call(self._w, 'header', 'create', col, *self._options(cnf, kw)) + + def header_configure(self, col, cnf={}, **kw): + if cnf is None: + return self._getconfigure(self._w, 'header', 'configure', col) + self.tk.call(self._w, 'header', 'configure', col, + *self._options(cnf, kw)) + + def header_cget(self, col, opt): + return self.tk.call(self._w, 'header', 'cget', col, opt) + + def header_exists(self, col): + # A workaround to Tix library bug (issue #25464). + # The documented command is "exists", but only erroneous "exist" is + # accepted. + return self.tk.getboolean(self.tk.call(self._w, 'header', 'exist', col)) + header_exist = header_exists + + def header_delete(self, col): + self.tk.call(self._w, 'header', 'delete', col) + + def header_size(self, col): + return self.tk.call(self._w, 'header', 'size', col) + + def hide_entry(self, entry): + self.tk.call(self._w, 'hide', 'entry', entry) + + def indicator_create(self, entry, cnf={}, **kw): + self.tk.call( + self._w, 'indicator', 'create', entry, *self._options(cnf, kw)) + + def indicator_configure(self, entry, cnf={}, **kw): + if cnf is None: + return self._getconfigure( + self._w, 'indicator', 'configure', entry) + self.tk.call( + self._w, 'indicator', 'configure', entry, *self._options(cnf, kw)) + + def indicator_cget(self, entry, opt): + return self.tk.call(self._w, 'indicator', 'cget', entry, opt) + + def indicator_exists(self, entry): + return self.tk.call (self._w, 'indicator', 'exists', entry) + + def indicator_delete(self, entry): + self.tk.call(self._w, 'indicator', 'delete', entry) + + def indicator_size(self, entry): + return self.tk.call(self._w, 'indicator', 'size', entry) + + def info_anchor(self): + return self.tk.call(self._w, 'info', 'anchor') + + def info_bbox(self, entry): + return self._getints( + self.tk.call(self._w, 'info', 'bbox', entry)) or None + + def info_children(self, entry=None): + c = self.tk.call(self._w, 'info', 'children', entry) + return self.tk.splitlist(c) + + def info_data(self, entry): + return self.tk.call(self._w, 'info', 'data', entry) + + def info_dragsite(self): + return self.tk.call(self._w, 'info', 'dragsite') + + def info_dropsite(self): + return self.tk.call(self._w, 'info', 'dropsite') + + def info_exists(self, entry): + return self.tk.call(self._w, 'info', 'exists', entry) + + def info_hidden(self, entry): + return self.tk.call(self._w, 'info', 'hidden', entry) + + def info_next(self, entry): + return self.tk.call(self._w, 'info', 'next', entry) + + def info_parent(self, entry): + return self.tk.call(self._w, 'info', 'parent', entry) + + def info_prev(self, entry): + return self.tk.call(self._w, 'info', 'prev', entry) + + def info_selection(self): + c = self.tk.call(self._w, 'info', 'selection') + return self.tk.splitlist(c) + + def item_cget(self, entry, col, opt): + return self.tk.call(self._w, 'item', 'cget', entry, col, opt) + + def item_configure(self, entry, col, cnf={}, **kw): + if cnf is None: + return self._getconfigure(self._w, 'item', 'configure', entry, col) + self.tk.call(self._w, 'item', 'configure', entry, col, + *self._options(cnf, kw)) + + def item_create(self, entry, col, cnf={}, **kw): + self.tk.call( + self._w, 'item', 'create', entry, col, *self._options(cnf, kw)) + + def item_exists(self, entry, col): + return self.tk.call(self._w, 'item', 'exists', entry, col) + + def item_delete(self, entry, col): + self.tk.call(self._w, 'item', 'delete', entry, col) + + def entrycget(self, entry, opt): + return self.tk.call(self._w, 'entrycget', entry, opt) + + def entryconfigure(self, entry, cnf={}, **kw): + if cnf is None: + return self._getconfigure(self._w, 'entryconfigure', entry) + self.tk.call(self._w, 'entryconfigure', entry, + *self._options(cnf, kw)) + + def nearest(self, y): + return self.tk.call(self._w, 'nearest', y) + + def see(self, entry): + self.tk.call(self._w, 'see', entry) + + def selection_clear(self, cnf={}, **kw): + self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw)) + + def selection_includes(self, entry): + return self.tk.call(self._w, 'selection', 'includes', entry) + + def selection_set(self, first, last=None): + self.tk.call(self._w, 'selection', 'set', first, last) + + def show_entry(self, entry): + return self.tk.call(self._w, 'show', 'entry', entry) + +class InputOnly(TixWidget): + """InputOnly - Invisible widget. Unix only. + + Subwidgets - None""" + + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self, master, 'tixInputOnly', None, cnf, kw) + +class LabelEntry(TixWidget): + """LabelEntry - Entry field with label. Packages an entry widget + and a label into one mega widget. It can be used to simplify the creation + of ``entry-form'' type of interface. + + Subwidgets Class + ---------- ----- + label Label + entry Entry""" + + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self, master, 'tixLabelEntry', + ['labelside','options'], cnf, kw) + self.subwidget_list['label'] = _dummyLabel(self, 'label') + self.subwidget_list['entry'] = _dummyEntry(self, 'entry') + +class LabelFrame(TixWidget): + """LabelFrame - Labelled Frame container. Packages a frame widget + and a label into one mega widget. To create widgets inside a + LabelFrame widget, one creates the new widgets relative to the + frame subwidget and manage them inside the frame subwidget. + + Subwidgets Class + ---------- ----- + label Label + frame Frame""" + + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self, master, 'tixLabelFrame', + ['labelside','options'], cnf, kw) + self.subwidget_list['label'] = _dummyLabel(self, 'label') + self.subwidget_list['frame'] = _dummyFrame(self, 'frame') + + +class ListNoteBook(TixWidget): + """A ListNoteBook widget is very similar to the TixNoteBook widget: + it can be used to display many windows in a limited space using a + notebook metaphor. The notebook is divided into a stack of pages + (windows). At one time only one of these pages can be shown. + The user can navigate through these pages by + choosing the name of the desired page in the hlist subwidget.""" + + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixListNoteBook', ['options'], cnf, kw) + # Is this necessary? It's not an exposed subwidget in Tix. + self.subwidget_list['pane'] = _dummyPanedWindow(self, 'pane', + destroy_physically=0) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['shlist'] = _dummyScrolledHList(self, 'shlist') + + def add(self, name, cnf={}, **kw): + self.tk.call(self._w, 'add', name, *self._options(cnf, kw)) + self.subwidget_list[name] = TixSubWidget(self, name) + return self.subwidget_list[name] + + def page(self, name): + return self.subwidget(name) + + def pages(self): + # Can't call subwidgets_all directly because we don't want .nbframe + names = self.tk.splitlist(self.tk.call(self._w, 'pages')) + ret = [] + for x in names: + ret.append(self.subwidget(x)) + return ret + + def raise_page(self, name): # raise is a python keyword + self.tk.call(self._w, 'raise', name) + +class Meter(TixWidget): + """The Meter widget can be used to show the progress of a background + job which may take a long time to execute. + """ + + def __init__(self, master=None, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixMeter', + ['options'], cnf, kw) + +class NoteBook(TixWidget): + """NoteBook - Multi-page container widget (tabbed notebook metaphor). + + Subwidgets Class + ---------- ----- + nbframe NoteBookFrame + page widgets added dynamically with the add method""" + + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self,master,'tixNoteBook', ['options'], cnf, kw) + self.subwidget_list['nbframe'] = TixSubWidget(self, 'nbframe', + destroy_physically=0) + + def add(self, name, cnf={}, **kw): + self.tk.call(self._w, 'add', name, *self._options(cnf, kw)) + self.subwidget_list[name] = TixSubWidget(self, name) + return self.subwidget_list[name] + + def delete(self, name): + self.tk.call(self._w, 'delete', name) + self.subwidget_list[name].destroy() + del self.subwidget_list[name] + + def page(self, name): + return self.subwidget(name) + + def pages(self): + # Can't call subwidgets_all directly because we don't want .nbframe + names = self.tk.splitlist(self.tk.call(self._w, 'pages')) + ret = [] + for x in names: + ret.append(self.subwidget(x)) + return ret + + def raise_page(self, name): # raise is a python keyword + self.tk.call(self._w, 'raise', name) + + def raised(self): + return self.tk.call(self._w, 'raised') + +class NoteBookFrame(TixWidget): + # FIXME: This is dangerous to expose to be called on its own. + pass + +class OptionMenu(TixWidget): + """OptionMenu - creates a menu button of options. + + Subwidget Class + --------- ----- + menubutton Menubutton + menu Menu""" + + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixOptionMenu', ['options'], cnf, kw) + self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton') + self.subwidget_list['menu'] = _dummyMenu(self, 'menu') + + def add_command(self, name, cnf={}, **kw): + self.tk.call(self._w, 'add', 'command', name, *self._options(cnf, kw)) + + def add_separator(self, name, cnf={}, **kw): + self.tk.call(self._w, 'add', 'separator', name, *self._options(cnf, kw)) + + def delete(self, name): + self.tk.call(self._w, 'delete', name) + + def disable(self, name): + self.tk.call(self._w, 'disable', name) + + def enable(self, name): + self.tk.call(self._w, 'enable', name) + +class PanedWindow(TixWidget): + """PanedWindow - Multi-pane container widget + allows the user to interactively manipulate the sizes of several + panes. The panes can be arranged either vertically or horizontally.The + user changes the sizes of the panes by dragging the resize handle + between two panes. + + Subwidgets Class + ---------- ----- + g/p widgets added dynamically with the add method.""" + + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixPanedWindow', ['orientation', 'options'], cnf, kw) + + # add delete forget panecget paneconfigure panes setsize + def add(self, name, cnf={}, **kw): + self.tk.call(self._w, 'add', name, *self._options(cnf, kw)) + self.subwidget_list[name] = TixSubWidget(self, name, + check_intermediate=0) + return self.subwidget_list[name] + + def delete(self, name): + self.tk.call(self._w, 'delete', name) + self.subwidget_list[name].destroy() + del self.subwidget_list[name] + + def forget(self, name): + self.tk.call(self._w, 'forget', name) + + def panecget(self, entry, opt): + return self.tk.call(self._w, 'panecget', entry, opt) + + def paneconfigure(self, entry, cnf={}, **kw): + if cnf is None: + return self._getconfigure(self._w, 'paneconfigure', entry) + self.tk.call(self._w, 'paneconfigure', entry, *self._options(cnf, kw)) + + def panes(self): + names = self.tk.splitlist(self.tk.call(self._w, 'panes')) + return [self.subwidget(x) for x in names] + +class PopupMenu(TixWidget): + """PopupMenu widget can be used as a replacement of the tk_popup command. + The advantage of the Tix PopupMenu widget is it requires less application + code to manipulate. + + + Subwidgets Class + ---------- ----- + menubutton Menubutton + menu Menu""" + + # FIXME: It should inherit -superclass tixShell + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixPopupMenu', ['options'], cnf, kw) + self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton') + self.subwidget_list['menu'] = _dummyMenu(self, 'menu') + + def bind_widget(self, widget): + self.tk.call(self._w, 'bind', widget._w) + + def unbind_widget(self, widget): + self.tk.call(self._w, 'unbind', widget._w) + + def post_widget(self, widget, x, y): + self.tk.call(self._w, 'post', widget._w, x, y) + +class ResizeHandle(TixWidget): + """Internal widget to draw resize handles on Scrolled widgets.""" + def __init__(self, master, cnf={}, **kw): + # There seems to be a Tix bug rejecting the configure method + # Let's try making the flags -static + flags = ['options', 'command', 'cursorfg', 'cursorbg', + 'handlesize', 'hintcolor', 'hintwidth', + 'x', 'y'] + # In fact, x y height width are configurable + TixWidget.__init__(self, master, 'tixResizeHandle', + flags, cnf, kw) + + def attach_widget(self, widget): + self.tk.call(self._w, 'attachwidget', widget._w) + + def detach_widget(self, widget): + self.tk.call(self._w, 'detachwidget', widget._w) + + def hide(self, widget): + self.tk.call(self._w, 'hide', widget._w) + + def show(self, widget): + self.tk.call(self._w, 'show', widget._w) + +class ScrolledHList(TixWidget): + """ScrolledHList - HList with automatic scrollbars.""" + + # FIXME: It should inherit -superclass tixScrolledWidget + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixScrolledHList', ['options'], + cnf, kw) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class ScrolledListBox(TixWidget): + """ScrolledListBox - Listbox with automatic scrollbars.""" + + # FIXME: It should inherit -superclass tixScrolledWidget + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixScrolledListBox', ['options'], cnf, kw) + self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class ScrolledText(TixWidget): + """ScrolledText - Text with automatic scrollbars.""" + + # FIXME: It should inherit -superclass tixScrolledWidget + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixScrolledText', ['options'], cnf, kw) + self.subwidget_list['text'] = _dummyText(self, 'text') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class ScrolledTList(TixWidget): + """ScrolledTList - TList with automatic scrollbars.""" + + # FIXME: It should inherit -superclass tixScrolledWidget + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixScrolledTList', ['options'], + cnf, kw) + self.subwidget_list['tlist'] = _dummyTList(self, 'tlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class ScrolledWindow(TixWidget): + """ScrolledWindow - Window with automatic scrollbars.""" + + # FIXME: It should inherit -superclass tixScrolledWidget + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixScrolledWindow', ['options'], cnf, kw) + self.subwidget_list['window'] = _dummyFrame(self, 'window') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class Select(TixWidget): + """Select - Container of button subwidgets. It can be used to provide + radio-box or check-box style of selection options for the user. + + Subwidgets are buttons added dynamically using the add method.""" + + # FIXME: It should inherit -superclass tixLabelWidget + def __init__(self, master, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixSelect', + ['allowzero', 'radio', 'orientation', 'labelside', + 'options'], + cnf, kw) + self.subwidget_list['label'] = _dummyLabel(self, 'label') + + def add(self, name, cnf={}, **kw): + self.tk.call(self._w, 'add', name, *self._options(cnf, kw)) + self.subwidget_list[name] = _dummyButton(self, name) + return self.subwidget_list[name] + + def invoke(self, name): + self.tk.call(self._w, 'invoke', name) + +class Shell(TixWidget): + """Toplevel window. + + Subwidgets - None""" + + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self, master, 'tixShell', ['options', 'title'], cnf, kw) + +class DialogShell(TixWidget): + """Toplevel window, with popup popdown and center methods. + It tells the window manager that it is a dialog window and should be + treated specially. The exact treatment depends on the treatment of + the window manager. + + Subwidgets - None""" + + # FIXME: It should inherit from Shell + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self, master, + 'tixDialogShell', + ['options', 'title', 'mapped', + 'minheight', 'minwidth', + 'parent', 'transient'], cnf, kw) + + def popdown(self): + self.tk.call(self._w, 'popdown') + + def popup(self): + self.tk.call(self._w, 'popup') + + def center(self): + self.tk.call(self._w, 'center') + +class StdButtonBox(TixWidget): + """StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) """ + + def __init__(self, master=None, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixStdButtonBox', + ['orientation', 'options'], cnf, kw) + self.subwidget_list['ok'] = _dummyButton(self, 'ok') + self.subwidget_list['apply'] = _dummyButton(self, 'apply') + self.subwidget_list['cancel'] = _dummyButton(self, 'cancel') + self.subwidget_list['help'] = _dummyButton(self, 'help') + + def invoke(self, name): + if name in self.subwidget_list: + self.tk.call(self._w, 'invoke', name) + +class TList(TixWidget, XView, YView): + """TList - Hierarchy display widget which can be + used to display data in a tabular format. The list entries of a TList + widget are similar to the entries in the Tk listbox widget. The main + differences are (1) the TList widget can display the list entries in a + two dimensional format and (2) you can use graphical images as well as + multiple colors and fonts for the list entries. + + Subwidgets - None""" + + def __init__ (self,master=None,cnf={}, **kw): + TixWidget.__init__(self, master, 'tixTList', ['options'], cnf, kw) + + def active_set(self, index): + self.tk.call(self._w, 'active', 'set', index) + + def active_clear(self): + self.tk.call(self._w, 'active', 'clear') + + def anchor_set(self, index): + self.tk.call(self._w, 'anchor', 'set', index) + + def anchor_clear(self): + self.tk.call(self._w, 'anchor', 'clear') + + def delete(self, from_, to=None): + self.tk.call(self._w, 'delete', from_, to) + + def dragsite_set(self, index): + self.tk.call(self._w, 'dragsite', 'set', index) + + def dragsite_clear(self): + self.tk.call(self._w, 'dragsite', 'clear') + + def dropsite_set(self, index): + self.tk.call(self._w, 'dropsite', 'set', index) + + def dropsite_clear(self): + self.tk.call(self._w, 'dropsite', 'clear') + + def insert(self, index, cnf={}, **kw): + self.tk.call(self._w, 'insert', index, *self._options(cnf, kw)) + + def info_active(self): + return self.tk.call(self._w, 'info', 'active') + + def info_anchor(self): + return self.tk.call(self._w, 'info', 'anchor') + + def info_down(self, index): + return self.tk.call(self._w, 'info', 'down', index) + + def info_left(self, index): + return self.tk.call(self._w, 'info', 'left', index) + + def info_right(self, index): + return self.tk.call(self._w, 'info', 'right', index) + + def info_selection(self): + c = self.tk.call(self._w, 'info', 'selection') + return self.tk.splitlist(c) + + def info_size(self): + return self.tk.call(self._w, 'info', 'size') + + def info_up(self, index): + return self.tk.call(self._w, 'info', 'up', index) + + def nearest(self, x, y): + return self.tk.call(self._w, 'nearest', x, y) + + def see(self, index): + self.tk.call(self._w, 'see', index) + + def selection_clear(self, cnf={}, **kw): + self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw)) + + def selection_includes(self, index): + return self.tk.call(self._w, 'selection', 'includes', index) + + def selection_set(self, first, last=None): + self.tk.call(self._w, 'selection', 'set', first, last) + +class Tree(TixWidget): + """Tree - The tixTree widget can be used to display hierarchical + data in a tree form. The user can adjust + the view of the tree by opening or closing parts of the tree.""" + + # FIXME: It should inherit -superclass tixScrolledWidget + def __init__(self, master=None, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixTree', + ['options'], cnf, kw) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + + def autosetmode(self): + '''This command calls the setmode method for all the entries in this + Tree widget: if an entry has no child entries, its mode is set to + none. Otherwise, if the entry has any hidden child entries, its mode is + set to open; otherwise its mode is set to close.''' + self.tk.call(self._w, 'autosetmode') + + def close(self, entrypath): + '''Close the entry given by entryPath if its mode is close.''' + self.tk.call(self._w, 'close', entrypath) + + def getmode(self, entrypath): + '''Returns the current mode of the entry given by entryPath.''' + return self.tk.call(self._w, 'getmode', entrypath) + + def open(self, entrypath): + '''Open the entry given by entryPath if its mode is open.''' + self.tk.call(self._w, 'open', entrypath) + + def setmode(self, entrypath, mode='none'): + '''This command is used to indicate whether the entry given by + entryPath has children entries and whether the children are visible. mode + must be one of open, close or none. If mode is set to open, a (+) + indicator is drawn next the entry. If mode is set to close, a (-) + indicator is drawn next the entry. If mode is set to none, no + indicators will be drawn for this entry. The default mode is none. The + open mode indicates the entry has hidden children and this entry can be + opened by the user. The close mode indicates that all the children of the + entry are now visible and the entry can be closed by the user.''' + self.tk.call(self._w, 'setmode', entrypath, mode) + + +# Could try subclassing Tree for CheckList - would need another arg to init +class CheckList(TixWidget): + """The CheckList widget + displays a list of items to be selected by the user. CheckList acts + similarly to the Tk checkbutton or radiobutton widgets, except it is + capable of handling many more items than checkbuttons or radiobuttons. + """ + # FIXME: It should inherit -superclass tixTree + def __init__(self, master=None, cnf={}, **kw): + TixWidget.__init__(self, master, 'tixCheckList', + ['options', 'radio'], cnf, kw) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + + def autosetmode(self): + '''This command calls the setmode method for all the entries in this + Tree widget: if an entry has no child entries, its mode is set to + none. Otherwise, if the entry has any hidden child entries, its mode is + set to open; otherwise its mode is set to close.''' + self.tk.call(self._w, 'autosetmode') + + def close(self, entrypath): + '''Close the entry given by entryPath if its mode is close.''' + self.tk.call(self._w, 'close', entrypath) + + def getmode(self, entrypath): + '''Returns the current mode of the entry given by entryPath.''' + return self.tk.call(self._w, 'getmode', entrypath) + + def open(self, entrypath): + '''Open the entry given by entryPath if its mode is open.''' + self.tk.call(self._w, 'open', entrypath) + + def getselection(self, mode='on'): + '''Returns a list of items whose status matches status. If status is + not specified, the list of items in the "on" status will be returned. + Mode can be on, off, default''' + return self.tk.splitlist(self.tk.call(self._w, 'getselection', mode)) + + def getstatus(self, entrypath): + '''Returns the current status of entryPath.''' + return self.tk.call(self._w, 'getstatus', entrypath) + + def setstatus(self, entrypath, mode='on'): + '''Sets the status of entryPath to be status. A bitmap will be + displayed next to the entry its status is on, off or default.''' + self.tk.call(self._w, 'setstatus', entrypath, mode) + + +########################################################################### +### The subclassing below is used to instantiate the subwidgets in each ### +### mega widget. This allows us to access their methods directly. ### +########################################################################### + +class _dummyButton(Button, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyCheckbutton(Checkbutton, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyEntry(Entry, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyFrame(Frame, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyLabel(Label, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyListbox(Listbox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyMenu(Menu, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyMenubutton(Menubutton, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyScrollbar(Scrollbar, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyText(Text, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyScrolledListBox(ScrolledListBox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class _dummyHList(HList, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyScrolledHList(ScrolledHList, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class _dummyTList(TList, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyComboBox(ComboBox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, ['fancy',destroy_physically]) + self.subwidget_list['label'] = _dummyLabel(self, 'label') + self.subwidget_list['entry'] = _dummyEntry(self, 'entry') + self.subwidget_list['arrow'] = _dummyButton(self, 'arrow') + + self.subwidget_list['slistbox'] = _dummyScrolledListBox(self, + 'slistbox') + try: + self.subwidget_list['tick'] = _dummyButton(self, 'tick') + #cross Button : present if created with the fancy option + self.subwidget_list['cross'] = _dummyButton(self, 'cross') + except TypeError: + # unavailable when -fancy not specified + pass + +class _dummyDirList(DirList, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['hlist'] = _dummyHList(self, 'hlist') + self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb') + self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb') + +class _dummyDirSelectBox(DirSelectBox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist') + self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx') + +class _dummyExFileSelectBox(ExFileSelectBox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['cancel'] = _dummyButton(self, 'cancel') + self.subwidget_list['ok'] = _dummyButton(self, 'ok') + self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden') + self.subwidget_list['types'] = _dummyComboBox(self, 'types') + self.subwidget_list['dir'] = _dummyComboBox(self, 'dir') + self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist') + self.subwidget_list['file'] = _dummyComboBox(self, 'file') + self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist') + +class _dummyFileSelectBox(FileSelectBox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist') + self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist') + self.subwidget_list['filter'] = _dummyComboBox(self, 'filter') + self.subwidget_list['selection'] = _dummyComboBox(self, 'selection') + +class _dummyFileComboBox(ComboBox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['dircbx'] = _dummyComboBox(self, 'dircbx') + +class _dummyStdButtonBox(StdButtonBox, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + self.subwidget_list['ok'] = _dummyButton(self, 'ok') + self.subwidget_list['apply'] = _dummyButton(self, 'apply') + self.subwidget_list['cancel'] = _dummyButton(self, 'cancel') + self.subwidget_list['help'] = _dummyButton(self, 'help') + +class _dummyNoteBookFrame(NoteBookFrame, TixSubWidget): + def __init__(self, master, name, destroy_physically=0): + TixSubWidget.__init__(self, master, name, destroy_physically) + +class _dummyPanedWindow(PanedWindow, TixSubWidget): + def __init__(self, master, name, destroy_physically=1): + TixSubWidget.__init__(self, master, name, destroy_physically) + +######################## +### Utility Routines ### +######################## + +#mike Should tixDestroy be exposed as a wrapper? - but not for widgets. + +def OptionName(widget): + '''Returns the qualified path name for the widget. Normally used to set + default options for subwidgets. See tixwidgets.py''' + return widget.tk.call('tixOptionName', widget._w) + +# Called with a dictionary argument of the form +# {'*.c':'C source files', '*.txt':'Text Files', '*':'All files'} +# returns a string which can be used to configure the fsbox file types +# in an ExFileSelectBox. i.e., +# '{{*} {* - All files}} {{*.c} {*.c - C source files}} {{*.txt} {*.txt - Text Files}}' +def FileTypeList(dict): + s = '' + for type in dict.keys(): + s = s + '{{' + type + '} {' + type + ' - ' + dict[type] + '}} ' + return s + +# Still to be done: +# tixIconView +class CObjView(TixWidget): + """This file implements the Canvas Object View widget. This is a base + class of IconView. It implements automatic placement/adjustment of the + scrollbars according to the canvas objects inside the canvas subwidget. + The scrollbars are adjusted so that the canvas is just large enough + to see all the objects. + """ + # FIXME: It should inherit -superclass tixScrolledWidget + pass + + +class Grid(TixWidget, XView, YView): + '''The Tix Grid command creates a new window and makes it into a + tixGrid widget. Additional options, may be specified on the command + line or in the option database to configure aspects such as its cursor + and relief. + + A Grid widget displays its contents in a two dimensional grid of cells. + Each cell may contain one Tix display item, which may be in text, + graphics or other formats. See the DisplayStyle class for more information + about Tix display items. Individual cells, or groups of cells, can be + formatted with a wide range of attributes, such as its color, relief and + border. + + Subwidgets - None''' + # valid specific resources as of Tk 8.4 + # editdonecmd, editnotifycmd, floatingcols, floatingrows, formatcmd, + # highlightbackground, highlightcolor, leftmargin, itemtype, selectmode, + # selectunit, topmargin, + def __init__(self, master=None, cnf={}, **kw): + static= [] + self.cnf= cnf + TixWidget.__init__(self, master, 'tixGrid', static, cnf, kw) + + # valid options as of Tk 8.4 + # anchor, bdtype, cget, configure, delete, dragsite, dropsite, entrycget, + # edit, entryconfigure, format, geometryinfo, info, index, move, nearest, + # selection, set, size, unset, xview, yview + def anchor_clear(self): + """Removes the selection anchor.""" + self.tk.call(self, 'anchor', 'clear') + + def anchor_get(self): + "Get the (x,y) coordinate of the current anchor cell" + return self._getints(self.tk.call(self, 'anchor', 'get')) + + def anchor_set(self, x, y): + """Set the selection anchor to the cell at (x, y).""" + self.tk.call(self, 'anchor', 'set', x, y) + + def delete_row(self, from_, to=None): + """Delete rows between from_ and to inclusive. + If to is not provided, delete only row at from_""" + if to is None: + self.tk.call(self, 'delete', 'row', from_) + else: + self.tk.call(self, 'delete', 'row', from_, to) + + def delete_column(self, from_, to=None): + """Delete columns between from_ and to inclusive. + If to is not provided, delete only column at from_""" + if to is None: + self.tk.call(self, 'delete', 'column', from_) + else: + self.tk.call(self, 'delete', 'column', from_, to) + + def edit_apply(self): + """If any cell is being edited, de-highlight the cell and applies + the changes.""" + self.tk.call(self, 'edit', 'apply') + + def edit_set(self, x, y): + """Highlights the cell at (x, y) for editing, if the -editnotify + command returns True for this cell.""" + self.tk.call(self, 'edit', 'set', x, y) + + def entrycget(self, x, y, option): + "Get the option value for cell at (x,y)" + if option and option[0] != '-': + option = '-' + option + return self.tk.call(self, 'entrycget', x, y, option) + + def entryconfigure(self, x, y, cnf=None, **kw): + return self._configure(('entryconfigure', x, y), cnf, kw) + + # def format + # def index + + def info_exists(self, x, y): + "Return True if display item exists at (x,y)" + return self._getboolean(self.tk.call(self, 'info', 'exists', x, y)) + + def info_bbox(self, x, y): + # This seems to always return '', at least for 'text' displayitems + return self.tk.call(self, 'info', 'bbox', x, y) + + def move_column(self, from_, to, offset): + """Moves the range of columns from position FROM through TO by + the distance indicated by OFFSET. For example, move_column(2, 4, 1) + moves the columns 2,3,4 to columns 3,4,5.""" + self.tk.call(self, 'move', 'column', from_, to, offset) + + def move_row(self, from_, to, offset): + """Moves the range of rows from position FROM through TO by + the distance indicated by OFFSET. + For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5.""" + self.tk.call(self, 'move', 'row', from_, to, offset) + + def nearest(self, x, y): + "Return coordinate of cell nearest pixel coordinate (x,y)" + return self._getints(self.tk.call(self, 'nearest', x, y)) + + # def selection adjust + # def selection clear + # def selection includes + # def selection set + # def selection toggle + + def set(self, x, y, itemtype=None, **kw): + args= self._options(self.cnf, kw) + if itemtype is not None: + args= ('-itemtype', itemtype) + args + self.tk.call(self, 'set', x, y, *args) + + def size_column(self, index, **kw): + """Queries or sets the size of the column given by + INDEX. INDEX may be any non-negative + integer that gives the position of a given column. + INDEX can also be the string "default"; in this case, this command + queries or sets the default size of all columns. + When no option-value pair is given, this command returns a tuple + containing the current size setting of the given column. When + option-value pairs are given, the corresponding options of the + size setting of the given column are changed. Options may be one + of the following: + pad0 pixels + Specifies the paddings to the left of a column. + pad1 pixels + Specifies the paddings to the right of a column. + size val + Specifies the width of a column. Val may be: + "auto" -- the width of the column is set to the + width of the widest cell in the column; + a valid Tk screen distance unit; + or a real number following by the word chars + (e.g. 3.4chars) that sets the width of the column to the + given number of characters.""" + return self.tk.splitlist(self.tk.call(self._w, 'size', 'column', index, + *self._options({}, kw))) + + def size_row(self, index, **kw): + """Queries or sets the size of the row given by + INDEX. INDEX may be any non-negative + integer that gives the position of a given row . + INDEX can also be the string "default"; in this case, this command + queries or sets the default size of all rows. + When no option-value pair is given, this command returns a list con- + taining the current size setting of the given row . When option-value + pairs are given, the corresponding options of the size setting of the + given row are changed. Options may be one of the following: + pad0 pixels + Specifies the paddings to the top of a row. + pad1 pixels + Specifies the paddings to the bottom of a row. + size val + Specifies the height of a row. Val may be: + "auto" -- the height of the row is set to the + height of the highest cell in the row; + a valid Tk screen distance unit; + or a real number following by the word chars + (e.g. 3.4chars) that sets the height of the row to the + given number of characters.""" + return self.tk.splitlist(self.tk.call( + self, 'size', 'row', index, *self._options({}, kw))) + + def unset(self, x, y): + """Clears the cell at (x, y) by removing its display item.""" + self.tk.call(self._w, 'unset', x, y) + + +class ScrolledGrid(Grid): + '''Scrolled Grid widgets''' + + # FIXME: It should inherit -superclass tixScrolledWidget + def __init__(self, master=None, cnf={}, **kw): + static= [] + self.cnf= cnf + TixWidget.__init__(self, master, 'tixScrolledGrid', static, cnf, kw) diff -Nru python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/ttk.py python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/ttk.py --- python3-stdlib-extensions-3.9.7/3.11/Lib/tkinter/ttk.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Lib/tkinter/ttk.py 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,1637 @@ +"""Ttk wrapper. + +This module provides classes to allow using Tk themed widget set. + +Ttk is based on a revised and enhanced version of +TIP #48 (http://tip.tcl.tk/48) specified style engine. + +Its basic idea is to separate, to the extent possible, the code +implementing a widget's behavior from the code implementing its +appearance. Widget class bindings are primarily responsible for +maintaining the widget state and invoking callbacks, all aspects +of the widgets appearance lies at Themes. +""" + +__version__ = "0.3.1" + +__author__ = "Guilherme Polo " + +__all__ = ["Button", "Checkbutton", "Combobox", "Entry", "Frame", "Label", + "Labelframe", "LabelFrame", "Menubutton", "Notebook", "Panedwindow", + "PanedWindow", "Progressbar", "Radiobutton", "Scale", "Scrollbar", + "Separator", "Sizegrip", "Spinbox", "Style", "Treeview", + # Extensions + "LabeledScale", "OptionMenu", + # functions + "tclobjs_to_py", "setup_master"] + +import tkinter +from tkinter import _flatten, _join, _stringify, _splitdict + + +def _format_optvalue(value, script=False): + """Internal function.""" + if script: + # if caller passes a Tcl script to tk.call, all the values need to + # be grouped into words (arguments to a command in Tcl dialect) + value = _stringify(value) + elif isinstance(value, (list, tuple)): + value = _join(value) + return value + +def _format_optdict(optdict, script=False, ignore=None): + """Formats optdict to a tuple to pass it to tk.call. + + E.g. (script=False): + {'foreground': 'blue', 'padding': [1, 2, 3, 4]} returns: + ('-foreground', 'blue', '-padding', '1 2 3 4')""" + + opts = [] + for opt, value in optdict.items(): + if not ignore or opt not in ignore: + opts.append("-%s" % opt) + if value is not None: + opts.append(_format_optvalue(value, script)) + + return _flatten(opts) + +def _mapdict_values(items): + # each value in mapdict is expected to be a sequence, where each item + # is another sequence containing a state (or several) and a value + # E.g. (script=False): + # [('active', 'selected', 'grey'), ('focus', [1, 2, 3, 4])] + # returns: + # ['active selected', 'grey', 'focus', [1, 2, 3, 4]] + opt_val = [] + for *state, val in items: + if len(state) == 1: + # if it is empty (something that evaluates to False), then + # format it to Tcl code to denote the "normal" state + state = state[0] or '' + else: + # group multiple states + state = ' '.join(state) # raise TypeError if not str + opt_val.append(state) + if val is not None: + opt_val.append(val) + return opt_val + +def _format_mapdict(mapdict, script=False): + """Formats mapdict to pass it to tk.call. + + E.g. (script=False): + {'expand': [('active', 'selected', 'grey'), ('focus', [1, 2, 3, 4])]} + + returns: + + ('-expand', '{active selected} grey focus {1, 2, 3, 4}')""" + + opts = [] + for opt, value in mapdict.items(): + opts.extend(("-%s" % opt, + _format_optvalue(_mapdict_values(value), script))) + + return _flatten(opts) + +def _format_elemcreate(etype, script=False, *args, **kw): + """Formats args and kw according to the given element factory etype.""" + spec = None + opts = () + if etype in ("image", "vsapi"): + if etype == "image": # define an element based on an image + # first arg should be the default image name + iname = args[0] + # next args, if any, are statespec/value pairs which is almost + # a mapdict, but we just need the value + imagespec = _join(_mapdict_values(args[1:])) + spec = "%s %s" % (iname, imagespec) + + else: + # define an element whose visual appearance is drawn using the + # Microsoft Visual Styles API which is responsible for the + # themed styles on Windows XP and Vista. + # Availability: Tk 8.6, Windows XP and Vista. + class_name, part_id = args[:2] + statemap = _join(_mapdict_values(args[2:])) + spec = "%s %s %s" % (class_name, part_id, statemap) + + opts = _format_optdict(kw, script) + + elif etype == "from": # clone an element + # it expects a themename and optionally an element to clone from, + # otherwise it will clone {} (empty element) + spec = args[0] # theme name + if len(args) > 1: # elementfrom specified + opts = (_format_optvalue(args[1], script),) + + if script: + spec = '{%s}' % spec + opts = ' '.join(opts) + + return spec, opts + +def _format_layoutlist(layout, indent=0, indent_size=2): + """Formats a layout list so we can pass the result to ttk::style + layout and ttk::style settings. Note that the layout doesn't have to + be a list necessarily. + + E.g.: + [("Menubutton.background", None), + ("Menubutton.button", {"children": + [("Menubutton.focus", {"children": + [("Menubutton.padding", {"children": + [("Menubutton.label", {"side": "left", "expand": 1})] + })] + })] + }), + ("Menubutton.indicator", {"side": "right"}) + ] + + returns: + + Menubutton.background + Menubutton.button -children { + Menubutton.focus -children { + Menubutton.padding -children { + Menubutton.label -side left -expand 1 + } + } + } + Menubutton.indicator -side right""" + script = [] + + for layout_elem in layout: + elem, opts = layout_elem + opts = opts or {} + fopts = ' '.join(_format_optdict(opts, True, ("children",))) + head = "%s%s%s" % (' ' * indent, elem, (" %s" % fopts) if fopts else '') + + if "children" in opts: + script.append(head + " -children {") + indent += indent_size + newscript, indent = _format_layoutlist(opts['children'], indent, + indent_size) + script.append(newscript) + indent -= indent_size + script.append('%s}' % (' ' * indent)) + else: + script.append(head) + + return '\n'.join(script), indent + +def _script_from_settings(settings): + """Returns an appropriate script, based on settings, according to + theme_settings definition to be used by theme_settings and + theme_create.""" + script = [] + # a script will be generated according to settings passed, which + # will then be evaluated by Tcl + for name, opts in settings.items(): + # will format specific keys according to Tcl code + if opts.get('configure'): # format 'configure' + s = ' '.join(_format_optdict(opts['configure'], True)) + script.append("ttk::style configure %s %s;" % (name, s)) + + if opts.get('map'): # format 'map' + s = ' '.join(_format_mapdict(opts['map'], True)) + script.append("ttk::style map %s %s;" % (name, s)) + + if 'layout' in opts: # format 'layout' which may be empty + if not opts['layout']: + s = 'null' # could be any other word, but this one makes sense + else: + s, _ = _format_layoutlist(opts['layout']) + script.append("ttk::style layout %s {\n%s\n}" % (name, s)) + + if opts.get('element create'): # format 'element create' + eopts = opts['element create'] + etype = eopts[0] + + # find where args end, and where kwargs start + argc = 1 # etype was the first one + while argc < len(eopts) and not hasattr(eopts[argc], 'items'): + argc += 1 + + elemargs = eopts[1:argc] + elemkw = eopts[argc] if argc < len(eopts) and eopts[argc] else {} + spec, opts = _format_elemcreate(etype, True, *elemargs, **elemkw) + + script.append("ttk::style element create %s %s %s %s" % ( + name, etype, spec, opts)) + + return '\n'.join(script) + +def _list_from_statespec(stuple): + """Construct a list from the given statespec tuple according to the + accepted statespec accepted by _format_mapdict.""" + if isinstance(stuple, str): + return stuple + result = [] + it = iter(stuple) + for state, val in zip(it, it): + if hasattr(state, 'typename'): # this is a Tcl object + state = str(state).split() + elif isinstance(state, str): + state = state.split() + elif not isinstance(state, (tuple, list)): + state = (state,) + if hasattr(val, 'typename'): + val = str(val) + result.append((*state, val)) + + return result + +def _list_from_layouttuple(tk, ltuple): + """Construct a list from the tuple returned by ttk::layout, this is + somewhat the reverse of _format_layoutlist.""" + ltuple = tk.splitlist(ltuple) + res = [] + + indx = 0 + while indx < len(ltuple): + name = ltuple[indx] + opts = {} + res.append((name, opts)) + indx += 1 + + while indx < len(ltuple): # grab name's options + opt, val = ltuple[indx:indx + 2] + if not opt.startswith('-'): # found next name + break + + opt = opt[1:] # remove the '-' from the option + indx += 2 + + if opt == 'children': + val = _list_from_layouttuple(tk, val) + + opts[opt] = val + + return res + +def _val_or_dict(tk, options, *args): + """Format options then call Tk command with args and options and return + the appropriate result. + + If no option is specified, a dict is returned. If an option is + specified with the None value, the value for that option is returned. + Otherwise, the function just sets the passed options and the caller + shouldn't be expecting a return value anyway.""" + options = _format_optdict(options) + res = tk.call(*(args + options)) + + if len(options) % 2: # option specified without a value, return its value + return res + + return _splitdict(tk, res, conv=_tclobj_to_py) + +def _convert_stringval(value): + """Converts a value to, hopefully, a more appropriate Python object.""" + value = str(value) + try: + value = int(value) + except (ValueError, TypeError): + pass + + return value + +def _to_number(x): + if isinstance(x, str): + if '.' in x: + x = float(x) + else: + x = int(x) + return x + +def _tclobj_to_py(val): + """Return value converted from Tcl object to Python object.""" + if val and hasattr(val, '__len__') and not isinstance(val, str): + if getattr(val[0], 'typename', None) == 'StateSpec': + val = _list_from_statespec(val) + else: + val = list(map(_convert_stringval, val)) + + elif hasattr(val, 'typename'): # some other (single) Tcl object + val = _convert_stringval(val) + + return val + +def tclobjs_to_py(adict): + """Returns adict with its values converted from Tcl objects to Python + objects.""" + for opt, val in adict.items(): + adict[opt] = _tclobj_to_py(val) + + return adict + +def setup_master(master=None): + """If master is not None, itself is returned. If master is None, + the default master is returned if there is one, otherwise a new + master is created and returned. + + If it is not allowed to use the default root and master is None, + RuntimeError is raised.""" + if master is None: + master = tkinter._get_default_root() + return master + + +class Style(object): + """Manipulate style database.""" + + _name = "ttk::style" + + def __init__(self, master=None): + master = setup_master(master) + self.master = master + self.tk = self.master.tk + + + def configure(self, style, query_opt=None, **kw): + """Query or sets the default value of the specified option(s) in + style. + + Each key in kw is an option and each value is either a string or + a sequence identifying the value for that option.""" + if query_opt is not None: + kw[query_opt] = None + result = _val_or_dict(self.tk, kw, self._name, "configure", style) + if result or query_opt: + return result + + + def map(self, style, query_opt=None, **kw): + """Query or sets dynamic values of the specified option(s) in + style. + + Each key in kw is an option and each value should be a list or a + tuple (usually) containing statespecs grouped in tuples, or list, + or something else of your preference. A statespec is compound of + one or more states and then a value.""" + if query_opt is not None: + result = self.tk.call(self._name, "map", style, '-%s' % query_opt) + return _list_from_statespec(self.tk.splitlist(result)) + + result = self.tk.call(self._name, "map", style, *_format_mapdict(kw)) + return {k: _list_from_statespec(self.tk.splitlist(v)) + for k, v in _splitdict(self.tk, result).items()} + + + def lookup(self, style, option, state=None, default=None): + """Returns the value specified for option in style. + + If state is specified it is expected to be a sequence of one + or more states. If the default argument is set, it is used as + a fallback value in case no specification for option is found.""" + state = ' '.join(state) if state else '' + + return self.tk.call(self._name, "lookup", style, '-%s' % option, + state, default) + + + def layout(self, style, layoutspec=None): + """Define the widget layout for given style. If layoutspec is + omitted, return the layout specification for given style. + + layoutspec is expected to be a list or an object different than + None that evaluates to False if you want to "turn off" that style. + If it is a list (or tuple, or something else), each item should be + a tuple where the first item is the layout name and the second item + should have the format described below: + + LAYOUTS + + A layout can contain the value None, if takes no options, or + a dict of options specifying how to arrange the element. + The layout mechanism uses a simplified version of the pack + geometry manager: given an initial cavity, each element is + allocated a parcel. Valid options/values are: + + side: whichside + Specifies which side of the cavity to place the + element; one of top, right, bottom or left. If + omitted, the element occupies the entire cavity. + + sticky: nswe + Specifies where the element is placed inside its + allocated parcel. + + children: [sublayout... ] + Specifies a list of elements to place inside the + element. Each element is a tuple (or other sequence) + where the first item is the layout name, and the other + is a LAYOUT.""" + lspec = None + if layoutspec: + lspec = _format_layoutlist(layoutspec)[0] + elif layoutspec is not None: # will disable the layout ({}, '', etc) + lspec = "null" # could be any other word, but this may make sense + # when calling layout(style) later + + return _list_from_layouttuple(self.tk, + self.tk.call(self._name, "layout", style, lspec)) + + + def element_create(self, elementname, etype, *args, **kw): + """Create a new element in the current theme of given etype.""" + spec, opts = _format_elemcreate(etype, False, *args, **kw) + self.tk.call(self._name, "element", "create", elementname, etype, + spec, *opts) + + + def element_names(self): + """Returns the list of elements defined in the current theme.""" + return tuple(n.lstrip('-') for n in self.tk.splitlist( + self.tk.call(self._name, "element", "names"))) + + + def element_options(self, elementname): + """Return the list of elementname's options.""" + return tuple(o.lstrip('-') for o in self.tk.splitlist( + self.tk.call(self._name, "element", "options", elementname))) + + + def theme_create(self, themename, parent=None, settings=None): + """Creates a new theme. + + It is an error if themename already exists. If parent is + specified, the new theme will inherit styles, elements and + layouts from the specified parent theme. If settings are present, + they are expected to have the same syntax used for theme_settings.""" + script = _script_from_settings(settings) if settings else '' + + if parent: + self.tk.call(self._name, "theme", "create", themename, + "-parent", parent, "-settings", script) + else: + self.tk.call(self._name, "theme", "create", themename, + "-settings", script) + + + def theme_settings(self, themename, settings): + """Temporarily sets the current theme to themename, apply specified + settings and then restore the previous theme. + + Each key in settings is a style and each value may contain the + keys 'configure', 'map', 'layout' and 'element create' and they + are expected to have the same format as specified by the methods + configure, map, layout and element_create respectively.""" + script = _script_from_settings(settings) + self.tk.call(self._name, "theme", "settings", themename, script) + + + def theme_names(self): + """Returns a list of all known themes.""" + return self.tk.splitlist(self.tk.call(self._name, "theme", "names")) + + + def theme_use(self, themename=None): + """If themename is None, returns the theme in use, otherwise, set + the current theme to themename, refreshes all widgets and emits + a <> event.""" + if themename is None: + # Starting on Tk 8.6, checking this global is no longer needed + # since it allows doing self.tk.call(self._name, "theme", "use") + return self.tk.eval("return $ttk::currentTheme") + + # using "ttk::setTheme" instead of "ttk::style theme use" causes + # the variable currentTheme to be updated, also, ttk::setTheme calls + # "ttk::style theme use" in order to change theme. + self.tk.call("ttk::setTheme", themename) + + +class Widget(tkinter.Widget): + """Base class for Tk themed widgets.""" + + def __init__(self, master, widgetname, kw=None): + """Constructs a Ttk Widget with the parent master. + + STANDARD OPTIONS + + class, cursor, takefocus, style + + SCROLLABLE WIDGET OPTIONS + + xscrollcommand, yscrollcommand + + LABEL WIDGET OPTIONS + + text, textvariable, underline, image, compound, width + + WIDGET STATES + + active, disabled, focus, pressed, selected, background, + readonly, alternate, invalid + """ + master = setup_master(master) + tkinter.Widget.__init__(self, master, widgetname, kw=kw) + + + def identify(self, x, y): + """Returns the name of the element at position x, y, or the empty + string if the point does not lie within any element. + + x and y are pixel coordinates relative to the widget.""" + return self.tk.call(self._w, "identify", x, y) + + + def instate(self, statespec, callback=None, *args, **kw): + """Test the widget's state. + + If callback is not specified, returns True if the widget state + matches statespec and False otherwise. If callback is specified, + then it will be invoked with *args, **kw if the widget state + matches statespec. statespec is expected to be a sequence.""" + ret = self.tk.getboolean( + self.tk.call(self._w, "instate", ' '.join(statespec))) + if ret and callback is not None: + return callback(*args, **kw) + + return ret + + + def state(self, statespec=None): + """Modify or inquire widget state. + + Widget state is returned if statespec is None, otherwise it is + set according to the statespec flags and then a new state spec + is returned indicating which flags were changed. statespec is + expected to be a sequence.""" + if statespec is not None: + statespec = ' '.join(statespec) + + return self.tk.splitlist(str(self.tk.call(self._w, "state", statespec))) + + +class Button(Widget): + """Ttk Button widget, displays a textual label and/or image, and + evaluates a command when pressed.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Button widget with the parent master. + + STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + + WIDGET-SPECIFIC OPTIONS + + command, default, width + """ + Widget.__init__(self, master, "ttk::button", kw) + + + def invoke(self): + """Invokes the command associated with the button.""" + return self.tk.call(self._w, "invoke") + + +class Checkbutton(Widget): + """Ttk Checkbutton widget which is either in on- or off-state.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Checkbutton widget with the parent master. + + STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + + WIDGET-SPECIFIC OPTIONS + + command, offvalue, onvalue, variable + """ + Widget.__init__(self, master, "ttk::checkbutton", kw) + + + def invoke(self): + """Toggles between the selected and deselected states and + invokes the associated command. If the widget is currently + selected, sets the option variable to the offvalue option + and deselects the widget; otherwise, sets the option variable + to the option onvalue. + + Returns the result of the associated command.""" + return self.tk.call(self._w, "invoke") + + +class Entry(Widget, tkinter.Entry): + """Ttk Entry widget displays a one-line text string and allows that + string to be edited by the user.""" + + def __init__(self, master=None, widget=None, **kw): + """Constructs a Ttk Entry widget with the parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus, xscrollcommand + + WIDGET-SPECIFIC OPTIONS + + exportselection, invalidcommand, justify, show, state, + textvariable, validate, validatecommand, width + + VALIDATION MODES + + none, key, focus, focusin, focusout, all + """ + Widget.__init__(self, master, widget or "ttk::entry", kw) + + + def bbox(self, index): + """Return a tuple of (x, y, width, height) which describes the + bounding box of the character given by index.""" + return self._getints(self.tk.call(self._w, "bbox", index)) + + + def identify(self, x, y): + """Returns the name of the element at position x, y, or the + empty string if the coordinates are outside the window.""" + return self.tk.call(self._w, "identify", x, y) + + + def validate(self): + """Force revalidation, independent of the conditions specified + by the validate option. Returns False if validation fails, True + if it succeeds. Sets or clears the invalid state accordingly.""" + return self.tk.getboolean(self.tk.call(self._w, "validate")) + + +class Combobox(Entry): + """Ttk Combobox widget combines a text field with a pop-down list of + values.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Combobox widget with the parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + exportselection, justify, height, postcommand, state, + textvariable, values, width + """ + Entry.__init__(self, master, "ttk::combobox", **kw) + + + def current(self, newindex=None): + """If newindex is supplied, sets the combobox value to the + element at position newindex in the list of values. Otherwise, + returns the index of the current value in the list of values + or -1 if the current value does not appear in the list.""" + if newindex is None: + return self.tk.getint(self.tk.call(self._w, "current")) + return self.tk.call(self._w, "current", newindex) + + + def set(self, value): + """Sets the value of the combobox to value.""" + self.tk.call(self._w, "set", value) + + +class Frame(Widget): + """Ttk Frame widget is a container, used to group other widgets + together.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Frame with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + borderwidth, relief, padding, width, height + """ + Widget.__init__(self, master, "ttk::frame", kw) + + +class Label(Widget): + """Ttk Label widget displays a textual label and/or image.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Label with parent master. + + STANDARD OPTIONS + + class, compound, cursor, image, style, takefocus, text, + textvariable, underline, width + + WIDGET-SPECIFIC OPTIONS + + anchor, background, font, foreground, justify, padding, + relief, text, wraplength + """ + Widget.__init__(self, master, "ttk::label", kw) + + +class Labelframe(Widget): + """Ttk Labelframe widget is a container used to group other widgets + together. It has an optional label, which may be a plain text string + or another widget.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Labelframe with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + labelanchor, text, underline, padding, labelwidget, width, + height + """ + Widget.__init__(self, master, "ttk::labelframe", kw) + +LabelFrame = Labelframe # tkinter name compatibility + + +class Menubutton(Widget): + """Ttk Menubutton widget displays a textual label and/or image, and + displays a menu when pressed.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Menubutton with parent master. + + STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + + WIDGET-SPECIFIC OPTIONS + + direction, menu + """ + Widget.__init__(self, master, "ttk::menubutton", kw) + + +class Notebook(Widget): + """Ttk Notebook widget manages a collection of windows and displays + a single one at a time. Each child window is associated with a tab, + which the user may select to change the currently-displayed window.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Notebook with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + height, padding, width + + TAB OPTIONS + + state, sticky, padding, text, image, compound, underline + + TAB IDENTIFIERS (tab_id) + + The tab_id argument found in several methods may take any of + the following forms: + + * An integer between zero and the number of tabs + * The name of a child window + * A positional specification of the form "@x,y", which + defines the tab + * The string "current", which identifies the + currently-selected tab + * The string "end", which returns the number of tabs (only + valid for method index) + """ + Widget.__init__(self, master, "ttk::notebook", kw) + + + def add(self, child, **kw): + """Adds a new tab to the notebook. + + If window is currently managed by the notebook but hidden, it is + restored to its previous position.""" + self.tk.call(self._w, "add", child, *(_format_optdict(kw))) + + + def forget(self, tab_id): + """Removes the tab specified by tab_id, unmaps and unmanages the + associated window.""" + self.tk.call(self._w, "forget", tab_id) + + + def hide(self, tab_id): + """Hides the tab specified by tab_id. + + The tab will not be displayed, but the associated window remains + managed by the notebook and its configuration remembered. Hidden + tabs may be restored with the add command.""" + self.tk.call(self._w, "hide", tab_id) + + + def identify(self, x, y): + """Returns the name of the tab element at position x, y, or the + empty string if none.""" + return self.tk.call(self._w, "identify", x, y) + + + def index(self, tab_id): + """Returns the numeric index of the tab specified by tab_id, or + the total number of tabs if tab_id is the string "end".""" + return self.tk.getint(self.tk.call(self._w, "index", tab_id)) + + + def insert(self, pos, child, **kw): + """Inserts a pane at the specified position. + + pos is either the string end, an integer index, or the name of + a managed child. If child is already managed by the notebook, + moves it to the specified position.""" + self.tk.call(self._w, "insert", pos, child, *(_format_optdict(kw))) + + + def select(self, tab_id=None): + """Selects the specified tab. + + The associated child window will be displayed, and the + previously-selected window (if different) is unmapped. If tab_id + is omitted, returns the widget name of the currently selected + pane.""" + return self.tk.call(self._w, "select", tab_id) + + + def tab(self, tab_id, option=None, **kw): + """Query or modify the options of the specific tab_id. + + If kw is not given, returns a dict of the tab option values. If option + is specified, returns the value of that option. Otherwise, sets the + options to the corresponding values.""" + if option is not None: + kw[option] = None + return _val_or_dict(self.tk, kw, self._w, "tab", tab_id) + + + def tabs(self): + """Returns a list of windows managed by the notebook.""" + return self.tk.splitlist(self.tk.call(self._w, "tabs") or ()) + + + def enable_traversal(self): + """Enable keyboard traversal for a toplevel window containing + this notebook. + + This will extend the bindings for the toplevel window containing + this notebook as follows: + + Control-Tab: selects the tab following the currently selected + one + + Shift-Control-Tab: selects the tab preceding the currently + selected one + + Alt-K: where K is the mnemonic (underlined) character of any + tab, will select that tab. + + Multiple notebooks in a single toplevel may be enabled for + traversal, including nested notebooks. However, notebook traversal + only works properly if all panes are direct children of the + notebook.""" + # The only, and good, difference I see is about mnemonics, which works + # after calling this method. Control-Tab and Shift-Control-Tab always + # works (here at least). + self.tk.call("ttk::notebook::enableTraversal", self._w) + + +class Panedwindow(Widget, tkinter.PanedWindow): + """Ttk Panedwindow widget displays a number of subwindows, stacked + either vertically or horizontally.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Panedwindow with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + orient, width, height + + PANE OPTIONS + + weight + """ + Widget.__init__(self, master, "ttk::panedwindow", kw) + + + forget = tkinter.PanedWindow.forget # overrides Pack.forget + + + def insert(self, pos, child, **kw): + """Inserts a pane at the specified positions. + + pos is either the string end, and integer index, or the name + of a child. If child is already managed by the paned window, + moves it to the specified position.""" + self.tk.call(self._w, "insert", pos, child, *(_format_optdict(kw))) + + + def pane(self, pane, option=None, **kw): + """Query or modify the options of the specified pane. + + pane is either an integer index or the name of a managed subwindow. + If kw is not given, returns a dict of the pane option values. If + option is specified then the value for that option is returned. + Otherwise, sets the options to the corresponding values.""" + if option is not None: + kw[option] = None + return _val_or_dict(self.tk, kw, self._w, "pane", pane) + + + def sashpos(self, index, newpos=None): + """If newpos is specified, sets the position of sash number index. + + May adjust the positions of adjacent sashes to ensure that + positions are monotonically increasing. Sash positions are further + constrained to be between 0 and the total size of the widget. + + Returns the new position of sash number index.""" + return self.tk.getint(self.tk.call(self._w, "sashpos", index, newpos)) + +PanedWindow = Panedwindow # tkinter name compatibility + + +class Progressbar(Widget): + """Ttk Progressbar widget shows the status of a long-running + operation. They can operate in two modes: determinate mode shows the + amount completed relative to the total amount of work to be done, and + indeterminate mode provides an animated display to let the user know + that something is happening.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Progressbar with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + orient, length, mode, maximum, value, variable, phase + """ + Widget.__init__(self, master, "ttk::progressbar", kw) + + + def start(self, interval=None): + """Begin autoincrement mode: schedules a recurring timer event + that calls method step every interval milliseconds. + + interval defaults to 50 milliseconds (20 steps/second) if omitted.""" + self.tk.call(self._w, "start", interval) + + + def step(self, amount=None): + """Increments the value option by amount. + + amount defaults to 1.0 if omitted.""" + self.tk.call(self._w, "step", amount) + + + def stop(self): + """Stop autoincrement mode: cancels any recurring timer event + initiated by start.""" + self.tk.call(self._w, "stop") + + +class Radiobutton(Widget): + """Ttk Radiobutton widgets are used in groups to show or change a + set of mutually-exclusive options.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Radiobutton with parent master. + + STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + + WIDGET-SPECIFIC OPTIONS + + command, value, variable + """ + Widget.__init__(self, master, "ttk::radiobutton", kw) + + + def invoke(self): + """Sets the option variable to the option value, selects the + widget, and invokes the associated command. + + Returns the result of the command, or an empty string if + no command is specified.""" + return self.tk.call(self._w, "invoke") + + +class Scale(Widget, tkinter.Scale): + """Ttk Scale widget is typically used to control the numeric value of + a linked variable that varies uniformly over some range.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Scale with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + command, from, length, orient, to, value, variable + """ + Widget.__init__(self, master, "ttk::scale", kw) + + + def configure(self, cnf=None, **kw): + """Modify or query scale options. + + Setting a value for any of the "from", "from_" or "to" options + generates a <> event.""" + retval = Widget.configure(self, cnf, **kw) + if not isinstance(cnf, (type(None), str)): + kw.update(cnf) + if any(['from' in kw, 'from_' in kw, 'to' in kw]): + self.event_generate('<>') + return retval + + + def get(self, x=None, y=None): + """Get the current value of the value option, or the value + corresponding to the coordinates x, y if they are specified. + + x and y are pixel coordinates relative to the scale widget + origin.""" + return self.tk.call(self._w, 'get', x, y) + + +class Scrollbar(Widget, tkinter.Scrollbar): + """Ttk Scrollbar controls the viewport of a scrollable widget.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Scrollbar with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + command, orient + """ + Widget.__init__(self, master, "ttk::scrollbar", kw) + + +class Separator(Widget): + """Ttk Separator widget displays a horizontal or vertical separator + bar.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Separator with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + + orient + """ + Widget.__init__(self, master, "ttk::separator", kw) + + +class Sizegrip(Widget): + """Ttk Sizegrip allows the user to resize the containing toplevel + window by pressing and dragging the grip.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Sizegrip with parent master. + + STANDARD OPTIONS + + class, cursor, state, style, takefocus + """ + Widget.__init__(self, master, "ttk::sizegrip", kw) + + +class Spinbox(Entry): + """Ttk Spinbox is an Entry with increment and decrement arrows + + It is commonly used for number entry or to select from a list of + string values. + """ + + def __init__(self, master=None, **kw): + """Construct a Ttk Spinbox widget with the parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus, validate, + validatecommand, xscrollcommand, invalidcommand + + WIDGET-SPECIFIC OPTIONS + + to, from_, increment, values, wrap, format, command + """ + Entry.__init__(self, master, "ttk::spinbox", **kw) + + + def set(self, value): + """Sets the value of the Spinbox to value.""" + self.tk.call(self._w, "set", value) + + +class Treeview(Widget, tkinter.XView, tkinter.YView): + """Ttk Treeview widget displays a hierarchical collection of items. + + Each item has a textual label, an optional image, and an optional list + of data values. The data values are displayed in successive columns + after the tree label.""" + + def __init__(self, master=None, **kw): + """Construct a Ttk Treeview with parent master. + + STANDARD OPTIONS + + class, cursor, style, takefocus, xscrollcommand, + yscrollcommand + + WIDGET-SPECIFIC OPTIONS + + columns, displaycolumns, height, padding, selectmode, show + + ITEM OPTIONS + + text, image, values, open, tags + + TAG OPTIONS + + foreground, background, font, image + """ + Widget.__init__(self, master, "ttk::treeview", kw) + + + def bbox(self, item, column=None): + """Returns the bounding box (relative to the treeview widget's + window) of the specified item in the form x y width height. + + If column is specified, returns the bounding box of that cell. + If the item is not visible (i.e., if it is a descendant of a + closed item or is scrolled offscreen), returns an empty string.""" + return self._getints(self.tk.call(self._w, "bbox", item, column)) or '' + + + def get_children(self, item=None): + """Returns a tuple of children belonging to item. + + If item is not specified, returns root children.""" + return self.tk.splitlist( + self.tk.call(self._w, "children", item or '') or ()) + + + def set_children(self, item, *newchildren): + """Replaces item's child with newchildren. + + Children present in item that are not present in newchildren + are detached from tree. No items in newchildren may be an + ancestor of item.""" + self.tk.call(self._w, "children", item, newchildren) + + + def column(self, column, option=None, **kw): + """Query or modify the options for the specified column. + + If kw is not given, returns a dict of the column option values. If + option is specified then the value for that option is returned. + Otherwise, sets the options to the corresponding values.""" + if option is not None: + kw[option] = None + return _val_or_dict(self.tk, kw, self._w, "column", column) + + + def delete(self, *items): + """Delete all specified items and all their descendants. The root + item may not be deleted.""" + self.tk.call(self._w, "delete", items) + + + def detach(self, *items): + """Unlinks all of the specified items from the tree. + + The items and all of their descendants are still present, and may + be reinserted at another point in the tree, but will not be + displayed. The root item may not be detached.""" + self.tk.call(self._w, "detach", items) + + + def exists(self, item): + """Returns True if the specified item is present in the tree, + False otherwise.""" + return self.tk.getboolean(self.tk.call(self._w, "exists", item)) + + + def focus(self, item=None): + """If item is specified, sets the focus item to item. Otherwise, + returns the current focus item, or '' if there is none.""" + return self.tk.call(self._w, "focus", item) + + + def heading(self, column, option=None, **kw): + """Query or modify the heading options for the specified column. + + If kw is not given, returns a dict of the heading option values. If + option is specified then the value for that option is returned. + Otherwise, sets the options to the corresponding values. + + Valid options/values are: + text: text + The text to display in the column heading + image: image_name + Specifies an image to display to the right of the column + heading + anchor: anchor + Specifies how the heading text should be aligned. One of + the standard Tk anchor values + command: callback + A callback to be invoked when the heading label is + pressed. + + To configure the tree column heading, call this with column = "#0" """ + cmd = kw.get('command') + if cmd and not isinstance(cmd, str): + # callback not registered yet, do it now + kw['command'] = self.master.register(cmd, self._substitute) + + if option is not None: + kw[option] = None + + return _val_or_dict(self.tk, kw, self._w, 'heading', column) + + + def identify(self, component, x, y): + """Returns a description of the specified component under the + point given by x and y, or the empty string if no such component + is present at that position.""" + return self.tk.call(self._w, "identify", component, x, y) + + + def identify_row(self, y): + """Returns the item ID of the item at position y.""" + return self.identify("row", 0, y) + + + def identify_column(self, x): + """Returns the data column identifier of the cell at position x. + + The tree column has ID #0.""" + return self.identify("column", x, 0) + + + def identify_region(self, x, y): + """Returns one of: + + heading: Tree heading area. + separator: Space between two columns headings; + tree: The tree area. + cell: A data cell. + + * Availability: Tk 8.6""" + return self.identify("region", x, y) + + + def identify_element(self, x, y): + """Returns the element at position x, y. + + * Availability: Tk 8.6""" + return self.identify("element", x, y) + + + def index(self, item): + """Returns the integer index of item within its parent's list + of children.""" + return self.tk.getint(self.tk.call(self._w, "index", item)) + + + def insert(self, parent, index, iid=None, **kw): + """Creates a new item and return the item identifier of the newly + created item. + + parent is the item ID of the parent item, or the empty string + to create a new top-level item. index is an integer, or the value + end, specifying where in the list of parent's children to insert + the new item. If index is less than or equal to zero, the new node + is inserted at the beginning, if index is greater than or equal to + the current number of children, it is inserted at the end. If iid + is specified, it is used as the item identifier, iid must not + already exist in the tree. Otherwise, a new unique identifier + is generated.""" + opts = _format_optdict(kw) + if iid is not None: + res = self.tk.call(self._w, "insert", parent, index, + "-id", iid, *opts) + else: + res = self.tk.call(self._w, "insert", parent, index, *opts) + + return res + + + def item(self, item, option=None, **kw): + """Query or modify the options for the specified item. + + If no options are given, a dict with options/values for the item + is returned. If option is specified then the value for that option + is returned. Otherwise, sets the options to the corresponding + values as given by kw.""" + if option is not None: + kw[option] = None + return _val_or_dict(self.tk, kw, self._w, "item", item) + + + def move(self, item, parent, index): + """Moves item to position index in parent's list of children. + + It is illegal to move an item under one of its descendants. If + index is less than or equal to zero, item is moved to the + beginning, if greater than or equal to the number of children, + it is moved to the end. If item was detached it is reattached.""" + self.tk.call(self._w, "move", item, parent, index) + + reattach = move # A sensible method name for reattaching detached items + + + def next(self, item): + """Returns the identifier of item's next sibling, or '' if item + is the last child of its parent.""" + return self.tk.call(self._w, "next", item) + + + def parent(self, item): + """Returns the ID of the parent of item, or '' if item is at the + top level of the hierarchy.""" + return self.tk.call(self._w, "parent", item) + + + def prev(self, item): + """Returns the identifier of item's previous sibling, or '' if + item is the first child of its parent.""" + return self.tk.call(self._w, "prev", item) + + + def see(self, item): + """Ensure that item is visible. + + Sets all of item's ancestors open option to True, and scrolls + the widget if necessary so that item is within the visible + portion of the tree.""" + self.tk.call(self._w, "see", item) + + + def selection(self): + """Returns the tuple of selected items.""" + return self.tk.splitlist(self.tk.call(self._w, "selection")) + + + def _selection(self, selop, items): + if len(items) == 1 and isinstance(items[0], (tuple, list)): + items = items[0] + + self.tk.call(self._w, "selection", selop, items) + + + def selection_set(self, *items): + """The specified items becomes the new selection.""" + self._selection("set", items) + + + def selection_add(self, *items): + """Add all of the specified items to the selection.""" + self._selection("add", items) + + + def selection_remove(self, *items): + """Remove all of the specified items from the selection.""" + self._selection("remove", items) + + + def selection_toggle(self, *items): + """Toggle the selection state of each specified item.""" + self._selection("toggle", items) + + + def set(self, item, column=None, value=None): + """Query or set the value of given item. + + With one argument, return a dictionary of column/value pairs + for the specified item. With two arguments, return the current + value of the specified column. With three arguments, set the + value of given column in given item to the specified value.""" + res = self.tk.call(self._w, "set", item, column, value) + if column is None and value is None: + return _splitdict(self.tk, res, + cut_minus=False, conv=_tclobj_to_py) + else: + return res + + + def tag_bind(self, tagname, sequence=None, callback=None): + """Bind a callback for the given event sequence to the tag tagname. + When an event is delivered to an item, the callbacks for each + of the item's tags option are called.""" + self._bind((self._w, "tag", "bind", tagname), sequence, callback, add=0) + + + def tag_configure(self, tagname, option=None, **kw): + """Query or modify the options for the specified tagname. + + If kw is not given, returns a dict of the option settings for tagname. + If option is specified, returns the value for that option for the + specified tagname. Otherwise, sets the options to the corresponding + values for the given tagname.""" + if option is not None: + kw[option] = None + return _val_or_dict(self.tk, kw, self._w, "tag", "configure", + tagname) + + + def tag_has(self, tagname, item=None): + """If item is specified, returns 1 or 0 depending on whether the + specified item has the given tagname. Otherwise, returns a list of + all items which have the specified tag. + + * Availability: Tk 8.6""" + if item is None: + return self.tk.splitlist( + self.tk.call(self._w, "tag", "has", tagname)) + else: + return self.tk.getboolean( + self.tk.call(self._w, "tag", "has", tagname, item)) + + +# Extensions + +class LabeledScale(Frame): + """A Ttk Scale widget with a Ttk Label widget indicating its + current value. + + The Ttk Scale can be accessed through instance.scale, and Ttk Label + can be accessed through instance.label""" + + def __init__(self, master=None, variable=None, from_=0, to=10, **kw): + """Construct a horizontal LabeledScale with parent master, a + variable to be associated with the Ttk Scale widget and its range. + If variable is not specified, a tkinter.IntVar is created. + + WIDGET-SPECIFIC OPTIONS + + compound: 'top' or 'bottom' + Specifies how to display the label relative to the scale. + Defaults to 'top'. + """ + self._label_top = kw.pop('compound', 'top') == 'top' + + Frame.__init__(self, master, **kw) + self._variable = variable or tkinter.IntVar(master) + self._variable.set(from_) + self._last_valid = from_ + + self.label = Label(self) + self.scale = Scale(self, variable=self._variable, from_=from_, to=to) + self.scale.bind('<>', self._adjust) + + # position scale and label according to the compound option + scale_side = 'bottom' if self._label_top else 'top' + label_side = 'top' if scale_side == 'bottom' else 'bottom' + self.scale.pack(side=scale_side, fill='x') + # Dummy required to make frame correct height + dummy = Label(self) + dummy.pack(side=label_side) + dummy.lower() + self.label.place(anchor='n' if label_side == 'top' else 's') + + # update the label as scale or variable changes + self.__tracecb = self._variable.trace_variable('w', self._adjust) + self.bind('', self._adjust) + self.bind('', self._adjust) + + + def destroy(self): + """Destroy this widget and possibly its associated variable.""" + try: + self._variable.trace_vdelete('w', self.__tracecb) + except AttributeError: + pass + else: + del self._variable + super().destroy() + self.label = None + self.scale = None + + + def _adjust(self, *args): + """Adjust the label position according to the scale.""" + def adjust_label(): + self.update_idletasks() # "force" scale redraw + + x, y = self.scale.coords() + if self._label_top: + y = self.scale.winfo_y() - self.label.winfo_reqheight() + else: + y = self.scale.winfo_reqheight() + self.label.winfo_reqheight() + + self.label.place_configure(x=x, y=y) + + from_ = _to_number(self.scale['from']) + to = _to_number(self.scale['to']) + if to < from_: + from_, to = to, from_ + newval = self._variable.get() + if not from_ <= newval <= to: + # value outside range, set value back to the last valid one + self.value = self._last_valid + return + + self._last_valid = newval + self.label['text'] = newval + self.after_idle(adjust_label) + + @property + def value(self): + """Return current scale value.""" + return self._variable.get() + + @value.setter + def value(self, val): + """Set new scale value.""" + self._variable.set(val) + + +class OptionMenu(Menubutton): + """Themed OptionMenu, based after tkinter's OptionMenu, which allows + the user to select a value from a menu.""" + + def __init__(self, master, variable, default=None, *values, **kwargs): + """Construct a themed OptionMenu widget with master as the parent, + the resource textvariable set to variable, the initially selected + value specified by the default parameter, the menu values given by + *values and additional keywords. + + WIDGET-SPECIFIC OPTIONS + + style: stylename + Menubutton style. + direction: 'above', 'below', 'left', 'right', or 'flush' + Menubutton direction. + command: callback + A callback that will be invoked after selecting an item. + """ + kw = {'textvariable': variable, 'style': kwargs.pop('style', None), + 'direction': kwargs.pop('direction', None)} + Menubutton.__init__(self, master, **kw) + self['menu'] = tkinter.Menu(self, tearoff=False) + + self._variable = variable + self._callback = kwargs.pop('command', None) + if kwargs: + raise tkinter.TclError('unknown option -%s' % ( + next(iter(kwargs.keys())))) + + self.set_menu(default, *values) + + + def __getitem__(self, item): + if item == 'menu': + return self.nametowidget(Menubutton.__getitem__(self, item)) + + return Menubutton.__getitem__(self, item) + + + def set_menu(self, default=None, *values): + """Build a new menu of radiobuttons with *values and optionally + a default value.""" + menu = self['menu'] + menu.delete(0, 'end') + for val in values: + menu.add_radiobutton(label=val, + command=( + None if self._callback is None + else lambda val=val: self._callback(val) + ), + variable=self._variable) + + if default: + self._variable.set(default) + + + def destroy(self): + """Destroy this widget and its associated variable.""" + try: + del self._variable + except AttributeError: + pass + super().destroy() diff -Nru python3-stdlib-extensions-3.9.7/3.11/Modules/Setup python3-stdlib-extensions-3.10.8/3.11/Modules/Setup --- python3-stdlib-extensions-3.9.7/3.11/Modules/Setup 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Modules/Setup 2019-03-26 09:35:50.000000000 +0000 @@ -0,0 +1,386 @@ +# -*- makefile -*- +# The file Setup is used by the makesetup script to construct the files +# Makefile and config.c, from Makefile.pre and config.c.in, +# respectively. The file Setup itself is initially copied from +# Setup.dist; once it exists it will not be overwritten, so you can edit +# Setup to your heart's content. Note that Makefile.pre is created +# from Makefile.pre.in by the toplevel configure script. + +# (VPATH notes: Setup and Makefile.pre are in the build directory, as +# are Makefile and config.c; the *.in and *.dist files are in the source +# directory.) + +# Each line in this file describes one or more optional modules. +# Modules enabled here will not be compiled by the setup.py script, +# so the file can be used to override setup.py's behavior. + +# Lines have the following structure: +# +# ... [ ...] [ ...] [ ...] +# +# is anything ending in .c (.C, .cc, .c++ are C++ files) +# is anything starting with -I, -D, -U or -C +# is anything ending in .a or beginning with -l or -L +# is anything else but should be a valid Python +# identifier (letters, digits, underscores, beginning with non-digit) +# +# (As the makesetup script changes, it may recognize some other +# arguments as well, e.g. *.so and *.sl as libraries. See the big +# case statement in the makesetup script.) +# +# Lines can also have the form +# +# = +# +# which defines a Make variable definition inserted into Makefile.in +# +# Finally, if a line contains just the word "*shared*" (without the +# quotes but with the stars), then the following modules will not be +# built statically. The build process works like this: +# +# 1. Build all modules that are declared as static in Modules/Setup, +# combine them into libpythonxy.a, combine that into python. +# 2. Build all modules that are listed as shared in Modules/Setup. +# 3. Invoke setup.py. That builds all modules that +# a) are not builtin, and +# b) are not listed in Modules/Setup, and +# c) can be build on the target +# +# Therefore, modules declared to be shared will not be +# included in the config.c file, nor in the list of objects to be +# added to the library archive, and their linker options won't be +# added to the linker options. Rules to create their .o files and +# their shared libraries will still be added to the Makefile, and +# their names will be collected in the Make variable SHAREDMODS. This +# is used to build modules as shared libraries. (They can be +# installed using "make sharedinstall", which is implied by the +# toplevel "make install" target.) (For compatibility, +# *noconfig* has the same effect as *shared*.) +# +# In addition, *static* explicitly declares the following modules to +# be static. Lines containing "*static*" and "*shared*" may thus +# alternate throughout this file. + +# NOTE: As a standard policy, as many modules as can be supported by a +# platform should be present. The distribution comes with all modules +# enabled that are supported by most platforms and don't require you +# to ftp sources from elsewhere. + + +# Some special rules to define PYTHONPATH. +# Edit the definitions below to indicate which options you are using. +# Don't add any whitespace or comments! + +# Directories where library files get installed. +# DESTLIB is for Python modules; MACHDESTLIB for shared libraries. +DESTLIB=$(LIBDEST) +MACHDESTLIB=$(BINLIBDEST) + +# NOTE: all the paths are now relative to the prefix that is computed +# at run time! + +# Standard path -- don't edit. +# No leading colon since this is the first entry. +# Empty since this is now just the runtime prefix. +DESTPATH= + +# Site specific path components -- should begin with : if non-empty +SITEPATH= + +# Standard path components for test modules +TESTPATH= + +# Path components for machine- or system-dependent modules and shared libraries +MACHDEPPATH=:plat-$(MACHDEP) +EXTRAMACHDEPPATH= + +COREPYTHONPATH=$(DESTPATH)$(SITEPATH)$(TESTPATH)$(MACHDEPPATH)$(EXTRAMACHDEPPATH) +PYTHONPATH=$(COREPYTHONPATH) + + +# The modules listed here can't be built as shared libraries for +# various reasons; therefore they are listed here instead of in the +# normal order. + +# This only contains the minimal set of modules required to run the +# setup.py script in the root of the Python source tree. + +posix posixmodule.c # posix (UNIX) system calls +errno errnomodule.c # posix (UNIX) errno values +pwd pwdmodule.c # this is needed to find out the user's home dir + # if $HOME is not set +_sre _sre.c # Fredrik Lundh's new regular expressions +_codecs _codecsmodule.c # access to the builtin codecs and codec registry +_weakref _weakref.c # weak references +_functools _functoolsmodule.c # Tools for working with functions and callable objects + +# access to ISO C locale support +_locale _localemodule.c # -lintl + +# Standard I/O baseline +_io -I$(srcdir)/Modules/_io _io/_iomodule.c _io/iobase.c _io/fileio.c _io/bytesio.c _io/bufferedio.c _io/textio.c _io/stringio.c + +# The zipimport module is always imported at startup. Having it as a +# builtin module avoids some bootstrapping problems and reduces overhead. +zipimport zipimport.c + +# The rest of the modules listed in this file are all commented out by +# default. Usually they can be detected and built as dynamically +# loaded modules by the new setup.py script added in Python 2.1. If +# you're on a platform that doesn't support dynamic loading, want to +# compile modules statically into the Python binary, or need to +# specify some odd set of compiler switches, you can uncomment the +# appropriate lines below. + +# ====================================================================== + +# The Python symtable module depends on .h files that setup.py doesn't track +_symtable symtablemodule.c + +# Uncommenting the following line tells makesetup that all following +# modules are to be built as shared libraries (see above for more +# detail; also note that *static* reverses this effect): + +#*shared* + +# GNU readline. Unlike previous Python incarnations, GNU readline is +# now incorporated in an optional module, configured in the Setup file +# instead of by a configure script switch. You may have to insert a +# -L option pointing to the directory where libreadline.* lives, +# and you may have to change -ltermcap to -ltermlib or perhaps remove +# it, depending on your system -- see the GNU readline instructions. +# It's okay for this to be a shared library, too. + +#readline readline.c -lreadline -ltermcap + + +# Modules that should always be present (non UNIX dependent): + +#array arraymodule.c # array objects +#cmath cmathmodule.c _math.c # -lm # complex math library functions +#math mathmodule.c _math.c # -lm # math library functions, e.g. sin() +#_struct _struct.c # binary structure packing/unpacking +#time timemodule.c # -lm # time operations and variables +#operator operator.c # operator.add() and similar goodies +#_weakref _weakref.c # basic weak reference support +#_testcapi _testcapimodule.c # Python C API test module +#_random _randommodule.c # Random number generator +#_collections _collectionsmodule.c # Container types +#itertools itertoolsmodule.c # Functions creating iterators for efficient looping +#atexit atexitmodule.c # Register functions to be run at interpreter-shutdown +#_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator +#_pickle _pickle.c # pickle accelerator +#datetime datetimemodule.c # date/time type +#_bisect _bisectmodule.c # Bisection algorithms +#_heapq _heapqmodule.c # Heap queue algorithm + +#unicodedata unicodedata.c # static Unicode character database + + +# Modules with some UNIX dependencies -- on by default: +# (If you have a really backward UNIX, select and socket may not be +# supported...) + +#fcntl fcntlmodule.c # fcntl(2) and ioctl(2) +#spwd spwdmodule.c # spwd(3) +#grp grpmodule.c # grp(3) +#select selectmodule.c # select(2); not on ancient System V + +# Memory-mapped files (also works on Win32). +#mmap mmapmodule.c + +# CSV file helper +#_csv _csv.c + +# Socket module helper for socket(2) +#_socket socketmodule.c + +# Socket module helper for SSL support; you must comment out the other +# socket line above, and possibly edit the SSL variable: +#SSL=/usr/local/ssl +#_ssl _ssl.c \ +# -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \ +# -L$(SSL)/lib -lssl -lcrypto + +# The crypt module is now disabled by default because it breaks builds +# on many systems (where -lcrypt is needed), e.g. Linux (I believe). +# +# First, look at Setup.config; configure may have set this for you. + +#crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems + + +# Some more UNIX dependent modules -- off by default, since these +# are not supported by all UNIX systems: + +#nis nismodule.c -lnsl # Sun yellow pages -- not everywhere +#termios termios.c # Steen Lumholt's termios module +#resource resource.c # Jeremy Hylton's rlimit interface + +#_posixsubprocess _posixsubprocess.c # POSIX subprocess module helper + +# Multimedia modules -- off by default. +# These don't work for 64-bit platforms!!! +# #993173 says audioop works on 64-bit platforms, though. +# These represent audio samples or images as strings: + +#audioop audioop.c # Operations on audio samples + + +# Note that the _md5 and _sha modules are normally only built if the +# system does not have the OpenSSL libs containing an optimized version. + +# The _md5 module implements the RSA Data Security, Inc. MD5 +# Message-Digest Algorithm, described in RFC 1321. The necessary files +# md5.c and md5.h are included here. + +#_md5 md5module.c md5.c + + +# The _sha module implements the SHA checksum algorithms. +# (NIST's Secure Hash Algorithms.) +#_sha shamodule.c +#_sha256 sha256module.c +#_sha512 sha512module.c + + +# The _tkinter module. +# +# The command for _tkinter is long and site specific. Please +# uncomment and/or edit those parts as indicated. If you don't have a +# specific extension (e.g. Tix or BLT), leave the corresponding line +# commented out. (Leave the trailing backslashes in! If you +# experience strange errors, you may want to join all uncommented +# lines and remove the backslashes -- the backslash interpretation is +# done by the shell's "read" command and it may not be implemented on +# every system. + +# *** Always uncomment this (leave the leading underscore in!): +# _tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \ +# *** Uncomment and edit to reflect where your Tcl/Tk libraries are: +# -L/usr/local/lib \ +# *** Uncomment and edit to reflect where your Tcl/Tk headers are: +# -I/usr/local/include \ +# *** Uncomment and edit to reflect where your X11 header files are: +# -I/usr/X11R6/include \ +# *** Or uncomment this for Solaris: +# -I/usr/openwin/include \ +# *** Uncomment and edit for Tix extension only: +# -DWITH_TIX -ltix8.1.8.2 \ +# *** Uncomment and edit for BLT extension only: +# -DWITH_BLT -I/usr/local/blt/blt8.0-unoff/include -lBLT8.0 \ +# *** Uncomment and edit for PIL (TkImaging) extension only: +# (See http://www.pythonware.com/products/pil/ for more info) +# -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ +# *** Uncomment and edit for TOGL extension only: +# -DWITH_TOGL togl.c \ +# *** Uncomment and edit to reflect your Tcl/Tk versions: +# -ltk8.2 -ltcl8.2 \ +# *** Uncomment and edit to reflect where your X11 libraries are: +# -L/usr/X11R6/lib \ +# *** Or uncomment this for Solaris: +# -L/usr/openwin/lib \ +# *** Uncomment these for TOGL extension only: +# -lGL -lGLU -lXext -lXmu \ +# *** Uncomment for AIX: +# -lld \ +# *** Always uncomment this; X11 libraries to link with: +# -lX11 + +# Lance Ellinghaus's syslog module +#syslog syslogmodule.c # syslog daemon interface + + +# Curses support, requring the System V version of curses, often +# provided by the ncurses library. e.g. on Linux, link with -lncurses +# instead of -lcurses). +# +# First, look at Setup.config; configure may have set this for you. + +#_curses _cursesmodule.c -lcurses -ltermcap +# Wrapper for the panel library that's part of ncurses and SYSV curses. +#_curses_panel _curses_panel.c -lpanel -lncurses + + +# Modules that provide persistent dictionary-like semantics. You will +# probably want to arrange for at least one of them to be available on +# your machine, though none are defined by default because of library +# dependencies. The Python module dbm/__init__.py provides an +# implementation independent wrapper for these; dbm/dumb.py provides +# similar functionality (but slower of course) implemented in Python. + +# The standard Unix dbm module has been moved to Setup.config so that +# it will be compiled as a shared library by default. Compiling it as +# a built-in module causes conflicts with the pybsddb3 module since it +# creates a static dependency on an out-of-date version of db.so. +# +# First, look at Setup.config; configure may have set this for you. + +#_dbm _dbmmodule.c # dbm(3) may require -lndbm or similar + +# Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: +# +# First, look at Setup.config; configure may have set this for you. + +#_gdbm _gdbmmodule.c -I/usr/local/include -L/usr/local/lib -lgdbm + + +# Helper module for various ascii-encoders +#binascii binascii.c + +# Fred Drake's interface to the Python parser +#parser parsermodule.c + + +# Lee Busby's SIGFPE modules. +# The library to link fpectl with is platform specific. +# Choose *one* of the options below for fpectl: + +# For SGI IRIX (tested on 5.3): +#fpectl fpectlmodule.c -lfpe + +# For Solaris with SunPro compiler (tested on Solaris 2.5 with SunPro C 4.2): +# (Without the compiler you don't have -lsunmath.) +#fpectl fpectlmodule.c -R/opt/SUNWspro/lib -lsunmath -lm + +# For other systems: see instructions in fpectlmodule.c. +#fpectl fpectlmodule.c ... + +# Test module for fpectl. No extra libraries needed. +#fpetest fpetestmodule.c + +# Andrew Kuchling's zlib module. +# This require zlib 1.1.3 (or later). +# See http://www.gzip.org/zlib/ +#zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz + +# Interface to the Expat XML parser +# +# Expat was written by James Clark and is now maintained by a group of +# developers on SourceForge; see www.libexpat.org for more +# information. The pyexpat module was written by Paul Prescod after a +# prototype by Jack Jansen. Source of Expat 1.95.2 is included in +# Modules/expat/. Usage of a system shared libexpat.so/expat.dll is +# not advised. +# +# More information on Expat can be found at www.libexpat.org. +# +#pyexpat expat/xmlparse.c expat/xmlrole.c expat/xmltok.c pyexpat.c -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI + +# Hye-Shik Chang's CJKCodecs + +# multibytecodec is required for all the other CJK codec modules +#_multibytecodec cjkcodecs/multibytecodec.c + +#_codecs_cn cjkcodecs/_codecs_cn.c +#_codecs_hk cjkcodecs/_codecs_hk.c +#_codecs_iso2022 cjkcodecs/_codecs_iso2022.c +#_codecs_jp cjkcodecs/_codecs_jp.c +#_codecs_kr cjkcodecs/_codecs_kr.c +#_codecs_tw cjkcodecs/_codecs_tw.c + +# Example -- included for reference only: +# xx xxmodule.c + +# Another example -- the 'xxsubtype' module shows C-level subtyping in action +xxsubtype xxsubtype.c diff -Nru python3-stdlib-extensions-3.9.7/3.11/Modules/_gdbmmodule.c python3-stdlib-extensions-3.10.8/3.11/Modules/_gdbmmodule.c --- python3-stdlib-extensions-3.9.7/3.11/Modules/_gdbmmodule.c 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Modules/_gdbmmodule.c 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,789 @@ + +/* GDBM module using dictionary interface */ +/* Author: Anthony Baxter, after dbmmodule.c */ +/* Doc strings: Mitch Chapman */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#include "gdbm.h" + +#include +#include // free() +#include +#include + +#if defined(WIN32) && !defined(__CYGWIN__) +#include "gdbmerrno.h" +extern const char * gdbm_strerror(gdbm_error); +#endif + +typedef struct { + PyTypeObject *gdbm_type; + PyObject *gdbm_error; +} _gdbm_state; + +static inline _gdbm_state* +get_gdbm_state(PyObject *module) +{ + void *state = PyModule_GetState(module); + assert(state != NULL); + return (_gdbm_state *)state; +} + +/*[clinic input] +module _gdbm +class _gdbm.gdbm "gdbmobject *" "&Gdbmtype" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=38ae71cedfc7172b]*/ + +PyDoc_STRVAR(gdbmmodule__doc__, +"This module provides an interface to the GNU DBM (GDBM) library.\n\ +\n\ +This module is quite similar to the dbm module, but uses GDBM instead to\n\ +provide some additional functionality. Please note that the file formats\n\ +created by GDBM and dbm are incompatible.\n\ +\n\ +GDBM objects behave like mappings (dictionaries), except that keys and\n\ +values are always immutable bytes-like objects or strings. Printing\n\ +a GDBM object doesn't print the keys and values, and the items() and\n\ +values() methods are not supported."); + +typedef struct { + PyObject_HEAD + Py_ssize_t di_size; /* -1 means recompute */ + GDBM_FILE di_dbm; +} gdbmobject; + +#include "clinic/_gdbmmodule.c.h" + +#define check_gdbmobject_open(v, err) \ + if ((v)->di_dbm == NULL) { \ + PyErr_SetString(err, "GDBM object has already been closed"); \ + return NULL; \ + } + +PyDoc_STRVAR(gdbm_object__doc__, +"This object represents a GDBM database.\n\ +GDBM objects behave like mappings (dictionaries), except that keys and\n\ +values are always immutable bytes-like objects or strings. Printing\n\ +a GDBM object doesn't print the keys and values, and the items() and\n\ +values() methods are not supported.\n\ +\n\ +GDBM objects also support additional operations such as firstkey,\n\ +nextkey, reorganize, and sync."); + +static PyObject * +newgdbmobject(_gdbm_state *state, const char *file, int flags, int mode) +{ + gdbmobject *dp = PyObject_GC_New(gdbmobject, state->gdbm_type); + if (dp == NULL) { + return NULL; + } + dp->di_size = -1; + errno = 0; + PyObject_GC_Track(dp); + + if ((dp->di_dbm = gdbm_open((char *)file, 0, flags, mode, NULL)) == 0) { + if (errno != 0) { + PyErr_SetFromErrnoWithFilename(state->gdbm_error, file); + } + else { + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); + } + Py_DECREF(dp); + return NULL; + } + return (PyObject *)dp; +} + +/* Methods */ +static int +gdbm_traverse(gdbmobject *dp, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(dp)); + return 0; +} + +static void +gdbm_dealloc(gdbmobject *dp) +{ + PyObject_GC_UnTrack(dp); + if (dp->di_dbm) { + gdbm_close(dp->di_dbm); + } + PyTypeObject *tp = Py_TYPE(dp); + tp->tp_free(dp); + Py_DECREF(tp); +} + +static Py_ssize_t +gdbm_length(gdbmobject *dp) +{ + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + if (dp->di_dbm == NULL) { + PyErr_SetString(state->gdbm_error, "GDBM object has already been closed"); + return -1; + } + if (dp->di_size < 0) { +#if GDBM_VERSION_MAJOR >= 1 && GDBM_VERSION_MINOR >= 11 + errno = 0; + gdbm_count_t count; + if (gdbm_count(dp->di_dbm, &count) == -1) { + if (errno != 0) { + PyErr_SetFromErrno(state->gdbm_error); + } + else { + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); + } + return -1; + } + if (count > PY_SSIZE_T_MAX) { + PyErr_SetString(PyExc_OverflowError, "count exceeds PY_SSIZE_T_MAX"); + return -1; + } + dp->di_size = count; +#else + datum key,okey; + okey.dsize=0; + okey.dptr=NULL; + + Py_ssize_t size = 0; + for (key = gdbm_firstkey(dp->di_dbm); key.dptr; + key = gdbm_nextkey(dp->di_dbm,okey)) { + size++; + if (okey.dsize) { + free(okey.dptr); + } + okey=key; + } + dp->di_size = size; +#endif + } + return dp->di_size; +} + +// Wrapper function for PyArg_Parse(o, "s#", &d.dptr, &d.size). +// This function is needed to support PY_SSIZE_T_CLEAN. +// Return 1 on success, same to PyArg_Parse(). +static int +parse_datum(PyObject *o, datum *d, const char *failmsg) +{ + Py_ssize_t size; + if (!PyArg_Parse(o, "s#", &d->dptr, &size)) { + if (failmsg != NULL) { + PyErr_SetString(PyExc_TypeError, failmsg); + } + return 0; + } + if (INT_MAX < size) { + PyErr_SetString(PyExc_OverflowError, "size does not fit in an int"); + return 0; + } + d->dsize = size; + return 1; +} + +static PyObject * +gdbm_subscript(gdbmobject *dp, PyObject *key) +{ + PyObject *v; + datum drec, krec; + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + + if (!parse_datum(key, &krec, NULL)) { + return NULL; + } + if (dp->di_dbm == NULL) { + PyErr_SetString(state->gdbm_error, + "GDBM object has already been closed"); + return NULL; + } + drec = gdbm_fetch(dp->di_dbm, krec); + if (drec.dptr == 0) { + PyErr_SetObject(PyExc_KeyError, key); + return NULL; + } + v = PyBytes_FromStringAndSize(drec.dptr, drec.dsize); + free(drec.dptr); + return v; +} + +/*[clinic input] +_gdbm.gdbm.get + + key: object + default: object = None + / + +Get the value for key, or default if not present. +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value) +/*[clinic end generated code: output=92421838f3a852f4 input=a9c20423f34c17b6]*/ +{ + PyObject *res; + + res = gdbm_subscript(self, key); + if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { + PyErr_Clear(); + Py_INCREF(default_value); + return default_value; + } + return res; +} + +static int +gdbm_ass_sub(gdbmobject *dp, PyObject *v, PyObject *w) +{ + datum krec, drec; + const char *failmsg = "gdbm mappings have bytes or string indices only"; + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + + if (!parse_datum(v, &krec, failmsg)) { + return -1; + } + if (dp->di_dbm == NULL) { + PyErr_SetString(state->gdbm_error, + "GDBM object has already been closed"); + return -1; + } + dp->di_size = -1; + if (w == NULL) { + if (gdbm_delete(dp->di_dbm, krec) < 0) { + if (gdbm_errno == GDBM_ITEM_NOT_FOUND) { + PyErr_SetObject(PyExc_KeyError, v); + } + else { + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); + } + return -1; + } + } + else { + if (!parse_datum(w, &drec, failmsg)) { + return -1; + } + errno = 0; + if (gdbm_store(dp->di_dbm, krec, drec, GDBM_REPLACE) < 0) { + if (errno != 0) + PyErr_SetFromErrno(state->gdbm_error); + else + PyErr_SetString(state->gdbm_error, + gdbm_strerror(gdbm_errno)); + return -1; + } + } + return 0; +} + +/*[clinic input] +_gdbm.gdbm.setdefault + + key: object + default: object = None + / + +Get value for key, or set it to default and return default if not present. +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, + PyObject *default_value) +/*[clinic end generated code: output=f3246e880509f142 input=0db46b69e9680171]*/ +{ + PyObject *res; + + res = gdbm_subscript(self, key); + if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { + PyErr_Clear(); + if (gdbm_ass_sub(self, key, default_value) < 0) + return NULL; + return gdbm_subscript(self, key); + } + return res; +} + +/*[clinic input] +_gdbm.gdbm.close + +Close the database. +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_close_impl(gdbmobject *self) +/*[clinic end generated code: output=f5abb4d6bb9e52d5 input=0a203447379b45fd]*/ +{ + if (self->di_dbm) { + gdbm_close(self->di_dbm); + } + self->di_dbm = NULL; + Py_RETURN_NONE; +} + +/* XXX Should return a set or a set view */ +/*[clinic input] +_gdbm.gdbm.keys + + cls: defining_class + +Get a list of all keys in the database. +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=c24b824e81404755 input=1428b7c79703d7d5]*/ +{ + PyObject *v, *item; + datum key, nextkey; + int err; + + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + + if (self == NULL || !Py_IS_TYPE(self, state->gdbm_type)) { + PyErr_BadInternalCall(); + return NULL; + } + check_gdbmobject_open(self, state->gdbm_error); + + v = PyList_New(0); + if (v == NULL) + return NULL; + + key = gdbm_firstkey(self->di_dbm); + while (key.dptr) { + item = PyBytes_FromStringAndSize(key.dptr, key.dsize); + if (item == NULL) { + free(key.dptr); + Py_DECREF(v); + return NULL; + } + err = PyList_Append(v, item); + Py_DECREF(item); + if (err != 0) { + free(key.dptr); + Py_DECREF(v); + return NULL; + } + nextkey = gdbm_nextkey(self->di_dbm, key); + free(key.dptr); + key = nextkey; + } + return v; +} + +static int +gdbm_contains(PyObject *self, PyObject *arg) +{ + gdbmobject *dp = (gdbmobject *)self; + datum key; + Py_ssize_t size; + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + + if ((dp)->di_dbm == NULL) { + PyErr_SetString(state->gdbm_error, + "GDBM object has already been closed"); + return -1; + } + if (PyUnicode_Check(arg)) { + key.dptr = (char *)PyUnicode_AsUTF8AndSize(arg, &size); + key.dsize = size; + if (key.dptr == NULL) + return -1; + } + else if (!PyBytes_Check(arg)) { + PyErr_Format(PyExc_TypeError, + "gdbm key must be bytes or string, not %.100s", + Py_TYPE(arg)->tp_name); + return -1; + } + else { + key.dptr = PyBytes_AS_STRING(arg); + key.dsize = PyBytes_GET_SIZE(arg); + } + return gdbm_exists(dp->di_dbm, key); +} + +/*[clinic input] +_gdbm.gdbm.firstkey + + cls: defining_class + +Return the starting key for the traversal. + +It's possible to loop over every key in the database using this method +and the nextkey() method. The traversal is ordered by GDBM's internal +hash values, and won't be sorted by the key values. +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=139275e9c8b60827 input=ed8782a029a5d299]*/ +{ + PyObject *v; + datum key; + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + + check_gdbmobject_open(self, state->gdbm_error); + key = gdbm_firstkey(self->di_dbm); + if (key.dptr) { + v = PyBytes_FromStringAndSize(key.dptr, key.dsize); + free(key.dptr); + return v; + } + else { + Py_RETURN_NONE; + } +} + +/*[clinic input] +_gdbm.gdbm.nextkey + + cls: defining_class + key: str(accept={str, robuffer}, zeroes=True) + / + +Returns the key that follows key in the traversal. + +The following code prints every key in the database db, without having +to create a list in memory that contains them all: + + k = db.firstkey() + while k is not None: + print(k) + k = db.nextkey(k) +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, + Py_ssize_t key_length) +/*[clinic end generated code: output=c81a69300ef41766 input=365e297bc0b3db48]*/ +{ + PyObject *v; + datum dbm_key, nextkey; + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + + dbm_key.dptr = (char *)key; + dbm_key.dsize = key_length; + check_gdbmobject_open(self, state->gdbm_error); + nextkey = gdbm_nextkey(self->di_dbm, dbm_key); + if (nextkey.dptr) { + v = PyBytes_FromStringAndSize(nextkey.dptr, nextkey.dsize); + free(nextkey.dptr); + return v; + } + else { + Py_RETURN_NONE; + } +} + +/*[clinic input] +_gdbm.gdbm.reorganize + + cls: defining_class + +Reorganize the database. + +If you have carried out a lot of deletions and would like to shrink +the space used by the GDBM file, this routine will reorganize the +database. GDBM will not shorten the length of a database file except +by using this reorganization; otherwise, deleted file space will be +kept and reused as new (key,value) pairs are added. +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=d77c69e8e3dd644a input=e1359faeef844e46]*/ +{ + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + check_gdbmobject_open(self, state->gdbm_error); + errno = 0; + if (gdbm_reorganize(self->di_dbm) < 0) { + if (errno != 0) + PyErr_SetFromErrno(state->gdbm_error); + else + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +_gdbm.gdbm.sync + + cls: defining_class + +Flush the database to the disk file. + +When the database has been opened in fast mode, this method forces +any unwritten data to be written to the disk. +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=bb680a2035c3f592 input=3d749235f79b6f2a]*/ +{ + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + check_gdbmobject_open(self, state->gdbm_error); + gdbm_sync(self->di_dbm); + Py_RETURN_NONE; +} + +static PyObject * +gdbm__enter__(PyObject *self, PyObject *args) +{ + Py_INCREF(self); + return self; +} + +static PyObject * +gdbm__exit__(PyObject *self, PyObject *args) +{ + return _gdbm_gdbm_close_impl((gdbmobject *)self); +} + +static PyMethodDef gdbm_methods[] = { + _GDBM_GDBM_CLOSE_METHODDEF + _GDBM_GDBM_KEYS_METHODDEF + _GDBM_GDBM_FIRSTKEY_METHODDEF + _GDBM_GDBM_NEXTKEY_METHODDEF + _GDBM_GDBM_REORGANIZE_METHODDEF + _GDBM_GDBM_SYNC_METHODDEF + _GDBM_GDBM_GET_METHODDEF + _GDBM_GDBM_SETDEFAULT_METHODDEF + {"__enter__", gdbm__enter__, METH_NOARGS, NULL}, + {"__exit__", gdbm__exit__, METH_VARARGS, NULL}, + {NULL, NULL} /* sentinel */ +}; + +static PyType_Slot gdbmtype_spec_slots[] = { + {Py_tp_dealloc, gdbm_dealloc}, + {Py_tp_traverse, gdbm_traverse}, + {Py_tp_methods, gdbm_methods}, + {Py_sq_contains, gdbm_contains}, + {Py_mp_length, gdbm_length}, + {Py_mp_subscript, gdbm_subscript}, + {Py_mp_ass_subscript, gdbm_ass_sub}, + {Py_tp_doc, (char*)gdbm_object__doc__}, + {0, 0} +}; + +static PyType_Spec gdbmtype_spec = { + .name = "_gdbm.gdbm", + .basicsize = sizeof(gdbmobject), + // Calling PyType_GetModuleState() on a subclass is not safe. + // dbmtype_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE), + .slots = gdbmtype_spec_slots, +}; + +/* ----------------------------------------------------------------- */ + +/*[clinic input] +_gdbm.open as dbmopen + + filename: object + flags: str="r" + mode: int(py_default="0o666") = 0o666 + / + +Open a dbm database and return a dbm object. + +The filename argument is the name of the database file. + +The optional flags argument can be 'r' (to open an existing database +for reading only -- default), 'w' (to open an existing database for +reading and writing), 'c' (which creates the database if it doesn't +exist), or 'n' (which always creates a new empty database). + +Some versions of gdbm support additional flags which must be +appended to one of the flags described above. The module constant +'open_flags' is a string of valid additional flags. The 'f' flag +opens the database in fast mode; altered data will not automatically +be written to the disk after every change. This results in faster +writes to the database, but may result in an inconsistent database +if the program crashes while the database is still open. Use the +sync() method to force any unwritten data to be written to the disk. +The 's' flag causes all database operations to be synchronized to +disk. The 'u' flag disables locking of the database file. + +The optional mode argument is the Unix mode of the file, used only +when the database has to be created. It defaults to octal 0o666. +[clinic start generated code]*/ + +static PyObject * +dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, + int mode) +/*[clinic end generated code: output=9527750f5df90764 input=bca6ec81dc49292c]*/ +{ + int iflags; + _gdbm_state *state = get_gdbm_state(module); + assert(state != NULL); + + switch (flags[0]) { + case 'r': + iflags = GDBM_READER; + break; + case 'w': + iflags = GDBM_WRITER; + break; + case 'c': + iflags = GDBM_WRCREAT; + break; + case 'n': + iflags = GDBM_NEWDB; + break; + default: + PyErr_SetString(state->gdbm_error, + "First flag must be one of 'r', 'w', 'c' or 'n'"); + return NULL; + } + for (flags++; *flags != '\0'; flags++) { + char buf[40]; + switch (*flags) { +#ifdef GDBM_FAST + case 'f': + iflags |= GDBM_FAST; + break; +#endif +#ifdef GDBM_SYNC + case 's': + iflags |= GDBM_SYNC; + break; +#endif +#ifdef GDBM_NOLOCK + case 'u': + iflags |= GDBM_NOLOCK; + break; +#endif + default: + PyOS_snprintf(buf, sizeof(buf), "Flag '%c' is not supported.", + *flags); + PyErr_SetString(state->gdbm_error, buf); + return NULL; + } + } + + PyObject *filenamebytes; + if (!PyUnicode_FSConverter(filename, &filenamebytes)) { + return NULL; + } + + const char *name = PyBytes_AS_STRING(filenamebytes); + if (strlen(name) != (size_t)PyBytes_GET_SIZE(filenamebytes)) { + Py_DECREF(filenamebytes); + PyErr_SetString(PyExc_ValueError, "embedded null character"); + return NULL; + } + PyObject *self = newgdbmobject(state, name, iflags, mode); + Py_DECREF(filenamebytes); + return self; +} + +static const char gdbmmodule_open_flags[] = "rwcn" +#ifdef GDBM_FAST + "f" +#endif +#ifdef GDBM_SYNC + "s" +#endif +#ifdef GDBM_NOLOCK + "u" +#endif + ; + +static PyMethodDef _gdbm_module_methods[] = { + DBMOPEN_METHODDEF + { 0, 0 }, +}; + +static int +_gdbm_exec(PyObject *module) +{ + _gdbm_state *state = get_gdbm_state(module); + state->gdbm_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &gdbmtype_spec, NULL); + if (state->gdbm_type == NULL) { + return -1; + } + state->gdbm_error = PyErr_NewException("_gdbm.error", PyExc_OSError, NULL); + if (state->gdbm_error == NULL) { + return -1; + } + if (PyModule_AddType(module, (PyTypeObject *)state->gdbm_error) < 0) { + return -1; + } + if (PyModule_AddStringConstant(module, "open_flags", + gdbmmodule_open_flags) < 0) { + return -1; + } + +#if defined(GDBM_VERSION_MAJOR) && defined(GDBM_VERSION_MINOR) && \ + defined(GDBM_VERSION_PATCH) + PyObject *obj = Py_BuildValue("iii", GDBM_VERSION_MAJOR, + GDBM_VERSION_MINOR, GDBM_VERSION_PATCH); + if (obj == NULL) { + return -1; + } + if (PyModule_AddObject(module, "_GDBM_VERSION", obj) < 0) { + Py_DECREF(obj); + return -1; + } +#endif + return 0; +} + +static int +_gdbm_module_traverse(PyObject *module, visitproc visit, void *arg) +{ + _gdbm_state *state = get_gdbm_state(module); + Py_VISIT(state->gdbm_error); + Py_VISIT(state->gdbm_type); + return 0; +} + +static int +_gdbm_module_clear(PyObject *module) +{ + _gdbm_state *state = get_gdbm_state(module); + Py_CLEAR(state->gdbm_error); + Py_CLEAR(state->gdbm_type); + return 0; +} + +static void +_gdbm_module_free(void *module) +{ + _gdbm_module_clear((PyObject *)module); +} + +static PyModuleDef_Slot _gdbm_module_slots[] = { + {Py_mod_exec, _gdbm_exec}, + {0, NULL} +}; + +static struct PyModuleDef _gdbmmodule = { + PyModuleDef_HEAD_INIT, + .m_name = "_gdbm", + .m_doc = gdbmmodule__doc__, + .m_size = sizeof(_gdbm_state), + .m_methods = _gdbm_module_methods, + .m_slots = _gdbm_module_slots, + .m_traverse = _gdbm_module_traverse, + .m_clear = _gdbm_module_clear, + .m_free = _gdbm_module_free, +}; + +PyMODINIT_FUNC +PyInit__gdbm(void) +{ + return PyModuleDef_Init(&_gdbmmodule); +} diff -Nru python3-stdlib-extensions-3.9.7/3.11/Modules/_tkinter.c python3-stdlib-extensions-3.10.8/3.11/Modules/_tkinter.c --- python3-stdlib-extensions-3.9.7/3.11/Modules/_tkinter.c 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Modules/_tkinter.c 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,3424 @@ +/*********************************************************** +Copyright (C) 1994 Steen Lumholt. + + All Rights Reserved + +******************************************************************/ + +/* _tkinter.c -- Interface to libtk.a and libtcl.a. */ + +/* TCL/TK VERSION INFO: + + Only Tcl/Tk 8.5.12 and later are supported. Older versions are not + supported. Use Python 3.10 or older if you cannot upgrade your + Tcl/Tk libraries. +*/ + +/* XXX Further speed-up ideas, involving Tcl 8.0 features: + + - Register a new Tcl type, "Python callable", which can be called more + efficiently and passed to Tcl_EvalObj() directly (if this is possible). + +*/ + +#define PY_SSIZE_T_CLEAN +#ifndef Py_BUILD_CORE_BUILTIN +# define Py_BUILD_CORE_MODULE 1 +#endif + +#include "Python.h" +#include +#ifdef MS_WINDOWS +# include "pycore_fileutils.h" // _Py_stat() +#endif + +#ifdef MS_WINDOWS +#include +#endif + +#define CHECK_SIZE(size, elemsize) \ + ((size_t)(size) <= Py_MIN((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize))) + +/* If Tcl is compiled for threads, we must also define TCL_THREAD. We define + it always; if Tcl is not threaded, the thread functions in + Tcl are empty. */ +#define TCL_THREADS + +#ifdef TK_FRAMEWORK +#include +#include +#else +#include +#include +#endif + +#include "tkinter.h" + +#if TK_HEX_VERSION < 0x0805020c +#error "Tk older than 8.5.12 not supported" +#endif + +#include + +#if !(defined(MS_WINDOWS) || defined(__CYGWIN__)) +#define HAVE_CREATEFILEHANDLER +#endif + +#ifdef HAVE_CREATEFILEHANDLER + +/* This bit is to ensure that TCL_UNIX_FD is defined and doesn't interfere + with the proper calculation of FHANDLETYPE == TCL_UNIX_FD below. */ +#ifndef TCL_UNIX_FD +# ifdef TCL_WIN_SOCKET +# define TCL_UNIX_FD (! TCL_WIN_SOCKET) +# else +# define TCL_UNIX_FD 1 +# endif +#endif + +/* Tcl_CreateFileHandler() changed several times; these macros deal with the + messiness. In Tcl 8.0 and later, it is not available on Windows (and on + Unix, only because Jack added it back); when available on Windows, it only + applies to sockets. */ + +#ifdef MS_WINDOWS +#define FHANDLETYPE TCL_WIN_SOCKET +#else +#define FHANDLETYPE TCL_UNIX_FD +#endif + +/* If Tcl can wait for a Unix file descriptor, define the EventHook() routine + which uses this to handle Tcl events while the user is typing commands. */ + +#if FHANDLETYPE == TCL_UNIX_FD +#define WAIT_FOR_STDIN +#endif + +#endif /* HAVE_CREATEFILEHANDLER */ + +/* Use OS native encoding for converting between Python strings and + Tcl objects. + On Windows use UTF-16 (or UTF-32 for 32-bit Tcl_UniChar) with the + "surrogatepass" error handler for converting to/from Tcl Unicode objects. + On Linux use UTF-8 with the "surrogateescape" error handler for converting + to/from Tcl String objects. */ +#ifdef MS_WINDOWS +#define USE_TCL_UNICODE 1 +#else +#define USE_TCL_UNICODE 0 +#endif + +#if PY_LITTLE_ENDIAN +#define NATIVE_BYTEORDER -1 +#else +#define NATIVE_BYTEORDER 1 +#endif + +#ifdef MS_WINDOWS +#include +#define WAIT_FOR_STDIN + +static PyObject * +_get_tcl_lib_path() +{ + static PyObject *tcl_library_path = NULL; + static int already_checked = 0; + + if (already_checked == 0) { + PyObject *prefix; + struct stat stat_buf; + int stat_return_value; + + prefix = PyUnicode_FromWideChar(Py_GetPrefix(), -1); + if (prefix == NULL) { + return NULL; + } + + /* Check expected location for an installed Python first */ + tcl_library_path = PyUnicode_FromString("\\tcl\\tcl" TCL_VERSION); + if (tcl_library_path == NULL) { + return NULL; + } + tcl_library_path = PyUnicode_Concat(prefix, tcl_library_path); + if (tcl_library_path == NULL) { + return NULL; + } + stat_return_value = _Py_stat(tcl_library_path, &stat_buf); + if (stat_return_value == -2) { + return NULL; + } + if (stat_return_value == -1) { + /* install location doesn't exist, reset errno and see if + we're a repository build */ + errno = 0; +#ifdef Py_TCLTK_DIR + tcl_library_path = PyUnicode_FromString( + Py_TCLTK_DIR "\\lib\\tcl" TCL_VERSION); + if (tcl_library_path == NULL) { + return NULL; + } + stat_return_value = _Py_stat(tcl_library_path, &stat_buf); + if (stat_return_value == -2) { + return NULL; + } + if (stat_return_value == -1) { + /* tcltkDir for a repository build doesn't exist either, + reset errno and leave Tcl to its own devices */ + errno = 0; + tcl_library_path = NULL; + } +#else + tcl_library_path = NULL; +#endif + } + already_checked = 1; + } + return tcl_library_path; +} +#endif /* MS_WINDOWS */ + +/* The threading situation is complicated. Tcl is not thread-safe, except + when configured with --enable-threads. + + So we need to use a lock around all uses of Tcl. Previously, the + Python interpreter lock was used for this. However, this causes + problems when other Python threads need to run while Tcl is blocked + waiting for events. + + To solve this problem, a separate lock for Tcl is introduced. + Holding it is incompatible with holding Python's interpreter lock. + The following four macros manipulate both locks together. + + ENTER_TCL and LEAVE_TCL are brackets, just like + Py_BEGIN_ALLOW_THREADS and Py_END_ALLOW_THREADS. They should be + used whenever a call into Tcl is made that could call an event + handler, or otherwise affect the state of a Tcl interpreter. These + assume that the surrounding code has the Python interpreter lock; + inside the brackets, the Python interpreter lock has been released + and the lock for Tcl has been acquired. + + Sometimes, it is necessary to have both the Python lock and the Tcl + lock. (For example, when transferring data from the Tcl + interpreter result to a Python string object.) This can be done by + using different macros to close the ENTER_TCL block: ENTER_OVERLAP + reacquires the Python lock (and restores the thread state) but + doesn't release the Tcl lock; LEAVE_OVERLAP_TCL releases the Tcl + lock. + + By contrast, ENTER_PYTHON and LEAVE_PYTHON are used in Tcl event + handlers when the handler needs to use Python. Such event handlers + are entered while the lock for Tcl is held; the event handler + presumably needs to use Python. ENTER_PYTHON releases the lock for + Tcl and acquires the Python interpreter lock, restoring the + appropriate thread state, and LEAVE_PYTHON releases the Python + interpreter lock and re-acquires the lock for Tcl. It is okay for + ENTER_TCL/LEAVE_TCL pairs to be contained inside the code between + ENTER_PYTHON and LEAVE_PYTHON. + + These locks expand to several statements and brackets; they should + not be used in branches of if statements and the like. + + If Tcl is threaded, this approach won't work anymore. The Tcl + interpreter is only valid in the thread that created it, and all Tk + activity must happen in this thread, also. That means that the + mainloop must be invoked in the thread that created the + interpreter. Invoking commands from other threads is possible; + _tkinter will queue an event for the interpreter thread, which will + then execute the command and pass back the result. If the main + thread is not in the mainloop, and invoking commands causes an + exception; if the main loop is running but not processing events, + the command invocation will block. + + In addition, for a threaded Tcl, a single global tcl_tstate won't + be sufficient anymore, since multiple Tcl interpreters may + simultaneously dispatch in different threads. So we use the Tcl TLS + API. + +*/ + +static PyThread_type_lock tcl_lock = 0; + +#ifdef TCL_THREADS +static Tcl_ThreadDataKey state_key; +typedef PyThreadState *ThreadSpecificData; +#define tcl_tstate \ + (*(PyThreadState**)Tcl_GetThreadData(&state_key, sizeof(PyThreadState*))) +#else +static PyThreadState *tcl_tstate = NULL; +#endif + +#define ENTER_TCL \ + { PyThreadState *tstate = PyThreadState_Get(); \ + Py_BEGIN_ALLOW_THREADS \ + if(tcl_lock)PyThread_acquire_lock(tcl_lock, 1); \ + tcl_tstate = tstate; + +#define LEAVE_TCL \ + tcl_tstate = NULL; \ + if(tcl_lock)PyThread_release_lock(tcl_lock); \ + Py_END_ALLOW_THREADS} + +#define ENTER_OVERLAP \ + Py_END_ALLOW_THREADS + +#define LEAVE_OVERLAP_TCL \ + tcl_tstate = NULL; if(tcl_lock)PyThread_release_lock(tcl_lock); } + +#define ENTER_PYTHON \ + { PyThreadState *tstate = tcl_tstate; tcl_tstate = NULL; \ + if(tcl_lock) \ + PyThread_release_lock(tcl_lock); \ + PyEval_RestoreThread((tstate)); } + +#define LEAVE_PYTHON \ + { PyThreadState *tstate = PyEval_SaveThread(); \ + if(tcl_lock)PyThread_acquire_lock(tcl_lock, 1); \ + tcl_tstate = tstate; } + +#define CHECK_TCL_APPARTMENT \ + if (((TkappObject *)self)->threaded && \ + ((TkappObject *)self)->thread_id != Tcl_GetCurrentThread()) { \ + PyErr_SetString(PyExc_RuntimeError, \ + "Calling Tcl from different apartment"); \ + return 0; \ + } + +#ifndef FREECAST +#define FREECAST (char *) +#endif + +/**** Tkapp Object Declaration ****/ + +static PyObject *Tkapp_Type; + +typedef struct { + PyObject_HEAD + Tcl_Interp *interp; + int wantobjects; + int threaded; /* True if tcl_platform[threaded] */ + Tcl_ThreadId thread_id; + int dispatching; + /* We cannot include tclInt.h, as this is internal. + So we cache interesting types here. */ + const Tcl_ObjType *OldBooleanType; + const Tcl_ObjType *BooleanType; + const Tcl_ObjType *ByteArrayType; + const Tcl_ObjType *DoubleType; + const Tcl_ObjType *IntType; + const Tcl_ObjType *WideIntType; + const Tcl_ObjType *BignumType; + const Tcl_ObjType *ListType; + const Tcl_ObjType *ProcBodyType; + const Tcl_ObjType *StringType; +} TkappObject; + +#define Tkapp_Interp(v) (((TkappObject *) (v))->interp) + + +/**** Error Handling ****/ + +static PyObject *Tkinter_TclError; +static int quitMainLoop = 0; +static int errorInCmd = 0; +static PyObject *excInCmd; +static PyObject *valInCmd; +static PyObject *trbInCmd; + +#ifdef TKINTER_PROTECT_LOADTK +static int tk_load_failed = 0; +#endif + + +static PyObject *Tkapp_UnicodeResult(TkappObject *); + +static PyObject * +Tkinter_Error(TkappObject *self) +{ + PyObject *res = Tkapp_UnicodeResult(self); + if (res != NULL) { + PyErr_SetObject(Tkinter_TclError, res); + Py_DECREF(res); + } + return NULL; +} + + + +/**** Utils ****/ + +static int Tkinter_busywaitinterval = 20; + +#ifndef MS_WINDOWS + +/* Millisecond sleep() for Unix platforms. */ + +static void +Sleep(int milli) +{ + /* XXX Too bad if you don't have select(). */ + struct timeval t; + t.tv_sec = milli/1000; + t.tv_usec = (milli%1000) * 1000; + select(0, (fd_set *)0, (fd_set *)0, (fd_set *)0, &t); +} +#endif /* MS_WINDOWS */ + +/* Wait up to 1s for the mainloop to come up. */ + +static int +WaitForMainloop(TkappObject* self) +{ + int i; + for (i = 0; i < 10; i++) { + if (self->dispatching) + return 1; + Py_BEGIN_ALLOW_THREADS + Sleep(100); + Py_END_ALLOW_THREADS + } + if (self->dispatching) + return 1; + PyErr_SetString(PyExc_RuntimeError, "main thread is not in main loop"); + return 0; +} + + + +#define ARGSZ 64 + + + +static PyObject * +unicodeFromTclStringAndSize(const char *s, Py_ssize_t size) +{ + PyObject *r = PyUnicode_DecodeUTF8(s, size, NULL); + if (r != NULL || !PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { + return r; + } + + char *buf = NULL; + PyErr_Clear(); + /* Tcl encodes null character as \xc0\x80. + https://en.wikipedia.org/wiki/UTF-8#Modified_UTF-8 */ + if (memchr(s, '\xc0', size)) { + char *q; + const char *e = s + size; + q = buf = (char *)PyMem_Malloc(size); + if (buf == NULL) { + PyErr_NoMemory(); + return NULL; + } + while (s != e) { + if (s + 1 != e && s[0] == '\xc0' && s[1] == '\x80') { + *q++ = '\0'; + s += 2; + } + else + *q++ = *s++; + } + s = buf; + size = q - s; + } + r = PyUnicode_DecodeUTF8(s, size, "surrogateescape"); + if (buf != NULL) { + PyMem_Free(buf); + } + if (r == NULL || PyUnicode_KIND(r) == PyUnicode_1BYTE_KIND) { + return r; + } + + /* In CESU-8 non-BMP characters are represented as a surrogate pair, + like in UTF-16, and then each surrogate code point is encoded in UTF-8. + https://en.wikipedia.org/wiki/CESU-8 */ + Py_ssize_t len = PyUnicode_GET_LENGTH(r); + Py_ssize_t i, j; + /* All encoded surrogate characters start with \xED. */ + i = PyUnicode_FindChar(r, 0xdcED, 0, len, 1); + if (i == -2) { + Py_DECREF(r); + return NULL; + } + if (i == -1) { + return r; + } + Py_UCS4 *u = PyUnicode_AsUCS4Copy(r); + Py_DECREF(r); + if (u == NULL) { + return NULL; + } + Py_UCS4 ch; + for (j = i; i < len; i++, u[j++] = ch) { + Py_UCS4 ch1, ch2, ch3, high, low; + /* Low surrogates U+D800 - U+DBFF are encoded as + \xED\xA0\x80 - \xED\xAF\xBF. */ + ch1 = ch = u[i]; + if (ch1 != 0xdcED) continue; + ch2 = u[i + 1]; + if (!(0xdcA0 <= ch2 && ch2 <= 0xdcAF)) continue; + ch3 = u[i + 2]; + if (!(0xdc80 <= ch3 && ch3 <= 0xdcBF)) continue; + high = 0xD000 | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F); + assert(Py_UNICODE_IS_HIGH_SURROGATE(high)); + /* High surrogates U+DC00 - U+DFFF are encoded as + \xED\xB0\x80 - \xED\xBF\xBF. */ + ch1 = u[i + 3]; + if (ch1 != 0xdcED) continue; + ch2 = u[i + 4]; + if (!(0xdcB0 <= ch2 && ch2 <= 0xdcBF)) continue; + ch3 = u[i + 5]; + if (!(0xdc80 <= ch3 && ch3 <= 0xdcBF)) continue; + low = 0xD000 | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F); + assert(Py_UNICODE_IS_HIGH_SURROGATE(high)); + ch = Py_UNICODE_JOIN_SURROGATES(high, low); + i += 5; + } + r = PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, u, j); + PyMem_Free(u); + return r; +} + +static PyObject * +unicodeFromTclString(const char *s) +{ + return unicodeFromTclStringAndSize(s, strlen(s)); +} + +static PyObject * +unicodeFromTclObj(Tcl_Obj *value) +{ + int len; +#if USE_TCL_UNICODE + int byteorder = NATIVE_BYTEORDER; + const Tcl_UniChar *u = Tcl_GetUnicodeFromObj(value, &len); + if (sizeof(Tcl_UniChar) == 2) + return PyUnicode_DecodeUTF16((const char *)u, len * 2, + "surrogatepass", &byteorder); + else if (sizeof(Tcl_UniChar) == 4) + return PyUnicode_DecodeUTF32((const char *)u, len * 4, + "surrogatepass", &byteorder); + else + Py_UNREACHABLE(); +#else + const char *s = Tcl_GetStringFromObj(value, &len); + return unicodeFromTclStringAndSize(s, len); +#endif +} + +/*[clinic input] +module _tkinter +class _tkinter.tkapp "TkappObject *" "&Tkapp_Type_spec" +class _tkinter.Tcl_Obj "PyTclObject *" "&PyTclObject_Type_spec" +class _tkinter.tktimertoken "TkttObject *" "&Tktt_Type_spec" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=b1ebf15c162ee229]*/ + +/**** Tkapp Object ****/ + +#ifndef WITH_APPINIT +int +Tcl_AppInit(Tcl_Interp *interp) +{ + const char * _tkinter_skip_tk_init; + + if (Tcl_Init(interp) == TCL_ERROR) { + PySys_WriteStderr("Tcl_Init error: %s\n", Tcl_GetStringResult(interp)); + return TCL_ERROR; + } + + _tkinter_skip_tk_init = Tcl_GetVar(interp, + "_tkinter_skip_tk_init", TCL_GLOBAL_ONLY); + if (_tkinter_skip_tk_init != NULL && + strcmp(_tkinter_skip_tk_init, "1") == 0) { + return TCL_OK; + } + +#ifdef TKINTER_PROTECT_LOADTK + if (tk_load_failed) { + PySys_WriteStderr("Tk_Init error: %s\n", TKINTER_LOADTK_ERRMSG); + return TCL_ERROR; + } +#endif + + if (Tk_Init(interp) == TCL_ERROR) { +#ifdef TKINTER_PROTECT_LOADTK + tk_load_failed = 1; +#endif + PySys_WriteStderr("Tk_Init error: %s\n", Tcl_GetStringResult(interp)); + return TCL_ERROR; + } + + return TCL_OK; +} +#endif /* !WITH_APPINIT */ + + + + +/* Initialize the Tk application; see the `main' function in + * `tkMain.c'. + */ + +static void EnableEventHook(void); /* Forward */ +static void DisableEventHook(void); /* Forward */ + +static TkappObject * +Tkapp_New(const char *screenName, const char *className, + int interactive, int wantobjects, int wantTk, int sync, + const char *use) +{ + TkappObject *v; + char *argv0; + + v = PyObject_New(TkappObject, (PyTypeObject *) Tkapp_Type); + if (v == NULL) + return NULL; + + v->interp = Tcl_CreateInterp(); + v->wantobjects = wantobjects; + v->threaded = Tcl_GetVar2Ex(v->interp, "tcl_platform", "threaded", + TCL_GLOBAL_ONLY) != NULL; + v->thread_id = Tcl_GetCurrentThread(); + v->dispatching = 0; + +#ifndef TCL_THREADS + if (v->threaded) { + PyErr_SetString(PyExc_RuntimeError, + "Tcl is threaded but _tkinter is not"); + Py_DECREF(v); + return 0; + } +#endif + if (v->threaded && tcl_lock) { + /* If Tcl is threaded, we don't need the lock. */ + PyThread_free_lock(tcl_lock); + tcl_lock = NULL; + } + + v->OldBooleanType = Tcl_GetObjType("boolean"); + v->BooleanType = Tcl_GetObjType("booleanString"); + v->ByteArrayType = Tcl_GetObjType("bytearray"); + v->DoubleType = Tcl_GetObjType("double"); + v->IntType = Tcl_GetObjType("int"); + v->WideIntType = Tcl_GetObjType("wideInt"); + v->BignumType = Tcl_GetObjType("bignum"); + v->ListType = Tcl_GetObjType("list"); + v->ProcBodyType = Tcl_GetObjType("procbody"); + v->StringType = Tcl_GetObjType("string"); + + /* Delete the 'exit' command, which can screw things up */ + Tcl_DeleteCommand(v->interp, "exit"); + + if (screenName != NULL) + Tcl_SetVar2(v->interp, "env", "DISPLAY", + screenName, TCL_GLOBAL_ONLY); + + if (interactive) + Tcl_SetVar(v->interp, "tcl_interactive", "1", TCL_GLOBAL_ONLY); + else + Tcl_SetVar(v->interp, "tcl_interactive", "0", TCL_GLOBAL_ONLY); + + /* This is used to get the application class for Tk 4.1 and up */ + argv0 = (char*)PyMem_Malloc(strlen(className) + 1); + if (!argv0) { + PyErr_NoMemory(); + Py_DECREF(v); + return NULL; + } + + strcpy(argv0, className); + if (Py_ISUPPER(argv0[0])) + argv0[0] = Py_TOLOWER(argv0[0]); + Tcl_SetVar(v->interp, "argv0", argv0, TCL_GLOBAL_ONLY); + PyMem_Free(argv0); + + if (! wantTk) { + Tcl_SetVar(v->interp, + "_tkinter_skip_tk_init", "1", TCL_GLOBAL_ONLY); + } +#ifdef TKINTER_PROTECT_LOADTK + else if (tk_load_failed) { + Tcl_SetVar(v->interp, + "_tkinter_tk_failed", "1", TCL_GLOBAL_ONLY); + } +#endif + + /* some initial arguments need to be in argv */ + if (sync || use) { + char *args; + Py_ssize_t len = 0; + + if (sync) + len += sizeof "-sync"; + if (use) + len += strlen(use) + sizeof "-use "; /* never overflows */ + + args = (char*)PyMem_Malloc(len); + if (!args) { + PyErr_NoMemory(); + Py_DECREF(v); + return NULL; + } + + args[0] = '\0'; + if (sync) + strcat(args, "-sync"); + if (use) { + if (sync) + strcat(args, " "); + strcat(args, "-use "); + strcat(args, use); + } + + Tcl_SetVar(v->interp, "argv", args, TCL_GLOBAL_ONLY); + PyMem_Free(args); + } + +#ifdef MS_WINDOWS + { + PyObject *str_path; + PyObject *utf8_path; + DWORD ret; + + ret = GetEnvironmentVariableW(L"TCL_LIBRARY", NULL, 0); + if (!ret && GetLastError() == ERROR_ENVVAR_NOT_FOUND) { + str_path = _get_tcl_lib_path(); + if (str_path == NULL && PyErr_Occurred()) { + return NULL; + } + if (str_path != NULL) { + utf8_path = PyUnicode_AsUTF8String(str_path); + if (utf8_path == NULL) { + return NULL; + } + Tcl_SetVar(v->interp, + "tcl_library", + PyBytes_AS_STRING(utf8_path), + TCL_GLOBAL_ONLY); + Py_DECREF(utf8_path); + } + } + } +#endif + + if (Tcl_AppInit(v->interp) != TCL_OK) { + PyObject *result = Tkinter_Error(v); +#ifdef TKINTER_PROTECT_LOADTK + if (wantTk) { + const char *_tkinter_tk_failed; + _tkinter_tk_failed = Tcl_GetVar(v->interp, + "_tkinter_tk_failed", TCL_GLOBAL_ONLY); + + if ( _tkinter_tk_failed != NULL && + strcmp(_tkinter_tk_failed, "1") == 0) { + tk_load_failed = 1; + } + } +#endif + Py_DECREF((PyObject *)v); + return (TkappObject *)result; + } + + EnableEventHook(); + + return v; +} + + +static void +Tkapp_ThreadSend(TkappObject *self, Tcl_Event *ev, + Tcl_Condition *cond, Tcl_Mutex *mutex) +{ + Py_BEGIN_ALLOW_THREADS; + Tcl_MutexLock(mutex); + Tcl_ThreadQueueEvent(self->thread_id, ev, TCL_QUEUE_TAIL); + Tcl_ThreadAlert(self->thread_id); + Tcl_ConditionWait(cond, mutex, NULL); + Tcl_MutexUnlock(mutex); + Py_END_ALLOW_THREADS +} + + +/** Tcl Eval **/ + +typedef struct { + PyObject_HEAD + Tcl_Obj *value; + PyObject *string; /* This cannot cause cycles. */ +} PyTclObject; + +static PyObject *PyTclObject_Type; +#define PyTclObject_Check(v) Py_IS_TYPE(v, (PyTypeObject *) PyTclObject_Type) + +static PyObject * +newPyTclObject(Tcl_Obj *arg) +{ + PyTclObject *self; + self = PyObject_New(PyTclObject, (PyTypeObject *) PyTclObject_Type); + if (self == NULL) + return NULL; + Tcl_IncrRefCount(arg); + self->value = arg; + self->string = NULL; + return (PyObject*)self; +} + +static void +PyTclObject_dealloc(PyTclObject *self) +{ + PyObject *tp = (PyObject *) Py_TYPE(self); + Tcl_DecrRefCount(self->value); + Py_XDECREF(self->string); + PyObject_Free(self); + Py_DECREF(tp); +} + +/* Like _str, but create Unicode if necessary. */ +PyDoc_STRVAR(PyTclObject_string__doc__, +"the string representation of this object, either as str or bytes"); + +static PyObject * +PyTclObject_string(PyTclObject *self, void *ignored) +{ + if (!self->string) { + self->string = unicodeFromTclObj(self->value); + if (!self->string) + return NULL; + } + Py_INCREF(self->string); + return self->string; +} + +static PyObject * +PyTclObject_str(PyTclObject *self) +{ + if (self->string) { + Py_INCREF(self->string); + return self->string; + } + /* XXX Could cache result if it is non-ASCII. */ + return unicodeFromTclObj(self->value); +} + +static PyObject * +PyTclObject_repr(PyTclObject *self) +{ + PyObject *repr, *str = PyTclObject_str(self); + if (str == NULL) + return NULL; + repr = PyUnicode_FromFormat("<%s object: %R>", + self->value->typePtr->name, str); + Py_DECREF(str); + return repr; +} + +static PyObject * +PyTclObject_richcompare(PyObject *self, PyObject *other, int op) +{ + int result; + + /* neither argument should be NULL, unless something's gone wrong */ + if (self == NULL || other == NULL) { + PyErr_BadInternalCall(); + return NULL; + } + + /* both arguments should be instances of PyTclObject */ + if (!PyTclObject_Check(self) || !PyTclObject_Check(other)) { + Py_RETURN_NOTIMPLEMENTED; + } + + if (self == other) + /* fast path when self and other are identical */ + result = 0; + else + result = strcmp(Tcl_GetString(((PyTclObject *)self)->value), + Tcl_GetString(((PyTclObject *)other)->value)); + Py_RETURN_RICHCOMPARE(result, 0, op); +} + +PyDoc_STRVAR(get_typename__doc__, "name of the Tcl type"); + +static PyObject* +get_typename(PyTclObject* obj, void* ignored) +{ + return unicodeFromTclString(obj->value->typePtr->name); +} + + +static PyGetSetDef PyTclObject_getsetlist[] = { + {"typename", (getter)get_typename, NULL, get_typename__doc__}, + {"string", (getter)PyTclObject_string, NULL, + PyTclObject_string__doc__}, + {0}, +}; + +static PyType_Slot PyTclObject_Type_slots[] = { + {Py_tp_dealloc, (destructor)PyTclObject_dealloc}, + {Py_tp_repr, (reprfunc)PyTclObject_repr}, + {Py_tp_str, (reprfunc)PyTclObject_str}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_richcompare, PyTclObject_richcompare}, + {Py_tp_getset, PyTclObject_getsetlist}, + {0, 0} +}; + +static PyType_Spec PyTclObject_Type_spec = { + "_tkinter.Tcl_Obj", + sizeof(PyTclObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, + PyTclObject_Type_slots, +}; + + +#if SIZE_MAX > INT_MAX +#define CHECK_STRING_LENGTH(s) do { \ + if (s != NULL && strlen(s) >= INT_MAX) { \ + PyErr_SetString(PyExc_OverflowError, "string is too long"); \ + return NULL; \ + } } while(0) +#else +#define CHECK_STRING_LENGTH(s) +#endif + +static Tcl_Obj* +asBignumObj(PyObject *value) +{ + Tcl_Obj *result; + int neg; + PyObject *hexstr; + const char *hexchars; + mp_int bigValue; + + neg = Py_SIZE(value) < 0; + hexstr = _PyLong_Format(value, 16); + if (hexstr == NULL) + return NULL; + hexchars = PyUnicode_AsUTF8(hexstr); + if (hexchars == NULL) { + Py_DECREF(hexstr); + return NULL; + } + hexchars += neg + 2; /* skip sign and "0x" */ + mp_init(&bigValue); + if (mp_read_radix(&bigValue, hexchars, 16) != MP_OKAY) { + mp_clear(&bigValue); + Py_DECREF(hexstr); + PyErr_NoMemory(); + return NULL; + } + Py_DECREF(hexstr); + bigValue.sign = neg ? MP_NEG : MP_ZPOS; + result = Tcl_NewBignumObj(&bigValue); + mp_clear(&bigValue); + if (result == NULL) { + PyErr_NoMemory(); + return NULL; + } + return result; +} + +static Tcl_Obj* +AsObj(PyObject *value) +{ + Tcl_Obj *result; + + if (PyBytes_Check(value)) { + if (PyBytes_GET_SIZE(value) >= INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "bytes object is too long"); + return NULL; + } + return Tcl_NewByteArrayObj((unsigned char *)PyBytes_AS_STRING(value), + (int)PyBytes_GET_SIZE(value)); + } + + if (PyBool_Check(value)) + return Tcl_NewBooleanObj(PyObject_IsTrue(value)); + + if (PyLong_CheckExact(value)) { + int overflow; + long longValue; +#ifdef TCL_WIDE_INT_TYPE + Tcl_WideInt wideValue; +#endif + longValue = PyLong_AsLongAndOverflow(value, &overflow); + if (!overflow) { + return Tcl_NewLongObj(longValue); + } + /* If there is an overflow in the long conversion, + fall through to wideInt handling. */ +#ifdef TCL_WIDE_INT_TYPE + if (_PyLong_AsByteArray((PyLongObject *)value, + (unsigned char *)(void *)&wideValue, + sizeof(wideValue), + PY_LITTLE_ENDIAN, + /* signed */ 1) == 0) { + return Tcl_NewWideIntObj(wideValue); + } + PyErr_Clear(); +#endif + /* If there is an overflow in the wideInt conversion, + fall through to bignum handling. */ + return asBignumObj(value); + /* If there is no wideInt or bignum support, + fall through to default object handling. */ + } + + if (PyFloat_Check(value)) + return Tcl_NewDoubleObj(PyFloat_AS_DOUBLE(value)); + + if (PyTuple_Check(value) || PyList_Check(value)) { + Tcl_Obj **argv; + Py_ssize_t size, i; + + size = PySequence_Fast_GET_SIZE(value); + if (size == 0) + return Tcl_NewListObj(0, NULL); + if (!CHECK_SIZE(size, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, + PyTuple_Check(value) ? "tuple is too long" : + "list is too long"); + return NULL; + } + argv = (Tcl_Obj **) PyMem_Malloc(((size_t)size) * sizeof(Tcl_Obj *)); + if (!argv) { + PyErr_NoMemory(); + return NULL; + } + for (i = 0; i < size; i++) + argv[i] = AsObj(PySequence_Fast_GET_ITEM(value,i)); + result = Tcl_NewListObj((int)size, argv); + PyMem_Free(argv); + return result; + } + + if (PyUnicode_Check(value)) { + if (PyUnicode_READY(value) == -1) + return NULL; + + Py_ssize_t size = PyUnicode_GET_LENGTH(value); + if (size == 0) { + return Tcl_NewStringObj("", 0); + } + if (!CHECK_SIZE(size, sizeof(Tcl_UniChar))) { + PyErr_SetString(PyExc_OverflowError, "string is too long"); + return NULL; + } + if (PyUnicode_IS_ASCII(value)) { + return Tcl_NewStringObj((const char *)PyUnicode_DATA(value), + (int)size); + } + + PyObject *encoded; +#if USE_TCL_UNICODE + if (sizeof(Tcl_UniChar) == 2) + encoded = _PyUnicode_EncodeUTF16(value, + "surrogatepass", NATIVE_BYTEORDER); + else if (sizeof(Tcl_UniChar) == 4) + encoded = _PyUnicode_EncodeUTF32(value, + "surrogatepass", NATIVE_BYTEORDER); + else + Py_UNREACHABLE(); +#else + encoded = _PyUnicode_AsUTF8String(value, "surrogateescape"); +#endif + if (!encoded) { + return NULL; + } + size = PyBytes_GET_SIZE(encoded); + if (size > INT_MAX) { + Py_DECREF(encoded); + PyErr_SetString(PyExc_OverflowError, "string is too long"); + return NULL; + } +#if USE_TCL_UNICODE + result = Tcl_NewUnicodeObj((const Tcl_UniChar *)PyBytes_AS_STRING(encoded), + (int)(size / sizeof(Tcl_UniChar))); +#else + result = Tcl_NewStringObj(PyBytes_AS_STRING(encoded), (int)size); +#endif + Py_DECREF(encoded); + return result; + } + + if (PyTclObject_Check(value)) { + return ((PyTclObject*)value)->value; + } + + { + PyObject *v = PyObject_Str(value); + if (!v) + return 0; + result = AsObj(v); + Py_DECREF(v); + return result; + } +} + +static PyObject * +fromBoolean(TkappObject *tkapp, Tcl_Obj *value) +{ + int boolValue; + if (Tcl_GetBooleanFromObj(Tkapp_Interp(tkapp), value, &boolValue) == TCL_ERROR) + return Tkinter_Error(tkapp); + return PyBool_FromLong(boolValue); +} + +static PyObject* +fromWideIntObj(TkappObject *tkapp, Tcl_Obj *value) +{ + Tcl_WideInt wideValue; + if (Tcl_GetWideIntFromObj(Tkapp_Interp(tkapp), value, &wideValue) == TCL_OK) { + if (sizeof(wideValue) <= SIZEOF_LONG_LONG) + return PyLong_FromLongLong(wideValue); + return _PyLong_FromByteArray((unsigned char *)(void *)&wideValue, + sizeof(wideValue), + PY_LITTLE_ENDIAN, + /* signed */ 1); + } + return NULL; +} + +static PyObject* +fromBignumObj(TkappObject *tkapp, Tcl_Obj *value) +{ + mp_int bigValue; + unsigned long numBytes; + unsigned char *bytes; + PyObject *res; + + if (Tcl_GetBignumFromObj(Tkapp_Interp(tkapp), value, &bigValue) != TCL_OK) + return Tkinter_Error(tkapp); + numBytes = mp_unsigned_bin_size(&bigValue); + bytes = PyMem_Malloc(numBytes); + if (bytes == NULL) { + mp_clear(&bigValue); + return PyErr_NoMemory(); + } + if (mp_to_unsigned_bin_n(&bigValue, bytes, + &numBytes) != MP_OKAY) { + mp_clear(&bigValue); + PyMem_Free(bytes); + return PyErr_NoMemory(); + } + res = _PyLong_FromByteArray(bytes, numBytes, + /* big-endian */ 0, + /* unsigned */ 0); + PyMem_Free(bytes); + if (res != NULL && bigValue.sign == MP_NEG) { + PyObject *res2 = PyNumber_Negative(res); + Py_DECREF(res); + res = res2; + } + mp_clear(&bigValue); + return res; +} + +static PyObject* +FromObj(TkappObject *tkapp, Tcl_Obj *value) +{ + PyObject *result = NULL; + Tcl_Interp *interp = Tkapp_Interp(tkapp); + + if (value->typePtr == NULL) { + return unicodeFromTclObj(value); + } + + if (value->typePtr == tkapp->BooleanType || + value->typePtr == tkapp->OldBooleanType) { + return fromBoolean(tkapp, value); + } + + if (value->typePtr == tkapp->ByteArrayType) { + int size; + char *data = (char*)Tcl_GetByteArrayFromObj(value, &size); + return PyBytes_FromStringAndSize(data, size); + } + + if (value->typePtr == tkapp->DoubleType) { + return PyFloat_FromDouble(value->internalRep.doubleValue); + } + + if (value->typePtr == tkapp->IntType) { + long longValue; + if (Tcl_GetLongFromObj(interp, value, &longValue) == TCL_OK) + return PyLong_FromLong(longValue); + /* If there is an error in the long conversion, + fall through to wideInt handling. */ + } + + if (value->typePtr == tkapp->IntType || + value->typePtr == tkapp->WideIntType) { + result = fromWideIntObj(tkapp, value); + if (result != NULL || PyErr_Occurred()) + return result; + Tcl_ResetResult(interp); + /* If there is an error in the wideInt conversion, + fall through to bignum handling. */ + } + + if (value->typePtr == tkapp->IntType || + value->typePtr == tkapp->WideIntType || + value->typePtr == tkapp->BignumType) { + return fromBignumObj(tkapp, value); + } + + if (value->typePtr == tkapp->ListType) { + int size; + int i, status; + PyObject *elem; + Tcl_Obj *tcl_elem; + + status = Tcl_ListObjLength(interp, value, &size); + if (status == TCL_ERROR) + return Tkinter_Error(tkapp); + result = PyTuple_New(size); + if (!result) + return NULL; + for (i = 0; i < size; i++) { + status = Tcl_ListObjIndex(interp, value, i, &tcl_elem); + if (status == TCL_ERROR) { + Py_DECREF(result); + return Tkinter_Error(tkapp); + } + elem = FromObj(tkapp, tcl_elem); + if (!elem) { + Py_DECREF(result); + return NULL; + } + PyTuple_SET_ITEM(result, i, elem); + } + return result; + } + + if (value->typePtr == tkapp->ProcBodyType) { + /* fall through: return tcl object. */ + } + + if (value->typePtr == tkapp->StringType) { + return unicodeFromTclObj(value); + } + + if (tkapp->BooleanType == NULL && + strcmp(value->typePtr->name, "booleanString") == 0) { + /* booleanString type is not registered in Tcl */ + tkapp->BooleanType = value->typePtr; + return fromBoolean(tkapp, value); + } + + if (tkapp->BignumType == NULL && + strcmp(value->typePtr->name, "bignum") == 0) { + /* bignum type is not registered in Tcl */ + tkapp->BignumType = value->typePtr; + return fromBignumObj(tkapp, value); + } + + return newPyTclObject(value); +} + +/* This mutex synchronizes inter-thread command calls. */ +TCL_DECLARE_MUTEX(call_mutex) + +typedef struct Tkapp_CallEvent { + Tcl_Event ev; /* Must be first */ + TkappObject *self; + PyObject *args; + int flags; + PyObject **res; + PyObject **exc_type, **exc_value, **exc_tb; + Tcl_Condition *done; +} Tkapp_CallEvent; + +void +Tkapp_CallDeallocArgs(Tcl_Obj** objv, Tcl_Obj** objStore, int objc) +{ + int i; + for (i = 0; i < objc; i++) + Tcl_DecrRefCount(objv[i]); + if (objv != objStore) + PyMem_Free(objv); +} + +/* Convert Python objects to Tcl objects. This must happen in the + interpreter thread, which may or may not be the calling thread. */ + +static Tcl_Obj** +Tkapp_CallArgs(PyObject *args, Tcl_Obj** objStore, int *pobjc) +{ + Tcl_Obj **objv = objStore; + Py_ssize_t objc = 0, i; + if (args == NULL) + /* do nothing */; + + else if (!(PyTuple_Check(args) || PyList_Check(args))) { + objv[0] = AsObj(args); + if (objv[0] == NULL) + goto finally; + objc = 1; + Tcl_IncrRefCount(objv[0]); + } + else { + objc = PySequence_Fast_GET_SIZE(args); + + if (objc > ARGSZ) { + if (!CHECK_SIZE(objc, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, + PyTuple_Check(args) ? "tuple is too long" : + "list is too long"); + return NULL; + } + objv = (Tcl_Obj **)PyMem_Malloc(((size_t)objc) * sizeof(Tcl_Obj *)); + if (objv == NULL) { + PyErr_NoMemory(); + objc = 0; + goto finally; + } + } + + for (i = 0; i < objc; i++) { + PyObject *v = PySequence_Fast_GET_ITEM(args, i); + if (v == Py_None) { + objc = i; + break; + } + objv[i] = AsObj(v); + if (!objv[i]) { + /* Reset objc, so it attempts to clear + objects only up to i. */ + objc = i; + goto finally; + } + Tcl_IncrRefCount(objv[i]); + } + } + *pobjc = (int)objc; + return objv; +finally: + Tkapp_CallDeallocArgs(objv, objStore, (int)objc); + return NULL; +} + +/* Convert the results of a command call into a Python string. */ + +static PyObject * +Tkapp_UnicodeResult(TkappObject *self) +{ + return unicodeFromTclObj(Tcl_GetObjResult(self->interp)); +} + + +/* Convert the results of a command call into a Python objects. */ + +static PyObject * +Tkapp_ObjectResult(TkappObject *self) +{ + PyObject *res = NULL; + Tcl_Obj *value = Tcl_GetObjResult(self->interp); + if (self->wantobjects) { + /* Not sure whether the IncrRef is necessary, but something + may overwrite the interpreter result while we are + converting it. */ + Tcl_IncrRefCount(value); + res = FromObj(self, value); + Tcl_DecrRefCount(value); + } else { + res = unicodeFromTclObj(value); + } + return res; +} + + +/* Tkapp_CallProc is the event procedure that is executed in the context of + the Tcl interpreter thread. Initially, it holds the Tcl lock, and doesn't + hold the Python lock. */ + +static int +Tkapp_CallProc(Tkapp_CallEvent *e, int flags) +{ + Tcl_Obj *objStore[ARGSZ]; + Tcl_Obj **objv; + int objc; + int i; + ENTER_PYTHON + objv = Tkapp_CallArgs(e->args, objStore, &objc); + if (!objv) { + PyErr_Fetch(e->exc_type, e->exc_value, e->exc_tb); + *(e->res) = NULL; + } + LEAVE_PYTHON + if (!objv) + goto done; + i = Tcl_EvalObjv(e->self->interp, objc, objv, e->flags); + ENTER_PYTHON + if (i == TCL_ERROR) { + *(e->res) = Tkinter_Error(e->self); + } + else { + *(e->res) = Tkapp_ObjectResult(e->self); + } + if (*(e->res) == NULL) { + PyErr_Fetch(e->exc_type, e->exc_value, e->exc_tb); + } + LEAVE_PYTHON + + Tkapp_CallDeallocArgs(objv, objStore, objc); +done: + /* Wake up calling thread. */ + Tcl_MutexLock(&call_mutex); + Tcl_ConditionNotify(e->done); + Tcl_MutexUnlock(&call_mutex); + return 1; +} + + +/* This is the main entry point for calling a Tcl command. + It supports three cases, with regard to threading: + 1. Tcl is not threaded: Must have the Tcl lock, then can invoke command in + the context of the calling thread. + 2. Tcl is threaded, caller of the command is in the interpreter thread: + Execute the command in the calling thread. Since the Tcl lock will + not be used, we can merge that with case 1. + 3. Tcl is threaded, caller is in a different thread: Must queue an event to + the interpreter thread. Allocation of Tcl objects needs to occur in the + interpreter thread, so we ship the PyObject* args to the target thread, + and perform processing there. */ + +static PyObject * +Tkapp_Call(PyObject *selfptr, PyObject *args) +{ + Tcl_Obj *objStore[ARGSZ]; + Tcl_Obj **objv = NULL; + int objc, i; + PyObject *res = NULL; + TkappObject *self = (TkappObject*)selfptr; + int flags = TCL_EVAL_DIRECT | TCL_EVAL_GLOBAL; + + /* If args is a single tuple, replace with contents of tuple */ + if (PyTuple_GET_SIZE(args) == 1) { + PyObject *item = PyTuple_GET_ITEM(args, 0); + if (PyTuple_Check(item)) + args = item; + } + if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { + /* We cannot call the command directly. Instead, we must + marshal the parameters to the interpreter thread. */ + Tkapp_CallEvent *ev; + Tcl_Condition cond = NULL; + PyObject *exc_type, *exc_value, *exc_tb; + if (!WaitForMainloop(self)) + return NULL; + ev = (Tkapp_CallEvent*)attemptckalloc(sizeof(Tkapp_CallEvent)); + if (ev == NULL) { + PyErr_NoMemory(); + return NULL; + } + ev->ev.proc = (Tcl_EventProc*)Tkapp_CallProc; + ev->self = self; + ev->args = args; + ev->res = &res; + ev->exc_type = &exc_type; + ev->exc_value = &exc_value; + ev->exc_tb = &exc_tb; + ev->done = &cond; + + Tkapp_ThreadSend(self, (Tcl_Event*)ev, &cond, &call_mutex); + + if (res == NULL) { + if (exc_type) + PyErr_Restore(exc_type, exc_value, exc_tb); + else + PyErr_SetObject(Tkinter_TclError, exc_value); + } + Tcl_ConditionFinalize(&cond); + } + else + { + + objv = Tkapp_CallArgs(args, objStore, &objc); + if (!objv) + return NULL; + + ENTER_TCL + + i = Tcl_EvalObjv(self->interp, objc, objv, flags); + + ENTER_OVERLAP + + if (i == TCL_ERROR) + Tkinter_Error(self); + else + res = Tkapp_ObjectResult(self); + + LEAVE_OVERLAP_TCL + + Tkapp_CallDeallocArgs(objv, objStore, objc); + } + return res; +} + + +/*[clinic input] +_tkinter.tkapp.eval + + script: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_eval_impl(TkappObject *self, const char *script) +/*[clinic end generated code: output=24b79831f700dea0 input=481484123a455f22]*/ +{ + PyObject *res = NULL; + int err; + + CHECK_STRING_LENGTH(script); + CHECK_TCL_APPARTMENT; + + ENTER_TCL + err = Tcl_Eval(Tkapp_Interp(self), script); + ENTER_OVERLAP + if (err == TCL_ERROR) + res = Tkinter_Error(self); + else + res = Tkapp_UnicodeResult(self); + LEAVE_OVERLAP_TCL + return res; +} + +/*[clinic input] +_tkinter.tkapp.evalfile + + fileName: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_evalfile_impl(TkappObject *self, const char *fileName) +/*[clinic end generated code: output=63be88dcee4f11d3 input=873ab707e5e947e1]*/ +{ + PyObject *res = NULL; + int err; + + CHECK_STRING_LENGTH(fileName); + CHECK_TCL_APPARTMENT; + + ENTER_TCL + err = Tcl_EvalFile(Tkapp_Interp(self), fileName); + ENTER_OVERLAP + if (err == TCL_ERROR) + res = Tkinter_Error(self); + else + res = Tkapp_UnicodeResult(self); + LEAVE_OVERLAP_TCL + return res; +} + +/*[clinic input] +_tkinter.tkapp.record + + script: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_record_impl(TkappObject *self, const char *script) +/*[clinic end generated code: output=0ffe08a0061730df input=c0b0db5a21412cac]*/ +{ + PyObject *res = NULL; + int err; + + CHECK_STRING_LENGTH(script); + CHECK_TCL_APPARTMENT; + + ENTER_TCL + err = Tcl_RecordAndEval(Tkapp_Interp(self), script, TCL_NO_EVAL); + ENTER_OVERLAP + if (err == TCL_ERROR) + res = Tkinter_Error(self); + else + res = Tkapp_UnicodeResult(self); + LEAVE_OVERLAP_TCL + return res; +} + +/*[clinic input] +_tkinter.tkapp.adderrorinfo + + msg: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_adderrorinfo_impl(TkappObject *self, const char *msg) +/*[clinic end generated code: output=52162eaca2ee53cb input=f4b37aec7c7e8c77]*/ +{ + CHECK_STRING_LENGTH(msg); + CHECK_TCL_APPARTMENT; + + ENTER_TCL + Tcl_AddErrorInfo(Tkapp_Interp(self), msg); + LEAVE_TCL + + Py_RETURN_NONE; +} + + + +/** Tcl Variable **/ + +typedef PyObject* (*EventFunc)(TkappObject *, PyObject *, int); + +TCL_DECLARE_MUTEX(var_mutex) + +typedef struct VarEvent { + Tcl_Event ev; /* must be first */ + TkappObject *self; + PyObject *args; + int flags; + EventFunc func; + PyObject **res; + PyObject **exc_type; + PyObject **exc_val; + Tcl_Condition *cond; +} VarEvent; + +/*[python] + +class varname_converter(CConverter): + type = 'const char *' + converter = 'varname_converter' + +[python]*/ +/*[python checksum: da39a3ee5e6b4b0d3255bfef95601890afd80709]*/ + +static int +varname_converter(PyObject *in, void *_out) +{ + const char *s; + const char **out = (const char**)_out; + if (PyBytes_Check(in)) { + if (PyBytes_GET_SIZE(in) > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "bytes object is too long"); + return 0; + } + s = PyBytes_AS_STRING(in); + if (strlen(s) != (size_t)PyBytes_GET_SIZE(in)) { + PyErr_SetString(PyExc_ValueError, "embedded null byte"); + return 0; + } + *out = s; + return 1; + } + if (PyUnicode_Check(in)) { + Py_ssize_t size; + s = PyUnicode_AsUTF8AndSize(in, &size); + if (s == NULL) { + return 0; + } + if (size > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "string is too long"); + return 0; + } + if (strlen(s) != (size_t)size) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + return 0; + } + *out = s; + return 1; + } + if (PyTclObject_Check(in)) { + *out = Tcl_GetString(((PyTclObject *)in)->value); + return 1; + } + PyErr_Format(PyExc_TypeError, + "must be str, bytes or Tcl_Obj, not %.50s", + Py_TYPE(in)->tp_name); + return 0; +} + + +static void +var_perform(VarEvent *ev) +{ + *(ev->res) = ev->func(ev->self, ev->args, ev->flags); + if (!*(ev->res)) { + PyObject *exc, *val, *tb; + PyErr_Fetch(&exc, &val, &tb); + PyErr_NormalizeException(&exc, &val, &tb); + *(ev->exc_type) = exc; + *(ev->exc_val) = val; + Py_XDECREF(tb); + } + +} + +static int +var_proc(VarEvent* ev, int flags) +{ + ENTER_PYTHON + var_perform(ev); + Tcl_MutexLock(&var_mutex); + Tcl_ConditionNotify(ev->cond); + Tcl_MutexUnlock(&var_mutex); + LEAVE_PYTHON + return 1; +} + + +static PyObject* +var_invoke(EventFunc func, PyObject *selfptr, PyObject *args, int flags) +{ + TkappObject *self = (TkappObject*)selfptr; + if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { + VarEvent *ev; + PyObject *res, *exc_type, *exc_val; + Tcl_Condition cond = NULL; + + /* The current thread is not the interpreter thread. Marshal + the call to the interpreter thread, then wait for + completion. */ + if (!WaitForMainloop(self)) + return NULL; + + ev = (VarEvent*)attemptckalloc(sizeof(VarEvent)); + if (ev == NULL) { + PyErr_NoMemory(); + return NULL; + } + ev->self = self; + ev->args = args; + ev->flags = flags; + ev->func = func; + ev->res = &res; + ev->exc_type = &exc_type; + ev->exc_val = &exc_val; + ev->cond = &cond; + ev->ev.proc = (Tcl_EventProc*)var_proc; + Tkapp_ThreadSend(self, (Tcl_Event*)ev, &cond, &var_mutex); + Tcl_ConditionFinalize(&cond); + if (!res) { + PyErr_SetObject(exc_type, exc_val); + Py_DECREF(exc_type); + Py_DECREF(exc_val); + return NULL; + } + return res; + } + /* Tcl is not threaded, or this is the interpreter thread. */ + return func(self, args, flags); +} + +static PyObject * +SetVar(TkappObject *self, PyObject *args, int flags) +{ + const char *name1, *name2; + PyObject *newValue; + PyObject *res = NULL; + Tcl_Obj *newval, *ok; + + switch (PyTuple_GET_SIZE(args)) { + case 2: + if (!PyArg_ParseTuple(args, "O&O:setvar", + varname_converter, &name1, &newValue)) + return NULL; + /* XXX Acquire tcl lock??? */ + newval = AsObj(newValue); + if (newval == NULL) + return NULL; + ENTER_TCL + ok = Tcl_SetVar2Ex(Tkapp_Interp(self), name1, NULL, + newval, flags); + ENTER_OVERLAP + if (!ok) + Tkinter_Error(self); + else { + res = Py_None; + Py_INCREF(res); + } + LEAVE_OVERLAP_TCL + break; + case 3: + if (!PyArg_ParseTuple(args, "ssO:setvar", + &name1, &name2, &newValue)) + return NULL; + CHECK_STRING_LENGTH(name1); + CHECK_STRING_LENGTH(name2); + /* XXX must hold tcl lock already??? */ + newval = AsObj(newValue); + ENTER_TCL + ok = Tcl_SetVar2Ex(Tkapp_Interp(self), name1, name2, newval, flags); + ENTER_OVERLAP + if (!ok) + Tkinter_Error(self); + else { + res = Py_None; + Py_INCREF(res); + } + LEAVE_OVERLAP_TCL + break; + default: + PyErr_SetString(PyExc_TypeError, "setvar requires 2 to 3 arguments"); + return NULL; + } + return res; +} + +static PyObject * +Tkapp_SetVar(PyObject *self, PyObject *args) +{ + return var_invoke(SetVar, self, args, TCL_LEAVE_ERR_MSG); +} + +static PyObject * +Tkapp_GlobalSetVar(PyObject *self, PyObject *args) +{ + return var_invoke(SetVar, self, args, TCL_LEAVE_ERR_MSG | TCL_GLOBAL_ONLY); +} + + + +static PyObject * +GetVar(TkappObject *self, PyObject *args, int flags) +{ + const char *name1, *name2=NULL; + PyObject *res = NULL; + Tcl_Obj *tres; + + if (!PyArg_ParseTuple(args, "O&|s:getvar", + varname_converter, &name1, &name2)) + return NULL; + + CHECK_STRING_LENGTH(name2); + ENTER_TCL + tres = Tcl_GetVar2Ex(Tkapp_Interp(self), name1, name2, flags); + ENTER_OVERLAP + if (tres == NULL) { + Tkinter_Error(self); + } else { + if (self->wantobjects) { + res = FromObj(self, tres); + } + else { + res = unicodeFromTclObj(tres); + } + } + LEAVE_OVERLAP_TCL + return res; +} + +static PyObject * +Tkapp_GetVar(PyObject *self, PyObject *args) +{ + return var_invoke(GetVar, self, args, TCL_LEAVE_ERR_MSG); +} + +static PyObject * +Tkapp_GlobalGetVar(PyObject *self, PyObject *args) +{ + return var_invoke(GetVar, self, args, TCL_LEAVE_ERR_MSG | TCL_GLOBAL_ONLY); +} + + + +static PyObject * +UnsetVar(TkappObject *self, PyObject *args, int flags) +{ + char *name1, *name2=NULL; + int code; + PyObject *res = NULL; + + if (!PyArg_ParseTuple(args, "s|s:unsetvar", &name1, &name2)) + return NULL; + + CHECK_STRING_LENGTH(name1); + CHECK_STRING_LENGTH(name2); + ENTER_TCL + code = Tcl_UnsetVar2(Tkapp_Interp(self), name1, name2, flags); + ENTER_OVERLAP + if (code == TCL_ERROR) + res = Tkinter_Error(self); + else { + Py_INCREF(Py_None); + res = Py_None; + } + LEAVE_OVERLAP_TCL + return res; +} + +static PyObject * +Tkapp_UnsetVar(PyObject *self, PyObject *args) +{ + return var_invoke(UnsetVar, self, args, TCL_LEAVE_ERR_MSG); +} + +static PyObject * +Tkapp_GlobalUnsetVar(PyObject *self, PyObject *args) +{ + return var_invoke(UnsetVar, self, args, + TCL_LEAVE_ERR_MSG | TCL_GLOBAL_ONLY); +} + + + +/** Tcl to Python **/ + +/*[clinic input] +_tkinter.tkapp.getint + + arg: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_getint(TkappObject *self, PyObject *arg) +/*[clinic end generated code: output=88cf293fae307cfe input=034026997c5b91f8]*/ +{ + char *s; + Tcl_Obj *value; + PyObject *result; + + if (PyLong_Check(arg)) { + Py_INCREF(arg); + return arg; + } + + if (PyTclObject_Check(arg)) { + value = ((PyTclObject*)arg)->value; + Tcl_IncrRefCount(value); + } + else { + if (!PyArg_Parse(arg, "s:getint", &s)) + return NULL; + CHECK_STRING_LENGTH(s); + value = Tcl_NewStringObj(s, -1); + if (value == NULL) + return Tkinter_Error(self); + } + /* Don't use Tcl_GetInt() because it returns ambiguous result for value + in ranges -2**32..-2**31-1 and 2**31..2**32-1 (on 32-bit platform). + + Prefer bignum because Tcl_GetWideIntFromObj returns ambiguous result for + value in ranges -2**64..-2**63-1 and 2**63..2**64-1 (on 32-bit platform). + */ + result = fromBignumObj(self, value); + Tcl_DecrRefCount(value); + if (result != NULL || PyErr_Occurred()) + return result; + return Tkinter_Error(self); +} + +/*[clinic input] +_tkinter.tkapp.getdouble + + arg: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_getdouble(TkappObject *self, PyObject *arg) +/*[clinic end generated code: output=c52b138bd8b956b9 input=22015729ce9ef7f8]*/ +{ + char *s; + double v; + + if (PyFloat_Check(arg)) { + Py_INCREF(arg); + return arg; + } + + if (PyNumber_Check(arg)) { + return PyNumber_Float(arg); + } + + if (PyTclObject_Check(arg)) { + if (Tcl_GetDoubleFromObj(Tkapp_Interp(self), + ((PyTclObject*)arg)->value, + &v) == TCL_ERROR) + return Tkinter_Error(self); + return PyFloat_FromDouble(v); + } + + if (!PyArg_Parse(arg, "s:getdouble", &s)) + return NULL; + CHECK_STRING_LENGTH(s); + if (Tcl_GetDouble(Tkapp_Interp(self), s, &v) == TCL_ERROR) + return Tkinter_Error(self); + return PyFloat_FromDouble(v); +} + +/*[clinic input] +_tkinter.tkapp.getboolean + + arg: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_getboolean(TkappObject *self, PyObject *arg) +/*[clinic end generated code: output=726a9ae445821d91 input=7f11248ef8f8776e]*/ +{ + char *s; + int v; + + if (PyLong_Check(arg)) { /* int or bool */ + return PyBool_FromLong(Py_SIZE(arg) != 0); + } + + if (PyTclObject_Check(arg)) { + if (Tcl_GetBooleanFromObj(Tkapp_Interp(self), + ((PyTclObject*)arg)->value, + &v) == TCL_ERROR) + return Tkinter_Error(self); + return PyBool_FromLong(v); + } + + if (!PyArg_Parse(arg, "s:getboolean", &s)) + return NULL; + CHECK_STRING_LENGTH(s); + if (Tcl_GetBoolean(Tkapp_Interp(self), s, &v) == TCL_ERROR) + return Tkinter_Error(self); + return PyBool_FromLong(v); +} + +/*[clinic input] +_tkinter.tkapp.exprstring + + s: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_exprstring_impl(TkappObject *self, const char *s) +/*[clinic end generated code: output=beda323d3ed0abb1 input=fa78f751afb2f21b]*/ +{ + PyObject *res = NULL; + int retval; + + CHECK_STRING_LENGTH(s); + CHECK_TCL_APPARTMENT; + + ENTER_TCL + retval = Tcl_ExprString(Tkapp_Interp(self), s); + ENTER_OVERLAP + if (retval == TCL_ERROR) + res = Tkinter_Error(self); + else + res = Tkapp_UnicodeResult(self); + LEAVE_OVERLAP_TCL + return res; +} + +/*[clinic input] +_tkinter.tkapp.exprlong + + s: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_exprlong_impl(TkappObject *self, const char *s) +/*[clinic end generated code: output=5d6a46b63c6ebcf9 input=11bd7eee0c57b4dc]*/ +{ + PyObject *res = NULL; + int retval; + long v; + + CHECK_STRING_LENGTH(s); + CHECK_TCL_APPARTMENT; + + ENTER_TCL + retval = Tcl_ExprLong(Tkapp_Interp(self), s, &v); + ENTER_OVERLAP + if (retval == TCL_ERROR) + res = Tkinter_Error(self); + else + res = PyLong_FromLong(v); + LEAVE_OVERLAP_TCL + return res; +} + +/*[clinic input] +_tkinter.tkapp.exprdouble + + s: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_exprdouble_impl(TkappObject *self, const char *s) +/*[clinic end generated code: output=ff78df1081ea4158 input=ff02bc11798832d5]*/ +{ + PyObject *res = NULL; + double v; + int retval; + + CHECK_STRING_LENGTH(s); + CHECK_TCL_APPARTMENT; + ENTER_TCL + retval = Tcl_ExprDouble(Tkapp_Interp(self), s, &v); + ENTER_OVERLAP + if (retval == TCL_ERROR) + res = Tkinter_Error(self); + else + res = PyFloat_FromDouble(v); + LEAVE_OVERLAP_TCL + return res; +} + +/*[clinic input] +_tkinter.tkapp.exprboolean + + s: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_exprboolean_impl(TkappObject *self, const char *s) +/*[clinic end generated code: output=8b28038c22887311 input=c8c66022bdb8d5d3]*/ +{ + PyObject *res = NULL; + int retval; + int v; + + CHECK_STRING_LENGTH(s); + CHECK_TCL_APPARTMENT; + ENTER_TCL + retval = Tcl_ExprBoolean(Tkapp_Interp(self), s, &v); + ENTER_OVERLAP + if (retval == TCL_ERROR) + res = Tkinter_Error(self); + else + res = PyLong_FromLong(v); + LEAVE_OVERLAP_TCL + return res; +} + + + +/*[clinic input] +_tkinter.tkapp.splitlist + + arg: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_splitlist(TkappObject *self, PyObject *arg) +/*[clinic end generated code: output=13b51d34386d36fb input=2b2e13351e3c0b53]*/ +{ + char *list; + int argc; + const char **argv; + PyObject *v; + int i; + + if (PyTclObject_Check(arg)) { + int objc; + Tcl_Obj **objv; + if (Tcl_ListObjGetElements(Tkapp_Interp(self), + ((PyTclObject*)arg)->value, + &objc, &objv) == TCL_ERROR) { + return Tkinter_Error(self); + } + if (!(v = PyTuple_New(objc))) + return NULL; + for (i = 0; i < objc; i++) { + PyObject *s = FromObj(self, objv[i]); + if (!s) { + Py_DECREF(v); + return NULL; + } + PyTuple_SET_ITEM(v, i, s); + } + return v; + } + if (PyTuple_Check(arg)) { + Py_INCREF(arg); + return arg; + } + if (PyList_Check(arg)) { + return PySequence_Tuple(arg); + } + + if (!PyArg_Parse(arg, "et:splitlist", "utf-8", &list)) + return NULL; + + if (strlen(list) >= INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "string is too long"); + PyMem_Free(list); + return NULL; + } + if (Tcl_SplitList(Tkapp_Interp(self), list, + &argc, &argv) == TCL_ERROR) { + PyMem_Free(list); + return Tkinter_Error(self); + } + + if (!(v = PyTuple_New(argc))) + goto finally; + + for (i = 0; i < argc; i++) { + PyObject *s = unicodeFromTclString(argv[i]); + if (!s) { + Py_DECREF(v); + v = NULL; + goto finally; + } + PyTuple_SET_ITEM(v, i, s); + } + + finally: + ckfree(FREECAST argv); + PyMem_Free(list); + return v; +} + + +/** Tcl Command **/ + +/* Client data struct */ +typedef struct { + PyObject *self; + PyObject *func; +} PythonCmd_ClientData; + +static int +PythonCmd_Error(Tcl_Interp *interp) +{ + errorInCmd = 1; + PyErr_Fetch(&excInCmd, &valInCmd, &trbInCmd); + LEAVE_PYTHON + return TCL_ERROR; +} + +/* This is the Tcl command that acts as a wrapper for Python + * function or method. + */ +static int +PythonCmd(ClientData clientData, Tcl_Interp *interp, + int objc, Tcl_Obj *const objv[]) +{ + PythonCmd_ClientData *data = (PythonCmd_ClientData *)clientData; + PyObject *args, *res; + int i; + Tcl_Obj *obj_res; + + ENTER_PYTHON + + /* Create argument tuple (objv1, ..., objvN) */ + if (!(args = PyTuple_New(objc - 1))) + return PythonCmd_Error(interp); + + for (i = 0; i < (objc - 1); i++) { + PyObject *s = unicodeFromTclObj(objv[i + 1]); + if (!s) { + Py_DECREF(args); + return PythonCmd_Error(interp); + } + PyTuple_SET_ITEM(args, i, s); + } + + res = PyObject_Call(data->func, args, NULL); + Py_DECREF(args); + + if (res == NULL) + return PythonCmd_Error(interp); + + obj_res = AsObj(res); + if (obj_res == NULL) { + Py_DECREF(res); + return PythonCmd_Error(interp); + } + Tcl_SetObjResult(interp, obj_res); + Py_DECREF(res); + + LEAVE_PYTHON + + return TCL_OK; +} + + +static void +PythonCmdDelete(ClientData clientData) +{ + PythonCmd_ClientData *data = (PythonCmd_ClientData *)clientData; + + ENTER_PYTHON + Py_XDECREF(data->self); + Py_XDECREF(data->func); + PyMem_Free(data); + LEAVE_PYTHON +} + + + + +TCL_DECLARE_MUTEX(command_mutex) + +typedef struct CommandEvent{ + Tcl_Event ev; + Tcl_Interp* interp; + const char *name; + int create; + int *status; + ClientData *data; + Tcl_Condition *done; +} CommandEvent; + +static int +Tkapp_CommandProc(CommandEvent *ev, int flags) +{ + if (ev->create) + *ev->status = Tcl_CreateObjCommand( + ev->interp, ev->name, PythonCmd, + ev->data, PythonCmdDelete) == NULL; + else + *ev->status = Tcl_DeleteCommand(ev->interp, ev->name); + Tcl_MutexLock(&command_mutex); + Tcl_ConditionNotify(ev->done); + Tcl_MutexUnlock(&command_mutex); + return 1; +} + +/*[clinic input] +_tkinter.tkapp.createcommand + + name: str + func: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_createcommand_impl(TkappObject *self, const char *name, + PyObject *func) +/*[clinic end generated code: output=2a1c79a4ee2af410 input=255785cb70edc6a0]*/ +{ + PythonCmd_ClientData *data; + int err; + + CHECK_STRING_LENGTH(name); + if (!PyCallable_Check(func)) { + PyErr_SetString(PyExc_TypeError, "command not callable"); + return NULL; + } + + if (self->threaded && self->thread_id != Tcl_GetCurrentThread() && + !WaitForMainloop(self)) + return NULL; + + data = PyMem_NEW(PythonCmd_ClientData, 1); + if (!data) + return PyErr_NoMemory(); + Py_INCREF(self); + Py_INCREF(func); + data->self = (PyObject *) self; + data->func = func; + if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { + Tcl_Condition cond = NULL; + CommandEvent *ev = (CommandEvent*)attemptckalloc(sizeof(CommandEvent)); + if (ev == NULL) { + PyErr_NoMemory(); + PyMem_Free(data); + return NULL; + } + ev->ev.proc = (Tcl_EventProc*)Tkapp_CommandProc; + ev->interp = self->interp; + ev->create = 1; + ev->name = name; + ev->data = (ClientData)data; + ev->status = &err; + ev->done = &cond; + Tkapp_ThreadSend(self, (Tcl_Event*)ev, &cond, &command_mutex); + Tcl_ConditionFinalize(&cond); + } + else + { + ENTER_TCL + err = Tcl_CreateObjCommand( + Tkapp_Interp(self), name, PythonCmd, + (ClientData)data, PythonCmdDelete) == NULL; + LEAVE_TCL + } + if (err) { + PyErr_SetString(Tkinter_TclError, "can't create Tcl command"); + PyMem_Free(data); + return NULL; + } + + Py_RETURN_NONE; +} + + + +/*[clinic input] +_tkinter.tkapp.deletecommand + + name: str + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_deletecommand_impl(TkappObject *self, const char *name) +/*[clinic end generated code: output=a67e8cb5845e0d2d input=53e9952eae1f85f5]*/ +{ + int err; + + CHECK_STRING_LENGTH(name); + + if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { + Tcl_Condition cond = NULL; + CommandEvent *ev; + ev = (CommandEvent*)attemptckalloc(sizeof(CommandEvent)); + if (ev == NULL) { + PyErr_NoMemory(); + return NULL; + } + ev->ev.proc = (Tcl_EventProc*)Tkapp_CommandProc; + ev->interp = self->interp; + ev->create = 0; + ev->name = name; + ev->status = &err; + ev->done = &cond; + Tkapp_ThreadSend(self, (Tcl_Event*)ev, &cond, + &command_mutex); + Tcl_ConditionFinalize(&cond); + } + else + { + ENTER_TCL + err = Tcl_DeleteCommand(self->interp, name); + LEAVE_TCL + } + if (err == -1) { + PyErr_SetString(Tkinter_TclError, "can't delete Tcl command"); + return NULL; + } + Py_RETURN_NONE; +} + + + +#ifdef HAVE_CREATEFILEHANDLER +/** File Handler **/ + +typedef struct _fhcdata { + PyObject *func; + PyObject *file; + int id; + struct _fhcdata *next; +} FileHandler_ClientData; + +static FileHandler_ClientData *HeadFHCD; + +static FileHandler_ClientData * +NewFHCD(PyObject *func, PyObject *file, int id) +{ + FileHandler_ClientData *p; + p = PyMem_NEW(FileHandler_ClientData, 1); + if (p != NULL) { + Py_XINCREF(func); + Py_XINCREF(file); + p->func = func; + p->file = file; + p->id = id; + p->next = HeadFHCD; + HeadFHCD = p; + } + return p; +} + +static void +DeleteFHCD(int id) +{ + FileHandler_ClientData *p, **pp; + + pp = &HeadFHCD; + while ((p = *pp) != NULL) { + if (p->id == id) { + *pp = p->next; + Py_XDECREF(p->func); + Py_XDECREF(p->file); + PyMem_Free(p); + } + else + pp = &p->next; + } +} + +static void +FileHandler(ClientData clientData, int mask) +{ + FileHandler_ClientData *data = (FileHandler_ClientData *)clientData; + PyObject *func, *file, *res; + + ENTER_PYTHON + func = data->func; + file = data->file; + + res = PyObject_CallFunction(func, "Oi", file, mask); + if (res == NULL) { + errorInCmd = 1; + PyErr_Fetch(&excInCmd, &valInCmd, &trbInCmd); + } + Py_XDECREF(res); + LEAVE_PYTHON +} + +/*[clinic input] +_tkinter.tkapp.createfilehandler + + file: object + mask: int + func: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_createfilehandler_impl(TkappObject *self, PyObject *file, + int mask, PyObject *func) +/*[clinic end generated code: output=f73ce82de801c353 input=84943a5286e47947]*/ +{ + FileHandler_ClientData *data; + int tfile; + + CHECK_TCL_APPARTMENT; + + tfile = PyObject_AsFileDescriptor(file); + if (tfile < 0) + return NULL; + if (!PyCallable_Check(func)) { + PyErr_SetString(PyExc_TypeError, "bad argument list"); + return NULL; + } + + data = NewFHCD(func, file, tfile); + if (data == NULL) + return NULL; + + /* Ought to check for null Tcl_File object... */ + ENTER_TCL + Tcl_CreateFileHandler(tfile, mask, FileHandler, (ClientData) data); + LEAVE_TCL + Py_RETURN_NONE; +} + +/*[clinic input] +_tkinter.tkapp.deletefilehandler + + file: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_deletefilehandler(TkappObject *self, PyObject *file) +/*[clinic end generated code: output=b53cc96ebf9476fd input=abbec19d66312e2a]*/ +{ + int tfile; + + CHECK_TCL_APPARTMENT; + + tfile = PyObject_AsFileDescriptor(file); + if (tfile < 0) + return NULL; + + DeleteFHCD(tfile); + + /* Ought to check for null Tcl_File object... */ + ENTER_TCL + Tcl_DeleteFileHandler(tfile); + LEAVE_TCL + Py_RETURN_NONE; +} +#endif /* HAVE_CREATEFILEHANDLER */ + + +/**** Tktt Object (timer token) ****/ + +static PyObject *Tktt_Type; + +typedef struct { + PyObject_HEAD + Tcl_TimerToken token; + PyObject *func; +} TkttObject; + +/*[clinic input] +_tkinter.tktimertoken.deletetimerhandler + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tktimertoken_deletetimerhandler_impl(TkttObject *self) +/*[clinic end generated code: output=bd7fe17f328cfa55 input=40bd070ff85f5cf3]*/ +{ + TkttObject *v = self; + PyObject *func = v->func; + + if (v->token != NULL) { + Tcl_DeleteTimerHandler(v->token); + v->token = NULL; + } + if (func != NULL) { + v->func = NULL; + Py_DECREF(func); + Py_DECREF(v); /* See Tktt_New() */ + } + Py_RETURN_NONE; +} + +static TkttObject * +Tktt_New(PyObject *func) +{ + TkttObject *v; + + v = PyObject_New(TkttObject, (PyTypeObject *) Tktt_Type); + if (v == NULL) + return NULL; + + Py_INCREF(func); + v->token = NULL; + v->func = func; + + /* Extra reference, deleted when called or when handler is deleted */ + Py_INCREF(v); + return v; +} + +static void +Tktt_Dealloc(PyObject *self) +{ + TkttObject *v = (TkttObject *)self; + PyObject *func = v->func; + PyObject *tp = (PyObject *) Py_TYPE(self); + + Py_XDECREF(func); + + PyObject_Free(self); + Py_DECREF(tp); +} + +static PyObject * +Tktt_Repr(PyObject *self) +{ + TkttObject *v = (TkttObject *)self; + return PyUnicode_FromFormat("", + v, + v->func == NULL ? ", handler deleted" : ""); +} + +/** Timer Handler **/ + +static void +TimerHandler(ClientData clientData) +{ + TkttObject *v = (TkttObject *)clientData; + PyObject *func = v->func; + PyObject *res; + + if (func == NULL) + return; + + v->func = NULL; + + ENTER_PYTHON + + res = PyObject_CallNoArgs(func); + Py_DECREF(func); + Py_DECREF(v); /* See Tktt_New() */ + + if (res == NULL) { + errorInCmd = 1; + PyErr_Fetch(&excInCmd, &valInCmd, &trbInCmd); + } + else + Py_DECREF(res); + + LEAVE_PYTHON +} + +/*[clinic input] +_tkinter.tkapp.createtimerhandler + + milliseconds: int + func: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_createtimerhandler_impl(TkappObject *self, int milliseconds, + PyObject *func) +/*[clinic end generated code: output=2da5959b9d031911 input=ba6729f32f0277a5]*/ +{ + TkttObject *v; + + if (!PyCallable_Check(func)) { + PyErr_SetString(PyExc_TypeError, "bad argument list"); + return NULL; + } + + CHECK_TCL_APPARTMENT; + + v = Tktt_New(func); + if (v) { + v->token = Tcl_CreateTimerHandler(milliseconds, TimerHandler, + (ClientData)v); + } + + return (PyObject *) v; +} + + +/** Event Loop **/ + +/*[clinic input] +_tkinter.tkapp.mainloop + + threshold: int = 0 + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_mainloop_impl(TkappObject *self, int threshold) +/*[clinic end generated code: output=0ba8eabbe57841b0 input=036bcdcf03d5eca0]*/ +{ + PyThreadState *tstate = PyThreadState_Get(); + + CHECK_TCL_APPARTMENT; + self->dispatching = 1; + + quitMainLoop = 0; + while (Tk_GetNumMainWindows() > threshold && + !quitMainLoop && + !errorInCmd) + { + int result; + + if (self->threaded) { + /* Allow other Python threads to run. */ + ENTER_TCL + result = Tcl_DoOneEvent(0); + LEAVE_TCL + } + else { + Py_BEGIN_ALLOW_THREADS + if(tcl_lock)PyThread_acquire_lock(tcl_lock, 1); + tcl_tstate = tstate; + result = Tcl_DoOneEvent(TCL_DONT_WAIT); + tcl_tstate = NULL; + if(tcl_lock)PyThread_release_lock(tcl_lock); + if (result == 0) + Sleep(Tkinter_busywaitinterval); + Py_END_ALLOW_THREADS + } + + if (PyErr_CheckSignals() != 0) { + self->dispatching = 0; + return NULL; + } + if (result < 0) + break; + } + self->dispatching = 0; + quitMainLoop = 0; + + if (errorInCmd) { + errorInCmd = 0; + PyErr_Restore(excInCmd, valInCmd, trbInCmd); + excInCmd = valInCmd = trbInCmd = NULL; + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +_tkinter.tkapp.dooneevent + + flags: int = 0 + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_dooneevent_impl(TkappObject *self, int flags) +/*[clinic end generated code: output=27c6b2aa464cac29 input=6542b928e364b793]*/ +{ + int rv; + + ENTER_TCL + rv = Tcl_DoOneEvent(flags); + LEAVE_TCL + return PyLong_FromLong(rv); +} + +/*[clinic input] +_tkinter.tkapp.quit +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_quit_impl(TkappObject *self) +/*[clinic end generated code: output=7f21eeff481f754f input=e03020dc38aff23c]*/ +{ + quitMainLoop = 1; + Py_RETURN_NONE; +} + +/*[clinic input] +_tkinter.tkapp.interpaddr +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_interpaddr_impl(TkappObject *self) +/*[clinic end generated code: output=6caaae3273b3c95a input=2dd32cbddb55a111]*/ +{ + return PyLong_FromVoidPtr(Tkapp_Interp(self)); +} + +/*[clinic input] +_tkinter.tkapp.loadtk +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_loadtk_impl(TkappObject *self) +/*[clinic end generated code: output=e9e10a954ce46d2a input=b5e82afedd6354f0]*/ +{ + Tcl_Interp *interp = Tkapp_Interp(self); + const char * _tk_exists = NULL; + int err; + +#ifdef TKINTER_PROTECT_LOADTK + /* Up to Tk 8.4.13, Tk_Init deadlocks on the second call when the + * first call failed. + * To avoid the deadlock, we just refuse the second call through + * a static variable. + */ + if (tk_load_failed) { + PyErr_SetString(Tkinter_TclError, TKINTER_LOADTK_ERRMSG); + return NULL; + } +#endif + + /* We want to guard against calling Tk_Init() multiple times */ + CHECK_TCL_APPARTMENT; + ENTER_TCL + err = Tcl_Eval(Tkapp_Interp(self), "info exists tk_version"); + ENTER_OVERLAP + if (err == TCL_ERROR) { + /* This sets an exception, but we cannot return right + away because we need to exit the overlap first. */ + Tkinter_Error(self); + } else { + _tk_exists = Tcl_GetStringResult(Tkapp_Interp(self)); + } + LEAVE_OVERLAP_TCL + if (err == TCL_ERROR) { + return NULL; + } + if (_tk_exists == NULL || strcmp(_tk_exists, "1") != 0) { + if (Tk_Init(interp) == TCL_ERROR) { + Tkinter_Error(self); +#ifdef TKINTER_PROTECT_LOADTK + tk_load_failed = 1; +#endif + return NULL; + } + } + Py_RETURN_NONE; +} + +static PyObject * +Tkapp_WantObjects(PyObject *self, PyObject *args) +{ + + int wantobjects = -1; + if (!PyArg_ParseTuple(args, "|i:wantobjects", &wantobjects)) + return NULL; + if (wantobjects == -1) + return PyBool_FromLong(((TkappObject*)self)->wantobjects); + ((TkappObject*)self)->wantobjects = wantobjects; + + Py_RETURN_NONE; +} + +/*[clinic input] +_tkinter.tkapp.willdispatch + +[clinic start generated code]*/ + +static PyObject * +_tkinter_tkapp_willdispatch_impl(TkappObject *self) +/*[clinic end generated code: output=0e3f46d244642155 input=d88f5970843d6dab]*/ +{ + self->dispatching = 1; + + Py_RETURN_NONE; +} + + +/**** Tkapp Type Methods ****/ + +static void +Tkapp_Dealloc(PyObject *self) +{ + PyObject *tp = (PyObject *) Py_TYPE(self); + /*CHECK_TCL_APPARTMENT;*/ + ENTER_TCL + Tcl_DeleteInterp(Tkapp_Interp(self)); + LEAVE_TCL + PyObject_Free(self); + Py_DECREF(tp); + DisableEventHook(); +} + + + +/**** Tkinter Module ****/ + +typedef struct { + PyObject* tuple; + Py_ssize_t size; /* current size */ + Py_ssize_t maxsize; /* allocated size */ +} FlattenContext; + +static int +_bump(FlattenContext* context, Py_ssize_t size) +{ + /* expand tuple to hold (at least) size new items. + return true if successful, false if an exception was raised */ + + Py_ssize_t maxsize = context->maxsize * 2; /* never overflows */ + + if (maxsize < context->size + size) + maxsize = context->size + size; /* never overflows */ + + context->maxsize = maxsize; + + return _PyTuple_Resize(&context->tuple, maxsize) >= 0; +} + +static int +_flatten1(FlattenContext* context, PyObject* item, int depth) +{ + /* add tuple or list to argument tuple (recursively) */ + + Py_ssize_t i, size; + + if (depth > 1000) { + PyErr_SetString(PyExc_ValueError, + "nesting too deep in _flatten"); + return 0; + } else if (PyTuple_Check(item) || PyList_Check(item)) { + size = PySequence_Fast_GET_SIZE(item); + /* preallocate (assume no nesting) */ + if (context->size + size > context->maxsize && + !_bump(context, size)) + return 0; + /* copy items to output tuple */ + for (i = 0; i < size; i++) { + PyObject *o = PySequence_Fast_GET_ITEM(item, i); + if (PyList_Check(o) || PyTuple_Check(o)) { + if (!_flatten1(context, o, depth + 1)) + return 0; + } else if (o != Py_None) { + if (context->size + 1 > context->maxsize && + !_bump(context, 1)) + return 0; + Py_INCREF(o); + PyTuple_SET_ITEM(context->tuple, + context->size++, o); + } + } + } else { + PyErr_SetString(PyExc_TypeError, "argument must be sequence"); + return 0; + } + return 1; +} + +/*[clinic input] +_tkinter._flatten + + item: object + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter__flatten(PyObject *module, PyObject *item) +/*[clinic end generated code: output=cad02a3f97f29862 input=6b9c12260aa1157f]*/ +{ + FlattenContext context; + + context.maxsize = PySequence_Size(item); + if (context.maxsize < 0) + return NULL; + if (context.maxsize == 0) + return PyTuple_New(0); + + context.tuple = PyTuple_New(context.maxsize); + if (!context.tuple) + return NULL; + + context.size = 0; + + if (!_flatten1(&context, item, 0)) { + Py_XDECREF(context.tuple); + return NULL; + } + + if (_PyTuple_Resize(&context.tuple, context.size)) + return NULL; + + return context.tuple; +} + +/*[clinic input] +_tkinter.create + + screenName: str(accept={str, NoneType}) = None + baseName: str = "" + className: str = "Tk" + interactive: bool(accept={int}) = False + wantobjects: bool(accept={int}) = False + wantTk: bool(accept={int}) = True + if false, then Tk_Init() doesn't get called + sync: bool(accept={int}) = False + if true, then pass -sync to wish + use: str(accept={str, NoneType}) = None + if not None, then pass -use to wish + / + +[clinic start generated code]*/ + +static PyObject * +_tkinter_create_impl(PyObject *module, const char *screenName, + const char *baseName, const char *className, + int interactive, int wantobjects, int wantTk, int sync, + const char *use) +/*[clinic end generated code: output=e3315607648e6bb4 input=da9b17ee7358d862]*/ +{ + /* XXX baseName is not used anymore; + * try getting rid of it. */ + CHECK_STRING_LENGTH(screenName); + CHECK_STRING_LENGTH(baseName); + CHECK_STRING_LENGTH(className); + CHECK_STRING_LENGTH(use); + + return (PyObject *) Tkapp_New(screenName, className, + interactive, wantobjects, wantTk, + sync, use); +} + +/*[clinic input] +_tkinter.setbusywaitinterval + + new_val: int + / + +Set the busy-wait interval in milliseconds between successive calls to Tcl_DoOneEvent in a threaded Python interpreter. + +It should be set to a divisor of the maximum time between frames in an animation. +[clinic start generated code]*/ + +static PyObject * +_tkinter_setbusywaitinterval_impl(PyObject *module, int new_val) +/*[clinic end generated code: output=42bf7757dc2d0ab6 input=deca1d6f9e6dae47]*/ +{ + if (new_val < 0) { + PyErr_SetString(PyExc_ValueError, + "busywaitinterval must be >= 0"); + return NULL; + } + Tkinter_busywaitinterval = new_val; + Py_RETURN_NONE; +} + +/*[clinic input] +_tkinter.getbusywaitinterval -> int + +Return the current busy-wait interval between successive calls to Tcl_DoOneEvent in a threaded Python interpreter. +[clinic start generated code]*/ + +static int +_tkinter_getbusywaitinterval_impl(PyObject *module) +/*[clinic end generated code: output=23b72d552001f5c7 input=a695878d2d576a84]*/ +{ + return Tkinter_busywaitinterval; +} + +#include "clinic/_tkinter.c.h" + +static PyMethodDef Tktt_methods[] = +{ + _TKINTER_TKTIMERTOKEN_DELETETIMERHANDLER_METHODDEF + {NULL, NULL} +}; + +static PyType_Slot Tktt_Type_slots[] = { + {Py_tp_dealloc, Tktt_Dealloc}, + {Py_tp_repr, Tktt_Repr}, + {Py_tp_methods, Tktt_methods}, + {0, 0} +}; + +static PyType_Spec Tktt_Type_spec = { + "_tkinter.tktimertoken", + sizeof(TkttObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, + Tktt_Type_slots, +}; + + +/**** Tkapp Method List ****/ + +static PyMethodDef Tkapp_methods[] = +{ + _TKINTER_TKAPP_WILLDISPATCH_METHODDEF + {"wantobjects", Tkapp_WantObjects, METH_VARARGS}, + {"call", Tkapp_Call, METH_VARARGS}, + _TKINTER_TKAPP_EVAL_METHODDEF + _TKINTER_TKAPP_EVALFILE_METHODDEF + _TKINTER_TKAPP_RECORD_METHODDEF + _TKINTER_TKAPP_ADDERRORINFO_METHODDEF + {"setvar", Tkapp_SetVar, METH_VARARGS}, + {"globalsetvar", Tkapp_GlobalSetVar, METH_VARARGS}, + {"getvar", Tkapp_GetVar, METH_VARARGS}, + {"globalgetvar", Tkapp_GlobalGetVar, METH_VARARGS}, + {"unsetvar", Tkapp_UnsetVar, METH_VARARGS}, + {"globalunsetvar", Tkapp_GlobalUnsetVar, METH_VARARGS}, + _TKINTER_TKAPP_GETINT_METHODDEF + _TKINTER_TKAPP_GETDOUBLE_METHODDEF + _TKINTER_TKAPP_GETBOOLEAN_METHODDEF + _TKINTER_TKAPP_EXPRSTRING_METHODDEF + _TKINTER_TKAPP_EXPRLONG_METHODDEF + _TKINTER_TKAPP_EXPRDOUBLE_METHODDEF + _TKINTER_TKAPP_EXPRBOOLEAN_METHODDEF + _TKINTER_TKAPP_SPLITLIST_METHODDEF + _TKINTER_TKAPP_CREATECOMMAND_METHODDEF + _TKINTER_TKAPP_DELETECOMMAND_METHODDEF + _TKINTER_TKAPP_CREATEFILEHANDLER_METHODDEF + _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF + _TKINTER_TKAPP_CREATETIMERHANDLER_METHODDEF + _TKINTER_TKAPP_MAINLOOP_METHODDEF + _TKINTER_TKAPP_DOONEEVENT_METHODDEF + _TKINTER_TKAPP_QUIT_METHODDEF + _TKINTER_TKAPP_INTERPADDR_METHODDEF + _TKINTER_TKAPP_LOADTK_METHODDEF + {NULL, NULL} +}; + +static PyType_Slot Tkapp_Type_slots[] = { + {Py_tp_dealloc, Tkapp_Dealloc}, + {Py_tp_methods, Tkapp_methods}, + {0, 0} +}; + + +static PyType_Spec Tkapp_Type_spec = { + "_tkinter.tkapp", + sizeof(TkappObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, + Tkapp_Type_slots, +}; + +static PyMethodDef moduleMethods[] = +{ + _TKINTER__FLATTEN_METHODDEF + _TKINTER_CREATE_METHODDEF + _TKINTER_SETBUSYWAITINTERVAL_METHODDEF + _TKINTER_GETBUSYWAITINTERVAL_METHODDEF + {NULL, NULL} +}; + +#ifdef WAIT_FOR_STDIN + +static int stdin_ready = 0; + +#ifndef MS_WINDOWS +static void +MyFileProc(void *clientData, int mask) +{ + stdin_ready = 1; +} +#endif + +static PyThreadState *event_tstate = NULL; + +static int +EventHook(void) +{ +#ifndef MS_WINDOWS + int tfile; +#endif + PyEval_RestoreThread(event_tstate); + stdin_ready = 0; + errorInCmd = 0; +#ifndef MS_WINDOWS + tfile = fileno(stdin); + Tcl_CreateFileHandler(tfile, TCL_READABLE, MyFileProc, NULL); +#endif + while (!errorInCmd && !stdin_ready) { + int result; +#ifdef MS_WINDOWS + if (_kbhit()) { + stdin_ready = 1; + break; + } +#endif + Py_BEGIN_ALLOW_THREADS + if(tcl_lock)PyThread_acquire_lock(tcl_lock, 1); + tcl_tstate = event_tstate; + + result = Tcl_DoOneEvent(TCL_DONT_WAIT); + + tcl_tstate = NULL; + if(tcl_lock)PyThread_release_lock(tcl_lock); + if (result == 0) + Sleep(Tkinter_busywaitinterval); + Py_END_ALLOW_THREADS + + if (result < 0) + break; + } +#ifndef MS_WINDOWS + Tcl_DeleteFileHandler(tfile); +#endif + if (errorInCmd) { + errorInCmd = 0; + PyErr_Restore(excInCmd, valInCmd, trbInCmd); + excInCmd = valInCmd = trbInCmd = NULL; + PyErr_Print(); + } + PyEval_SaveThread(); + return 0; +} + +#endif + +static void +EnableEventHook(void) +{ +#ifdef WAIT_FOR_STDIN + if (PyOS_InputHook == NULL) { + event_tstate = PyThreadState_Get(); + PyOS_InputHook = EventHook; + } +#endif +} + +static void +DisableEventHook(void) +{ +#ifdef WAIT_FOR_STDIN + if (Tk_GetNumMainWindows() == 0 && PyOS_InputHook == EventHook) { + PyOS_InputHook = NULL; + } +#endif +} + + +static struct PyModuleDef _tkintermodule = { + PyModuleDef_HEAD_INIT, + "_tkinter", + NULL, + -1, + moduleMethods, + NULL, + NULL, + NULL, + NULL +}; + +PyMODINIT_FUNC +PyInit__tkinter(void) +{ + PyObject *m, *uexe, *cexe, *o; + + tcl_lock = PyThread_allocate_lock(); + if (tcl_lock == NULL) + return NULL; + + m = PyModule_Create(&_tkintermodule); + if (m == NULL) + return NULL; + + o = PyErr_NewException("_tkinter.TclError", NULL, NULL); + if (o == NULL) { + Py_DECREF(m); + return NULL; + } + Py_INCREF(o); + if (PyModule_AddObject(m, "TclError", o)) { + Py_DECREF(o); + Py_DECREF(m); + return NULL; + } + Tkinter_TclError = o; + + if (PyModule_AddIntConstant(m, "READABLE", TCL_READABLE)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "WRITABLE", TCL_WRITABLE)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "EXCEPTION", TCL_EXCEPTION)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "WINDOW_EVENTS", TCL_WINDOW_EVENTS)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "FILE_EVENTS", TCL_FILE_EVENTS)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "TIMER_EVENTS", TCL_TIMER_EVENTS)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "IDLE_EVENTS", TCL_IDLE_EVENTS)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "ALL_EVENTS", TCL_ALL_EVENTS)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddIntConstant(m, "DONT_WAIT", TCL_DONT_WAIT)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddStringConstant(m, "TK_VERSION", TK_VERSION)) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddStringConstant(m, "TCL_VERSION", TCL_VERSION)) { + Py_DECREF(m); + return NULL; + } + + o = PyType_FromSpec(&Tkapp_Type_spec); + if (o == NULL) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddObject(m, "TkappType", o)) { + Py_DECREF(o); + Py_DECREF(m); + return NULL; + } + Tkapp_Type = o; + + o = PyType_FromSpec(&Tktt_Type_spec); + if (o == NULL) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddObject(m, "TkttType", o)) { + Py_DECREF(o); + Py_DECREF(m); + return NULL; + } + Tktt_Type = o; + + o = PyType_FromSpec(&PyTclObject_Type_spec); + if (o == NULL) { + Py_DECREF(m); + return NULL; + } + if (PyModule_AddObject(m, "Tcl_Obj", o)) { + Py_DECREF(o); + Py_DECREF(m); + return NULL; + } + PyTclObject_Type = o; + +#ifdef TK_AQUA + /* Tk_MacOSXSetupTkNotifier must be called before Tcl's subsystems + * start waking up. Note that Tcl_FindExecutable will do this, this + * code must be above it! The original warning from + * tkMacOSXAppInit.c is copied below. + * + * NB - You have to swap in the Tk Notifier BEFORE you start up the + * Tcl interpreter for now. It probably should work to do this + * in the other order, but for now it doesn't seem to. + * + */ + Tk_MacOSXSetupTkNotifier(); +#endif + + + /* This helps the dynamic loader; in Unicode aware Tcl versions + it also helps Tcl find its encodings. */ + uexe = PyUnicode_FromWideChar(Py_GetProgramName(), -1); + if (uexe) { + cexe = PyUnicode_EncodeFSDefault(uexe); + if (cexe) { +#ifdef MS_WINDOWS + int set_var = 0; + PyObject *str_path; + wchar_t *wcs_path; + DWORD ret; + + ret = GetEnvironmentVariableW(L"TCL_LIBRARY", NULL, 0); + + if (!ret && GetLastError() == ERROR_ENVVAR_NOT_FOUND) { + str_path = _get_tcl_lib_path(); + if (str_path == NULL && PyErr_Occurred()) { + Py_DECREF(m); + return NULL; + } + if (str_path != NULL) { + wcs_path = PyUnicode_AsWideCharString(str_path, NULL); + if (wcs_path == NULL) { + Py_DECREF(m); + return NULL; + } + SetEnvironmentVariableW(L"TCL_LIBRARY", wcs_path); + set_var = 1; + } + } + + Tcl_FindExecutable(PyBytes_AS_STRING(cexe)); + + if (set_var) { + SetEnvironmentVariableW(L"TCL_LIBRARY", NULL); + PyMem_Free(wcs_path); + } +#else + Tcl_FindExecutable(PyBytes_AS_STRING(cexe)); +#endif /* MS_WINDOWS */ + } + Py_XDECREF(cexe); + Py_DECREF(uexe); + } + + if (PyErr_Occurred()) { + Py_DECREF(m); + return NULL; + } + + return m; +} diff -Nru python3-stdlib-extensions-3.9.7/3.11/Modules/clinic/_gdbmmodule.c.h python3-stdlib-extensions-3.10.8/3.11/Modules/clinic/_gdbmmodule.c.h --- python3-stdlib-extensions-3.9.7/3.11/Modules/clinic/_gdbmmodule.c.h 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Modules/clinic/_gdbmmodule.c.h 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,308 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_gdbm_gdbm_get__doc__, +"get($self, key, default=None, /)\n" +"--\n" +"\n" +"Get the value for key, or default if not present."); + +#define _GDBM_GDBM_GET_METHODDEF \ + {"get", _PyCFunction_CAST(_gdbm_gdbm_get), METH_FASTCALL, _gdbm_gdbm_get__doc__}, + +static PyObject * +_gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value); + +static PyObject * +_gdbm_gdbm_get(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *key; + PyObject *default_value = Py_None; + + if (!_PyArg_CheckPositional("get", nargs, 1, 2)) { + goto exit; + } + key = args[0]; + if (nargs < 2) { + goto skip_optional; + } + default_value = args[1]; +skip_optional: + return_value = _gdbm_gdbm_get_impl(self, key, default_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(_gdbm_gdbm_setdefault__doc__, +"setdefault($self, key, default=None, /)\n" +"--\n" +"\n" +"Get value for key, or set it to default and return default if not present."); + +#define _GDBM_GDBM_SETDEFAULT_METHODDEF \ + {"setdefault", _PyCFunction_CAST(_gdbm_gdbm_setdefault), METH_FASTCALL, _gdbm_gdbm_setdefault__doc__}, + +static PyObject * +_gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, + PyObject *default_value); + +static PyObject * +_gdbm_gdbm_setdefault(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *key; + PyObject *default_value = Py_None; + + if (!_PyArg_CheckPositional("setdefault", nargs, 1, 2)) { + goto exit; + } + key = args[0]; + if (nargs < 2) { + goto skip_optional; + } + default_value = args[1]; +skip_optional: + return_value = _gdbm_gdbm_setdefault_impl(self, key, default_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(_gdbm_gdbm_close__doc__, +"close($self, /)\n" +"--\n" +"\n" +"Close the database."); + +#define _GDBM_GDBM_CLOSE_METHODDEF \ + {"close", (PyCFunction)_gdbm_gdbm_close, METH_NOARGS, _gdbm_gdbm_close__doc__}, + +static PyObject * +_gdbm_gdbm_close_impl(gdbmobject *self); + +static PyObject * +_gdbm_gdbm_close(gdbmobject *self, PyObject *Py_UNUSED(ignored)) +{ + return _gdbm_gdbm_close_impl(self); +} + +PyDoc_STRVAR(_gdbm_gdbm_keys__doc__, +"keys($self, /)\n" +"--\n" +"\n" +"Get a list of all keys in the database."); + +#define _GDBM_GDBM_KEYS_METHODDEF \ + {"keys", _PyCFunction_CAST(_gdbm_gdbm_keys), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_keys__doc__}, + +static PyObject * +_gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls); + +static PyObject * +_gdbm_gdbm_keys(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + if (nargs) { + PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); + return NULL; + } + return _gdbm_gdbm_keys_impl(self, cls); +} + +PyDoc_STRVAR(_gdbm_gdbm_firstkey__doc__, +"firstkey($self, /)\n" +"--\n" +"\n" +"Return the starting key for the traversal.\n" +"\n" +"It\'s possible to loop over every key in the database using this method\n" +"and the nextkey() method. The traversal is ordered by GDBM\'s internal\n" +"hash values, and won\'t be sorted by the key values."); + +#define _GDBM_GDBM_FIRSTKEY_METHODDEF \ + {"firstkey", _PyCFunction_CAST(_gdbm_gdbm_firstkey), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_firstkey__doc__}, + +static PyObject * +_gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls); + +static PyObject * +_gdbm_gdbm_firstkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + if (nargs) { + PyErr_SetString(PyExc_TypeError, "firstkey() takes no arguments"); + return NULL; + } + return _gdbm_gdbm_firstkey_impl(self, cls); +} + +PyDoc_STRVAR(_gdbm_gdbm_nextkey__doc__, +"nextkey($self, key, /)\n" +"--\n" +"\n" +"Returns the key that follows key in the traversal.\n" +"\n" +"The following code prints every key in the database db, without having\n" +"to create a list in memory that contains them all:\n" +"\n" +" k = db.firstkey()\n" +" while k is not None:\n" +" print(k)\n" +" k = db.nextkey(k)"); + +#define _GDBM_GDBM_NEXTKEY_METHODDEF \ + {"nextkey", _PyCFunction_CAST(_gdbm_gdbm_nextkey), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_nextkey__doc__}, + +static PyObject * +_gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, + Py_ssize_t key_length); + +static PyObject * +_gdbm_gdbm_nextkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = {"s#:nextkey", _keywords, 0}; + const char *key; + Py_ssize_t key_length; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &key, &key_length)) { + goto exit; + } + return_value = _gdbm_gdbm_nextkey_impl(self, cls, key, key_length); + +exit: + return return_value; +} + +PyDoc_STRVAR(_gdbm_gdbm_reorganize__doc__, +"reorganize($self, /)\n" +"--\n" +"\n" +"Reorganize the database.\n" +"\n" +"If you have carried out a lot of deletions and would like to shrink\n" +"the space used by the GDBM file, this routine will reorganize the\n" +"database. GDBM will not shorten the length of a database file except\n" +"by using this reorganization; otherwise, deleted file space will be\n" +"kept and reused as new (key,value) pairs are added."); + +#define _GDBM_GDBM_REORGANIZE_METHODDEF \ + {"reorganize", _PyCFunction_CAST(_gdbm_gdbm_reorganize), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_reorganize__doc__}, + +static PyObject * +_gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls); + +static PyObject * +_gdbm_gdbm_reorganize(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + if (nargs) { + PyErr_SetString(PyExc_TypeError, "reorganize() takes no arguments"); + return NULL; + } + return _gdbm_gdbm_reorganize_impl(self, cls); +} + +PyDoc_STRVAR(_gdbm_gdbm_sync__doc__, +"sync($self, /)\n" +"--\n" +"\n" +"Flush the database to the disk file.\n" +"\n" +"When the database has been opened in fast mode, this method forces\n" +"any unwritten data to be written to the disk."); + +#define _GDBM_GDBM_SYNC_METHODDEF \ + {"sync", _PyCFunction_CAST(_gdbm_gdbm_sync), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_sync__doc__}, + +static PyObject * +_gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls); + +static PyObject * +_gdbm_gdbm_sync(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + if (nargs) { + PyErr_SetString(PyExc_TypeError, "sync() takes no arguments"); + return NULL; + } + return _gdbm_gdbm_sync_impl(self, cls); +} + +PyDoc_STRVAR(dbmopen__doc__, +"open($module, filename, flags=\'r\', mode=0o666, /)\n" +"--\n" +"\n" +"Open a dbm database and return a dbm object.\n" +"\n" +"The filename argument is the name of the database file.\n" +"\n" +"The optional flags argument can be \'r\' (to open an existing database\n" +"for reading only -- default), \'w\' (to open an existing database for\n" +"reading and writing), \'c\' (which creates the database if it doesn\'t\n" +"exist), or \'n\' (which always creates a new empty database).\n" +"\n" +"Some versions of gdbm support additional flags which must be\n" +"appended to one of the flags described above. The module constant\n" +"\'open_flags\' is a string of valid additional flags. The \'f\' flag\n" +"opens the database in fast mode; altered data will not automatically\n" +"be written to the disk after every change. This results in faster\n" +"writes to the database, but may result in an inconsistent database\n" +"if the program crashes while the database is still open. Use the\n" +"sync() method to force any unwritten data to be written to the disk.\n" +"The \'s\' flag causes all database operations to be synchronized to\n" +"disk. The \'u\' flag disables locking of the database file.\n" +"\n" +"The optional mode argument is the Unix mode of the file, used only\n" +"when the database has to be created. It defaults to octal 0o666."); + +#define DBMOPEN_METHODDEF \ + {"open", _PyCFunction_CAST(dbmopen), METH_FASTCALL, dbmopen__doc__}, + +static PyObject * +dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, + int mode); + +static PyObject * +dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *filename; + const char *flags = "r"; + int mode = 438; + + if (!_PyArg_CheckPositional("open", nargs, 1, 3)) { + goto exit; + } + filename = args[0]; + if (nargs < 2) { + goto skip_optional; + } + if (!PyUnicode_Check(args[1])) { + _PyArg_BadArgument("open", "argument 2", "str", args[1]); + goto exit; + } + Py_ssize_t flags_length; + flags = PyUnicode_AsUTF8AndSize(args[1], &flags_length); + if (flags == NULL) { + goto exit; + } + if (strlen(flags) != (size_t)flags_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + mode = _PyLong_AsInt(args[2]); + if (mode == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = dbmopen_impl(module, filename, flags, mode); + +exit: + return return_value; +} +/*[clinic end generated code: output=617117d16956ac4d input=a9049054013a1b77]*/ diff -Nru python3-stdlib-extensions-3.9.7/3.11/Modules/clinic/_tkinter.c.h python3-stdlib-extensions-3.10.8/3.11/Modules/clinic/_tkinter.c.h --- python3-stdlib-extensions-3.9.7/3.11/Modules/clinic/_tkinter.c.h 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Modules/clinic/_tkinter.c.h 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,862 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_tkinter_tkapp_eval__doc__, +"eval($self, script, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_EVAL_METHODDEF \ + {"eval", (PyCFunction)_tkinter_tkapp_eval, METH_O, _tkinter_tkapp_eval__doc__}, + +static PyObject * +_tkinter_tkapp_eval_impl(TkappObject *self, const char *script); + +static PyObject * +_tkinter_tkapp_eval(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *script; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("eval", "argument", "str", arg); + goto exit; + } + Py_ssize_t script_length; + script = PyUnicode_AsUTF8AndSize(arg, &script_length); + if (script == NULL) { + goto exit; + } + if (strlen(script) != (size_t)script_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_eval_impl(self, script); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_evalfile__doc__, +"evalfile($self, fileName, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_EVALFILE_METHODDEF \ + {"evalfile", (PyCFunction)_tkinter_tkapp_evalfile, METH_O, _tkinter_tkapp_evalfile__doc__}, + +static PyObject * +_tkinter_tkapp_evalfile_impl(TkappObject *self, const char *fileName); + +static PyObject * +_tkinter_tkapp_evalfile(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *fileName; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("evalfile", "argument", "str", arg); + goto exit; + } + Py_ssize_t fileName_length; + fileName = PyUnicode_AsUTF8AndSize(arg, &fileName_length); + if (fileName == NULL) { + goto exit; + } + if (strlen(fileName) != (size_t)fileName_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_evalfile_impl(self, fileName); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_record__doc__, +"record($self, script, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_RECORD_METHODDEF \ + {"record", (PyCFunction)_tkinter_tkapp_record, METH_O, _tkinter_tkapp_record__doc__}, + +static PyObject * +_tkinter_tkapp_record_impl(TkappObject *self, const char *script); + +static PyObject * +_tkinter_tkapp_record(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *script; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("record", "argument", "str", arg); + goto exit; + } + Py_ssize_t script_length; + script = PyUnicode_AsUTF8AndSize(arg, &script_length); + if (script == NULL) { + goto exit; + } + if (strlen(script) != (size_t)script_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_record_impl(self, script); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_adderrorinfo__doc__, +"adderrorinfo($self, msg, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_ADDERRORINFO_METHODDEF \ + {"adderrorinfo", (PyCFunction)_tkinter_tkapp_adderrorinfo, METH_O, _tkinter_tkapp_adderrorinfo__doc__}, + +static PyObject * +_tkinter_tkapp_adderrorinfo_impl(TkappObject *self, const char *msg); + +static PyObject * +_tkinter_tkapp_adderrorinfo(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *msg; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("adderrorinfo", "argument", "str", arg); + goto exit; + } + Py_ssize_t msg_length; + msg = PyUnicode_AsUTF8AndSize(arg, &msg_length); + if (msg == NULL) { + goto exit; + } + if (strlen(msg) != (size_t)msg_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_adderrorinfo_impl(self, msg); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_getint__doc__, +"getint($self, arg, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_GETINT_METHODDEF \ + {"getint", (PyCFunction)_tkinter_tkapp_getint, METH_O, _tkinter_tkapp_getint__doc__}, + +PyDoc_STRVAR(_tkinter_tkapp_getdouble__doc__, +"getdouble($self, arg, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_GETDOUBLE_METHODDEF \ + {"getdouble", (PyCFunction)_tkinter_tkapp_getdouble, METH_O, _tkinter_tkapp_getdouble__doc__}, + +PyDoc_STRVAR(_tkinter_tkapp_getboolean__doc__, +"getboolean($self, arg, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_GETBOOLEAN_METHODDEF \ + {"getboolean", (PyCFunction)_tkinter_tkapp_getboolean, METH_O, _tkinter_tkapp_getboolean__doc__}, + +PyDoc_STRVAR(_tkinter_tkapp_exprstring__doc__, +"exprstring($self, s, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_EXPRSTRING_METHODDEF \ + {"exprstring", (PyCFunction)_tkinter_tkapp_exprstring, METH_O, _tkinter_tkapp_exprstring__doc__}, + +static PyObject * +_tkinter_tkapp_exprstring_impl(TkappObject *self, const char *s); + +static PyObject * +_tkinter_tkapp_exprstring(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *s; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("exprstring", "argument", "str", arg); + goto exit; + } + Py_ssize_t s_length; + s = PyUnicode_AsUTF8AndSize(arg, &s_length); + if (s == NULL) { + goto exit; + } + if (strlen(s) != (size_t)s_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_exprstring_impl(self, s); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_exprlong__doc__, +"exprlong($self, s, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_EXPRLONG_METHODDEF \ + {"exprlong", (PyCFunction)_tkinter_tkapp_exprlong, METH_O, _tkinter_tkapp_exprlong__doc__}, + +static PyObject * +_tkinter_tkapp_exprlong_impl(TkappObject *self, const char *s); + +static PyObject * +_tkinter_tkapp_exprlong(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *s; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("exprlong", "argument", "str", arg); + goto exit; + } + Py_ssize_t s_length; + s = PyUnicode_AsUTF8AndSize(arg, &s_length); + if (s == NULL) { + goto exit; + } + if (strlen(s) != (size_t)s_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_exprlong_impl(self, s); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_exprdouble__doc__, +"exprdouble($self, s, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_EXPRDOUBLE_METHODDEF \ + {"exprdouble", (PyCFunction)_tkinter_tkapp_exprdouble, METH_O, _tkinter_tkapp_exprdouble__doc__}, + +static PyObject * +_tkinter_tkapp_exprdouble_impl(TkappObject *self, const char *s); + +static PyObject * +_tkinter_tkapp_exprdouble(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *s; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("exprdouble", "argument", "str", arg); + goto exit; + } + Py_ssize_t s_length; + s = PyUnicode_AsUTF8AndSize(arg, &s_length); + if (s == NULL) { + goto exit; + } + if (strlen(s) != (size_t)s_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_exprdouble_impl(self, s); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_exprboolean__doc__, +"exprboolean($self, s, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_EXPRBOOLEAN_METHODDEF \ + {"exprboolean", (PyCFunction)_tkinter_tkapp_exprboolean, METH_O, _tkinter_tkapp_exprboolean__doc__}, + +static PyObject * +_tkinter_tkapp_exprboolean_impl(TkappObject *self, const char *s); + +static PyObject * +_tkinter_tkapp_exprboolean(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *s; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("exprboolean", "argument", "str", arg); + goto exit; + } + Py_ssize_t s_length; + s = PyUnicode_AsUTF8AndSize(arg, &s_length); + if (s == NULL) { + goto exit; + } + if (strlen(s) != (size_t)s_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_exprboolean_impl(self, s); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_splitlist__doc__, +"splitlist($self, arg, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_SPLITLIST_METHODDEF \ + {"splitlist", (PyCFunction)_tkinter_tkapp_splitlist, METH_O, _tkinter_tkapp_splitlist__doc__}, + +PyDoc_STRVAR(_tkinter_tkapp_createcommand__doc__, +"createcommand($self, name, func, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_CREATECOMMAND_METHODDEF \ + {"createcommand", _PyCFunction_CAST(_tkinter_tkapp_createcommand), METH_FASTCALL, _tkinter_tkapp_createcommand__doc__}, + +static PyObject * +_tkinter_tkapp_createcommand_impl(TkappObject *self, const char *name, + PyObject *func); + +static PyObject * +_tkinter_tkapp_createcommand(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + const char *name; + PyObject *func; + + if (!_PyArg_CheckPositional("createcommand", nargs, 2, 2)) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("createcommand", "argument 1", "str", args[0]); + goto exit; + } + Py_ssize_t name_length; + name = PyUnicode_AsUTF8AndSize(args[0], &name_length); + if (name == NULL) { + goto exit; + } + if (strlen(name) != (size_t)name_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + func = args[1]; + return_value = _tkinter_tkapp_createcommand_impl(self, name, func); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_deletecommand__doc__, +"deletecommand($self, name, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_DELETECOMMAND_METHODDEF \ + {"deletecommand", (PyCFunction)_tkinter_tkapp_deletecommand, METH_O, _tkinter_tkapp_deletecommand__doc__}, + +static PyObject * +_tkinter_tkapp_deletecommand_impl(TkappObject *self, const char *name); + +static PyObject * +_tkinter_tkapp_deletecommand(TkappObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *name; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("deletecommand", "argument", "str", arg); + goto exit; + } + Py_ssize_t name_length; + name = PyUnicode_AsUTF8AndSize(arg, &name_length); + if (name == NULL) { + goto exit; + } + if (strlen(name) != (size_t)name_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = _tkinter_tkapp_deletecommand_impl(self, name); + +exit: + return return_value; +} + +#if defined(HAVE_CREATEFILEHANDLER) + +PyDoc_STRVAR(_tkinter_tkapp_createfilehandler__doc__, +"createfilehandler($self, file, mask, func, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_CREATEFILEHANDLER_METHODDEF \ + {"createfilehandler", _PyCFunction_CAST(_tkinter_tkapp_createfilehandler), METH_FASTCALL, _tkinter_tkapp_createfilehandler__doc__}, + +static PyObject * +_tkinter_tkapp_createfilehandler_impl(TkappObject *self, PyObject *file, + int mask, PyObject *func); + +static PyObject * +_tkinter_tkapp_createfilehandler(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *file; + int mask; + PyObject *func; + + if (!_PyArg_CheckPositional("createfilehandler", nargs, 3, 3)) { + goto exit; + } + file = args[0]; + mask = _PyLong_AsInt(args[1]); + if (mask == -1 && PyErr_Occurred()) { + goto exit; + } + func = args[2]; + return_value = _tkinter_tkapp_createfilehandler_impl(self, file, mask, func); + +exit: + return return_value; +} + +#endif /* defined(HAVE_CREATEFILEHANDLER) */ + +#if defined(HAVE_CREATEFILEHANDLER) + +PyDoc_STRVAR(_tkinter_tkapp_deletefilehandler__doc__, +"deletefilehandler($self, file, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF \ + {"deletefilehandler", (PyCFunction)_tkinter_tkapp_deletefilehandler, METH_O, _tkinter_tkapp_deletefilehandler__doc__}, + +#endif /* defined(HAVE_CREATEFILEHANDLER) */ + +PyDoc_STRVAR(_tkinter_tktimertoken_deletetimerhandler__doc__, +"deletetimerhandler($self, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKTIMERTOKEN_DELETETIMERHANDLER_METHODDEF \ + {"deletetimerhandler", (PyCFunction)_tkinter_tktimertoken_deletetimerhandler, METH_NOARGS, _tkinter_tktimertoken_deletetimerhandler__doc__}, + +static PyObject * +_tkinter_tktimertoken_deletetimerhandler_impl(TkttObject *self); + +static PyObject * +_tkinter_tktimertoken_deletetimerhandler(TkttObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _tkinter_tktimertoken_deletetimerhandler_impl(self); +} + +PyDoc_STRVAR(_tkinter_tkapp_createtimerhandler__doc__, +"createtimerhandler($self, milliseconds, func, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_CREATETIMERHANDLER_METHODDEF \ + {"createtimerhandler", _PyCFunction_CAST(_tkinter_tkapp_createtimerhandler), METH_FASTCALL, _tkinter_tkapp_createtimerhandler__doc__}, + +static PyObject * +_tkinter_tkapp_createtimerhandler_impl(TkappObject *self, int milliseconds, + PyObject *func); + +static PyObject * +_tkinter_tkapp_createtimerhandler(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int milliseconds; + PyObject *func; + + if (!_PyArg_CheckPositional("createtimerhandler", nargs, 2, 2)) { + goto exit; + } + milliseconds = _PyLong_AsInt(args[0]); + if (milliseconds == -1 && PyErr_Occurred()) { + goto exit; + } + func = args[1]; + return_value = _tkinter_tkapp_createtimerhandler_impl(self, milliseconds, func); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_mainloop__doc__, +"mainloop($self, threshold=0, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_MAINLOOP_METHODDEF \ + {"mainloop", _PyCFunction_CAST(_tkinter_tkapp_mainloop), METH_FASTCALL, _tkinter_tkapp_mainloop__doc__}, + +static PyObject * +_tkinter_tkapp_mainloop_impl(TkappObject *self, int threshold); + +static PyObject * +_tkinter_tkapp_mainloop(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int threshold = 0; + + if (!_PyArg_CheckPositional("mainloop", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + threshold = _PyLong_AsInt(args[0]); + if (threshold == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = _tkinter_tkapp_mainloop_impl(self, threshold); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_dooneevent__doc__, +"dooneevent($self, flags=0, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_DOONEEVENT_METHODDEF \ + {"dooneevent", _PyCFunction_CAST(_tkinter_tkapp_dooneevent), METH_FASTCALL, _tkinter_tkapp_dooneevent__doc__}, + +static PyObject * +_tkinter_tkapp_dooneevent_impl(TkappObject *self, int flags); + +static PyObject * +_tkinter_tkapp_dooneevent(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int flags = 0; + + if (!_PyArg_CheckPositional("dooneevent", nargs, 0, 1)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + flags = _PyLong_AsInt(args[0]); + if (flags == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = _tkinter_tkapp_dooneevent_impl(self, flags); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_tkapp_quit__doc__, +"quit($self, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_QUIT_METHODDEF \ + {"quit", (PyCFunction)_tkinter_tkapp_quit, METH_NOARGS, _tkinter_tkapp_quit__doc__}, + +static PyObject * +_tkinter_tkapp_quit_impl(TkappObject *self); + +static PyObject * +_tkinter_tkapp_quit(TkappObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _tkinter_tkapp_quit_impl(self); +} + +PyDoc_STRVAR(_tkinter_tkapp_interpaddr__doc__, +"interpaddr($self, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_INTERPADDR_METHODDEF \ + {"interpaddr", (PyCFunction)_tkinter_tkapp_interpaddr, METH_NOARGS, _tkinter_tkapp_interpaddr__doc__}, + +static PyObject * +_tkinter_tkapp_interpaddr_impl(TkappObject *self); + +static PyObject * +_tkinter_tkapp_interpaddr(TkappObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _tkinter_tkapp_interpaddr_impl(self); +} + +PyDoc_STRVAR(_tkinter_tkapp_loadtk__doc__, +"loadtk($self, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_LOADTK_METHODDEF \ + {"loadtk", (PyCFunction)_tkinter_tkapp_loadtk, METH_NOARGS, _tkinter_tkapp_loadtk__doc__}, + +static PyObject * +_tkinter_tkapp_loadtk_impl(TkappObject *self); + +static PyObject * +_tkinter_tkapp_loadtk(TkappObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _tkinter_tkapp_loadtk_impl(self); +} + +PyDoc_STRVAR(_tkinter_tkapp_willdispatch__doc__, +"willdispatch($self, /)\n" +"--\n" +"\n"); + +#define _TKINTER_TKAPP_WILLDISPATCH_METHODDEF \ + {"willdispatch", (PyCFunction)_tkinter_tkapp_willdispatch, METH_NOARGS, _tkinter_tkapp_willdispatch__doc__}, + +static PyObject * +_tkinter_tkapp_willdispatch_impl(TkappObject *self); + +static PyObject * +_tkinter_tkapp_willdispatch(TkappObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _tkinter_tkapp_willdispatch_impl(self); +} + +PyDoc_STRVAR(_tkinter__flatten__doc__, +"_flatten($module, item, /)\n" +"--\n" +"\n"); + +#define _TKINTER__FLATTEN_METHODDEF \ + {"_flatten", (PyCFunction)_tkinter__flatten, METH_O, _tkinter__flatten__doc__}, + +PyDoc_STRVAR(_tkinter_create__doc__, +"create($module, screenName=None, baseName=\'\', className=\'Tk\',\n" +" interactive=False, wantobjects=False, wantTk=True, sync=False,\n" +" use=None, /)\n" +"--\n" +"\n" +"\n" +"\n" +" wantTk\n" +" if false, then Tk_Init() doesn\'t get called\n" +" sync\n" +" if true, then pass -sync to wish\n" +" use\n" +" if not None, then pass -use to wish"); + +#define _TKINTER_CREATE_METHODDEF \ + {"create", _PyCFunction_CAST(_tkinter_create), METH_FASTCALL, _tkinter_create__doc__}, + +static PyObject * +_tkinter_create_impl(PyObject *module, const char *screenName, + const char *baseName, const char *className, + int interactive, int wantobjects, int wantTk, int sync, + const char *use); + +static PyObject * +_tkinter_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + const char *screenName = NULL; + const char *baseName = ""; + const char *className = "Tk"; + int interactive = 0; + int wantobjects = 0; + int wantTk = 1; + int sync = 0; + const char *use = NULL; + + if (!_PyArg_CheckPositional("create", nargs, 0, 8)) { + goto exit; + } + if (nargs < 1) { + goto skip_optional; + } + if (args[0] == Py_None) { + screenName = NULL; + } + else if (PyUnicode_Check(args[0])) { + Py_ssize_t screenName_length; + screenName = PyUnicode_AsUTF8AndSize(args[0], &screenName_length); + if (screenName == NULL) { + goto exit; + } + if (strlen(screenName) != (size_t)screenName_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + } + else { + _PyArg_BadArgument("create", "argument 1", "str or None", args[0]); + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + if (!PyUnicode_Check(args[1])) { + _PyArg_BadArgument("create", "argument 2", "str", args[1]); + goto exit; + } + Py_ssize_t baseName_length; + baseName = PyUnicode_AsUTF8AndSize(args[1], &baseName_length); + if (baseName == NULL) { + goto exit; + } + if (strlen(baseName) != (size_t)baseName_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + if (nargs < 3) { + goto skip_optional; + } + if (!PyUnicode_Check(args[2])) { + _PyArg_BadArgument("create", "argument 3", "str", args[2]); + goto exit; + } + Py_ssize_t className_length; + className = PyUnicode_AsUTF8AndSize(args[2], &className_length); + if (className == NULL) { + goto exit; + } + if (strlen(className) != (size_t)className_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + if (nargs < 4) { + goto skip_optional; + } + interactive = _PyLong_AsInt(args[3]); + if (interactive == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 5) { + goto skip_optional; + } + wantobjects = _PyLong_AsInt(args[4]); + if (wantobjects == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 6) { + goto skip_optional; + } + wantTk = _PyLong_AsInt(args[5]); + if (wantTk == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 7) { + goto skip_optional; + } + sync = _PyLong_AsInt(args[6]); + if (sync == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 8) { + goto skip_optional; + } + if (args[7] == Py_None) { + use = NULL; + } + else if (PyUnicode_Check(args[7])) { + Py_ssize_t use_length; + use = PyUnicode_AsUTF8AndSize(args[7], &use_length); + if (use == NULL) { + goto exit; + } + if (strlen(use) != (size_t)use_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + } + else { + _PyArg_BadArgument("create", "argument 8", "str or None", args[7]); + goto exit; + } +skip_optional: + return_value = _tkinter_create_impl(module, screenName, baseName, className, interactive, wantobjects, wantTk, sync, use); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_setbusywaitinterval__doc__, +"setbusywaitinterval($module, new_val, /)\n" +"--\n" +"\n" +"Set the busy-wait interval in milliseconds between successive calls to Tcl_DoOneEvent in a threaded Python interpreter.\n" +"\n" +"It should be set to a divisor of the maximum time between frames in an animation."); + +#define _TKINTER_SETBUSYWAITINTERVAL_METHODDEF \ + {"setbusywaitinterval", (PyCFunction)_tkinter_setbusywaitinterval, METH_O, _tkinter_setbusywaitinterval__doc__}, + +static PyObject * +_tkinter_setbusywaitinterval_impl(PyObject *module, int new_val); + +static PyObject * +_tkinter_setbusywaitinterval(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + int new_val; + + new_val = _PyLong_AsInt(arg); + if (new_val == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _tkinter_setbusywaitinterval_impl(module, new_val); + +exit: + return return_value; +} + +PyDoc_STRVAR(_tkinter_getbusywaitinterval__doc__, +"getbusywaitinterval($module, /)\n" +"--\n" +"\n" +"Return the current busy-wait interval between successive calls to Tcl_DoOneEvent in a threaded Python interpreter."); + +#define _TKINTER_GETBUSYWAITINTERVAL_METHODDEF \ + {"getbusywaitinterval", (PyCFunction)_tkinter_getbusywaitinterval, METH_NOARGS, _tkinter_getbusywaitinterval__doc__}, + +static int +_tkinter_getbusywaitinterval_impl(PyObject *module); + +static PyObject * +_tkinter_getbusywaitinterval(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + PyObject *return_value = NULL; + int _return_value; + + _return_value = _tkinter_getbusywaitinterval_impl(module); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromLong((long)_return_value); + +exit: + return return_value; +} + +#ifndef _TKINTER_TKAPP_CREATEFILEHANDLER_METHODDEF + #define _TKINTER_TKAPP_CREATEFILEHANDLER_METHODDEF +#endif /* !defined(_TKINTER_TKAPP_CREATEFILEHANDLER_METHODDEF) */ + +#ifndef _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF + #define _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF +#endif /* !defined(_TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF) */ +/*[clinic end generated code: output=b0667ac928eb0c28 input=a9049054013a1b77]*/ diff -Nru python3-stdlib-extensions-3.9.7/3.11/Modules/tkappinit.c python3-stdlib-extensions-3.10.8/3.11/Modules/tkappinit.c --- python3-stdlib-extensions-3.9.7/3.11/Modules/tkappinit.c 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Modules/tkappinit.c 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,166 @@ +/* appinit.c -- Tcl and Tk application initialization. + + The function Tcl_AppInit() below initializes various Tcl packages. + It is called for each Tcl interpreter created by _tkinter.create(). + It needs to be compiled with -DWITH_ flags for each package + that you are statically linking with. You may have to add sections + for packages not yet listed below. + + Note that those packages for which Tcl_StaticPackage() is called with + a NULL first argument are known as "static loadable" packages to + Tcl but not actually initialized. To use these, you have to load + it explicitly, e.g. tkapp.eval("load {} Blt"). + */ + +#include +#include +#include + +#include "tkinter.h" + +#ifdef TKINTER_PROTECT_LOADTK +/* See Tkapp_TkInit in _tkinter.c for the usage of tk_load_faile */ +static int tk_load_failed; +#endif + +int +Tcl_AppInit(Tcl_Interp *interp) +{ + const char *_tkinter_skip_tk_init; +#ifdef TKINTER_PROTECT_LOADTK + const char *_tkinter_tk_failed; +#endif + +#ifdef TK_AQUA +#ifndef MAX_PATH_LEN +#define MAX_PATH_LEN 1024 +#endif + char tclLibPath[MAX_PATH_LEN], tkLibPath[MAX_PATH_LEN]; + Tcl_Obj* pathPtr; + + /* pre- Tcl_Init code copied from tkMacOSXAppInit.c */ + Tk_MacOSXOpenBundleResources (interp, "com.tcltk.tcllibrary", + tclLibPath, MAX_PATH_LEN, 0); + + if (tclLibPath[0] != '\0') { + Tcl_SetVar(interp, "tcl_library", tclLibPath, TCL_GLOBAL_ONLY); + Tcl_SetVar(interp, "tclDefaultLibrary", tclLibPath, TCL_GLOBAL_ONLY); + Tcl_SetVar(interp, "tcl_pkgPath", tclLibPath, TCL_GLOBAL_ONLY); + } + + if (tclLibPath[0] != '\0') { + Tcl_SetVar(interp, "tcl_library", tclLibPath, TCL_GLOBAL_ONLY); + Tcl_SetVar(interp, "tclDefaultLibrary", tclLibPath, TCL_GLOBAL_ONLY); + Tcl_SetVar(interp, "tcl_pkgPath", tclLibPath, TCL_GLOBAL_ONLY); + } +#endif + if (Tcl_Init (interp) == TCL_ERROR) + return TCL_ERROR; + +#ifdef TK_AQUA + /* pre- Tk_Init code copied from tkMacOSXAppInit.c */ + Tk_MacOSXOpenBundleResources (interp, "com.tcltk.tklibrary", + tkLibPath, MAX_PATH_LEN, 1); + + if (tclLibPath[0] != '\0') { + pathPtr = Tcl_NewStringObj(tclLibPath, -1); + } else { + Tcl_Obj *pathPtr = TclGetLibraryPath(); + } + + if (tkLibPath[0] != '\0') { + Tcl_Obj *objPtr; + + Tcl_SetVar(interp, "tk_library", tkLibPath, TCL_GLOBAL_ONLY); + objPtr = Tcl_NewStringObj(tkLibPath, -1); + Tcl_ListObjAppendElement(NULL, pathPtr, objPtr); + } + + TclSetLibraryPath(pathPtr); +#endif + +#ifdef WITH_XXX + /* Initialize modules that don't require Tk */ +#endif + + _tkinter_skip_tk_init = Tcl_GetVar(interp, + "_tkinter_skip_tk_init", TCL_GLOBAL_ONLY); + if (_tkinter_skip_tk_init != NULL && + strcmp(_tkinter_skip_tk_init, "1") == 0) { + return TCL_OK; + } + +#ifdef TKINTER_PROTECT_LOADTK + _tkinter_tk_failed = Tcl_GetVar(interp, + "_tkinter_tk_failed", TCL_GLOBAL_ONLY); + + if (tk_load_failed || ( + _tkinter_tk_failed != NULL && + strcmp(_tkinter_tk_failed, "1") == 0)) { + Tcl_SetResult(interp, TKINTER_LOADTK_ERRMSG, TCL_STATIC); + return TCL_ERROR; + } +#endif + + if (Tk_Init(interp) == TCL_ERROR) { +#ifdef TKINTER_PROTECT_LOADTK + tk_load_failed = 1; + Tcl_SetVar(interp, "_tkinter_tk_failed", "1", TCL_GLOBAL_ONLY); +#endif + return TCL_ERROR; + } + + Tk_MainWindow(interp); + +#ifdef TK_AQUA + TkMacOSXInitAppleEvents(interp); + TkMacOSXInitMenus(interp); +#endif + +#ifdef WITH_PIL /* 0.2b5 and later -- not yet released as of May 14 */ + { + extern void TkImaging_Init(Tcl_Interp *); + TkImaging_Init(interp); + /* XXX TkImaging_Init() doesn't have the right return type */ + /*Tcl_StaticPackage(interp, "Imaging", TkImaging_Init, NULL);*/ + } +#endif + +#ifdef WITH_PIL_OLD /* 0.2b4 and earlier */ + { + extern void TkImaging_Init(void); + /* XXX TkImaging_Init() doesn't have the right prototype */ + /*Tcl_StaticPackage(interp, "Imaging", TkImaging_Init, NULL);*/ + } +#endif + +#ifdef WITH_TIX + { + extern int Tix_Init(Tcl_Interp *interp); + extern int Tix_SafeInit(Tcl_Interp *interp); + Tcl_StaticPackage(NULL, "Tix", Tix_Init, Tix_SafeInit); + } +#endif + +#ifdef WITH_BLT + { + extern int Blt_Init(Tcl_Interp *); + extern int Blt_SafeInit(Tcl_Interp *); + Tcl_StaticPackage(NULL, "Blt", Blt_Init, Blt_SafeInit); + } +#endif + +#ifdef WITH_TOGL + { + /* XXX I've heard rumors that this doesn't work */ + extern int Togl_Init(Tcl_Interp *); + /* XXX Is there no Togl_SafeInit? */ + Tcl_StaticPackage(NULL, "Togl", Togl_Init, NULL); + } +#endif + +#ifdef WITH_XXX + +#endif + return TCL_OK; +} diff -Nru python3-stdlib-extensions-3.9.7/3.11/Modules/tkinter.h python3-stdlib-extensions-3.10.8/3.11/Modules/tkinter.h --- python3-stdlib-extensions-3.9.7/3.11/Modules/tkinter.h 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/Modules/tkinter.h 2022-10-24 17:35:39.000000000 +0000 @@ -0,0 +1,27 @@ +#ifndef TKINTER_H +#define TKINTER_H + +/* This header is used to share some macros between _tkinter.c and + * tkappinit.c. + * Be sure to include tk.h before including this header so + * TK_HEX_VERSION is properly defined. */ + +/* TK_RELEASE_LEVEL is always one of the following: + * TCL_ALPHA_RELEASE 0 + * TCL_BETA_RELEASE 1 + * TCL_FINAL_RELEASE 2 + */ +#define TK_HEX_VERSION ((TK_MAJOR_VERSION << 24) | \ + (TK_MINOR_VERSION << 16) | \ + (TK_RELEASE_LEVEL << 8) | \ + (TK_RELEASE_SERIAL << 0)) + +/* Protect Tk 8.4.13 and older from a deadlock that happens when trying + * to load tk after a failed attempt. */ +#if TK_HEX_VERSION < 0x0804020e +#define TKINTER_PROTECT_LOADTK +#define TKINTER_LOADTK_ERRMSG \ + "Calling Tk_Init again after a previous call failed might deadlock" +#endif + +#endif /* !TKINTER_H */ diff -Nru python3-stdlib-extensions-3.9.7/3.11/setup.py python3-stdlib-extensions-3.10.8/3.11/setup.py --- python3-stdlib-extensions-3.9.7/3.11/setup.py 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.11/setup.py 2019-07-06 10:48:25.000000000 +0000 @@ -0,0 +1,877 @@ +# Autodetecting setup.py script for building the Python extensions +# + +import sys, os, imp, re, optparse +from glob import glob +import sysconfig + +from distutils import log +from distutils import text_file +from distutils.errors import * +from distutils.core import Extension, setup +from distutils.command.build_ext import build_ext +from distutils.command.install import install +from distutils.command.install_lib import install_lib +from distutils.command.build_scripts import build_scripts +from distutils.spawn import find_executable + +cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ + +def get_platform(): + # cross build + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + # Get value of sys.platform + if sys.platform.startswith('osf1'): + return 'osf1' + return sys.platform +host_platform = get_platform() + +# Were we compiled --with-pydebug or with #define Py_DEBUG? +COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS")) + +# This global variable is used to hold the list of modules to be disabled. +disabled_module_list = [] + +def add_dir_to_list(dirlist, dir): + """Add the directory 'dir' to the list 'dirlist' (after any relative + directories) if: + + 1) 'dir' is not already in 'dirlist' + 2) 'dir' actually exists, and is a directory. + """ + if dir is None or not os.path.isdir(dir) or dir in dirlist: + return + for i, path in enumerate(dirlist): + if not os.path.isabs(path): + dirlist.insert(i + 1, dir) + return + dirlist.insert(0, dir) + +def macosx_sdk_root(): + """ + Return the directory of the current OSX SDK, + or '/' if no SDK was specified. + """ + cflags = sysconfig.get_config_var('CFLAGS') + m = re.search(r'-isysroot\s+(\S+)', cflags) + if m is None: + sysroot = '/' + else: + sysroot = m.group(1) + return sysroot + +def is_macosx_sdk_path(path): + """ + Returns True if 'path' can be located in an OSX SDK + """ + return ( (path.startswith('/usr/') and not path.startswith('/usr/local')) + or path.startswith('/System/') + or path.startswith('/Library/') ) + +def find_file(filename, std_dirs, paths): + """Searches for the directory where a given file is located, + and returns a possibly-empty list of additional directories, or None + if the file couldn't be found at all. + + 'filename' is the name of a file, such as readline.h or libcrypto.a. + 'std_dirs' is the list of standard system directories; if the + file is found in one of them, no additional directives are needed. + 'paths' is a list of additional locations to check; if the file is + found in one of them, the resulting list will contain the directory. + """ + if host_platform == 'darwin': + # Honor the MacOSX SDK setting when one was specified. + # An SDK is a directory with the same structure as a real + # system, but with only header files and libraries. + sysroot = macosx_sdk_root() + + # Check the standard locations + for dir in std_dirs: + f = os.path.join(dir, filename) + + if host_platform == 'darwin' and is_macosx_sdk_path(dir): + f = os.path.join(sysroot, dir[1:], filename) + + if os.path.exists(f): return [] + + # Check the additional directories + for dir in paths: + f = os.path.join(dir, filename) + + if host_platform == 'darwin' and is_macosx_sdk_path(dir): + f = os.path.join(sysroot, dir[1:], filename) + + if os.path.exists(f): + return [dir] + + # Not found anywhere + return None + +def find_library_file(compiler, libname, std_dirs, paths): + result = compiler.find_library_file(std_dirs + paths, libname) + if result is None: + return None + + if host_platform == 'darwin': + sysroot = macosx_sdk_root() + + # Check whether the found file is in one of the standard directories + dirname = os.path.dirname(result) + for p in std_dirs: + # Ensure path doesn't end with path separator + p = p.rstrip(os.sep) + + if host_platform == 'darwin' and is_macosx_sdk_path(p): + if os.path.join(sysroot, p[1:]) == dirname: + return [ ] + + if p == dirname: + return [ ] + + # Otherwise, it must have been in one of the additional directories, + # so we have to figure out which one. + for p in paths: + # Ensure path doesn't end with path separator + p = p.rstrip(os.sep) + + if host_platform == 'darwin' and is_macosx_sdk_path(p): + if os.path.join(sysroot, p[1:]) == dirname: + return [ p ] + + if p == dirname: + return [p] + else: + assert False, "Internal error: Path not found in std_dirs or paths" + +def module_enabled(extlist, modname): + """Returns whether the module 'modname' is present in the list + of extensions 'extlist'.""" + extlist = [ext for ext in extlist if ext.name == modname] + return len(extlist) + +def find_module_file(module, dirlist): + """Find a module in a set of possible folders. If it is not found + return the unadorned filename""" + list = find_file(module, [], dirlist) + if not list: + return module + if len(list) > 1: + log.info("WARNING: multiple copies of %s found"%module) + return os.path.join(list[0], module) + +class PyBuildExt(build_ext): + + def __init__(self, dist): + build_ext.__init__(self, dist) + self.failed = [] + + def build_extensions(self): + + # Detect which modules should be compiled + old_so = self.compiler.shared_lib_extension + # Workaround PEP 3149 stuff + self.compiler.shared_lib_extension = os.environ.get("SO", ".so") + try: + missing = self.detect_modules() + finally: + self.compiler.shared_lib_extension = old_so + + # Remove modules that are present on the disabled list + extensions = [ext for ext in self.extensions + if ext.name not in disabled_module_list] + # move ctypes to the end, it depends on other modules + ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) + if "_ctypes" in ext_map: + ctypes = extensions.pop(ext_map["_ctypes"]) + extensions.append(ctypes) + self.extensions = extensions + + # Fix up the autodetected modules, prefixing all the source files + # with Modules/. + srcdir = sysconfig.get_config_var('srcdir') + if not srcdir: + # Maybe running on Windows but not using CYGWIN? + raise ValueError("No source directory; cannot proceed.") + srcdir = os.path.abspath(srcdir) + moddirlist = [os.path.join(srcdir, 'Modules')] + + # Fix up the paths for scripts, too + self.distribution.scripts = [os.path.join(srcdir, filename) + for filename in self.distribution.scripts] + + # Python header files + headers = [sysconfig.get_config_h_filename()] + headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) + + for ext in self.extensions[:]: + ext.sources = [ find_module_file(filename, moddirlist) + for filename in ext.sources ] + if ext.depends is not None: + ext.depends = [find_module_file(filename, moddirlist) + for filename in ext.depends] + else: + ext.depends = [] + # re-compile extensions if a header file has been changed + ext.depends.extend(headers) + + # If a module has already been built statically, + # don't build it here + if ext.name in sys.builtin_module_names: + self.extensions.remove(ext) + + # Parse Modules/Setup to figure out which + # modules are turned on in the file. + remove_modules = [] + for filename in ('Modules/Setup', ): + input = text_file.TextFile(filename, join_lines=1) + while 1: + line = input.readline() + if not line: break + line = line.split() + remove_modules.append(line[0]) + input.close() + + for ext in self.extensions[:]: + if ext.name in remove_modules: + self.extensions.remove(ext) + + # When you run "make CC=altcc" or something similar, you really want + # those environment variables passed into the setup.py phase. Here's + # a small set of useful ones. + compiler = os.environ.get('CC') + args = {} + # unfortunately, distutils doesn't let us provide separate C and C++ + # compilers + if compiler is not None: + (ccshared, cppflags, cflags) = \ + sysconfig.get_config_vars('CCSHARED', 'CPPFLAGS', 'CFLAGS') + cppflags = ' '.join([f for f in cppflags.split() if not f.startswith('-I')]) + args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cppflags + ' ' + cflags + self.compiler.set_executables(**args) + + build_ext.build_extensions(self) + + longest = max([len(e.name) for e in self.extensions]) + if self.failed: + longest = max(longest, max([len(name) for name in self.failed])) + + def print_three_column(lst): + lst.sort(key=str.lower) + # guarantee zip() doesn't drop anything + while len(lst) % 3: + lst.append("") + for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): + print("%-*s %-*s %-*s" % (longest, e, longest, f, + longest, g)) + + if missing: + print() + print("Python build finished, but the necessary bits to build " + "these modules were not found:") + print_three_column(missing) + print("To find the necessary bits, look in setup.py in" + " detect_modules() for the module's name.") + print() + + if self.failed: + failed = self.failed[:] + print() + print("Failed to build these modules:") + print_three_column(failed) + print() + + def build_extension(self, ext): + + if ext.name == '_ctypes': + if not self.configure_ctypes(ext): + return + + try: + build_ext.build_extension(self, ext) + except (CCompilerError, DistutilsError) as why: + self.announce('WARNING: building of extension "%s" failed: %s' % + (ext.name, sys.exc_info()[1])) + self.failed.append(ext.name) + return + # Workaround for Mac OS X: The Carbon-based modules cannot be + # reliably imported into a command-line Python + if 'Carbon' in ext.extra_link_args: + self.announce( + 'WARNING: skipping import check for Carbon-based "%s"' % + ext.name) + return + + if host_platform == 'darwin' and ( + sys.maxsize > 2**32 and '-arch' in ext.extra_link_args): + # Don't bother doing an import check when an extension was + # build with an explicit '-arch' flag on OSX. That's currently + # only used to build 32-bit only extensions in a 4-way + # universal build and loading 32-bit code into a 64-bit + # process will fail. + self.announce( + 'WARNING: skipping import check for "%s"' % + ext.name) + return + + # Workaround for Cygwin: Cygwin currently has fork issues when many + # modules have been imported + if host_platform == 'cygwin': + self.announce('WARNING: skipping import check for Cygwin-based "%s"' + % ext.name) + return + ext_filename = os.path.join( + self.build_lib, + self.get_ext_filename(self.get_ext_fullname(ext.name))) + + # If the build directory didn't exist when setup.py was + # started, sys.path_importer_cache has a negative result + # cached. Clear that cache before trying to import. + sys.path_importer_cache.clear() + + # Don't try to load extensions for cross builds + if cross_compiling: + return + + try: + imp.load_dynamic(ext.name, ext_filename) + except ImportError as why: + self.failed.append(ext.name) + self.announce('*** WARNING: renaming "%s" since importing it' + ' failed: %s' % (ext.name, why), level=3) + assert not self.inplace + basename, tail = os.path.splitext(ext_filename) + newname = basename + "_failed" + tail + if os.path.exists(newname): + os.remove(newname) + os.rename(ext_filename, newname) + + # XXX -- This relies on a Vile HACK in + # distutils.command.build_ext.build_extension(). The + # _built_objects attribute is stored there strictly for + # use here. + # If there is a failure, _built_objects may not be there, + # so catch the AttributeError and move on. + try: + for filename in self._built_objects: + os.remove(filename) + except AttributeError: + self.announce('unable to remove files (ignored)') + except: + exc_type, why, tb = sys.exc_info() + self.announce('*** WARNING: importing extension "%s" ' + 'failed with %s: %s' % (ext.name, exc_type, why), + level=3) + self.failed.append(ext.name) + + def add_multiarch_paths(self): + # Debian/Ubuntu multiarch support. + # https://wiki.ubuntu.com/MultiarchSpec + cc = sysconfig.get_config_var('CC') + tmpfile = os.path.join(self.build_temp, 'multiarch') + if not os.path.exists(self.build_temp): + os.makedirs(self.build_temp) + ret = os.system( + '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile)) + multiarch_path_component = '' + try: + if ret >> 8 == 0: + with open(tmpfile) as fp: + multiarch_path_component = fp.readline().strip() + finally: + os.unlink(tmpfile) + + if multiarch_path_component != '': + add_dir_to_list(self.compiler.library_dirs, + '/usr/lib/' + multiarch_path_component) + add_dir_to_list(self.compiler.include_dirs, + '/usr/include/' + multiarch_path_component) + return + + if not find_executable('dpkg-architecture'): + return + opt = '' + if cross_compiling: + opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE') + tmpfile = os.path.join(self.build_temp, 'multiarch') + if not os.path.exists(self.build_temp): + os.makedirs(self.build_temp) + ret = os.system( + 'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % + (opt, tmpfile)) + try: + if ret >> 8 == 0: + with open(tmpfile) as fp: + multiarch_path_component = fp.readline().strip() + add_dir_to_list(self.compiler.library_dirs, + '/usr/lib/' + multiarch_path_component) + add_dir_to_list(self.compiler.include_dirs, + '/usr/include/' + multiarch_path_component) + finally: + os.unlink(tmpfile) + + def add_gcc_paths(self): + gcc = sysconfig.get_config_var('CC') + tmpfile = os.path.join(self.build_temp, 'gccpaths') + if not os.path.exists(self.build_temp): + os.makedirs(self.build_temp) + ret = os.system('%s -E -v - %s 1>/dev/null' % (gcc, tmpfile)) + is_gcc = False + in_incdirs = False + inc_dirs = [] + lib_dirs = [] + try: + if ret >> 8 == 0: + with open(tmpfile) as fp: + for line in fp.readlines(): + if line.startswith("gcc version"): + is_gcc = True + elif line.startswith("#include <...>"): + in_incdirs = True + elif line.startswith("End of search list"): + in_incdirs = False + elif is_gcc and line.startswith("LIBRARY_PATH"): + for d in line.strip().split("=")[1].split(":"): + d = os.path.normpath(d) + if '/gcc/' not in d: + add_dir_to_list(self.compiler.library_dirs, + d) + elif is_gcc and in_incdirs and '/gcc/' not in line: + add_dir_to_list(self.compiler.include_dirs, + line.strip()) + finally: + os.unlink(tmpfile) + + def detect_modules(self): + # On Debian /usr/local is always used, so we don't include it twice + # only change this for cross builds for 3.3, issues on Mageia + if cross_compiling: + self.add_gcc_paths() + self.add_multiarch_paths() + + # Add paths specified in the environment variables LDFLAGS and + # CPPFLAGS for header and library files. + # We must get the values from the Makefile and not the environment + # directly since an inconsistently reproducible issue comes up where + # the environment variable is not set even though the value were passed + # into configure and stored in the Makefile (issue found on OS X 10.3). + for env_var, arg_name, dir_list in ( + ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), + ('LDFLAGS', '-L', self.compiler.library_dirs), + ('CPPFLAGS', '-I', self.compiler.include_dirs)): + env_val = sysconfig.get_config_var(env_var) + if env_val: + # To prevent optparse from raising an exception about any + # options in env_val that it doesn't know about we strip out + # all double dashes and any dashes followed by a character + # that is not for the option we are dealing with. + # + # Please note that order of the regex is important! We must + # strip out double-dashes first so that we don't end up with + # substituting "--Long" to "-Long" and thus lead to "ong" being + # used for a library directory. + env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], + ' ', env_val) + parser = optparse.OptionParser() + # Make sure that allowing args interspersed with options is + # allowed + parser.allow_interspersed_args = True + parser.error = lambda msg: None + parser.add_option(arg_name, dest="dirs", action="append") + options = parser.parse_args(env_val.split())[0] + if options.dirs: + for directory in reversed(options.dirs): + add_dir_to_list(dir_list, directory) + + if os.path.normpath(sys.base_prefix) != '/usr' \ + and not sysconfig.get_config_var('PYTHONFRAMEWORK'): + # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework + # (PYTHONFRAMEWORK is set) to avoid # linking problems when + # building a framework with different architectures than + # the one that is currently installed (issue #7473) + add_dir_to_list(self.compiler.library_dirs, + sysconfig.get_config_var("LIBDIR")) + add_dir_to_list(self.compiler.include_dirs, + sysconfig.get_config_var("INCLUDEDIR")) + + # lib_dirs and inc_dirs are used to search for files; + # if a file is found in one of those directories, it can + # be assumed that no additional -I,-L directives are needed. + if not cross_compiling: + lib_dirs = self.compiler.library_dirs + [ + '/lib64', '/usr/lib64', + '/lib', '/usr/lib', + ] + inc_dirs = self.compiler.include_dirs + ['/usr/include'] + else: + lib_dirs = self.compiler.library_dirs[:] + inc_dirs = self.compiler.include_dirs[:] + exts = [] + missing = [] + + config_h = sysconfig.get_config_h_filename() + with open(config_h) as file: + config_h_vars = sysconfig.parse_config_h(file) + + srcdir = sysconfig.get_config_var('srcdir') + + # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) + if host_platform in ['osf1', 'unixware7', 'openunix8']: + lib_dirs += ['/usr/ccs/lib'] + + # HP-UX11iv3 keeps files in lib/hpux folders. + if host_platform == 'hp-ux11': + lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] + + if host_platform == 'darwin': + # This should work on any unixy platform ;-) + # If the user has bothered specifying additional -I and -L flags + # in OPT and LDFLAGS we might as well use them here. + # + # NOTE: using shlex.split would technically be more correct, but + # also gives a bootstrap problem. Let's hope nobody uses + # directories with whitespace in the name to store libraries. + cflags, ldflags = sysconfig.get_config_vars( + 'CFLAGS', 'LDFLAGS') + for item in cflags.split(): + if item.startswith('-I'): + inc_dirs.append(item[2:]) + + for item in ldflags.split(): + if item.startswith('-L'): + lib_dirs.append(item[2:]) + + # Check for MacOS X, which doesn't need libm.a at all + math_libs = ['m'] + if host_platform == 'darwin': + math_libs = [] + + # XXX Omitted modules: gl, pure, dl, SGI-specific modules + + # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: + if (True or 'gdbm' in dbm_order and + self.compiler.find_library_file(lib_dirs, 'gdbm')): + exts.append( Extension('_gdbm', ['Modules/_gdbmmodule.c'], + libraries = ['gdbm'] ) ) + else: + missing.append('_gdbm') + + self.extensions.extend(exts) + + # Call the method for detecting whether _tkinter can be compiled + self.detect_tkinter(inc_dirs, lib_dirs) + + if '_tkinter' not in [e.name for e in self.extensions]: + missing.append('_tkinter') + + return missing + + def detect_tkinter_darwin(self, inc_dirs, lib_dirs): + # The _tkinter module, using frameworks. Since frameworks are quite + # different the UNIX search logic is not sharable. + from os.path import join, exists + framework_dirs = [ + '/Library/Frameworks', + '/System/Library/Frameworks/', + join(os.getenv('HOME'), '/Library/Frameworks') + ] + + sysroot = macosx_sdk_root() + + # Find the directory that contains the Tcl.framework and Tk.framework + # bundles. + # XXX distutils should support -F! + for F in framework_dirs: + # both Tcl.framework and Tk.framework should be present + + + for fw in 'Tcl', 'Tk': + if is_macosx_sdk_path(F): + if not exists(join(sysroot, F[1:], fw + '.framework')): + break + else: + if not exists(join(F, fw + '.framework')): + break + else: + # ok, F is now directory with both frameworks. Continure + # building + break + else: + # Tk and Tcl frameworks not found. Normal "unix" tkinter search + # will now resume. + return 0 + + # For 8.4a2, we must add -I options that point inside the Tcl and Tk + # frameworks. In later release we should hopefully be able to pass + # the -F option to gcc, which specifies a framework lookup path. + # + include_dirs = [ + join(F, fw + '.framework', H) + for fw in ('Tcl', 'Tk') + for H in ('Headers', 'Versions/Current/PrivateHeaders') + ] + + # For 8.4a2, the X11 headers are not included. Rather than include a + # complicated search, this is a hard-coded path. It could bail out + # if X11 libs are not found... + include_dirs.append('/usr/X11R6/include') + frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] + + # All existing framework builds of Tcl/Tk don't support 64-bit + # architectures. + cflags = sysconfig.get_config_vars('CFLAGS')[0] + archs = re.findall('-arch\s+(\w+)', cflags) + + tmpfile = os.path.join(self.build_temp, 'tk.arch') + if not os.path.exists(self.build_temp): + os.makedirs(self.build_temp) + + # Note: cannot use os.popen or subprocess here, that + # requires extensions that are not available here. + if is_macosx_sdk_path(F): + os.system("file %s/Tk.framework/Tk | grep 'for architecture' > %s"%(os.path.join(sysroot, F[1:]), tmpfile)) + else: + os.system("file %s/Tk.framework/Tk | grep 'for architecture' > %s"%(F, tmpfile)) + + with open(tmpfile) as fp: + detected_archs = [] + for ln in fp: + a = ln.split()[-1] + if a in archs: + detected_archs.append(ln.split()[-1]) + os.unlink(tmpfile) + + for a in detected_archs: + frameworks.append('-arch') + frameworks.append(a) + + ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], + define_macros=[('WITH_APPINIT', 1)], + include_dirs = include_dirs, + libraries = [], + extra_compile_args = frameworks[2:], + extra_link_args = frameworks, + ) + self.extensions.append(ext) + return 1 + + + def detect_tkinter(self, inc_dirs, lib_dirs): + # The _tkinter module. + + # Rather than complicate the code below, detecting and building + # AquaTk is a separate method. Only one Tkinter will be built on + # Darwin - either AquaTk, if it is found, or X11 based Tk. + if (host_platform == 'darwin' and + self.detect_tkinter_darwin(inc_dirs, lib_dirs)): + return + + # Assume we haven't found any of the libraries or include files + # The versions with dots are used on Unix, and the versions without + # dots on Windows, for detection by cygwin. + tcllib = tklib = tcl_includes = tk_includes = None + for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', + '8.2', '82', '8.1', '81', '8.0', '80']: + tklib = self.compiler.find_library_file(lib_dirs, + 'tk' + version) + tcllib = self.compiler.find_library_file(lib_dirs, + 'tcl' + version) + if tklib and tcllib: + # Exit the loop when we've found the Tcl/Tk libraries + break + + # Now check for the header files + if tklib and tcllib: + # Check for the include files on Debian and {Free,Open}BSD, where + # they're put in /usr/include/{tcl,tk}X.Y + dotversion = version + if '.' not in dotversion and "bsd" in host_platform.lower(): + # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, + # but the include subdirs are named like .../include/tcl8.3. + dotversion = dotversion[:-1] + '.' + dotversion[-1] + tcl_include_sub = [] + tk_include_sub = [] + for dir in inc_dirs: + tcl_include_sub += [dir + os.sep + "tcl" + dotversion] + tk_include_sub += [dir + os.sep + "tk" + dotversion] + tk_include_sub += tcl_include_sub + tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) + tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) + + if (tcllib is None or tklib is None or + tcl_includes is None or tk_includes is None): + self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) + return + + # OK... everything seems to be present for Tcl/Tk. + + include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] + for dir in tcl_includes + tk_includes: + if dir not in include_dirs: + include_dirs.append(dir) + + # Check for various platform-specific directories + if host_platform == 'sunos5': + include_dirs.append('/usr/openwin/include') + added_lib_dirs.append('/usr/openwin/lib') + elif os.path.exists('/usr/X11R6/include'): + include_dirs.append('/usr/X11R6/include') + added_lib_dirs.append('/usr/X11R6/lib64') + added_lib_dirs.append('/usr/X11R6/lib') + elif os.path.exists('/usr/X11R5/include'): + include_dirs.append('/usr/X11R5/include') + added_lib_dirs.append('/usr/X11R5/lib') + else: + # Assume default location for X11 + include_dirs.append('/usr/X11/include') + added_lib_dirs.append('/usr/X11/lib') + + # If Cygwin, then verify that X is installed before proceeding + if host_platform == 'cygwin': + x11_inc = find_file('X11/Xlib.h', [], include_dirs) + if x11_inc is None: + return + + # Check for BLT extension + if self.compiler.find_library_file(lib_dirs + added_lib_dirs, + 'BLT8.0'): + defs.append( ('WITH_BLT', 1) ) + libs.append('BLT8.0') + elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, + 'BLT'): + defs.append( ('WITH_BLT', 1) ) + libs.append('BLT') + + # Add the Tcl/Tk libraries + libs.append('tk'+ version) + libs.append('tcl'+ version) + + if host_platform in ['aix3', 'aix4']: + libs.append('ld') + + # Finally, link with the X11 libraries (not appropriate on cygwin) + if host_platform != "cygwin": + libs.append('X11') + + ext = Extension('_tkinter', ['Modules/_tkinter.c', 'Modules/tkappinit.c'], + define_macros=[('WITH_APPINIT', 1)] + defs, + include_dirs = include_dirs, + libraries = libs, + library_dirs = added_lib_dirs, + ) + self.extensions.append(ext) + + # XXX handle these, but how to detect? + # *** Uncomment and edit for PIL (TkImaging) extension only: + # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ + # *** Uncomment and edit for TOGL extension only: + # -DWITH_TOGL togl.c \ + # *** Uncomment these for TOGL extension only: + # -lGL -lGLU -lXext -lXmu \ + +class PyBuildInstall(install): + # Suppress the warning about installation into the lib_dynload + # directory, which is not in sys.path when running Python during + # installation: + def initialize_options (self): + install.initialize_options(self) + self.warn_dir=0 + + +class PyBuildInstallLib(install_lib): + # Do exactly what install_lib does but make sure correct access modes get + # set on installed directories and files. All installed files with get + # mode 644 unless they are a shared library in which case they will get + # mode 755. All installed directories will get mode 755. + + so_ext = sysconfig.get_config_var("SO") + + def install(self): + outfiles = install_lib.install(self) + self.set_file_modes(outfiles, 0o644, 0o755) + self.set_dir_modes(self.install_dir, 0o755) + return outfiles + + def set_file_modes(self, files, defaultMode, sharedLibMode): + if not self.is_chmod_supported(): return + if not files: return + + for filename in files: + if os.path.islink(filename): continue + mode = defaultMode + if filename.endswith(self.so_ext): mode = sharedLibMode + log.info("changing mode of %s to %o", filename, mode) + if not self.dry_run: os.chmod(filename, mode) + + def set_dir_modes(self, dirname, mode): + if not self.is_chmod_supported(): return + for dirpath, dirnames, fnames in os.walk(dirname): + if os.path.islink(dirpath): + continue + log.info("changing mode of %s to %o", dirpath, mode) + if not self.dry_run: os.chmod(dirpath, mode) + + def is_chmod_supported(self): + return hasattr(os, 'chmod') + +SUMMARY = """ +Python is an interpreted, interactive, object-oriented programming +language. It is often compared to Tcl, Perl, Scheme or Java. + +Python combines remarkable power with very clear syntax. It has +modules, classes, exceptions, very high level dynamic data types, and +dynamic typing. There are interfaces to many system calls and +libraries, as well as to various windowing systems (X11, Motif, Tk, +Mac, MFC). New built-in modules are easily written in C or C++. Python +is also usable as an extension language for applications that need a +programmable interface. + +The Python implementation is portable: it runs on many brands of UNIX, +on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't +listed here, it may still be supported, if there's a C compiler for +it. Ask around on comp.lang.python -- or just try compiling Python +yourself. +""" + +CLASSIFIERS = """ +Development Status :: 6 - Mature +License :: OSI Approved :: Python Software Foundation License +Natural Language :: English +Programming Language :: C +Programming Language :: Python +Topic :: Software Development +""" + +def main(): + # turn off warnings when deprecated modules are imported + import warnings + warnings.filterwarnings("ignore",category=DeprecationWarning) + setup(# PyPI Metadata (PEP 301) + name = "Python", + version = sys.version.split()[0], + url = "http://www.python.org/%s" % sys.version[:3], + maintainer = "Guido van Rossum and the Python community", + maintainer_email = "python-dev@python.org", + description = "A high-level object-oriented programming language", + long_description = SUMMARY.strip(), + license = "PSF license", + classifiers = [x for x in CLASSIFIERS.split("\n") if x], + platforms = ["Many"], + + # Build info + cmdclass = {'build_ext': PyBuildExt, + 'install': PyBuildInstall, + 'install_lib': PyBuildInstallLib}, + # The struct module is defined here, because build_ext won't be + # called unless there's at least one extension module defined. + ext_modules=[Extension('_struct', ['_struct.c'])], + + # If you change the scripts installed here, you also need to + # check the PyBuildScripts command above, and change the links + # created by the bininstall target in Makefile.pre.in + scripts = [] + ) + +# --install-platlib +if __name__ == '__main__': + main() diff -Nru python3-stdlib-extensions-3.9.7/3.7/Lib/tkinter/test/test_tkinter/test_widgets.py python3-stdlib-extensions-3.10.8/3.7/Lib/tkinter/test/test_tkinter/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.7/Lib/tkinter/test/test_tkinter/test_widgets.py 2021-06-28 16:51:36.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.7/Lib/tkinter/test/test_tkinter/test_widgets.py 2022-10-10 12:34:53.000000000 +0000 @@ -864,7 +864,8 @@ def test_from(self): widget = self.create() - self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=float_round) + conv = False if get_tk_patchlevel() >= (8, 6, 10) else float_round + self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=conv) def test_label(self): widget = self.create() diff -Nru python3-stdlib-extensions-3.9.7/3.8/Lib/tkinter/test/test_tkinter/test_widgets.py python3-stdlib-extensions-3.10.8/3.8/Lib/tkinter/test/test_tkinter/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.8/Lib/tkinter/test/test_tkinter/test_widgets.py 2021-08-30 14:26:41.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.8/Lib/tkinter/test/test_tkinter/test_widgets.py 2022-10-11 15:42:49.000000000 +0000 @@ -1244,8 +1244,11 @@ def test_configure_type(self): widget = self.create() - self.checkEnumParam(widget, 'type', - 'normal', 'tearoff', 'menubar') + self.checkEnumParam( + widget, 'type', + 'normal', 'tearoff', 'menubar', + errmsg='bad type "{}": must be normal, tearoff, or menubar', + ) def test_entryconfigure(self): m1 = self.create() diff -Nru python3-stdlib-extensions-3.9.7/3.8/Lib/tkinter/test/test_ttk/test_widgets.py python3-stdlib-extensions-3.10.8/3.8/Lib/tkinter/test/test_ttk/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.8/Lib/tkinter/test/test_ttk/test_widgets.py 2021-08-30 14:26:41.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.8/Lib/tkinter/test/test_ttk/test_widgets.py 2022-10-11 15:42:49.000000000 +0000 @@ -169,10 +169,13 @@ errmsg='image "spam" doesn\'t exist') def test_configure_compound(self): + options = 'none text image center top bottom left right'.split() + errmsg = ( + 'bad compound "{}": must be' + f' {", ".join(options[:-1])}, or {options[-1]}' + ) widget = self.create() - self.checkEnumParam(widget, 'compound', - 'none', 'text', 'image', 'center', - 'top', 'bottom', 'left', 'right') + self.checkEnumParam(widget, 'compound', *options, errmsg=errmsg) def test_configure_state(self): widget = self.create() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/ccompiler.py python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/ccompiler.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/ccompiler.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/ccompiler.py 2022-10-11 14:48:37.000000000 +0000 @@ -392,7 +392,7 @@ return output_dir, macros, include_dirs def _prep_compile(self, sources, output_dir, depends=None): - """Decide which souce files must be recompiled. + """Decide which source files must be recompiled. Determine the list of object files corresponding to 'sources', and figure out which ones really need to be recompiled. diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/command/check.py python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/command/check.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/command/check.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/command/check.py 2022-10-11 14:48:37.000000000 +0000 @@ -83,7 +83,7 @@ name, version, URL Recommended fields: - (author and author_email) or (maintainer and maintainer_email)) + (author and author_email) or (maintainer and maintainer_email) Warns if any are missing. """ diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/msvc9compiler.py python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/msvc9compiler.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/msvc9compiler.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/msvc9compiler.py 2022-10-11 14:48:37.000000000 +0000 @@ -673,7 +673,7 @@ # If a manifest should be embedded, return a tuple of # (manifest_filename, resource_id). Returns None if no manifest # should be embedded. See http://bugs.python.org/issue7833 for why - # we want to avoid any manifest for extension modules if we can) + # we want to avoid any manifest for extension modules if we can. for arg in ld_args: if arg.startswith("/MANIFESTFILE:"): temp_manifest = arg.split(":", 1)[1] diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/tests/test_bdist_rpm.py python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/tests/test_bdist_rpm.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/tests/test_bdist_rpm.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/tests/test_bdist_rpm.py 2022-10-11 14:48:37.000000000 +0000 @@ -44,7 +44,7 @@ # spurious sdtout/stderr output under Mac OS X @unittest.skipUnless(sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib + @requires_zlib() @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') @unittest.skipIf(find_executable('rpmbuild') is None, @@ -87,7 +87,7 @@ # spurious sdtout/stderr output under Mac OS X @unittest.skipUnless(sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib + @requires_zlib() # http://bugs.python.org/issue1533164 @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/unixccompiler.py python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/unixccompiler.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/distutils/unixccompiler.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/distutils/unixccompiler.py 2022-10-11 14:48:37.000000000 +0000 @@ -215,7 +215,8 @@ return "-L" + dir def _is_gcc(self, compiler_name): - return "gcc" in compiler_name or "g++" in compiler_name + # clang uses same syntax for rpath as gcc + return any(name in compiler_name for name in ("gcc", "g++", "clang")) def runtime_library_dir_option(self, dir): # XXX Hackish, at the very least. See Python bug #445902: diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/fixes/fix_metaclass.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/fixes/fix_metaclass.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/fixes/fix_metaclass.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/fixes/fix_metaclass.py 2022-10-11 14:48:37.000000000 +0000 @@ -51,7 +51,7 @@ # already in the preferred format, do nothing return - # !%@#! oneliners have no suite node, we have to fake one up + # !%@#! one-liners have no suite node, we have to fake one up for i, node in enumerate(cls_node.children): if node.type == token.COLON: break diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/fixes/fix_paren.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/fixes/fix_paren.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/fixes/fix_paren.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/fixes/fix_paren.py 2022-10-11 14:48:37.000000000 +0000 @@ -1,4 +1,4 @@ -"""Fixer that addes parentheses where they are required +"""Fixer that adds parentheses where they are required This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.""" diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/pytree.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/pytree.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/pytree.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/pytree.py 2022-10-11 14:48:37.000000000 +0000 @@ -720,8 +720,8 @@ r[self.name] = nodes[:count] yield count, r except RuntimeError: - # We fall back to the iterative pattern matching scheme if the recursive - # scheme hits the recursion limit. + # Fall back to the iterative pattern matching scheme if the + # recursive scheme hits the recursion limit (RecursionError). for count, r in self._iterative_matches(nodes): if self.name: r[self.name] = nodes[:count] diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/data/infinite_recursion.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/data/infinite_recursion.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/data/infinite_recursion.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/data/infinite_recursion.py 2022-10-11 14:48:37.000000000 +0000 @@ -1,5 +1,5 @@ -# This file is used to verify that 2to3 falls back to a slower, iterative pattern matching -# scheme in the event that the faster recursive system fails due to infinite recursion. +# Verify that 2to3 falls back from the recursive pattern matching scheme to a +# slower, iterative scheme in the event of a RecursionError. from ctypes import * STRING = c_char_p diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/data/py2_test_grammar.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/data/py2_test_grammar.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/data/py2_test_grammar.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/data/py2_test_grammar.py 2022-10-11 14:48:37.000000000 +0000 @@ -8,7 +8,7 @@ # regression test, the filterwarnings() call has been added to # regrtest.py. -from test.test_support import run_unittest, check_syntax_error +from test.test_support import check_syntax_error import unittest import sys # testing import * @@ -967,8 +967,5 @@ self.assertEqual((6 < 4 if 0 else 2), 2) -def test_main(): - run_unittest(TokenTests, GrammarTests) - if __name__ == '__main__': - test_main() + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/data/py3_test_grammar.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/data/py3_test_grammar.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/data/py3_test_grammar.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/data/py3_test_grammar.py 2022-10-11 14:48:37.000000000 +0000 @@ -8,7 +8,7 @@ # regression test, the filterwarnings() call has been added to # regrtest.py. -from test.support import run_unittest, check_syntax_error +from test.support import check_syntax_error import unittest import sys # testing import * @@ -952,8 +952,5 @@ self.assertEqual((6 < 4 if 0 else 2), 2) -def test_main(): - run_unittest(TokenTests, GrammarTests) - if __name__ == '__main__': - test_main() + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/test_all_fixers.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/test_all_fixers.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/test_all_fixers.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/test_all_fixers.py 2022-10-11 14:48:37.000000000 +0000 @@ -6,8 +6,10 @@ # Author: Collin Winter # Python imports -import unittest +import os.path +import sys import test.support +import unittest # Local imports from . import support @@ -19,9 +21,22 @@ def setUp(self): self.refactor = support.get_refactorer() + def refactor_file(self, filepath): + if test.support.verbose: + print(f"Refactor file: {filepath}") + if os.path.basename(filepath) == 'infinite_recursion.py': + # bpo-46542: Processing infinite_recursion.py can crash Python + # if Python is built in debug mode: lower the recursion limit + # to prevent a crash. + with test.support.infinite_recursion(150): + self.refactor.refactor_file(filepath) + else: + self.refactor.refactor_file(filepath) + def test_all_project_files(self): for filepath in support.all_project_files(): - self.refactor.refactor_file(filepath) + with self.subTest(filepath=filepath): + self.refactor_file(filepath) if __name__ == '__main__': unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/test_parser.py python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/test_parser.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/lib2to3/tests/test_parser.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/lib2to3/tests/test_parser.py 2022-10-11 14:48:37.000000000 +0000 @@ -20,6 +20,7 @@ import subprocess import sys import tempfile +import test.support import unittest # Local imports @@ -582,25 +583,31 @@ """A cut-down version of pytree_idempotency.py.""" + def parse_file(self, filepath): + if test.support.verbose: + print(f"Parse file: {filepath}") + with open(filepath, "rb") as fp: + encoding = tokenize.detect_encoding(fp.readline)[0] + self.assertIsNotNone(encoding, + "can't detect encoding for %s" % filepath) + with open(filepath, "r", encoding=encoding) as fp: + source = fp.read() + try: + tree = driver.parse_string(source) + except ParseError: + try: + tree = driver_no_print_statement.parse_string(source) + except ParseError as err: + self.fail('ParseError on file %s (%s)' % (filepath, err)) + new = str(tree) + if new != source: + print(diff_texts(source, new, filepath)) + self.fail("Idempotency failed: %s" % filepath) + def test_all_project_files(self): for filepath in support.all_project_files(): - with open(filepath, "rb") as fp: - encoding = tokenize.detect_encoding(fp.readline)[0] - self.assertIsNotNone(encoding, - "can't detect encoding for %s" % filepath) - with open(filepath, "r", encoding=encoding) as fp: - source = fp.read() - try: - tree = driver.parse_string(source) - except ParseError: - try: - tree = driver_no_print_statement.parse_string(source) - except ParseError as err: - self.fail('ParseError on file %s (%s)' % (filepath, err)) - new = str(tree) - if new != source: - print(diff_texts(source, new, filepath)) - self.fail("Idempotency failed: %s" % filepath) + with self.subTest(filepath=filepath): + self.parse_file(filepath) def test_extended_unpacking(self): driver.parse_string("a, *b, c = x\n") diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/__init__.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/__init__.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/__init__.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/__init__.py 2022-10-11 14:48:37.000000000 +0000 @@ -2070,7 +2070,7 @@ the bitmap if None is given. Under Windows, the DEFAULT parameter can be used to set the icon - for the widget and any descendents that don't have an icon set + for the widget and any descendants that don't have an icon set explicitly. DEFAULT can be the relative path to a .ico file (example: root.iconbitmap(default='myicon.ico') ). See Tk documentation for more information.""" @@ -2316,9 +2316,9 @@ _default_root = None def readprofile(self, baseName, className): - """Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into - the Tcl Interpreter and calls exec on the contents of BASENAME.py and - CLASSNAME.py if such a file exists in the home directory.""" + """Internal function. It reads .BASENAME.tcl and .CLASSNAME.tcl into + the Tcl Interpreter and calls exec on the contents of .BASENAME.py and + .CLASSNAME.py if such a file exists in the home directory.""" import os if 'HOME' in os.environ: home = os.environ['HOME'] else: home = os.curdir @@ -2707,7 +2707,7 @@ """Add tag NEWTAG to item which is closest to pixel at X, Y. If several match take the top-most. All items closer than HALO are considered overlapping (all are - closests). If START is specified the next below this tag is taken.""" + closest). If START is specified the next below this tag is taken.""" self.addtag(newtag, 'closest', x, y, halo, start) def addtag_enclosed(self, newtag, x1, y1, x2, y2): @@ -3302,7 +3302,7 @@ self.add('command', cnf or kw) def add_radiobutton(self, cnf={}, **kw): - """Addd radio menu item.""" + """Add radio menu item.""" self.add('radiobutton', cnf or kw) def add_separator(self, cnf={}, **kw): @@ -3327,7 +3327,7 @@ self.insert(index, 'command', cnf or kw) def insert_radiobutton(self, index, cnf={}, **kw): - """Addd radio menu item at INDEX.""" + """Add radio menu item at INDEX.""" self.insert(index, 'radiobutton', cnf or kw) def insert_separator(self, index, cnf={}, **kw): diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/runtktests.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/runtktests.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/runtktests.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/runtktests.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ -""" -Use this module to get and run all tk tests. - -tkinter tests should live in a package inside the directory where this file -lives, like test_tkinter. -Extensions also should live in packages following the same rule as above. -""" - -import os -import importlib -import test.support - -this_dir_path = os.path.abspath(os.path.dirname(__file__)) - -def is_package(path): - for name in os.listdir(path): - if name in ('__init__.py', '__init__.pyc'): - return True - return False - -def get_tests_modules(basepath=this_dir_path, gui=True, packages=None): - """This will import and yield modules whose names start with test_ - and are inside packages found in the path starting at basepath. - - If packages is specified it should contain package names that - want their tests collected. - """ - py_ext = '.py' - - for dirpath, dirnames, filenames in os.walk(basepath): - for dirname in list(dirnames): - if dirname[0] == '.': - dirnames.remove(dirname) - - if is_package(dirpath) and filenames: - pkg_name = dirpath[len(basepath) + len(os.sep):].replace('/', '.') - if packages and pkg_name not in packages: - continue - - filenames = filter( - lambda x: x.startswith('test_') and x.endswith(py_ext), - filenames) - - for name in filenames: - try: - yield importlib.import_module( - ".%s.%s" % (pkg_name, name[:-len(py_ext)]), - "tkinter.test") - except test.support.ResourceDenied: - if gui: - raise - -def get_tests(text=True, gui=True, packages=None): - """Yield all the tests in the modules found by get_tests_modules. - - If nogui is True, only tests that do not require a GUI will be - returned.""" - attrs = [] - if text: - attrs.append('tests_nogui') - if gui: - attrs.append('tests_gui') - for module in get_tests_modules(gui=gui, packages=packages): - for attr in attrs: - for test in getattr(module, attr, ()): - yield test - -if __name__ == "__main__": - test.support.run_unittest(*get_tests()) diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_colorchooser.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_colorchooser.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_colorchooser.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_colorchooser.py 2022-10-11 14:48:37.000000000 +0000 @@ -1,6 +1,6 @@ import unittest import tkinter -from test.support import requires, run_unittest, swap_attr +from test.support import requires, swap_attr from tkinter.test.support import AbstractTkTest from tkinter import colorchooser @@ -37,7 +37,5 @@ ((74, 60, 140), '#4a3c8c')) -tests_gui = (ChooserTest,) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_font.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_font.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_font.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_font.py 2022-10-11 14:48:37.000000000 +0000 @@ -1,7 +1,7 @@ import unittest import tkinter from tkinter import font -from test.support import requires, run_unittest, gc_collect, ALWAYS_EQ +from test.support import requires, gc_collect, ALWAYS_EQ from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest requires('gui') @@ -139,7 +139,5 @@ self.assertRaises(RuntimeError, font.names) -tests_gui = (FontTest, DefaultRootTest) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_images.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_images.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_images.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_images.py 2022-10-11 14:48:37.000000000 +0000 @@ -375,7 +375,5 @@ self.assertEqual(image.transparency_get(4, 6), False) -tests_gui = (MiscTest, DefaultRootTest, BitmapImageTest, PhotoImageTest,) - if __name__ == "__main__": - support.run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_loadtk.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_loadtk.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_loadtk.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_loadtk.py 2022-10-11 14:48:37.000000000 +0000 @@ -40,7 +40,6 @@ self.assertRaises(TclError, tcl.winfo_geometry) self.assertRaises(TclError, tcl.loadtk) -tests_gui = (TkLoadTest, ) if __name__ == "__main__": - test_support.run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_misc.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_misc.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_misc.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_misc.py 2022-10-11 14:48:37.000000000 +0000 @@ -193,6 +193,13 @@ root.clipboard_get() def test_winfo_rgb(self): + + def assertApprox(col1, col2): + # A small amount of flexibility is required (bpo-45496) + # 33 is ~0.05% of 65535, which is a reasonable margin + for col1_channel, col2_channel in zip(col1, col2): + self.assertAlmostEqual(col1_channel, col2_channel, delta=33) + root = self.root rgb = root.winfo_rgb @@ -202,9 +209,9 @@ # #RGB - extends each 4-bit hex value to be 16-bit. self.assertEqual(rgb('#F0F'), (0xFFFF, 0x0000, 0xFFFF)) # #RRGGBB - extends each 8-bit hex value to be 16-bit. - self.assertEqual(rgb('#4a3c8c'), (0x4a4a, 0x3c3c, 0x8c8c)) + assertApprox(rgb('#4a3c8c'), (0x4a4a, 0x3c3c, 0x8c8c)) # #RRRRGGGGBBBB - self.assertEqual(rgb('#dede14143939'), (0xdede, 0x1414, 0x3939)) + assertApprox(rgb('#dede14143939'), (0xdede, 0x1414, 0x3939)) # Invalid string. with self.assertRaises(tkinter.TclError): rgb('#123456789a') @@ -339,7 +346,5 @@ self.assertRaises(RuntimeError, tkinter.mainloop) -tests_gui = (MiscTest, DefaultRootTest) - if __name__ == "__main__": - support.run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_simpledialog.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_simpledialog.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_simpledialog.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_simpledialog.py 2022-10-11 14:48:37.000000000 +0000 @@ -1,6 +1,6 @@ import unittest import tkinter -from test.support import requires, run_unittest, swap_attr +from test.support import requires, swap_attr from tkinter.test.support import AbstractDefaultRootTest from tkinter.simpledialog import Dialog, askinteger @@ -19,7 +19,5 @@ self.assertRaises(RuntimeError, askinteger, "Go To Line", "Line number") -tests_gui = (DefaultRootTest,) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_text.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_text.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_text.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_text.py 2022-10-11 14:48:37.000000000 +0000 @@ -1,6 +1,6 @@ import unittest import tkinter -from test.support import requires, run_unittest +from test.support import requires from tkinter.test.support import AbstractTkTest requires('gui') @@ -41,7 +41,5 @@ self.assertEqual(text.search('test', '1.0', 'end'), '1.3') -tests_gui = (TextTest, ) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_variables.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_variables.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_variables.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_variables.py 2022-10-11 14:48:37.000000000 +0000 @@ -338,10 +338,5 @@ self.assertRaises(RuntimeError, Variable) -tests_gui = (TestVariable, TestStringVar, TestIntVar, - TestDoubleVar, TestBooleanVar, DefaultRootTest) - - if __name__ == "__main__": - from test.support import run_unittest - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_widgets.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_tkinter/test_widgets.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_tkinter/test_widgets.py 2022-10-11 14:48:37.000000000 +0000 @@ -800,7 +800,7 @@ self.checkEnumParam(widget, 'activestyle', 'dotbox', 'none', 'underline') - test_justify = requires_tcl(8, 6, 5)(StandardOptionsTests.test_configure_justify) + test_configure_justify = requires_tcl(8, 6, 5)(StandardOptionsTests.test_configure_justify) def test_configure_listvariable(self): widget = self.create() @@ -939,7 +939,7 @@ def test_configure_from(self): widget = self.create() - conv = False if get_tk_patchlevel() >= (8, 6, 10) else float_round + conv = float if get_tk_patchlevel() >= (8, 6, 10) else float_round self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=conv) def test_configure_label(self): @@ -1241,8 +1241,11 @@ def test_configure_type(self): widget = self.create() - self.checkEnumParam(widget, 'type', - 'normal', 'tearoff', 'menubar') + self.checkEnumParam( + widget, 'type', + 'normal', 'tearoff', 'menubar', + errmsg='bad type "{}": must be normal, tearoff, or menubar', + ) def test_entryconfigure(self): m1 = self.create() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_extensions.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_extensions.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_extensions.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_extensions.py 2022-10-11 14:48:37.000000000 +0000 @@ -2,7 +2,7 @@ import unittest import tkinter from tkinter import ttk -from test.support import requires, run_unittest, gc_collect +from test.support import requires, gc_collect from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest requires('gui') @@ -301,6 +301,22 @@ optmenu.destroy() optmenu2.destroy() + def test_trace_variable(self): + # prior to bpo45160, tracing a variable would cause the callback to be made twice + success = [] + items = ('a', 'b', 'c') + textvar = tkinter.StringVar(self.root) + def cb_test(*args): + success.append(textvar.get()) + optmenu = ttk.OptionMenu(self.root, textvar, "a", *items) + optmenu.pack() + cb_name = textvar.trace_add("write", cb_test) + optmenu['menu'].invoke(1) + self.assertEqual(success, ['b']) + self.assertEqual(textvar.get(), 'b') + textvar.trace_remove("write", cb_name) + optmenu.destroy() + class DefaultRootTest(AbstractDefaultRootTest, unittest.TestCase): @@ -308,7 +324,5 @@ self._test_widget(ttk.LabeledScale) -tests_gui = (LabeledScaleTest, OptionMenuTest, DefaultRootTest) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_functions.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_functions.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_functions.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_functions.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,460 +0,0 @@ -# -*- encoding: utf-8 -*- -import unittest -from tkinter import ttk - -class MockTkApp: - - def splitlist(self, arg): - if isinstance(arg, tuple): - return arg - return arg.split(':') - - def wantobjects(self): - return True - - -class MockTclObj(object): - typename = 'test' - - def __init__(self, val): - self.val = val - - def __str__(self): - return str(self.val) - - -class MockStateSpec(object): - typename = 'StateSpec' - - def __init__(self, *args): - self.val = args - - def __str__(self): - return ' '.join(self.val) - - -class InternalFunctionsTest(unittest.TestCase): - - def test_format_optdict(self): - def check_against(fmt_opts, result): - for i in range(0, len(fmt_opts), 2): - self.assertEqual(result.pop(fmt_opts[i]), fmt_opts[i + 1]) - if result: - self.fail("result still got elements: %s" % result) - - # passing an empty dict should return an empty object (tuple here) - self.assertFalse(ttk._format_optdict({})) - - # check list formatting - check_against( - ttk._format_optdict({'fg': 'blue', 'padding': [1, 2, 3, 4]}), - {'-fg': 'blue', '-padding': '1 2 3 4'}) - - # check tuple formatting (same as list) - check_against( - ttk._format_optdict({'test': (1, 2, '', 0)}), - {'-test': '1 2 {} 0'}) - - # check untouched values - check_against( - ttk._format_optdict({'test': {'left': 'as is'}}), - {'-test': {'left': 'as is'}}) - - # check script formatting - check_against( - ttk._format_optdict( - {'test': [1, -1, '', '2m', 0], 'test2': 3, - 'test3': '', 'test4': 'abc def', - 'test5': '"abc"', 'test6': '{}', - 'test7': '} -spam {'}, script=True), - {'-test': '{1 -1 {} 2m 0}', '-test2': '3', - '-test3': '{}', '-test4': '{abc def}', - '-test5': '{"abc"}', '-test6': r'\{\}', - '-test7': r'\}\ -spam\ \{'}) - - opts = {'αβγ': True, 'á': False} - orig_opts = opts.copy() - # check if giving unicode keys is fine - check_against(ttk._format_optdict(opts), {'-αβγ': True, '-á': False}) - # opts should remain unchanged - self.assertEqual(opts, orig_opts) - - # passing values with spaces inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('one two', 'three')}), - {'-option': '{one two} three'}) - check_against( - ttk._format_optdict( - {'option': ('one\ttwo', 'three')}), - {'-option': '{one\ttwo} three'}) - - # passing empty strings inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('', 'one')}), - {'-option': '{} one'}) - - # passing values with braces inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('one} {two', 'three')}), - {'-option': r'one\}\ \{two three'}) - - # passing quoted strings inside a tuple/list - check_against( - ttk._format_optdict( - {'option': ('"one"', 'two')}), - {'-option': '{"one"} two'}) - check_against( - ttk._format_optdict( - {'option': ('{one}', 'two')}), - {'-option': r'\{one\} two'}) - - # ignore an option - amount_opts = len(ttk._format_optdict(opts, ignore=('á'))) / 2 - self.assertEqual(amount_opts, len(opts) - 1) - - # ignore non-existing options - amount_opts = len(ttk._format_optdict(opts, ignore=('á', 'b'))) / 2 - self.assertEqual(amount_opts, len(opts) - 1) - - # ignore every option - self.assertFalse(ttk._format_optdict(opts, ignore=list(opts.keys()))) - - - def test_format_mapdict(self): - opts = {'a': [('b', 'c', 'val'), ('d', 'otherval'), ('', 'single')]} - result = ttk._format_mapdict(opts) - self.assertEqual(len(result), len(list(opts.keys())) * 2) - self.assertEqual(result, ('-a', '{b c} val d otherval {} single')) - self.assertEqual(ttk._format_mapdict(opts, script=True), - ('-a', '{{b c} val d otherval {} single}')) - - self.assertEqual(ttk._format_mapdict({2: []}), ('-2', '')) - - opts = {'üñíćódè': [('á', 'vãl')]} - result = ttk._format_mapdict(opts) - self.assertEqual(result, ('-üñíćódè', 'á vãl')) - - self.assertEqual(ttk._format_mapdict({'opt': [('value',)]}), - ('-opt', '{} value')) - - # empty states - valid = {'opt': [('', '', 'hi')]} - self.assertEqual(ttk._format_mapdict(valid), ('-opt', '{ } hi')) - - # when passing multiple states, they all must be strings - invalid = {'opt': [(1, 2, 'valid val')]} - self.assertRaises(TypeError, ttk._format_mapdict, invalid) - invalid = {'opt': [([1], '2', 'valid val')]} - self.assertRaises(TypeError, ttk._format_mapdict, invalid) - # but when passing a single state, it can be anything - valid = {'opt': [[1, 'value']]} - self.assertEqual(ttk._format_mapdict(valid), ('-opt', '1 value')) - # special attention to single states which evaluate to False - for stateval in (None, 0, False, '', set()): # just some samples - valid = {'opt': [(stateval, 'value')]} - self.assertEqual(ttk._format_mapdict(valid), - ('-opt', '{} value')) - - # values must be iterable - opts = {'a': None} - self.assertRaises(TypeError, ttk._format_mapdict, opts) - - - def test_format_elemcreate(self): - self.assertTrue(ttk._format_elemcreate(None), (None, ())) - - ## Testing type = image - # image type expects at least an image name, so this should raise - # IndexError since it tries to access the index 0 of an empty tuple - self.assertRaises(IndexError, ttk._format_elemcreate, 'image') - - # don't format returned values as a tcl script - # minimum acceptable for image type - self.assertEqual(ttk._format_elemcreate('image', False, 'test'), - ("test ", ())) - # specifying a state spec - self.assertEqual(ttk._format_elemcreate('image', False, 'test', - ('', 'a')), ("test {} a", ())) - # state spec with multiple states - self.assertEqual(ttk._format_elemcreate('image', False, 'test', - ('a', 'b', 'c')), ("test {a b} c", ())) - # state spec and options - self.assertEqual(ttk._format_elemcreate('image', False, 'test', - ('a', 'b'), a='x'), ("test a b", ("-a", "x"))) - # format returned values as a tcl script - # state spec with multiple states and an option with a multivalue - self.assertEqual(ttk._format_elemcreate('image', True, 'test', - ('a', 'b', 'c', 'd'), x=[2, 3]), ("{test {a b c} d}", "-x {2 3}")) - - ## Testing type = vsapi - # vsapi type expects at least a class name and a part_id, so this - # should raise a ValueError since it tries to get two elements from - # an empty tuple - self.assertRaises(ValueError, ttk._format_elemcreate, 'vsapi') - - # don't format returned values as a tcl script - # minimum acceptable for vsapi - self.assertEqual(ttk._format_elemcreate('vsapi', False, 'a', 'b'), - ("a b ", ())) - # now with a state spec with multiple states - self.assertEqual(ttk._format_elemcreate('vsapi', False, 'a', 'b', - ('a', 'b', 'c')), ("a b {a b} c", ())) - # state spec and option - self.assertEqual(ttk._format_elemcreate('vsapi', False, 'a', 'b', - ('a', 'b'), opt='x'), ("a b a b", ("-opt", "x"))) - # format returned values as a tcl script - # state spec with a multivalue and an option - self.assertEqual(ttk._format_elemcreate('vsapi', True, 'a', 'b', - ('a', 'b', [1, 2]), opt='x'), ("{a b {a b} {1 2}}", "-opt x")) - - # Testing type = from - # from type expects at least a type name - self.assertRaises(IndexError, ttk._format_elemcreate, 'from') - - self.assertEqual(ttk._format_elemcreate('from', False, 'a'), - ('a', ())) - self.assertEqual(ttk._format_elemcreate('from', False, 'a', 'b'), - ('a', ('b', ))) - self.assertEqual(ttk._format_elemcreate('from', True, 'a', 'b'), - ('{a}', 'b')) - - - def test_format_layoutlist(self): - def sample(indent=0, indent_size=2): - return ttk._format_layoutlist( - [('a', {'other': [1, 2, 3], 'children': - [('b', {'children': - [('c', {'children': - [('d', {'nice': 'opt'})], 'something': (1, 2) - })] - })] - })], indent=indent, indent_size=indent_size)[0] - - def sample_expected(indent=0, indent_size=2): - spaces = lambda amount=0: ' ' * (amount + indent) - return ( - "%sa -other {1 2 3} -children {\n" - "%sb -children {\n" - "%sc -something {1 2} -children {\n" - "%sd -nice opt\n" - "%s}\n" - "%s}\n" - "%s}" % (spaces(), spaces(indent_size), - spaces(2 * indent_size), spaces(3 * indent_size), - spaces(2 * indent_size), spaces(indent_size), spaces())) - - # empty layout - self.assertEqual(ttk._format_layoutlist([])[0], '') - - # _format_layoutlist always expects the second item (in every item) - # to act like a dict (except when the value evaluates to False). - self.assertRaises(AttributeError, - ttk._format_layoutlist, [('a', 'b')]) - - smallest = ttk._format_layoutlist([('a', None)], indent=0) - self.assertEqual(smallest, - ttk._format_layoutlist([('a', '')], indent=0)) - self.assertEqual(smallest[0], 'a') - - # testing indentation levels - self.assertEqual(sample(), sample_expected()) - for i in range(4): - self.assertEqual(sample(i), sample_expected(i)) - self.assertEqual(sample(i, i), sample_expected(i, i)) - - # invalid layout format, different kind of exceptions will be - # raised by internal functions - - # plain wrong format - self.assertRaises(ValueError, ttk._format_layoutlist, - ['bad', 'format']) - # will try to use iteritems in the 'bad' string - self.assertRaises(AttributeError, ttk._format_layoutlist, - [('name', 'bad')]) - # bad children formatting - self.assertRaises(ValueError, ttk._format_layoutlist, - [('name', {'children': {'a': None}})]) - - - def test_script_from_settings(self): - # empty options - self.assertFalse(ttk._script_from_settings({'name': - {'configure': None, 'map': None, 'element create': None}})) - - # empty layout - self.assertEqual( - ttk._script_from_settings({'name': {'layout': None}}), - "ttk::style layout name {\nnull\n}") - - configdict = {'αβγ': True, 'á': False} - self.assertTrue( - ttk._script_from_settings({'name': {'configure': configdict}})) - - mapdict = {'üñíćódè': [('á', 'vãl')]} - self.assertTrue( - ttk._script_from_settings({'name': {'map': mapdict}})) - - # invalid image element - self.assertRaises(IndexError, - ttk._script_from_settings, {'name': {'element create': ['image']}}) - - # minimal valid image - self.assertTrue(ttk._script_from_settings({'name': - {'element create': ['image', 'name']}})) - - image = {'thing': {'element create': - ['image', 'name', ('state1', 'state2', 'val')]}} - self.assertEqual(ttk._script_from_settings(image), - "ttk::style element create thing image {name {state1 state2} val} ") - - image['thing']['element create'].append({'opt': 30}) - self.assertEqual(ttk._script_from_settings(image), - "ttk::style element create thing image {name {state1 state2} val} " - "-opt 30") - - image['thing']['element create'][-1]['opt'] = [MockTclObj(3), - MockTclObj('2m')] - self.assertEqual(ttk._script_from_settings(image), - "ttk::style element create thing image {name {state1 state2} val} " - "-opt {3 2m}") - - - def test_tclobj_to_py(self): - self.assertEqual( - ttk._tclobj_to_py((MockStateSpec('a', 'b'), 'val')), - [('a', 'b', 'val')]) - self.assertEqual( - ttk._tclobj_to_py([MockTclObj('1'), 2, MockTclObj('3m')]), - [1, 2, '3m']) - - - def test_list_from_statespec(self): - def test_it(sspec, value, res_value, states): - self.assertEqual(ttk._list_from_statespec( - (sspec, value)), [states + (res_value, )]) - - states_even = tuple('state%d' % i for i in range(6)) - statespec = MockStateSpec(*states_even) - test_it(statespec, 'val', 'val', states_even) - test_it(statespec, MockTclObj('val'), 'val', states_even) - - states_odd = tuple('state%d' % i for i in range(5)) - statespec = MockStateSpec(*states_odd) - test_it(statespec, 'val', 'val', states_odd) - - test_it(('a', 'b', 'c'), MockTclObj('val'), 'val', ('a', 'b', 'c')) - - - def test_list_from_layouttuple(self): - tk = MockTkApp() - - # empty layout tuple - self.assertFalse(ttk._list_from_layouttuple(tk, ())) - - # shortest layout tuple - self.assertEqual(ttk._list_from_layouttuple(tk, ('name', )), - [('name', {})]) - - # not so interesting ltuple - sample_ltuple = ('name', '-option', 'value') - self.assertEqual(ttk._list_from_layouttuple(tk, sample_ltuple), - [('name', {'option': 'value'})]) - - # empty children - self.assertEqual(ttk._list_from_layouttuple(tk, - ('something', '-children', ())), - [('something', {'children': []})] - ) - - # more interesting ltuple - ltuple = ( - 'name', '-option', 'niceone', '-children', ( - ('otherone', '-children', ( - ('child', )), '-otheropt', 'othervalue' - ) - ) - ) - self.assertEqual(ttk._list_from_layouttuple(tk, ltuple), - [('name', {'option': 'niceone', 'children': - [('otherone', {'otheropt': 'othervalue', 'children': - [('child', {})] - })] - })] - ) - - # bad tuples - self.assertRaises(ValueError, ttk._list_from_layouttuple, tk, - ('name', 'no_minus')) - self.assertRaises(ValueError, ttk._list_from_layouttuple, tk, - ('name', 'no_minus', 'value')) - self.assertRaises(ValueError, ttk._list_from_layouttuple, tk, - ('something', '-children')) # no children - - - def test_val_or_dict(self): - def func(res, opt=None, val=None): - if opt is None: - return res - if val is None: - return "test val" - return (opt, val) - - tk = MockTkApp() - tk.call = func - - self.assertEqual(ttk._val_or_dict(tk, {}, '-test:3'), - {'test': '3'}) - self.assertEqual(ttk._val_or_dict(tk, {}, ('-test', 3)), - {'test': 3}) - - self.assertEqual(ttk._val_or_dict(tk, {'test': None}, 'x:y'), - 'test val') - - self.assertEqual(ttk._val_or_dict(tk, {'test': 3}, 'x:y'), - {'test': 3}) - - - def test_convert_stringval(self): - tests = ( - (0, 0), ('09', 9), ('a', 'a'), ('áÚ', 'áÚ'), ([], '[]'), - (None, 'None') - ) - for orig, expected in tests: - self.assertEqual(ttk._convert_stringval(orig), expected) - - -class TclObjsToPyTest(unittest.TestCase): - - def test_unicode(self): - adict = {'opt': 'välúè'} - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': 'välúè'}) - - adict['opt'] = MockTclObj(adict['opt']) - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': 'välúè'}) - - def test_multivalues(self): - adict = {'opt': [1, 2, 3, 4]} - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': [1, 2, 3, 4]}) - - adict['opt'] = [1, 'xm', 3] - self.assertEqual(ttk.tclobjs_to_py(adict), {'opt': [1, 'xm', 3]}) - - adict['opt'] = (MockStateSpec('a', 'b'), 'válũè') - self.assertEqual(ttk.tclobjs_to_py(adict), - {'opt': [('a', 'b', 'válũè')]}) - - self.assertEqual(ttk.tclobjs_to_py({'x': ['y z']}), - {'x': ['y z']}) - - def test_nosplit(self): - self.assertEqual(ttk.tclobjs_to_py({'text': 'some text'}), - {'text': 'some text'}) - -tests_nogui = (InternalFunctionsTest, TclObjsToPyTest) - -if __name__ == "__main__": - from test.support import run_unittest - run_unittest(*tests_nogui) diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_style.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_style.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_style.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_style.py 2022-10-11 14:48:37.000000000 +0000 @@ -3,8 +3,8 @@ import tkinter from tkinter import ttk from test import support -from test.support import requires, run_unittest -from tkinter.test.support import AbstractTkTest +from test.support import requires +from tkinter.test.support import AbstractTkTest, get_tk_patchlevel requires('gui') @@ -170,12 +170,12 @@ newname = f'C.{name}' self.assertEqual(style.map(newname), {}) style.map(newname, **default) + if theme == 'alt' and name == '.' and get_tk_patchlevel() < (8, 6, 1): + default['embossed'] = [('disabled', '1')] self.assertEqual(style.map(newname), default) for key, value in default.items(): self.assertEqual(style.map(newname, key), value) -tests_gui = (StyleTest, ) - if __name__ == "__main__": - run_unittest(*tests_gui) + unittest.main() diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_widgets.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_widgets.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/test/test_ttk/test_widgets.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/test/test_ttk/test_widgets.py 2022-10-11 14:48:37.000000000 +0000 @@ -4,7 +4,7 @@ from test.support import requires, gc_collect import sys -from tkinter.test.test_ttk.test_functions import MockTclObj +from test.test_ttk_textonly import MockTclObj from tkinter.test.support import (AbstractTkTest, tcl_version, get_tk_patchlevel, simulate_mouse_click, AbstractDefaultRootTest) from tkinter.test.widget_tests import (add_standard_options, noconv, @@ -61,7 +61,6 @@ self.widget = ttk.Button(self.root, width=0, text="Text") self.widget.pack() - def test_identify(self): self.widget.update() self.assertEqual(self.widget.identify( @@ -74,7 +73,6 @@ self.assertRaises(tkinter.TclError, self.widget.identify, 5, None) self.assertRaises(tkinter.TclError, self.widget.identify, 5, '') - def test_widget_state(self): # XXX not sure about the portability of all these tests self.assertEqual(self.widget.state(), ()) @@ -169,10 +167,13 @@ errmsg='image "spam" doesn\'t exist') def test_configure_compound(self): + options = 'none text image center top bottom left right'.split() + errmsg = ( + 'bad compound "{}": must be' + f' {", ".join(options[:-1])}, or {options[-1]}' + ) widget = self.create() - self.checkEnumParam(widget, 'compound', - 'none', 'text', 'image', 'center', - 'top', 'bottom', 'left', 'right') + self.checkEnumParam(widget, 'compound', *options, errmsg=errmsg) def test_configure_state(self): widget = self.create() @@ -284,6 +285,7 @@ 'show', 'state', 'style', 'takefocus', 'textvariable', 'validate', 'validatecommand', 'width', 'xscrollcommand', ) + IDENTIFY_AS = 'Entry.field' if sys.platform == 'darwin' else 'textarea' def setUp(self): super().setUp() @@ -316,30 +318,23 @@ widget = self.create() self.checkCommandParam(widget, 'validatecommand') - def test_bbox(self): self.assertIsBoundingBox(self.entry.bbox(0)) self.assertRaises(tkinter.TclError, self.entry.bbox, 'noindex') self.assertRaises(tkinter.TclError, self.entry.bbox, None) - def test_identify(self): self.entry.pack() self.entry.update() # bpo-27313: macOS Cocoa widget differs from X, allow either - if sys.platform == 'darwin': - self.assertIn(self.entry.identify(5, 5), - ("textarea", "Combobox.button") ) - else: - self.assertEqual(self.entry.identify(5, 5), "textarea") + self.assertEqual(self.entry.identify(5, 5), self.IDENTIFY_AS) self.assertEqual(self.entry.identify(-1, -1), "") self.assertRaises(tkinter.TclError, self.entry.identify, None, 5) self.assertRaises(tkinter.TclError, self.entry.identify, 5, None) self.assertRaises(tkinter.TclError, self.entry.identify, 5, '') - def test_validation_options(self): success = [] test_invalid = lambda: success.append(True) @@ -367,7 +362,6 @@ self.entry['validatecommand'] = True self.assertRaises(tkinter.TclError, self.entry.validate) - def test_validation(self): validation = [] def validate(to_insert): @@ -385,7 +379,6 @@ self.assertEqual(validation, [False, True]) self.assertEqual(self.entry.get(), 'a') - def test_revalidation(self): def validate(content): for letter in content: @@ -421,6 +414,7 @@ 'validate', 'validatecommand', 'values', 'width', 'xscrollcommand', ) + IDENTIFY_AS = 'Combobox.button' if sys.platform == 'darwin' else 'textarea' def setUp(self): super().setUp() @@ -436,7 +430,8 @@ def _show_drop_down_listbox(self): width = self.combo.winfo_width() x, y = width - 5, 5 - self.assertRegex(self.combo.identify(x, y), r'.*downarrow\Z') + if sys.platform != 'darwin': # there's no down arrow on macOS + self.assertRegex(self.combo.identify(x, y), r'.*downarrow\Z') self.combo.event_generate('', x=x, y=y) self.combo.event_generate('', x=x, y=y) self.combo.update_idletasks() @@ -458,7 +453,6 @@ self.assertTrue(success) - def test_configure_postcommand(self): success = [] @@ -474,7 +468,6 @@ self._show_drop_down_listbox() self.assertEqual(len(success), 1) - def test_configure_values(self): def check_get_current(getval, currval): self.assertEqual(self.combo.get(), getval) @@ -589,7 +582,6 @@ other_child.destroy() self.assertRaises(tkinter.TclError, self.paned.pane, 0) - def test_forget(self): self.assertRaises(tkinter.TclError, self.paned.forget, None) self.assertRaises(tkinter.TclError, self.paned.forget, 0) @@ -598,7 +590,6 @@ self.paned.forget(0) self.assertRaises(tkinter.TclError, self.paned.forget, 0) - def test_insert(self): self.assertRaises(tkinter.TclError, self.paned.insert, None, 0) self.assertRaises(tkinter.TclError, self.paned.insert, 0, None) @@ -633,7 +624,6 @@ self.assertEqual(self.paned.panes(), (str(child3), str(child2), str(child))) - def test_pane(self): self.assertRaises(tkinter.TclError, self.paned.pane, 0) @@ -650,7 +640,6 @@ self.assertRaises(tkinter.TclError, self.paned.pane, 0, badoption='somevalue') - def test_sashpos(self): self.assertRaises(tkinter.TclError, self.paned.sashpos, None) self.assertRaises(tkinter.TclError, self.paned.sashpos, '') @@ -798,7 +787,6 @@ self.assertFalse(failure) - def test_get(self): if self.wantobjects: conv = lambda x: x @@ -816,7 +804,6 @@ self.assertRaises(tkinter.TclError, self.scale.get, '', 0) self.assertRaises(tkinter.TclError, self.scale.get, 0, '') - def test_set(self): if self.wantobjects: conv = lambda x: x @@ -949,7 +936,6 @@ else: self.fail("Tab with text 'a' not found") - def test_add_and_hidden(self): self.assertRaises(tkinter.TclError, self.nb.hide, -1) self.assertRaises(tkinter.TclError, self.nb.hide, 'hi') @@ -968,7 +954,7 @@ tabs = self.nb.tabs() curr = self.nb.index('current') - # verify that the tab gets readded at its previous position + # verify that the tab gets re-added at its previous position child2_index = self.nb.index(self.child2) self.nb.hide(self.child2) self.nb.add(self.child2) @@ -978,7 +964,6 @@ # but the tab next to it (not hidden) is the one selected now self.assertEqual(self.nb.index('current'), curr + 1) - def test_forget(self): self.assertRaises(tkinter.TclError, self.nb.forget, -1) self.assertRaises(tkinter.TclError, self.nb.forget, 'hi') @@ -994,7 +979,6 @@ self.assertEqual(self.nb.index(self.child1), 1) self.assertNotEqual(child1_index, self.nb.index(self.child1)) - def test_index(self): self.assertRaises(tkinter.TclError, self.nb.index, -1) self.assertRaises(tkinter.TclError, self.nb.index, None) @@ -1004,7 +988,6 @@ self.assertEqual(self.nb.index(self.child2), 1) self.assertEqual(self.nb.index('end'), 2) - def test_insert(self): # moving tabs tabs = self.nb.tabs() @@ -1037,7 +1020,6 @@ self.assertRaises(tkinter.TclError, self.nb.insert, None, 0) self.assertRaises(tkinter.TclError, self.nb.insert, None, None) - def test_select(self): self.nb.pack() self.nb.update() @@ -1057,7 +1039,6 @@ self.nb.update() self.assertTrue(tab_changed) - def test_tab(self): self.assertRaises(tkinter.TclError, self.nb.tab, -1) self.assertRaises(tkinter.TclError, self.nb.tab, 'notab') @@ -1071,7 +1052,6 @@ self.assertEqual(self.nb.tab(self.child1, text=None), 'abc') self.assertEqual(self.nb.tab(self.child1, 'text'), 'abc') - def test_configure_tabs(self): self.assertEqual(len(self.nb.tabs()), 2) @@ -1080,14 +1060,14 @@ self.assertEqual(self.nb.tabs(), ()) - def test_traversal(self): self.nb.pack() self.nb.update() self.nb.select(0) - self.assertEqual(self.nb.identify(5, 5), 'focus') + focus_identify_as = 'focus' if sys.platform != 'darwin' else '' + self.assertEqual(self.nb.identify(5, 5), focus_identify_as) simulate_mouse_click(self.nb, 5, 5) self.nb.focus_force() self.nb.event_generate('') @@ -1100,15 +1080,24 @@ self.assertEqual(self.nb.select(), str(self.child2)) self.nb.tab(self.child1, text='a', underline=0) + self.nb.tab(self.child2, text='e', underline=0) self.nb.enable_traversal() self.nb.focus_force() - self.assertEqual(self.nb.identify(5, 5), 'focus') + self.assertEqual(self.nb.identify(5, 5), focus_identify_as) simulate_mouse_click(self.nb, 5, 5) + # on macOS Emacs-style keyboard shortcuts are region-dependent; + # let's use the regular arrow keys instead if sys.platform == 'darwin': - self.nb.event_generate('') + begin = '' + end = '' else: - self.nb.event_generate('') + begin = '' + end = '' + self.nb.event_generate(begin) self.assertEqual(self.nb.select(), str(self.child1)) + self.nb.event_generate(end) + self.assertEqual(self.nb.select(), str(self.child2)) + @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class SpinboxTest(EntryTest, unittest.TestCase): @@ -1119,6 +1108,7 @@ 'takefocus', 'textvariable', 'to', 'validate', 'validatecommand', 'values', 'width', 'wrap', 'xscrollcommand', ) + IDENTIFY_AS = 'Spinbox.field' if sys.platform == 'darwin' else 'textarea' def setUp(self): super().setUp() @@ -1367,7 +1357,6 @@ child1 = self.tv.insert(item_id, 'end') self.assertEqual(self.tv.bbox(child1), '') - def test_children(self): # no children yet, should get an empty tuple self.assertEqual(self.tv.get_children(), ()) @@ -1395,7 +1384,6 @@ self.tv.set_children('') self.assertEqual(self.tv.get_children(), ()) - def test_column(self): # return a dict with all options/values self.assertIsInstance(self.tv.column('#0'), dict) @@ -1421,7 +1409,6 @@ self.assertRaises(tkinter.TclError, self.tv.column, '#0', **kw) - def test_delete(self): self.assertRaises(tkinter.TclError, self.tv.delete, '#0') @@ -1445,7 +1432,6 @@ self.tv.delete(item1, item2) self.assertFalse(self.tv.get_children()) - def test_detach_reattach(self): item_id = self.tv.insert('', 'end') item2 = self.tv.insert(item_id, 'end') @@ -1487,7 +1473,6 @@ self.assertEqual(self.tv.get_children(), ()) self.assertEqual(self.tv.get_children(item_id), ()) - def test_exists(self): self.assertEqual(self.tv.exists('something'), False) self.assertEqual(self.tv.exists(''), True) @@ -1498,7 +1483,6 @@ # in the tcl interpreter since tk requires an item. self.assertRaises(tkinter.TclError, self.tv.exists, None) - def test_focus(self): # nothing is focused right now self.assertEqual(self.tv.focus(), '') @@ -1513,7 +1497,6 @@ # try focusing inexistent item self.assertRaises(tkinter.TclError, self.tv.focus, 'hi') - def test_heading(self): # check a dict is returned self.assertIsInstance(self.tv.heading('#0'), dict) @@ -1565,7 +1548,6 @@ #self.tv.heading('#0', command='I dont exist') #simulate_heading_click(5, 5) - def test_index(self): # item 'what' doesn't exist self.assertRaises(tkinter.TclError, self.tv.index, 'what') @@ -1596,7 +1578,6 @@ self.tv.delete(item1) self.assertRaises(tkinter.TclError, self.tv.index, c2) - def test_insert_item(self): # parent 'none' doesn't exist self.assertRaises(tkinter.TclError, self.tv.insert, 'none', 'end') @@ -1673,7 +1654,6 @@ self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', False) self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', '') - def test_selection(self): self.assertRaises(TypeError, self.tv.selection, 'spam') # item 'none' doesn't exist @@ -1744,7 +1724,6 @@ self.tv.selection_toggle((c1, c3)) self.assertEqual(self.tv.selection(), (c3, item2)) - def test_set(self): self.tv['columns'] = ['A', 'B'] item = self.tv.insert('', 'end', values=['a', 'b']) @@ -1777,7 +1756,6 @@ # inexistent item self.assertRaises(tkinter.TclError, self.tv.set, 'notme') - def test_tag_bind(self): events = [] item1 = self.tv.insert('', 'end', tags=['call']) @@ -1810,7 +1788,6 @@ for evt in zip(events[::2], events[1::2]): self.assertEqual(evt, (1, 2)) - def test_tag_configure(self): # Just testing parameter passing for now self.assertRaises(TypeError, self.tv.tag_configure) diff -Nru python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/ttk.py python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/ttk.py --- python3-stdlib-extensions-3.9.7/3.9/Lib/tkinter/ttk.py 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Lib/tkinter/ttk.py 2022-10-11 14:48:37.000000000 +0000 @@ -1643,7 +1643,10 @@ menu.delete(0, 'end') for val in values: menu.add_radiobutton(label=val, - command=tkinter._setit(self._variable, val, self._callback), + command=( + None if self._callback is None + else lambda val=val: self._callback(val) + ), variable=self._variable) if default: diff -Nru python3-stdlib-extensions-3.9.7/3.9/Modules/_gdbmmodule.c python3-stdlib-extensions-3.10.8/3.9/Modules/_gdbmmodule.c --- python3-stdlib-extensions-3.9.7/3.9/Modules/_gdbmmodule.c 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Modules/_gdbmmodule.c 2022-10-11 14:48:37.000000000 +0000 @@ -433,7 +433,7 @@ to create a list in memory that contains them all: k = db.firstkey() - while k != None: + while k is not None: print(k) k = db.nextkey(k) [clinic start generated code]*/ @@ -441,7 +441,7 @@ static PyObject * _gdbm_gdbm_nextkey_impl(dbmobject *self, const char *key, Py_ssize_clean_t key_length) -/*[clinic end generated code: output=192ab892de6eb2f6 input=1f1606943614e36f]*/ +/*[clinic end generated code: output=192ab892de6eb2f6 input=b7b0949c520d730c]*/ { PyObject *v; datum dbm_key, nextkey; diff -Nru python3-stdlib-extensions-3.9.7/3.9/Modules/_tkinter.c python3-stdlib-extensions-3.10.8/3.9/Modules/_tkinter.c --- python3-stdlib-extensions-3.9.7/3.9/Modules/_tkinter.c 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Modules/_tkinter.c 2022-10-11 14:48:37.000000000 +0000 @@ -936,7 +936,7 @@ Py_INCREF(self->string); return self->string; } - /* XXX Could chache result if it is non-ASCII. */ + /* XXX Could cache result if it is non-ASCII. */ return unicodeFromTclObj(self->value); } diff -Nru python3-stdlib-extensions-3.9.7/3.9/Modules/clinic/_gdbmmodule.c.h python3-stdlib-extensions-3.10.8/3.9/Modules/clinic/_gdbmmodule.c.h --- python3-stdlib-extensions-3.9.7/3.9/Modules/clinic/_gdbmmodule.c.h 2021-08-30 19:02:15.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/3.9/Modules/clinic/_gdbmmodule.c.h 2022-10-11 14:48:37.000000000 +0000 @@ -139,7 +139,7 @@ "to create a list in memory that contains them all:\n" "\n" " k = db.firstkey()\n" -" while k != None:\n" +" while k is not None:\n" " print(k)\n" " k = db.nextkey(k)"); @@ -298,4 +298,4 @@ exit: return return_value; } -/*[clinic end generated code: output=2766471b2fa1a816 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f48d6e8d4c8a3465 input=a9049054013a1b77]*/ diff -Nru python3-stdlib-extensions-3.9.7/debian/changelog python3-stdlib-extensions-3.10.8/debian/changelog --- python3-stdlib-extensions-3.9.7/debian/changelog 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/changelog 2022-11-02 15:40:58.000000000 +0000 @@ -1,27 +1,142 @@ +python3-stdlib-extensions (3.10.8-1~22.04) jammy-proposed; urgency=medium + + * SRU: LP: #1995504: Backport the 3.10.8 release to 22.04 LTS. + + -- Matthias Klose Wed, 02 Nov 2022 16:40:58 +0100 + +python3-stdlib-extensions (3.10.8-1) unstable; urgency=medium + + * Update 3.10 extensions and modules to the 3.10.8 release. + * Update 3.11 extensions and modules to 3.11.0 release. + * Remove bytecode files for old 3.9 version. Closes: #1017445. + * Bump standards version. + + -- Matthias Klose Sun, 30 Oct 2022 09:24:07 +0100 + +python3-stdlib-extensions (3.10.7-1) unstable; urgency=medium + + * Update 3.10 extensions and modules to the 3.10.7 release. + * Update 3.11 extensions and modules to 3.11.0 rc2. + * Update 3.9 extensions and modules to the 3.9.14 release. + + -- Matthias Klose Mon, 12 Sep 2022 18:49:43 +0200 + +python3-stdlib-extensions (3.10.6-1) unstable; urgency=medium + + * Update 3.10 extensions and modules to the 3.10.6 release. + * Update 3.11 extensions and modules to 3.11.0 rc1. + * Remove python3.9 from build dependencies and don't build for 3.9. + + -- Matthias Klose Wed, 10 Aug 2022 13:25:31 +0200 + +python3-stdlib-extensions (3.10.5-2) unstable; urgency=medium + + * Update 3.11 extensions and modules to the 3.11.0 beta4. Closes: #1015082. + + -- Matthias Klose Sun, 17 Jul 2022 12:00:07 +0200 + +python3-stdlib-extensions (3.10.5-1) unstable; urgency=medium + + * Update 3.9 extensions and modules to the 3.9.13 release. + * Update 3.10 extensions and modules to the 3.10.5 release. + * Add extensions for python3.11 3.11.0 beta3. + + -- Matthias Klose Thu, 09 Jun 2022 13:32:17 +0200 + +python3-stdlib-extensions (3.9.12-1) unstable; urgency=medium + + * Update 3.9 extensions and modules to the 3.9.11 release. + * Update 3.10 extensions and modules to the 3.10.3 release. + + -- Matthias Klose Thu, 24 Mar 2022 14:44:02 +0100 + +python3-stdlib-extensions (3.9.11-2) unstable; urgency=medium + + [ Stefano Rivera ] + * Change the "include" and "platinclude" paths in the "posix_local" + scheme to refer to the location of Python's headers, not the install + location for modules' C headers. Closes: #1007966 + + -- Matthias Klose Thu, 24 Mar 2022 13:20:28 +0100 + +python3-stdlib-extensions (3.9.11-1) unstable; urgency=medium + + * Update 3.9 extensions and modules to the 3.9.11 release. + * Update 3.10 extensions and modules to the 3.10.3 release. + + -- Matthias Klose Wed, 16 Mar 2022 18:49:40 +0100 + +python3-stdlib-extensions (3.9.10-2) unstable; urgency=medium + + [ Stefano Rivera ] + * 3.10: Re-add accidentally dropped /local/ to paths in distutils unix_local + scheme. + * Use the same is_virtual_environment() proposed in PEP 668 across the site, + sysconfig, and distutils modules. Stop checking for PYTHONUSERBASE or + VIRTUAL_ENV environment variables. + + [ Matthias Klose ] + * Update python3-lib2to3 package description. Closes: #983431. + + -- Matthias Klose Mon, 14 Mar 2022 06:23:49 +0100 + +python3-stdlib-extensions (3.9.10-1) unstable; urgency=medium + + * Update 3.9 extensions and modules to the 3.9.10 release. + * Update 3.10 extensions and modules to the 3.10.2 release. + + -- Matthias Klose Mon, 17 Jan 2022 17:23:50 +0100 + +python3-stdlib-extensions (3.9.9-3) unstable; urgency=medium + + * Update 3.10 extensions and modules to the 3.10.1 release. + + -- Matthias Klose Tue, 07 Dec 2021 14:43:46 +0100 + +python3-stdlib-extensions (3.9.9-2) unstable; urgency=medium + + * Fix the 3.10 build. + + -- Matthias Klose Wed, 17 Nov 2021 14:53:22 +0100 + +python3-stdlib-extensions (3.9.9-1) unstable; urgency=medium + + * Update 3.9 extensions and modules to the 3.9.9 release. + * Build again the _dbm extension using libdb-dev. + * Bump standards version. + + -- Matthias Klose Wed, 17 Nov 2021 07:55:21 +0100 + +python3-stdlib-extensions (3.9.8-1) unstable; urgency=medium + + * Update 3.10 extensions and modules to the 3.10.0 release. + * Update 3.9 extensions and modules to the 3.9.8 release. + + -- Matthias Klose Thu, 11 Nov 2021 14:40:07 +0100 + python3-stdlib-extensions (3.9.7-1) unstable; urgency=medium * Update 3.10 extensions and modules to the 3.9.10 release candidate 1. * Update 3.9 extensions and modules to the 3.9.7 release. - * Update 3.8 extensions and modules to the 3.8.12 release. * Refresh patches. - -- Matthias Klose Thu, 02 Sep 2021 15:22:20 +0200 + -- Matthias Klose Thu, 02 Sep 2021 17:21:26 +0200 -python3-stdlib-extensions (3.9.5-0ubuntu3) impish; urgency=medium +python3-stdlib-extensions (3.10.0~b1-3) experimental; urgency=medium * When building 3.10 for releases newer than Debian 11 (bullseye) or Ubuntu 21.04 (hirsute), build the _dbm extension using gdbm, and include it in the python3-gdbm package. - -- Matthias Klose Thu, 06 May 2021 15:16:46 +0200 + -- Matthias Klose Thu, 06 May 2021 15:07:21 +0200 -python3-stdlib-extensions (3.9.5-0ubuntu2) impish; urgency=medium +python3-stdlib-extensions (3.10.0~b1-2) experimental; urgency=medium * Relax dependency on python3-defaults. - -- Matthias Klose Tue, 04 May 2021 15:47:57 +0200 + -- Matthias Klose Tue, 04 May 2021 15:53:44 +0200 -python3-stdlib-extensions (3.9.5-0ubuntu1) impish; urgency=medium +python3-stdlib-extensions (3.10.0~b1-1) experimental; urgency=medium * Update 3.10 extensions and modules to the 3.9.10 beta1 snapshot. * Update 3.9 extensions and modules to the 3.9.5 release (no changes). @@ -29,15 +144,15 @@ * python3-distutils, python3-lib2to3: Breaks the stdlib (<< 3.10.0~b1). * Refresh patches. - -- Matthias Klose Tue, 04 May 2021 07:33:35 +0200 + -- Matthias Klose Tue, 04 May 2021 12:35:41 +0200 -python3-stdlib-extensions (3.9.4-0ubuntu1) hirsute; urgency=medium +python3-stdlib-extensions (3.10.0~a7-1) experimental; urgency=medium * Update 3.10 extensions and modules to the 3.9.10 alpha7 snapshot. - * Update 3.9 extensions and modules to the 3.9.4 release (no changes). + * Update 3.9 extensions and modules to the 3.9.1 release (no changes). * Update 3.8 extensions and modules to the 3.8.7 release (no changes). - -- Matthias Klose Tue, 06 Apr 2021 15:13:31 +0200 + -- Matthias Klose Tue, 06 Apr 2021 15:13:31 +0200 python3-stdlib-extensions (3.9.2-1) unstable; urgency=medium diff -Nru python3-stdlib-extensions-3.9.7/debian/control python3-stdlib-extensions-3.10.8/debian/control --- python3-stdlib-extensions-3.9.7/debian/control 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/control 2022-10-30 08:24:07.000000000 +0000 @@ -12,18 +12,23 @@ # libpython3.8-dbg, # python3.8-dev:any, # python3.8-dbg:any, - libpython3.9-dev, - libpython3.9-dbg, - python3.9-dev:any, - python3.9-dbg:any, +# libpython3.9-dev, +# libpython3.9-dbg, +# python3.9-dev:any, +# python3.9-dbg:any, libpython3.10-dev, libpython3.10-dbg, - python3.10-dev:any (>= 3.10.0~rc1), + python3.10-dev:any (>= 3.10.1), python3.10-dbg:any, + libpython3.11-dev, + libpython3.11-dbg, + python3.11-dev:any, + python3.11-dbg:any, tk-dev, blt-dev (>= 2.4z-9), - libgdbm-dev, libgdbm-compat-dev, +# libgdbm-dev, libgdbm-compat-dev, + libgdbm-dev, libdb-dev, Build-Conflicts: tcl8.4-dev, tk8.4-dev, tcl8.5-dev, tk8.5-dev -Standards-Version: 4.5.1 +Standards-Version: 4.6.1 Vcs-Git: https://salsa.debian.org/cpython-team/python3-stdlib.git Vcs-Browser: https://salsa.debian.org/cpython-team/python3-stdlib @@ -84,7 +89,7 @@ Provides: ${python3:Provides} Breaks: libpython3.6-stdlib (<< 3.6.5~rc1-3), libpython3.7-stdlib (<< 3.7.0~b2-2), libpython3.8-stdlib (<< 3.8.0~b2-5), - libpython3.10-stdlib (<< 3.10.0~rc1), + libpython3.10-stdlib (<< 3.10.0~b1), Replaces: libpython3.6-stdlib (<< 3.6.4~rc1-2), libpython3.7-stdlib (<< 3.7.0~a3-2) Description: distutils package for Python 3.x Distutils package for Python 3.x. This package contains the distutils module @@ -100,7 +105,7 @@ libpython3.7-stdlib (<< 3.7.0~a3-3), python3.6-2to3 (<< 3.6.4-2), python3.7-2to3 (<< 3.7.0~a3-3), - libpython3.10-stdlib (<< 3.10.0~rc1), + libpython3.10-stdlib (<< 3.10.0~b1), Replaces: libpython3.6-stdlib (<< 3.6.4~rc1-2), libpython3.7-stdlib (<< 3.7.0~a3-3), python3.6-2to3 (<< 3.6.4-2), @@ -110,4 +115,4 @@ includes an extensive class library with lots of goodies for network programming, system administration, sounds and graphics. . - This package contains the lib2to3 library. + This package contains the lib2to3 library, a Python2 to Python3 converter. diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.10/distutils-install-layout.diff python3-stdlib-extensions-3.10.8/debian/patches/3.10/distutils-install-layout.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.10/distutils-install-layout.diff 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.10/distutils-install-layout.diff 2022-06-09 07:49:40.000000000 +0000 @@ -1,6 +1,16 @@ -# DP: distutils: Add an option --install-layout=deb, which -# DP: - installs into $prefix/dist-packages instead of $prefix/site-packages. -# DP: - doesn't encode the python version into the egg name. +Description: Debian: Add a distutils option --install-layout=deb + This option: + - installs into $prefix/dist-packages instead of $prefix/site-packages. + - doesn't encode the python version into the egg name. + . + We install modules into dist-packages so that a local admin can build their + own cpython from source, and they won't see each others' installed modules. + This keeps Debian packaged applications working correctly, isolated from the + local cpython. + . + Customize site.py to import from Debian's dist-packages layout. + +Forwarded: not-needed --- a/3.10/Lib/distutils/command/install_egg_info.py +++ b/3.10/Lib/distutils/command/install_egg_info.py @@ -50,7 +60,16 @@ --- a/3.10/Lib/distutils/command/install.py +++ b/3.10/Lib/distutils/command/install.py -@@ -35,6 +35,30 @@ SCHEME_KEYS = ('purelib', 'platlib', 'he +@@ -10,7 +10,7 @@ + from distutils import log + from distutils.core import Command + from distutils.debug import DEBUG +-from distutils.sysconfig import get_config_vars ++from distutils.sysconfig import get_config_vars, is_virtual_environment + from distutils.errors import DistutilsPlatformError + from distutils.file_util import write_file + from distutils.util import convert_path, subst_vars, change_root +@@ -35,6 +35,30 @@ # of this information should switch to using sysconfig directly. INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}} @@ -58,13 +77,13 @@ +INSTALL_SCHEMES['unix_local'] = { + 'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}', + 'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}', -+ 'purelib': '{base}/lib/python{py_version_short}/dist-packages', -+ 'platlib': '{platbase}/{platlibdir}/python{py_version_short}/dist-packages', ++ 'purelib': '{base}/local/lib/python{py_version_short}/dist-packages', ++ 'platlib': '{platbase}/local/{platlibdir}/python{py_version_short}/dist-packages', + 'include': '{installed_base}/include/python{py_version_short}{abiflags}', -+ 'headers': '{installed_base}/include/python{py_version_short}{abiflags}', ++ 'headers': '{base}/local/include/python{py_version_short}{abiflags}', + 'platinclude': '{installed_platbase}/include/python{py_version_short}{abiflags}', -+ 'scripts': '{base}/bin', -+ 'data': '{base}', ++ 'scripts': '{base}/local/bin', ++ 'data': '{base}/local', +} +INSTALL_SCHEMES['deb_system'] = { + 'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}', @@ -105,7 +124,7 @@ + # enable custom installation, known values: deb + self.install_layout = None -+ ++ self.compile = None self.optimize = None @@ -117,7 +136,7 @@ if self.prefix is None: if self.exec_prefix is not None: raise DistutilsOptionError( -@@ -450,7 +482,26 @@ class install(Command): +@@ -450,7 +482,23 @@ self.install_base = self.prefix self.install_platbase = self.exec_prefix @@ -132,10 +151,7 @@ + "unknown value for --install-layout") + elif ((self.prefix_option and + os.path.normpath(self.prefix) != '/usr/local') -+ or sys.base_prefix != sys.prefix -+ or 'PYTHONUSERBASE' in os.environ -+ or 'VIRTUAL_ENV' in os.environ -+ or 'real_prefix' in sys.__dict__): ++ or is_virtual_environment()): + self.select_scheme("unix_prefix") + else: + if os.path.normpath(self.prefix) == '/usr/local': @@ -147,7 +163,18 @@ """Finalizes options for non-posix platforms""" --- a/3.10/Lib/distutils/sysconfig.py +++ b/3.10/Lib/distutils/sysconfig.py -@@ -320,6 +320,7 @@ def get_python_lib(plat_specific=0, stan +@@ -266,6 +266,10 @@ + compiler.shared_lib_extension = shlib_suffix + + ++def is_virtual_environment(): ++ return sys.base_prefix != sys.prefix or hasattr(sys, "real_prefix") ++ ++ + def get_python_inc(plat_specific=0, prefix=None): + """Return the directory containing installed Python header files. + +@@ -320,6 +324,7 @@ If 'prefix' is supplied, use it instead of sys.base_prefix or sys.base_exec_prefix -- i.e., ignore 'plat_specific'. """ @@ -155,47 +182,12 @@ if prefix is None: if standard_lib: prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX -@@ -338,6 +339,12 @@ def get_python_lib(plat_specific=0, stan +@@ -338,6 +343,8 @@ "python" + get_python_version()) if standard_lib: return libpython -+ elif (is_default_prefix and -+ 'PYTHONUSERBASE' not in os.environ and -+ 'VIRTUAL_ENV' not in os.environ and -+ 'real_prefix' not in sys.__dict__ and -+ sys.prefix == sys.base_prefix): ++ elif is_default_prefix and not is_virtual_environment(): + return os.path.join(prefix, "lib", "python3", "dist-packages") else: return os.path.join(libpython, "site-packages") elif os.name == "nt": ---- a/3.10/Lib/distutils/tests/test_bdist_dumb.py -+++ b/3.10/Lib/distutils/tests/test_bdist_dumb.py -@@ -85,7 +85,7 @@ class BuildDumbTestCase(support.TempdirM - fp.close() - - contents = sorted(filter(None, map(os.path.basename, contents))) -- wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2], 'foo.py'] -+ wanted = ['foo-0.1.egg-info', 'foo.py'] - if not sys.dont_write_bytecode: - wanted.append('foo.%s.pyc' % sys.implementation.cache_tag) - self.assertEqual(contents, sorted(wanted)) ---- a/3.10/Lib/distutils/tests/test_install.py -+++ b/3.10/Lib/distutils/tests/test_install.py -@@ -205,7 +205,7 @@ class InstallTestCase(support.TempdirMan - found = [os.path.basename(line) for line in content.splitlines()] - expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag, - 'sayhi', -- 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] -+ 'UNKNOWN-0.0.0.egg-info'] - self.assertEqual(found, expected) - - def test_record_extensions(self): -@@ -238,7 +238,7 @@ class InstallTestCase(support.TempdirMan - - found = [os.path.basename(line) for line in content.splitlines()] - expected = [_make_ext_name('xx'), -- 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] -+ 'UNKNOWN-0.0.0.egg-info'] - self.assertEqual(found, expected) - - def test_debug_mode(self): diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.10/distutils-sysconfig.diff python3-stdlib-extensions-3.10.8/debian/patches/3.10/distutils-sysconfig.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.10/distutils-sysconfig.diff 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.10/distutils-sysconfig.diff 2021-05-04 05:59:26.000000000 +0000 @@ -4,7 +4,7 @@ --- a/3.10/Lib/distutils/sysconfig.py +++ b/3.10/Lib/distutils/sysconfig.py -@@ -216,9 +216,11 @@ def customize_compiler(compiler): +@@ -113,9 +113,11 @@ def customize_compiler(compiler): _osx_support.customize_compiler(_config_vars) _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' @@ -18,7 +18,7 @@ if 'CC' in os.environ: newcc = os.environ['CC'] -@@ -239,13 +241,22 @@ def customize_compiler(compiler): +@@ -136,13 +138,22 @@ def customize_compiler(compiler): cpp = cc + " -E" # not always if 'LDFLAGS' in os.environ: ldshared = ldshared + ' ' + os.environ['LDFLAGS'] diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.10/mangle-fstack-protector.diff python3-stdlib-extensions-3.10.8/debian/patches/3.10/mangle-fstack-protector.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.10/mangle-fstack-protector.diff 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.10/mangle-fstack-protector.diff 2021-05-04 05:44:05.000000000 +0000 @@ -11,7 +11,7 @@ from functools import partial -@@ -233,6 +234,10 @@ def customize_compiler(compiler): +@@ -130,6 +131,10 @@ def customize_compiler(compiler): cc = newcc if 'CXX' in os.environ: cxx = os.environ['CXX'] diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.10/multiarch-extname.diff python3-stdlib-extensions-3.10.8/debian/patches/3.10/multiarch-extname.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.10/multiarch-extname.diff 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.10/multiarch-extname.diff 2022-06-09 07:49:40.000000000 +0000 @@ -67,15 +67,15 @@ self.warn("'%s' does not exist -- no Python modules to install" % --- a/3.10/Lib/distutils/command/install.py +++ b/3.10/Lib/distutils/command/install.py -@@ -220,6 +220,7 @@ class install(Command): +@@ -213,6 +213,7 @@ class install(Command): # enable custom installation, known values: deb self.install_layout = None + self.multiarch = None - + self.compile = None self.optimize = None -@@ -484,6 +485,8 @@ class install(Command): +@@ -474,6 +475,8 @@ class install(Command): self.install_platbase = self.exec_prefix if self.install_layout: if self.install_layout.lower() in ['deb']: diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.10/multiarch.diff python3-stdlib-extensions-3.10.8/debian/patches/3.10/multiarch.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.10/multiarch.diff 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.10/multiarch.diff 2021-05-04 05:59:26.000000000 +0000 @@ -1,6 +1,6 @@ --- a/3.10/Lib/distutils/sysconfig.py +++ b/3.10/Lib/distutils/sysconfig.py -@@ -303,6 +303,9 @@ def get_python_inc(plat_specific=0, pref +@@ -200,6 +200,9 @@ def get_python_inc(plat_specific=0, pref incdir = os.path.join(get_config_var('srcdir'), 'Include') return os.path.normpath(incdir) python_dir = 'python' + get_python_version() + build_flags diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/disable-some-tests.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/disable-some-tests.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/disable-some-tests.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/disable-some-tests.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,12 @@ +# DP: Disable some failing tests we are not interested in + +--- a/3.11/Lib/distutils/tests/test_build_ext.py ++++ b/3.11/Lib/distutils/tests/test_build_ext.py +@@ -115,6 +115,7 @@ class BuildExtTestCase(TempdirManager, + """) + assert_python_ok('-c', code) + ++ @unittest.skip('Skipping failing Solaris test') + def test_solaris_enable_shared(self): + dist = Distribution({'name': 'xx'}) + cmd = self.build_ext(dist) diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/distutils-install-layout.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/distutils-install-layout.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/distutils-install-layout.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/distutils-install-layout.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,193 @@ +Description: Debian: Add a distutils option --install-layout=deb + This option: + - installs into $prefix/dist-packages instead of $prefix/site-packages. + - doesn't encode the python version into the egg name. + . + We install modules into dist-packages so that a local admin can build their + own cpython from source, and they won't see each others' installed modules. + This keeps Debian packaged applications working correctly, isolated from the + local cpython. + . + Customize site.py to import from Debian's dist-packages layout. + +Forwarded: not-needed + +--- a/3.11/Lib/distutils/command/install_egg_info.py ++++ b/3.11/Lib/distutils/command/install_egg_info.py +@@ -14,18 +14,38 @@ class install_egg_info(Command): + description = "Install package's PKG-INFO metadata as an .egg-info file" + user_options = [ + ('install-dir=', 'd', "directory to install to"), ++ ('install-layout', None, "custom installation layout"), + ] + + def initialize_options(self): + self.install_dir = None ++ self.install_layout = None ++ self.prefix_option = None + + def finalize_options(self): + self.set_undefined_options('install_lib',('install_dir','install_dir')) +- basename = "%s-%s-py%d.%d.egg-info" % ( +- to_filename(safe_name(self.distribution.get_name())), +- to_filename(safe_version(self.distribution.get_version())), +- *sys.version_info[:2] +- ) ++ self.set_undefined_options('install',('install_layout','install_layout')) ++ self.set_undefined_options('install',('prefix_option','prefix_option')) ++ if self.install_layout: ++ if not self.install_layout.lower() in ['deb', 'unix']: ++ raise DistutilsOptionError( ++ "unknown value for --install-layout") ++ no_pyver = (self.install_layout.lower() == 'deb') ++ elif self.prefix_option: ++ no_pyver = False ++ else: ++ no_pyver = True ++ if no_pyver: ++ basename = "%s-%s.egg-info" % ( ++ to_filename(safe_name(self.distribution.get_name())), ++ to_filename(safe_version(self.distribution.get_version())) ++ ) ++ else: ++ basename = "%s-%s-py%d.%d.egg-info" % ( ++ to_filename(safe_name(self.distribution.get_name())), ++ to_filename(safe_version(self.distribution.get_version())), ++ *sys.version_info[:2] ++ ) + self.target = os.path.join(self.install_dir, basename) + self.outputs = [self.target] + +--- a/3.11/Lib/distutils/command/install.py ++++ b/3.11/Lib/distutils/command/install.py +@@ -10,7 +10,7 @@ + from distutils import log + from distutils.core import Command + from distutils.debug import DEBUG +-from distutils.sysconfig import get_config_vars ++from distutils.sysconfig import get_config_vars, is_virtual_environment + from distutils.errors import DistutilsPlatformError + from distutils.file_util import write_file + from distutils.util import convert_path, subst_vars, change_root +@@ -35,6 +35,30 @@ + # of this information should switch to using sysconfig directly. + INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}} + ++# add the Debian install schemes here until they are added to sysconfig ++INSTALL_SCHEMES['unix_local'] = { ++ 'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}', ++ 'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}', ++ 'purelib': '{base}/local/lib/python{py_version_short}/dist-packages', ++ 'platlib': '{platbase}/local/{platlibdir}/python{py_version_short}/dist-packages', ++ 'include': '{installed_base}/include/python{py_version_short}{abiflags}', ++ 'headers': '{base}/local/include/python{py_version_short}{abiflags}', ++ 'platinclude': '{installed_platbase}/include/python{py_version_short}{abiflags}', ++ 'scripts': '{base}/local/bin', ++ 'data': '{base}/local', ++} ++INSTALL_SCHEMES['deb_system'] = { ++ 'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}', ++ 'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}', ++ 'purelib': '{base}/lib/python3/dist-packages', ++ 'platlib': '{platbase}/{platlibdir}/python3/dist-packages', ++ 'include': '{installed_base}/include/python{py_version_short}{abiflags}', ++ 'headers': '{installed_base}/include/python{py_version_short}{abiflags}', ++ 'platinclude': '{installed_platbase}/include/python{py_version_short}{abiflags}', ++ 'scripts': '{base}/bin', ++ 'data': '{base}', ++} ++ + # Copy from sysconfig._INSTALL_SCHEMES + for key in SCHEME_KEYS: + for distutils_scheme_name, sys_scheme_name in ( +@@ -148,6 +172,9 @@ class install(Command): + + ('record=', None, + "filename in which to record list of installed files"), ++ ++ ('install-layout=', None, ++ "installation layout to choose (known values: deb, unix)"), + ] + + boolean_options = ['compile', 'force', 'skip-build'] +@@ -168,6 +195,7 @@ class install(Command): + self.exec_prefix = None + self.home = None + self.user = 0 ++ self.prefix_option = None + + # These select only the installation base; it's up to the user to + # specify the installation scheme (currently, that means supplying +@@ -190,6 +218,9 @@ class install(Command): + self.install_userbase = USER_BASE + self.install_usersite = USER_SITE + ++ # enable custom installation, known values: deb ++ self.install_layout = None ++ + self.compile = None + self.optimize = None + +@@ -436,6 +467,7 @@ class install(Command): + self.install_base = self.install_platbase = self.home + self.select_scheme("unix_home") + else: ++ self.prefix_option = self.prefix + if self.prefix is None: + if self.exec_prefix is not None: + raise DistutilsOptionError( +@@ -450,7 +482,23 @@ + + self.install_base = self.prefix + self.install_platbase = self.exec_prefix +- self.select_scheme("unix_prefix") ++ if self.install_layout: ++ if self.install_layout.lower() in ['deb']: ++ self.select_scheme("deb_system") ++ elif self.install_layout.lower() in ['unix']: ++ self.select_scheme("unix_prefix") ++ else: ++ raise DistutilsOptionError( ++ "unknown value for --install-layout") ++ elif ((self.prefix_option and ++ os.path.normpath(self.prefix) != '/usr/local') ++ or is_virtual_environment()): ++ self.select_scheme("unix_prefix") ++ else: ++ if os.path.normpath(self.prefix) == '/usr/local': ++ self.prefix = self.exec_prefix = '/usr' ++ self.install_base = self.install_platbase = '/usr' ++ self.select_scheme("unix_local") + + def finalize_other(self): + """Finalizes options for non-posix platforms""" +--- a/3.11/Lib/distutils/sysconfig.py ++++ b/3.11/Lib/distutils/sysconfig.py +@@ -266,6 +266,10 @@ + compiler.shared_lib_extension = shlib_suffix + + ++def is_virtual_environment(): ++ return sys.base_prefix != sys.prefix or hasattr(sys, "real_prefix") ++ ++ + def get_python_inc(plat_specific=0, prefix=None): + """Return the directory containing installed Python header files. + +@@ -320,6 +324,7 @@ + If 'prefix' is supplied, use it instead of sys.base_prefix or + sys.base_exec_prefix -- i.e., ignore 'plat_specific'. + """ ++ is_default_prefix = not prefix or os.path.normpath(prefix) in ('/usr', '/usr/local') + if prefix is None: + if standard_lib: + prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX +@@ -338,6 +343,8 @@ + "python" + get_python_version()) + if standard_lib: + return libpython ++ elif is_default_prefix and not is_virtual_environment(): ++ return os.path.join(prefix, "lib", "python3", "dist-packages") + else: + return os.path.join(libpython, "site-packages") + elif os.name == "nt": diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/distutils-link.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/distutils-link.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/distutils-link.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/distutils-link.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,22 @@ +# DP: Don't add standard library dirs to library_dirs and runtime_library_dirs. + +--- a/3.11/Lib/distutils/unixccompiler.py ++++ b/3.11/Lib/distutils/unixccompiler.py +@@ -155,6 +155,17 @@ class UnixCCompiler(CCompiler): + runtime_library_dirs) + libraries, library_dirs, runtime_library_dirs = fixed_args + ++ # filter out standard library paths, which are not explicitely needed ++ # for linking ++ system_libdirs = ['/lib', '/lib64', '/usr/lib', '/usr/lib64'] ++ multiarch = sysconfig.get_config_var("MULTIARCH") ++ if multiarch: ++ system_libdirs.extend(['/lib/%s' % multiarch, '/usr/lib/%s' % multiarch]) ++ library_dirs = [dir for dir in library_dirs ++ if not dir in system_libdirs] ++ runtime_library_dirs = [dir for dir in runtime_library_dirs ++ if not dir in system_libdirs] ++ + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, + libraries) + if not isinstance(output_dir, (str, type(None))): diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/distutils-sysconfig.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/distutils-sysconfig.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/distutils-sysconfig.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/distutils-sysconfig.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,43 @@ +# DP: Get CONFIGURE_CFLAGS, CONFIGURE_CPPFLAGS, CONFIGURE_LDFLAGS from +# DP: the python build, when CFLAGS, CPPFLAGS, LDSHARED) are not set +# DP: in the environment. + +--- a/3.11/Lib/distutils/sysconfig.py ++++ b/3.11/Lib/distutils/sysconfig.py +@@ -113,9 +113,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ ++ (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags, ++ configure_cppflags, configure_cflags, configure_ldflags) = \ + get_config_vars('CC', 'CXX', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS', ++ 'CONFIGURE_CPPFLAGS', 'CONFIGURE_CFLAGS', 'CONFIGURE_LDFLAGS') + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -136,13 +138,22 @@ def customize_compiler(compiler): + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ elif configure_ldflags: ++ ldshared = ldshared + ' ' + configure_ldflags + if 'CFLAGS' in os.environ: + cflags = cflags + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ elif configure_cflags: ++ cflags = cflags + ' ' + configure_cflags ++ ldshared = ldshared + ' ' + configure_cflags + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ elif configure_cppflags: ++ cpp = cpp + ' ' + configure_cppflags ++ cflags = cflags + ' ' + configure_cppflags ++ ldshared = ldshared + ' ' + configure_cppflags + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/ext-no-libpython-link.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/ext-no-libpython-link.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/ext-no-libpython-link.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/ext-no-libpython-link.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,22 @@ +# DP: Don't link extensions with the shared libpython library. + +--- a/3.11/Lib/distutils/command/build_ext.py ++++ b/3.11/Lib/distutils/command/build_ext.py +@@ -231,7 +231,7 @@ class build_ext(Command): + # For building extensions with a shared Python library, + # Python's library directory must be appended to library_dirs + # See Issues: #1600860, #4366 +- if (sysconfig.get_config_var('Py_ENABLE_SHARED')): ++ if False and (sysconfig.get_config_var('Py_ENABLE_SHARED')): + if not sysconfig.python_build: + # building third party extensions + self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) +@@ -734,7 +734,7 @@ class build_ext(Command): + # shared libraries are resolved at link time. + from distutils.sysconfig import get_config_var + link_libpython = False +- if get_config_var('Py_ENABLE_SHARED'): ++ if False and get_config_var('Py_ENABLE_SHARED'): + # A native build on an Android device or on Cygwin + if hasattr(sys, 'getandroidapilevel'): + link_libpython = True diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/lib2to3-no-pickled-grammar.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/lib2to3-no-pickled-grammar.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/lib2to3-no-pickled-grammar.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/lib2to3-no-pickled-grammar.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,14 @@ +--- a/3.11/Lib/lib2to3/pgen2/driver.py ++++ b/3.11/Lib/lib2to3/pgen2/driver.py +@@ -119,7 +119,10 @@ def load_grammar(gt="Grammar.txt", gp=No + if force or not _newer(gp, gt): + logger.info("Generating grammar tables from %s", gt) + g = pgen.generate_grammar(gt) +- if save: ++ # the pickle files mismatch, when built on different architectures. ++ # don't save these for now. An alternative solution might be to ++ # include the multiarch triplet into the file name ++ if False: + logger.info("Writing grammar tables to %s", gp) + try: + g.dump(gp) diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/mangle-fstack-protector.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/mangle-fstack-protector.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/mangle-fstack-protector.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/mangle-fstack-protector.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,24 @@ +# DP: When using GCC versions older than 4.9, automagically mangle +# DP: -fstack-protector-strong to -fstack-protector + +--- a/3.11/Lib/distutils/sysconfig.py ++++ b/3.11/Lib/distutils/sysconfig.py +@@ -14,6 +14,7 @@ import os + import re + import sys + import warnings ++import fnmatch + + from functools import partial + +@@ -130,6 +131,10 @@ def customize_compiler(compiler): + cc = newcc + if 'CXX' in os.environ: + cxx = os.environ['CXX'] ++ if fnmatch.filter([cc, cxx], '*-4.[0-8]'): ++ configure_cflags = configure_cflags.replace('-fstack-protector-strong', '-fstack-protector') ++ ldshared = ldshared.replace('-fstack-protector-strong', '-fstack-protector') ++ cflags = cflags.replace('-fstack-protector-strong', '-fstack-protector') + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] + if 'CPP' in os.environ: diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/multiarch-extname.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/multiarch-extname.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/multiarch-extname.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/multiarch-extname.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,86 @@ +# DP: Make sure to rename extensions to a tag including the MULTIARCH name + +this patch can be dropped for python3.5 final, if the upstream chage is kept. + +--- a/3.11/Lib/distutils/dir_util.py ++++ b/3.11/Lib/distutils/dir_util.py +@@ -96,6 +96,9 @@ def create_tree(base_dir, files, mode=0o + for dir in sorted(need_dir): + mkpath(dir, mode, verbose=verbose, dry_run=dry_run) + ++import sysconfig ++_multiarch = None ++ + def copy_tree(src, dst, preserve_mode=1, preserve_times=1, + preserve_symlinks=0, update=0, verbose=1, dry_run=0): + """Copy an entire directory tree 'src' to a new location 'dst'. +@@ -131,6 +134,13 @@ def copy_tree(src, dst, preserve_mode=1, + raise DistutilsFileError( + "error listing files in '%s': %s" % (src, e.strerror)) + ++ ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX') ++ _multiarch = sysconfig.get_config_var ('MULTIARCH') ++ if ext_suffix.endswith(_multiarch + ext_suffix[-3:]): ++ new_suffix = None ++ else: ++ new_suffix = "%s-%s%s" % (ext_suffix[:-3], _multiarch, ext_suffix[-3:]) ++ + if not dry_run: + mkpath(dst, verbose=verbose) + +@@ -139,6 +149,9 @@ def copy_tree(src, dst, preserve_mode=1, + for n in names: + src_name = os.path.join(src, n) + dst_name = os.path.join(dst, n) ++ if new_suffix and _multiarch and n.endswith(ext_suffix) and not n.endswith(new_suffix): ++ dst_name = os.path.join(dst, n.replace(ext_suffix, new_suffix)) ++ log.info("renaming extension %s -> %s", n, n.replace(ext_suffix, new_suffix)) + + if n.startswith('.nfs'): + # skip NFS rename files +--- a/3.11/Lib/distutils/command/install_lib.py ++++ b/3.11/Lib/distutils/command/install_lib.py +@@ -56,6 +56,7 @@ class install_lib(Command): + self.compile = None + self.optimize = None + self.skip_build = None ++ self.multiarch = None # if we should rename the extensions + + def finalize_options(self): + # Get all the information we need to install pure Python modules +@@ -68,6 +69,7 @@ class install_lib(Command): + ('compile', 'compile'), + ('optimize', 'optimize'), + ('skip_build', 'skip_build'), ++ ('multiarch', 'multiarch'), + ) + + if self.compile is None: +@@ -108,6 +110,8 @@ class install_lib(Command): + + def install(self): + if os.path.isdir(self.build_dir): ++ import distutils.dir_util ++ distutils.dir_util._multiarch = self.multiarch + outfiles = self.copy_tree(self.build_dir, self.install_dir) + else: + self.warn("'%s' does not exist -- no Python modules to install" % +--- a/3.11/Lib/distutils/command/install.py ++++ b/3.11/Lib/distutils/command/install.py +@@ -213,6 +213,7 @@ class install(Command): + + # enable custom installation, known values: deb + self.install_layout = None ++ self.multiarch = None + + self.compile = None + self.optimize = None +@@ -474,6 +475,8 @@ class install(Command): + self.install_platbase = self.exec_prefix + if self.install_layout: + if self.install_layout.lower() in ['deb']: ++ import sysconfig ++ self.multiarch = sysconfig.get_config_var('MULTIARCH') + self.select_scheme("deb_system") + elif self.install_layout.lower() in ['unix']: + self.select_scheme("unix_prefix") diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/3.11/multiarch.diff python3-stdlib-extensions-3.10.8/debian/patches/3.11/multiarch.diff --- python3-stdlib-extensions-3.9.7/debian/patches/3.11/multiarch.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/3.11/multiarch.diff 2022-06-09 07:58:35.000000000 +0000 @@ -0,0 +1,12 @@ +--- a/3.11/Lib/distutils/sysconfig.py ++++ b/3.11/Lib/distutils/sysconfig.py +@@ -200,6 +200,9 @@ def get_python_inc(plat_specific=0, pref + incdir = os.path.join(get_config_var('srcdir'), 'Include') + return os.path.normpath(incdir) + python_dir = 'python' + get_python_version() + build_flags ++ if not python_build and plat_specific: ++ import sysconfig ++ return sysconfig.get_path('platinclude') + return os.path.join(prefix, "include", python_dir) + elif os.name == "nt": + if python_build: diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/add-dbm-module.diff python3-stdlib-extensions-3.10.8/debian/patches/add-dbm-module.diff --- python3-stdlib-extensions-3.9.7/debian/patches/add-dbm-module.diff 2021-05-06 13:17:48.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/add-dbm-module.diff 2022-06-09 08:24:34.000000000 +0000 @@ -825,3 +825,830 @@ # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: if (True or 'gdbm' in dbm_order and self.compiler.find_library_file(lib_dirs, 'gdbm')): +--- /dev/null ++++ b/3.11/Modules/_dbmmodule.c +@@ -0,0 +1,559 @@ ++ ++/* DBM module using dictionary interface */ ++ ++ ++#define PY_SSIZE_T_CLEAN ++#include "Python.h" ++ ++#include ++#include ++#include ++ ++/* Some Linux systems install gdbm/ndbm.h, but not ndbm.h. This supports ++ * whichever configure was able to locate. ++ */ ++#if defined(HAVE_NDBM_H) ++#include ++static const char which_dbm[] = "GNU gdbm"; /* EMX port of GDBM */ ++#elif defined(HAVE_GDBM_NDBM_H) ++#include ++static const char which_dbm[] = "GNU gdbm"; ++#elif defined(HAVE_GDBM_DASH_NDBM_H) ++#include ++static const char which_dbm[] = "GNU gdbm"; ++#elif defined(HAVE_BERKDB_H) ++#include ++static const char which_dbm[] = "Berkeley DB"; ++#else ++#error "No ndbm.h available!" ++#endif ++ ++typedef struct { ++ PyTypeObject *dbm_type; ++ PyObject *dbm_error; ++} _dbm_state; ++ ++static inline _dbm_state* ++get_dbm_state(PyObject *module) ++{ ++ void *state = PyModule_GetState(module); ++ assert(state != NULL); ++ return (_dbm_state *)state; ++} ++ ++/*[clinic input] ++module _dbm ++class _dbm.dbm "dbmobject *" "&Dbmtype" ++[clinic start generated code]*/ ++/*[clinic end generated code: output=da39a3ee5e6b4b0d input=9b1aa8756d16150e]*/ ++ ++typedef struct { ++ PyObject_HEAD ++ int flags; ++ int di_size; /* -1 means recompute */ ++ DBM *di_dbm; ++} dbmobject; ++ ++#include "clinic/_dbmmodule.c.h" ++ ++#define check_dbmobject_open(v, err) \ ++ if ((v)->di_dbm == NULL) { \ ++ PyErr_SetString(err, "DBM object has already been closed"); \ ++ return NULL; \ ++ } ++ ++static PyObject * ++newdbmobject(_dbm_state *state, const char *file, int flags, int mode) ++{ ++ dbmobject *dp; ++ ++ dp = PyObject_New(dbmobject, state->dbm_type); ++ if (dp == NULL) ++ return NULL; ++ dp->di_size = -1; ++ dp->flags = flags; ++ /* See issue #19296 */ ++ if ( (dp->di_dbm = dbm_open((char *)file, flags, mode)) == 0 ) { ++ PyErr_SetFromErrnoWithFilename(state->dbm_error, file); ++ Py_DECREF(dp); ++ return NULL; ++ } ++ return (PyObject *)dp; ++} ++ ++/* Methods */ ++ ++static void ++dbm_dealloc(dbmobject *dp) ++{ ++ if (dp->di_dbm) { ++ dbm_close(dp->di_dbm); ++ } ++ PyTypeObject *tp = Py_TYPE(dp); ++ tp->tp_free(dp); ++ Py_DECREF(tp); ++} ++ ++static Py_ssize_t ++dbm_length(dbmobject *dp) ++{ ++ _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); ++ assert(state != NULL); ++ if (dp->di_dbm == NULL) { ++ PyErr_SetString(state->dbm_error, "DBM object has already been closed"); ++ return -1; ++ } ++ if ( dp->di_size < 0 ) { ++ datum key; ++ int size; ++ ++ size = 0; ++ for ( key=dbm_firstkey(dp->di_dbm); key.dptr; ++ key = dbm_nextkey(dp->di_dbm)) ++ size++; ++ dp->di_size = size; ++ } ++ return dp->di_size; ++} ++ ++static PyObject * ++dbm_subscript(dbmobject *dp, PyObject *key) ++{ ++ datum drec, krec; ++ Py_ssize_t tmp_size; ++ _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); ++ assert(state != NULL); ++ if (!PyArg_Parse(key, "s#", &krec.dptr, &tmp_size)) { ++ return NULL; ++ } ++ ++ krec.dsize = tmp_size; ++ check_dbmobject_open(dp, state->dbm_error); ++ drec = dbm_fetch(dp->di_dbm, krec); ++ if ( drec.dptr == 0 ) { ++ PyErr_SetObject(PyExc_KeyError, key); ++ return NULL; ++ } ++ if ( dbm_error(dp->di_dbm) ) { ++ dbm_clearerr(dp->di_dbm); ++ PyErr_SetString(state->dbm_error, ""); ++ return NULL; ++ } ++ return PyBytes_FromStringAndSize(drec.dptr, drec.dsize); ++} ++ ++static int ++dbm_ass_sub(dbmobject *dp, PyObject *v, PyObject *w) ++{ ++ datum krec, drec; ++ Py_ssize_t tmp_size; ++ ++ if ( !PyArg_Parse(v, "s#", &krec.dptr, &tmp_size) ) { ++ PyErr_SetString(PyExc_TypeError, ++ "dbm mappings have bytes or string keys only"); ++ return -1; ++ } ++ _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); ++ assert(state != NULL); ++ krec.dsize = tmp_size; ++ if (dp->di_dbm == NULL) { ++ PyErr_SetString(state->dbm_error, "DBM object has already been closed"); ++ return -1; ++ } ++ dp->di_size = -1; ++ if (w == NULL) { ++ if ( dbm_delete(dp->di_dbm, krec) < 0 ) { ++ dbm_clearerr(dp->di_dbm); ++ /* we might get a failure for reasons like file corrupted, ++ but we are not able to distinguish it */ ++ if (dp->flags & O_RDWR) { ++ PyErr_SetObject(PyExc_KeyError, v); ++ } ++ else { ++ PyErr_SetString(state->dbm_error, "cannot delete item from database"); ++ } ++ return -1; ++ } ++ } else { ++ if ( !PyArg_Parse(w, "s#", &drec.dptr, &tmp_size) ) { ++ PyErr_SetString(PyExc_TypeError, ++ "dbm mappings have bytes or string elements only"); ++ return -1; ++ } ++ drec.dsize = tmp_size; ++ if ( dbm_store(dp->di_dbm, krec, drec, DBM_REPLACE) < 0 ) { ++ dbm_clearerr(dp->di_dbm); ++ PyErr_SetString(state->dbm_error, ++ "cannot add item to database"); ++ return -1; ++ } ++ } ++ if ( dbm_error(dp->di_dbm) ) { ++ dbm_clearerr(dp->di_dbm); ++ PyErr_SetString(state->dbm_error, ""); ++ return -1; ++ } ++ return 0; ++} ++ ++/*[clinic input] ++_dbm.dbm.close ++ ++Close the database. ++[clinic start generated code]*/ ++ ++static PyObject * ++_dbm_dbm_close_impl(dbmobject *self) ++/*[clinic end generated code: output=c8dc5b6709600b86 input=046db72377d51be8]*/ ++{ ++ if (self->di_dbm) { ++ dbm_close(self->di_dbm); ++ } ++ self->di_dbm = NULL; ++ Py_RETURN_NONE; ++} ++ ++/*[clinic input] ++_dbm.dbm.keys ++ ++ cls: defining_class ++ ++Return a list of all keys in the database. ++[clinic start generated code]*/ ++ ++static PyObject * ++_dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls) ++/*[clinic end generated code: output=f2a593b3038e5996 input=d3706a28fc051097]*/ ++{ ++ PyObject *v, *item; ++ datum key; ++ int err; ++ ++ _dbm_state *state = PyType_GetModuleState(cls); ++ assert(state != NULL); ++ check_dbmobject_open(self, state->dbm_error); ++ v = PyList_New(0); ++ if (v == NULL) { ++ return NULL; ++ } ++ for (key = dbm_firstkey(self->di_dbm); key.dptr; ++ key = dbm_nextkey(self->di_dbm)) { ++ item = PyBytes_FromStringAndSize(key.dptr, key.dsize); ++ if (item == NULL) { ++ Py_DECREF(v); ++ return NULL; ++ } ++ err = PyList_Append(v, item); ++ Py_DECREF(item); ++ if (err != 0) { ++ Py_DECREF(v); ++ return NULL; ++ } ++ } ++ return v; ++} ++ ++static int ++dbm_contains(PyObject *self, PyObject *arg) ++{ ++ dbmobject *dp = (dbmobject *)self; ++ datum key, val; ++ Py_ssize_t size; ++ ++ _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); ++ assert(state != NULL); ++ if ((dp)->di_dbm == NULL) { ++ PyErr_SetString(state->dbm_error, ++ "DBM object has already been closed"); ++ return -1; ++ } ++ if (PyUnicode_Check(arg)) { ++ key.dptr = (char *)PyUnicode_AsUTF8AndSize(arg, &size); ++ key.dsize = size; ++ if (key.dptr == NULL) ++ return -1; ++ } ++ else if (!PyBytes_Check(arg)) { ++ PyErr_Format(PyExc_TypeError, ++ "dbm key must be bytes or string, not %.100s", ++ Py_TYPE(arg)->tp_name); ++ return -1; ++ } ++ else { ++ key.dptr = PyBytes_AS_STRING(arg); ++ key.dsize = PyBytes_GET_SIZE(arg); ++ } ++ val = dbm_fetch(dp->di_dbm, key); ++ return val.dptr != NULL; ++} ++ ++/*[clinic input] ++_dbm.dbm.get ++ cls: defining_class ++ key: str(accept={str, robuffer}, zeroes=True) ++ default: object = None ++ / ++ ++Return the value for key if present, otherwise default. ++[clinic start generated code]*/ ++ ++static PyObject * ++_dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, ++ Py_ssize_clean_t key_length, PyObject *default_value) ++/*[clinic end generated code: output=34851b5dc1c664dc input=66b993b8349fa8c1]*/ ++{ ++ datum dbm_key, val; ++ _dbm_state *state = PyType_GetModuleState(cls); ++ assert(state != NULL); ++ dbm_key.dptr = (char *)key; ++ dbm_key.dsize = key_length; ++ check_dbmobject_open(self, state->dbm_error); ++ val = dbm_fetch(self->di_dbm, dbm_key); ++ if (val.dptr != NULL) { ++ return PyBytes_FromStringAndSize(val.dptr, val.dsize); ++ } ++ ++ Py_INCREF(default_value); ++ return default_value; ++} ++ ++/*[clinic input] ++_dbm.dbm.setdefault ++ cls: defining_class ++ key: str(accept={str, robuffer}, zeroes=True) ++ default: object(c_default="NULL") = b'' ++ / ++ ++Return the value for key if present, otherwise default. ++ ++If key is not in the database, it is inserted with default as the value. ++[clinic start generated code]*/ ++ ++static PyObject * ++_dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, ++ Py_ssize_clean_t key_length, ++ PyObject *default_value) ++/*[clinic end generated code: output=d5c68fe673886767 input=126a3ff15c5f8232]*/ ++{ ++ datum dbm_key, val; ++ Py_ssize_t tmp_size; ++ _dbm_state *state = PyType_GetModuleState(cls); ++ assert(state != NULL); ++ dbm_key.dptr = (char *)key; ++ dbm_key.dsize = key_length; ++ check_dbmobject_open(self, state->dbm_error); ++ val = dbm_fetch(self->di_dbm, dbm_key); ++ if (val.dptr != NULL) { ++ return PyBytes_FromStringAndSize(val.dptr, val.dsize); ++ } ++ if (default_value == NULL) { ++ default_value = PyBytes_FromStringAndSize(NULL, 0); ++ if (default_value == NULL) { ++ return NULL; ++ } ++ val.dptr = NULL; ++ val.dsize = 0; ++ } ++ else { ++ if ( !PyArg_Parse(default_value, "s#", &val.dptr, &tmp_size) ) { ++ PyErr_SetString(PyExc_TypeError, ++ "dbm mappings have bytes or string elements only"); ++ return NULL; ++ } ++ val.dsize = tmp_size; ++ Py_INCREF(default_value); ++ } ++ if (dbm_store(self->di_dbm, dbm_key, val, DBM_INSERT) < 0) { ++ dbm_clearerr(self->di_dbm); ++ PyErr_SetString(state->dbm_error, "cannot add item to database"); ++ Py_DECREF(default_value); ++ return NULL; ++ } ++ return default_value; ++} ++ ++static PyObject * ++dbm__enter__(PyObject *self, PyObject *args) ++{ ++ Py_INCREF(self); ++ return self; ++} ++ ++static PyObject * ++dbm__exit__(PyObject *self, PyObject *args) ++{ ++ _Py_IDENTIFIER(close); ++ return _PyObject_CallMethodIdNoArgs(self, &PyId_close); ++} ++ ++static PyMethodDef dbm_methods[] = { ++ _DBM_DBM_CLOSE_METHODDEF ++ _DBM_DBM_KEYS_METHODDEF ++ _DBM_DBM_GET_METHODDEF ++ _DBM_DBM_SETDEFAULT_METHODDEF ++ {"__enter__", dbm__enter__, METH_NOARGS, NULL}, ++ {"__exit__", dbm__exit__, METH_VARARGS, NULL}, ++ {NULL, NULL} /* sentinel */ ++}; ++ ++static PyType_Slot dbmtype_spec_slots[] = { ++ {Py_tp_dealloc, dbm_dealloc}, ++ {Py_tp_methods, dbm_methods}, ++ {Py_sq_contains, dbm_contains}, ++ {Py_mp_length, dbm_length}, ++ {Py_mp_subscript, dbm_subscript}, ++ {Py_mp_ass_subscript, dbm_ass_sub}, ++ {0, 0} ++}; ++ ++ ++static PyType_Spec dbmtype_spec = { ++ .name = "_dbm.dbm", ++ .basicsize = sizeof(dbmobject), ++ // Calling PyType_GetModuleState() on a subclass is not safe. ++ // dbmtype_spec does not have Py_TPFLAGS_BASETYPE flag ++ // which prevents to create a subclass. ++ // So calling PyType_GetModuleState() in this file is always safe. ++ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, ++ .slots = dbmtype_spec_slots, ++}; ++ ++/* ----------------------------------------------------------------- */ ++ ++/*[clinic input] ++ ++_dbm.open as dbmopen ++ ++ filename: unicode ++ The filename to open. ++ ++ flags: str="r" ++ How to open the file. "r" for reading, "w" for writing, etc. ++ ++ mode: int(py_default="0o666") = 0o666 ++ If creating a new file, the mode bits for the new file ++ (e.g. os.O_RDWR). ++ ++ / ++ ++Return a database object. ++ ++[clinic start generated code]*/ ++ ++static PyObject * ++dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, ++ int mode) ++/*[clinic end generated code: output=9527750f5df90764 input=376a9d903a50df59]*/ ++{ ++ int iflags; ++ _dbm_state *state = get_dbm_state(module); ++ assert(state != NULL); ++ if (strcmp(flags, "r") == 0) { ++ iflags = O_RDONLY; ++ } ++ else if (strcmp(flags, "w") == 0) { ++ iflags = O_RDWR; ++ } ++ else if (strcmp(flags, "rw") == 0) { ++ /* Backward compatibility */ ++ iflags = O_RDWR|O_CREAT; ++ } ++ else if (strcmp(flags, "c") == 0) { ++ iflags = O_RDWR|O_CREAT; ++ } ++ else if (strcmp(flags, "n") == 0) { ++ iflags = O_RDWR|O_CREAT|O_TRUNC; ++ } ++ else { ++ PyErr_SetString(state->dbm_error, ++ "arg 2 to open should be 'r', 'w', 'c', or 'n'"); ++ return NULL; ++ } ++ ++ PyObject *filenamebytes = PyUnicode_EncodeFSDefault(filename); ++ if (filenamebytes == NULL) { ++ return NULL; ++ } ++ const char *name = PyBytes_AS_STRING(filenamebytes); ++ if (strlen(name) != (size_t)PyBytes_GET_SIZE(filenamebytes)) { ++ Py_DECREF(filenamebytes); ++ PyErr_SetString(PyExc_ValueError, "embedded null character"); ++ return NULL; ++ } ++ PyObject *self = newdbmobject(state, name, iflags, mode); ++ Py_DECREF(filenamebytes); ++ return self; ++} ++ ++static PyMethodDef dbmmodule_methods[] = { ++ DBMOPEN_METHODDEF ++ { 0, 0 }, ++}; ++ ++static int ++_dbm_exec(PyObject *module) ++{ ++ _dbm_state *state = get_dbm_state(module); ++ state->dbm_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, ++ &dbmtype_spec, NULL); ++ if (state->dbm_type == NULL) { ++ return -1; ++ } ++ state->dbm_error = PyErr_NewException("_dbm.error", PyExc_OSError, NULL); ++ if (state->dbm_error == NULL) { ++ return -1; ++ } ++ if (PyModule_AddStringConstant(module, "library", which_dbm) < 0) { ++ return -1; ++ } ++ if (PyModule_AddType(module, (PyTypeObject *)state->dbm_error) < 0) { ++ return -1; ++ } ++ return 0; ++} ++ ++static int ++_dbm_module_traverse(PyObject *module, visitproc visit, void *arg) ++{ ++ _dbm_state *state = get_dbm_state(module); ++ Py_VISIT(state->dbm_error); ++ Py_VISIT(state->dbm_type); ++ return 0; ++} ++ ++static int ++_dbm_module_clear(PyObject *module) ++{ ++ _dbm_state *state = get_dbm_state(module); ++ Py_CLEAR(state->dbm_error); ++ Py_CLEAR(state->dbm_type); ++ return 0; ++} ++ ++static void ++_dbm_module_free(void *module) ++{ ++ _dbm_module_clear((PyObject *)module); ++} ++ ++static PyModuleDef_Slot _dbmmodule_slots[] = { ++ {Py_mod_exec, _dbm_exec}, ++ {0, NULL} ++}; ++ ++static struct PyModuleDef _dbmmodule = { ++ PyModuleDef_HEAD_INIT, ++ .m_name = "_dbm", ++ .m_size = sizeof(_dbm_state), ++ .m_methods = dbmmodule_methods, ++ .m_slots = _dbmmodule_slots, ++ .m_traverse = _dbm_module_traverse, ++ .m_clear = _dbm_module_clear, ++ .m_free = _dbm_module_free, ++}; ++ ++PyMODINIT_FUNC ++PyInit__dbm(void) ++{ ++ return PyModuleDef_Init(&_dbmmodule); ++} +--- /dev/null ++++ b/3.11/Modules/clinic/_dbmmodule.c.h +@@ -0,0 +1,190 @@ ++/*[clinic input] ++preserve ++[clinic start generated code]*/ ++ ++PyDoc_STRVAR(_dbm_dbm_close__doc__, ++"close($self, /)\n" ++"--\n" ++"\n" ++"Close the database."); ++ ++#define _DBM_DBM_CLOSE_METHODDEF \ ++ {"close", (PyCFunction)_dbm_dbm_close, METH_NOARGS, _dbm_dbm_close__doc__}, ++ ++static PyObject * ++_dbm_dbm_close_impl(dbmobject *self); ++ ++static PyObject * ++_dbm_dbm_close(dbmobject *self, PyObject *Py_UNUSED(ignored)) ++{ ++ return _dbm_dbm_close_impl(self); ++} ++ ++PyDoc_STRVAR(_dbm_dbm_keys__doc__, ++"keys($self, /)\n" ++"--\n" ++"\n" ++"Return a list of all keys in the database."); ++ ++#define _DBM_DBM_KEYS_METHODDEF \ ++ {"keys", (PyCFunction)(void(*)(void))_dbm_dbm_keys, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _dbm_dbm_keys__doc__}, ++ ++static PyObject * ++_dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls); ++ ++static PyObject * ++_dbm_dbm_keys(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = { NULL}; ++ static _PyArg_Parser _parser = {":keys", _keywords, 0}; ++ ++ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser ++ )) { ++ goto exit; ++ } ++ return_value = _dbm_dbm_keys_impl(self, cls); ++ ++exit: ++ return return_value; ++} ++ ++PyDoc_STRVAR(_dbm_dbm_get__doc__, ++"get($self, key, default=None, /)\n" ++"--\n" ++"\n" ++"Return the value for key if present, otherwise default."); ++ ++#define _DBM_DBM_GET_METHODDEF \ ++ {"get", (PyCFunction)(void(*)(void))_dbm_dbm_get, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _dbm_dbm_get__doc__}, ++ ++static PyObject * ++_dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, ++ Py_ssize_clean_t key_length, PyObject *default_value); ++ ++static PyObject * ++_dbm_dbm_get(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"", "", NULL}; ++ static _PyArg_Parser _parser = {"s#|O:get", _keywords, 0}; ++ const char *key; ++ Py_ssize_clean_t key_length; ++ PyObject *default_value = Py_None; ++ ++ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, ++ &key, &key_length, &default_value)) { ++ goto exit; ++ } ++ return_value = _dbm_dbm_get_impl(self, cls, key, key_length, default_value); ++ ++exit: ++ return return_value; ++} ++ ++PyDoc_STRVAR(_dbm_dbm_setdefault__doc__, ++"setdefault($self, key, default=b\'\', /)\n" ++"--\n" ++"\n" ++"Return the value for key if present, otherwise default.\n" ++"\n" ++"If key is not in the database, it is inserted with default as the value."); ++ ++#define _DBM_DBM_SETDEFAULT_METHODDEF \ ++ {"setdefault", (PyCFunction)(void(*)(void))_dbm_dbm_setdefault, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _dbm_dbm_setdefault__doc__}, ++ ++static PyObject * ++_dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, ++ Py_ssize_clean_t key_length, ++ PyObject *default_value); ++ ++static PyObject * ++_dbm_dbm_setdefault(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"", "", NULL}; ++ static _PyArg_Parser _parser = {"s#|O:setdefault", _keywords, 0}; ++ const char *key; ++ Py_ssize_clean_t key_length; ++ PyObject *default_value = NULL; ++ ++ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, ++ &key, &key_length, &default_value)) { ++ goto exit; ++ } ++ return_value = _dbm_dbm_setdefault_impl(self, cls, key, key_length, default_value); ++ ++exit: ++ return return_value; ++} ++ ++PyDoc_STRVAR(dbmopen__doc__, ++"open($module, filename, flags=\'r\', mode=0o666, /)\n" ++"--\n" ++"\n" ++"Return a database object.\n" ++"\n" ++" filename\n" ++" The filename to open.\n" ++" flags\n" ++" How to open the file. \"r\" for reading, \"w\" for writing, etc.\n" ++" mode\n" ++" If creating a new file, the mode bits for the new file\n" ++" (e.g. os.O_RDWR)."); ++ ++#define DBMOPEN_METHODDEF \ ++ {"open", (PyCFunction)(void(*)(void))dbmopen, METH_FASTCALL, dbmopen__doc__}, ++ ++static PyObject * ++dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, ++ int mode); ++ ++static PyObject * ++dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) ++{ ++ PyObject *return_value = NULL; ++ PyObject *filename; ++ const char *flags = "r"; ++ int mode = 438; ++ ++ if (!_PyArg_CheckPositional("open", nargs, 1, 3)) { ++ goto exit; ++ } ++ if (!PyUnicode_Check(args[0])) { ++ _PyArg_BadArgument("open", "argument 1", "str", args[0]); ++ goto exit; ++ } ++ if (PyUnicode_READY(args[0]) == -1) { ++ goto exit; ++ } ++ filename = args[0]; ++ if (nargs < 2) { ++ goto skip_optional; ++ } ++ if (!PyUnicode_Check(args[1])) { ++ _PyArg_BadArgument("open", "argument 2", "str", args[1]); ++ goto exit; ++ } ++ Py_ssize_t flags_length; ++ flags = PyUnicode_AsUTF8AndSize(args[1], &flags_length); ++ if (flags == NULL) { ++ goto exit; ++ } ++ if (strlen(flags) != (size_t)flags_length) { ++ PyErr_SetString(PyExc_ValueError, "embedded null character"); ++ goto exit; ++ } ++ if (nargs < 3) { ++ goto skip_optional; ++ } ++ mode = _PyLong_AsInt(args[2]); ++ if (mode == -1 && PyErr_Occurred()) { ++ goto exit; ++ } ++skip_optional: ++ return_value = dbmopen_impl(module, filename, flags, mode); ++ ++exit: ++ return return_value; ++} ++/*[clinic end generated code: output=6947b1115df66f7c input=a9049054013a1b77]*/ +--- a/3.11/setup.py ++++ b/3.11/setup.py +@@ -548,6 +548,69 @@ class PyBuildExt(build_ext): + + # XXX Omitted modules: gl, pure, dl, SGI-specific modules + ++ # The standard Unix dbm module: ++ if True: ++ dbm_order = "gdbm:ndbm:bdb".split(":") ++ dbmext = None ++ for cand in dbm_order: ++ if cand == "ndbm": ++ if find_file("ndbm.h", inc_dirs, []) is not None: ++ # Some systems have -lndbm, others have -lgdbm_compat, ++ # others don't have either ++ if self.compiler.find_library_file(lib_dirs, ++ 'ndbm'): ++ ndbm_libs = ['ndbm'] ++ elif self.compiler.find_library_file(lib_dirs, ++ 'gdbm_compat'): ++ ndbm_libs = ['gdbm_compat'] ++ else: ++ ndbm_libs = [] ++ dbmext = Extension('_dbm', ['Modules/_dbmmodule.c'], ++ define_macros=[ ++ ('HAVE_NDBM_H',None), ++ ], ++ libraries=ndbm_libs) ++ break ++ ++ elif cand == "gdbm": ++ if self.compiler.find_library_file(lib_dirs, 'gdbm'): ++ gdbm_libs = ['gdbm'] ++ if self.compiler.find_library_file(lib_dirs, ++ 'gdbm_compat'): ++ gdbm_libs.append('gdbm_compat') ++ if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: ++ dbmext = Extension( ++ '_dbm', ['Modules/_dbmmodule.c'], ++ define_macros=[ ++ ('HAVE_GDBM_NDBM_H', None), ++ ], ++ libraries = gdbm_libs) ++ break ++ if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: ++ dbmext = Extension( ++ '_dbm', ['Modules/_dbmmodule.c'], ++ define_macros=[ ++ ('HAVE_GDBM_DASH_NDBM_H', None), ++ ], ++ libraries = gdbm_libs) ++ break ++ elif cand == "bdb": ++ if db_incs is not None: ++ dbmext = Extension('_dbm', ['Modules/_dbmmodule.c'], ++ library_dirs=dblib_dir, ++ runtime_library_dirs=dblib_dir, ++ include_dirs=db_incs, ++ define_macros=[ ++ ('HAVE_BERKDB_H', None), ++ ('DB_DBM_HSEARCH', None), ++ ], ++ libraries=dblibs) ++ break ++ if dbmext is not None: ++ exts.append(dbmext) ++ else: ++ missing.append('_dbm') ++ + # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: + if (True or 'gdbm' in dbm_order and + self.compiler.find_library_file(lib_dirs, 'gdbm')): diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/no-dbm.diff python3-stdlib-extensions-3.10.8/debian/patches/no-dbm.diff --- python3-stdlib-extensions-3.9.7/debian/patches/no-dbm.diff 1970-01-01 00:00:00.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/no-dbm.diff 2022-06-09 08:05:21.000000000 +0000 @@ -0,0 +1,22 @@ +--- a/3.10/setup.py ++++ b/3.10/setup.py +@@ -549,7 +549,7 @@ class PyBuildExt(build_ext): + # XXX Omitted modules: gl, pure, dl, SGI-specific modules + + # The standard Unix dbm module: +- if True: ++ if False: + dbm_order = "gdbm:ndbm:bdb".split(":") + dbmext = None + for cand in dbm_order: +--- a/3.11/setup.py ++++ b/3.11/setup.py +@@ -549,7 +549,7 @@ class PyBuildExt(build_ext): + # XXX Omitted modules: gl, pure, dl, SGI-specific modules + + # The standard Unix dbm module: +- if True: ++ if False: + dbm_order = "gdbm:ndbm:bdb".split(":") + dbmext = None + for cand in dbm_order: diff -Nru python3-stdlib-extensions-3.9.7/debian/patches/series python3-stdlib-extensions-3.10.8/debian/patches/series --- python3-stdlib-extensions-3.9.7/debian/patches/series 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/patches/series 2022-06-09 08:01:36.000000000 +0000 @@ -58,4 +58,15 @@ 3.10/disable-some-tests.diff 3.10/mangle-fstack-protector.diff 3.10/lib2to3-no-pickled-grammar.diff +# 3.11 +3.11/distutils-install-layout.diff +3.11/distutils-link.diff +3.11/distutils-sysconfig.diff +3.11/multiarch.diff +3.11/ext-no-libpython-link.diff +3.11/multiarch-extname.diff +3.11/disable-some-tests.diff +3.11/mangle-fstack-protector.diff +3.11/lib2to3-no-pickled-grammar.diff add-dbm-module.diff +no-dbm.diff diff -Nru python3-stdlib-extensions-3.9.7/debian/python3-distutils.postinst.in python3-stdlib-extensions-3.10.8/debian/python3-distutils.postinst.in --- python3-stdlib-extensions-3.9.7/debian/python3-distutils.postinst.in 2021-01-03 11:07:09.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/python3-distutils.postinst.in 2022-10-30 08:24:07.000000000 +0000 @@ -19,21 +19,21 @@ echo >&2 "$python: can't get files for byte-compilation" fi done - if [ -n "$2" ] && dpkg --compare-versions $2 lt 3.7.2-3~; then - find /usr/lib/python3.6/distutils -name __pycache__ | xargs -r rm -rf - find /usr/lib/python3.6/distutils -empty -type d -delete || true - find /usr/lib/python3.6 -maxdepth 0 -empty -type d -delete || true - fi if [ -n "$2" ] && dpkg --compare-versions $2 lt 3.8.3-2~; then find /usr/lib/python3.7/distutils -name __pycache__ | xargs -r rm -rf find /usr/lib/python3.7/distutils -empty -type d -delete || true find /usr/lib/python3.7 -maxdepth 0 -empty -type d -delete || true fi if [ -n "$2" ] && dpkg --compare-versions $2 lt 3.9.1-2~; then - find /usr/lib/python3.8/tkinter -name __pycache__ 2>/dev/null | xargs -r rm -rf - find /usr/lib/python3.8/tkinter -empty -type d -delete 2>/dev/null || true + find /usr/lib/python3.8/distutils -name __pycache__ 2>/dev/null | xargs -r rm -rf + find /usr/lib/python3.8/distutils -empty -type d -delete 2>/dev/null || true find /usr/lib/python3.8 -maxdepth 0 -empty -type d -delete 2>/dev/null || true fi + if [ -n "$2" ] && dpkg --compare-versions $2 lt 3.10.8-1~; then + find /usr/lib/python3.9/distutils -name __pycache__ 2>/dev/null | xargs -r rm -rf + find /usr/lib/python3.9/distutils -empty -type d -delete 2>/dev/null || true + find /usr/lib/python3.9 -maxdepth 0 -empty -type d -delete 2>/dev/null || true + fi esac #DEBHELPER# diff -Nru python3-stdlib-extensions-3.9.7/debian/python3-lib2to3.postinst.in python3-stdlib-extensions-3.10.8/debian/python3-lib2to3.postinst.in --- python3-stdlib-extensions-3.9.7/debian/python3-lib2to3.postinst.in 2021-01-03 11:06:59.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/python3-lib2to3.postinst.in 2022-10-30 08:24:07.000000000 +0000 @@ -30,10 +30,15 @@ find /usr/lib/python3.7 -maxdepth 0 -empty -type d -delete || true fi if [ -n "$2" ] && dpkg --compare-versions $2 lt 3.9.1-2~; then - find /usr/lib/python3.8/tkinter -name __pycache__ 2>/dev/null | xargs -r rm -rf - find /usr/lib/python3.8/tkinter -empty -type d -delete 2>/dev/null || true + find /usr/lib/python3.8/lib2to3 -name __pycache__ 2>/dev/null | xargs -r rm -rf + find /usr/lib/python3.8/lib2to3 -empty -type d -delete 2>/dev/null || true find /usr/lib/python3.8 -maxdepth 0 -empty -type d -delete 2>/dev/null || true fi + if [ -n "$2" ] && dpkg --compare-versions $2 lt 3.10.8-1~; then + find /usr/lib/python3.9/lib2to3 -name __pycache__ 2>/dev/null | xargs -r rm -rf + find /usr/lib/python3.9/lib2to3 -empty -type d -delete 2>/dev/null || true + find /usr/lib/python3.9 -maxdepth 0 -empty -type d -delete 2>/dev/null || true + fi esac #DEBHELPER# diff -Nru python3-stdlib-extensions-3.9.7/debian/python3-tk.postinst.in python3-stdlib-extensions-3.10.8/debian/python3-tk.postinst.in --- python3-stdlib-extensions-3.9.7/debian/python3-tk.postinst.in 2021-01-03 11:06:32.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/python3-tk.postinst.in 2022-10-30 08:24:07.000000000 +0000 @@ -29,6 +29,11 @@ find /usr/lib/python3.8/tkinter -empty -type d -delete 2>/dev/null || true find /usr/lib/python3.8 -maxdepth 0 -empty -type d -delete 2>/dev/null || true fi + if [ -n "$2" ] && dpkg --compare-versions $2 lt 3.10.8-1~; then + find /usr/lib/python3.9/tkinter -name __pycache__ 2>/dev/null | xargs -r rm -rf + find /usr/lib/python3.9/tkinter -empty -type d -delete 2>/dev/null || true + find /usr/lib/python3.9 -maxdepth 0 -empty -type d -delete 2>/dev/null || true + fi esac #DEBHELPER# diff -Nru python3-stdlib-extensions-3.9.7/debian/rules python3-stdlib-extensions-3.10.8/debian/rules --- python3-stdlib-extensions-3.9.7/debian/rules 2021-09-02 13:22:20.000000000 +0000 +++ python3-stdlib-extensions-3.10.8/debian/rules 2022-08-10 11:27:11.000000000 +0000 @@ -16,19 +16,19 @@ #rel_ext = $(strip $(shell dpkg-parsechangelog | awk -F- '/^Version:/ {print $$NF}')) PYVERS = $(shell py3versions -vs) -PYVERS = 3.9 3.10 +PYVERS = 3.10 3.11 -MIN_VER = 3.9.2-3~ -NEXT_VER = 3.11 +MIN_VER = 3.10.5-0~ +NEXT_VER = 3.12 -# FIXME: add bullseye when uploading to unstable, after bullseye is released -ifeq (,$(filter $(distrelase), jessie stretch buster precise trusty xenial bionic focal groovy hirsute)) - ifneq (,$(filter 3.1% 3.2%, $(PYVERS))) - with_dbmmodule = yes - dbm_breaks = '-Vdbm:Breaks=libpython3.10-stdlib (<< 3.10.0~rc1-1~)' - dbmdbg_breaks = '-Vdbmdbg:Breaks=libpython3.10-dbg (<< 3.10.0~rc1-1~)' - endif -endif +# FIXME: remove sid when uploading to unstable, after bullseye is released +#ifeq (,$(filter $(distrelase), jessie stretch buster bullseye bookworm sid precise trusty xenial bionic focal groovy hirsute)) +# ifneq (,$(filter 3.1% 3.2%, $(PYVERS))) +# with_dbmmodule = yes +# dbm_breaks = '-Vdbm:Breaks=libpython3.10-stdlib (<< 3.10.0~b1-2~)' +# dbmdbg_breaks = '-Vdbmdbg:Breaks=libpython3.10-dbg (<< 3.10.0~b1-2~)' +# endif +#endif include /usr/share/dpkg/architecture.mk @@ -96,8 +96,18 @@ dh_prep install-stamp-py%: build-stamp-py% - cd $* && $(SET_ENV) python$* setup.py install \ - --root=$(CURDIR)/$(d_tk) --install-layout=deb + : # FIXME: 3.11 install errors out ... + if [ "$*" = 3.11 ]; then \ + cd $* && $(SET_ENV) python$* setup.py install \ + --root=$(CURDIR)/$(d_tk) --install-layout=deb; \ + mkdir -p $(d_tk)/usr/lib/python$*/lib-dynload; \ + mv $(d_tk)/usr/lib/python3/dist-packages/*.so \ + $(d_tk)/usr/lib/python$*/lib-dynload; \ + rm -rf $(d_tk)/usr/lib/python3/dist-packages; \ + else \ + cd $* && $(SET_ENV) python$* setup.py install \ + --root=$(CURDIR)/$(d_tk) --install-layout=deb; \ + fi mkdir -p $(d_tk)/usr/lib/python$*/lib-dynload mv $(d_tk)/usr/lib/python3/*-packages/*.so \ $(d_tk)/usr/lib/python$*/lib-dynload/ @@ -127,8 +137,18 @@ find $(d_2to3) -name __pycache__ | xargs -r rm -rf dbg-install-py%: dbg-stamp-py% - cd $* && $(SET_ENV) python$*-dbg setup.py install \ - --root=$(CURDIR)/$(d_tk)-dbg --install-layout=deb + : # FIXME: 3.11 install errors out ... + if [ "$*" = 3.11 ]; then \ + cd $* && $(SET_ENV) python$*-dbg setup.py install \ + --root=$(CURDIR)/$(d_tk)-dbg --install-layout=deb; \ + mkdir -p $(d_tk)/usr/lib/python$*/lib-dynload; \ + mv $(d_tk)/usr/lib/python3/dist-packages/*.so \ + $(d_tk)/usr/lib/python$*/lib-dynload; \ + rm -rf $(d_tk)/usr/lib/python3/dist-packages; \ + else \ + cd $* && $(SET_ENV) python$*-dbg setup.py install \ + --root=$(CURDIR)/$(d_tk)-dbg --install-layout=deb; \ + fi mkdir -p $(d_tk)-dbg/usr/lib/python$*/lib-dynload mv $(d_tk)-dbg/usr/lib/python3/*-packages/*.so \ $(d_tk)-dbg/usr/lib/python$*/lib-dynload/