From 60ea218d2d8f997fa533db37134b6d700a3c1ebb Mon Sep 17 00:00:00 2001 From: Ben Weinstein-Raun Date: Thu, 15 Jun 2023 23:38:47 -0700 Subject: [PATCH] minify stdlib --- Lib/PSF-LICENSE | 254 - Lib/README.md | 13 - Lib/__future__.py | 175 +- Lib/__hello__.py | 22 +- Lib/__phello__/__init__.py | 10 +- Lib/__phello__/spam.py | 10 +- Lib/_collections_abc.py | 1615 +- Lib/_compat_pickle.py | 344 +- Lib/_compression.py | 224 +- Lib/_dummy_os.py | 89 +- Lib/_dummy_thread.py | 262 +- Lib/_markupbase.py | 579 +- Lib/_osx_support.py | 735 +- Lib/_py_abc.py | 197 +- Lib/_pycodecs.py | 1675 +- Lib/_pydecimal.py | 8090 ++-------- Lib/_pyio.py | 2719 ---- Lib/_sitebuiltins.py | 145 +- Lib/_threading_local.py | 277 +- Lib/_weakrefset.py | 303 +- Lib/abc.py | 219 +- Lib/aifc.py | 1329 +- Lib/antigravity.py | 17 - Lib/argparse.py | 2621 ---- Lib/ast.py | 2362 +-- Lib/asynchat.py | 307 - Lib/asyncio/__init__.py | 48 - Lib/asyncio/base_events.py | 1468 -- Lib/asyncio/base_futures.py | 71 - Lib/asyncio/base_subprocess.py | 293 - Lib/asyncio/base_tasks.py | 76 - Lib/asyncio/compat.py | 18 - Lib/asyncio/constants.py | 7 - Lib/asyncio/coroutines.py | 344 - Lib/asyncio/events.py | 705 - Lib/asyncio/futures.py | 446 - Lib/asyncio/locks.py | 478 - Lib/asyncio/log.py | 7 - Lib/asyncio/proactor_events.py | 550 - Lib/asyncio/protocols.py | 134 - Lib/asyncio/queues.py | 256 - Lib/asyncio/runners.py | 72 - Lib/asyncio/selector_events.py | 1142 -- Lib/asyncio/sslproto.py | 692 - Lib/asyncio/streams.py | 695 - Lib/asyncio/subprocess.py | 213 - Lib/asyncio/tasks.py | 808 - Lib/asyncio/test_utils.py | 503 - Lib/asyncio/transports.py | 306 - Lib/asyncio/unix_events.py | 1074 -- Lib/asyncio/windows_events.py | 780 - Lib/asyncio/windows_utils.py | 224 - Lib/asyncore.py | 642 - Lib/base64.py | 801 +- Lib/bdb.py | 1178 +- Lib/binhex.py | 215 + Lib/bisect.py | 157 +- Lib/bz2.py | 344 - Lib/calendar.py | 980 +- Lib/cgi.py | 1012 -- Lib/cgitb.py | 464 +- Lib/chunk.py | 230 +- Lib/cmd.py | 548 +- Lib/code.py | 389 +- Lib/codecs.py | 1331 +- Lib/codeop.py | 190 +- Lib/collections/__init__.py | 2095 +-- Lib/collections/_defaultdict.py | 86 +- Lib/collections/abc.py | 5 +- Lib/colorsys.py | 221 +- Lib/compileall.py | 401 +- Lib/concurrent/__init__.py | 1 - Lib/concurrent/futures/__init__.py | 53 - Lib/concurrent/futures/_base.py | 644 - Lib/concurrent/futures/process.py | 786 - Lib/concurrent/futures/thread.py | 237 - Lib/configparser.py | 1382 -- Lib/contextlib.py | 990 +- Lib/contextvars.py | 6 +- Lib/copy.py | 433 +- Lib/copyreg.py | 285 +- Lib/csv.py | 572 +- Lib/ctypes/__init__.py | 577 - Lib/ctypes/_aix.py | 327 - Lib/ctypes/_endian.py | 78 - Lib/ctypes/macholib/README.ctypes | 7 - Lib/ctypes/macholib/__init__.py | 9 - Lib/ctypes/macholib/dyld.py | 165 - Lib/ctypes/macholib/dylib.py | 42 - Lib/ctypes/macholib/fetch_macholib | 2 - Lib/ctypes/macholib/fetch_macholib.bat | 1 - Lib/ctypes/macholib/framework.py | 42 - Lib/ctypes/test/__init__.py | 16 - Lib/ctypes/test/__main__.py | 4 - Lib/ctypes/test/test_anon.py | 73 - Lib/ctypes/test/test_array_in_pointer.py | 64 - Lib/ctypes/test/test_arrays.py | 238 - Lib/ctypes/test/test_as_parameter.py | 231 - Lib/ctypes/test/test_bitfields.py | 297 - Lib/ctypes/test/test_buffers.py | 73 - Lib/ctypes/test/test_bytes.py | 66 - Lib/ctypes/test/test_byteswap.py | 364 - Lib/ctypes/test/test_callbacks.py | 333 - Lib/ctypes/test/test_cast.py | 99 - Lib/ctypes/test/test_cfuncs.py | 218 - Lib/ctypes/test/test_checkretval.py | 36 - Lib/ctypes/test/test_delattr.py | 21 - Lib/ctypes/test/test_errno.py | 76 - Lib/ctypes/test/test_find.py | 127 - Lib/ctypes/test/test_frombuffer.py | 141 - Lib/ctypes/test/test_funcptr.py | 132 - Lib/ctypes/test/test_functions.py | 384 - Lib/ctypes/test/test_incomplete.py | 42 - Lib/ctypes/test/test_init.py | 40 - Lib/ctypes/test/test_internals.py | 100 - Lib/ctypes/test/test_keeprefs.py | 153 - Lib/ctypes/test/test_libc.py | 33 - Lib/ctypes/test/test_loading.py | 182 - Lib/ctypes/test/test_macholib.py | 110 - Lib/ctypes/test/test_memfunctions.py | 79 - Lib/ctypes/test/test_numbers.py | 295 - Lib/ctypes/test/test_objects.py | 67 - Lib/ctypes/test/test_parameters.py | 250 - Lib/ctypes/test/test_pep3118.py | 235 - Lib/ctypes/test/test_pickling.py | 81 - Lib/ctypes/test/test_pointers.py | 223 - Lib/ctypes/test/test_prototypes.py | 222 - Lib/ctypes/test/test_python_api.py | 85 - Lib/ctypes/test/test_random_things.py | 77 - Lib/ctypes/test/test_refcounts.py | 116 - Lib/ctypes/test/test_repr.py | 29 - Lib/ctypes/test/test_returnfuncptrs.py | 66 - Lib/ctypes/test/test_simplesubclasses.py | 55 - Lib/ctypes/test/test_sizes.py | 33 - Lib/ctypes/test/test_slicing.py | 167 - Lib/ctypes/test/test_stringptr.py | 77 - Lib/ctypes/test/test_strings.py | 145 - Lib/ctypes/test/test_struct_fields.py | 97 - Lib/ctypes/test/test_structures.py | 812 - Lib/ctypes/test/test_unaligned_structures.py | 43 - Lib/ctypes/test/test_unicode.py | 64 - Lib/ctypes/test/test_values.py | 103 - Lib/ctypes/test/test_varsize_struct.py | 50 - Lib/ctypes/test/test_win32.py | 136 - Lib/ctypes/test/test_wintypes.py | 43 - Lib/ctypes/util.py | 376 - Lib/ctypes/wintypes.py | 202 - Lib/dataclasses.py | 1727 +- Lib/datetime.py | 3312 +--- Lib/dbm/__init__.py | 189 - Lib/dbm/dumb.py | 316 - Lib/decimal.py | 13 +- Lib/difflib.py | 2476 +-- Lib/dis.py | 22 +- Lib/distutils/README | 13 - Lib/distutils/__init__.py | 13 - Lib/distutils/_msvccompiler.py | 574 - Lib/distutils/archive_util.py | 256 - Lib/distutils/bcppcompiler.py | 393 - Lib/distutils/ccompiler.py | 1115 -- Lib/distutils/cmd.py | 434 - Lib/distutils/command/__init__.py | 31 - Lib/distutils/command/bdist.py | 143 - Lib/distutils/command/bdist_dumb.py | 123 - Lib/distutils/command/bdist_msi.py | 741 - Lib/distutils/command/bdist_rpm.py | 582 - Lib/distutils/command/bdist_wininst.py | 367 - Lib/distutils/command/build.py | 157 - Lib/distutils/command/build_clib.py | 209 - Lib/distutils/command/build_ext.py | 755 - Lib/distutils/command/build_py.py | 416 - Lib/distutils/command/build_scripts.py | 160 - Lib/distutils/command/check.py | 145 - Lib/distutils/command/clean.py | 76 - Lib/distutils/command/command_template | 33 - Lib/distutils/command/config.py | 347 - Lib/distutils/command/install.py | 705 - Lib/distutils/command/install_data.py | 79 - Lib/distutils/command/install_egg_info.py | 97 - Lib/distutils/command/install_headers.py | 47 - Lib/distutils/command/install_lib.py | 221 - Lib/distutils/command/install_scripts.py | 60 - Lib/distutils/command/register.py | 304 - Lib/distutils/command/sdist.py | 456 - Lib/distutils/command/upload.py | 200 - Lib/distutils/config.py | 131 - Lib/distutils/core.py | 234 - Lib/distutils/cygwinccompiler.py | 405 - Lib/distutils/debug.py | 5 - Lib/distutils/dep_util.py | 92 - Lib/distutils/dir_util.py | 223 - Lib/distutils/dist.py | 1236 -- Lib/distutils/errors.py | 97 - Lib/distutils/extension.py | 240 - Lib/distutils/fancy_getopt.py | 457 - Lib/distutils/file_util.py | 238 - Lib/distutils/filelist.py | 327 - Lib/distutils/log.py | 77 - Lib/distutils/msvc9compiler.py | 791 - Lib/distutils/msvccompiler.py | 643 - Lib/distutils/spawn.py | 192 - Lib/distutils/sysconfig.py | 556 - Lib/distutils/text_file.py | 286 - Lib/distutils/unixccompiler.py | 333 - Lib/distutils/util.py | 557 - Lib/distutils/version.py | 343 - Lib/distutils/versionpredicate.py | 166 - Lib/doctest.py | 2813 ---- Lib/dummy_threading.py | 94 +- Lib/email/__init__.py | 62 - Lib/email/_encoded_words.py | 221 - Lib/email/_header_value_parser.py | 2970 ---- Lib/email/_parseaddr.py | 540 - Lib/email/_policybase.py | 370 - Lib/email/architecture.rst | 216 - Lib/email/base64mime.py | 119 - Lib/email/charset.py | 406 - Lib/email/contentmanager.py | 250 - Lib/email/encoders.py | 69 - Lib/email/errors.py | 107 - Lib/email/feedparser.py | 536 - Lib/email/generator.py | 508 - Lib/email/header.py | 578 - Lib/email/headerregistry.py | 586 - Lib/email/iterators.py | 71 - Lib/email/message.py | 1164 -- Lib/email/mime/__init__.py | 0 Lib/email/mime/application.py | 37 - Lib/email/mime/audio.py | 74 - Lib/email/mime/base.py | 30 - Lib/email/mime/image.py | 47 - Lib/email/mime/message.py | 34 - Lib/email/mime/multipart.py | 48 - Lib/email/mime/nonmultipart.py | 22 - Lib/email/mime/text.py | 42 - Lib/email/parser.py | 132 - Lib/email/policy.py | 223 - Lib/email/quoprimime.py | 299 - Lib/email/utils.py | 382 - Lib/encodings/__init__.py | 170 - Lib/encodings/aliases.py | 551 - Lib/encodings/ascii.py | 50 - Lib/encodings/base64_codec.py | 55 - Lib/encodings/big5.py | 39 - Lib/encodings/big5hkscs.py | 39 - Lib/encodings/bz2_codec.py | 78 - Lib/encodings/charmap.py | 69 - Lib/encodings/cp037.py | 307 - Lib/encodings/cp1006.py | 307 - Lib/encodings/cp1026.py | 307 - Lib/encodings/cp1125.py | 698 - Lib/encodings/cp1140.py | 307 - Lib/encodings/cp1250.py | 307 - Lib/encodings/cp1251.py | 307 - Lib/encodings/cp1252.py | 307 - Lib/encodings/cp1253.py | 307 - Lib/encodings/cp1254.py | 307 - Lib/encodings/cp1255.py | 307 - Lib/encodings/cp1256.py | 307 - Lib/encodings/cp1257.py | 307 - Lib/encodings/cp1258.py | 307 - Lib/encodings/cp273.py | 307 - Lib/encodings/cp424.py | 307 - Lib/encodings/cp437.py | 698 - Lib/encodings/cp500.py | 307 - Lib/encodings/cp65001.py | 43 - Lib/encodings/cp720.py | 309 - Lib/encodings/cp737.py | 698 - Lib/encodings/cp775.py | 697 - Lib/encodings/cp850.py | 698 - Lib/encodings/cp852.py | 698 - Lib/encodings/cp855.py | 698 - Lib/encodings/cp856.py | 307 - Lib/encodings/cp857.py | 694 - Lib/encodings/cp858.py | 698 - Lib/encodings/cp860.py | 698 - Lib/encodings/cp861.py | 698 - Lib/encodings/cp862.py | 698 - Lib/encodings/cp863.py | 698 - Lib/encodings/cp864.py | 690 - Lib/encodings/cp865.py | 698 - Lib/encodings/cp866.py | 698 - Lib/encodings/cp869.py | 689 - Lib/encodings/cp874.py | 307 - Lib/encodings/cp875.py | 307 - Lib/encodings/cp932.py | 39 - Lib/encodings/cp949.py | 39 - Lib/encodings/cp950.py | 39 - Lib/encodings/euc_jis_2004.py | 39 - Lib/encodings/euc_jisx0213.py | 39 - Lib/encodings/euc_jp.py | 39 - Lib/encodings/euc_kr.py | 39 - Lib/encodings/gb18030.py | 39 - Lib/encodings/gb2312.py | 39 - Lib/encodings/gbk.py | 39 - Lib/encodings/hex_codec.py | 55 - Lib/encodings/hp_roman8.py | 314 - Lib/encodings/hz.py | 39 - Lib/encodings/idna.py | 309 - Lib/encodings/iso2022_jp.py | 39 - Lib/encodings/iso2022_jp_1.py | 39 - Lib/encodings/iso2022_jp_2.py | 39 - Lib/encodings/iso2022_jp_2004.py | 39 - Lib/encodings/iso2022_jp_3.py | 39 - Lib/encodings/iso2022_jp_ext.py | 39 - Lib/encodings/iso2022_kr.py | 39 - Lib/encodings/iso8859_1.py | 307 - Lib/encodings/iso8859_10.py | 307 - Lib/encodings/iso8859_11.py | 307 - Lib/encodings/iso8859_13.py | 307 - Lib/encodings/iso8859_14.py | 307 - Lib/encodings/iso8859_15.py | 307 - Lib/encodings/iso8859_16.py | 307 - Lib/encodings/iso8859_2.py | 307 - Lib/encodings/iso8859_3.py | 307 - Lib/encodings/iso8859_4.py | 307 - Lib/encodings/iso8859_5.py | 307 - Lib/encodings/iso8859_6.py | 307 - Lib/encodings/iso8859_7.py | 307 - Lib/encodings/iso8859_8.py | 307 - Lib/encodings/iso8859_9.py | 307 - Lib/encodings/johab.py | 39 - Lib/encodings/koi8_r.py | 307 - Lib/encodings/koi8_t.py | 308 - Lib/encodings/koi8_u.py | 307 - Lib/encodings/kz1048.py | 307 - Lib/encodings/latin_1.py | 50 - Lib/encodings/mac_arabic.py | 698 - Lib/encodings/mac_centeuro.py | 307 - Lib/encodings/mac_croatian.py | 307 - Lib/encodings/mac_cyrillic.py | 307 - Lib/encodings/mac_farsi.py | 307 - Lib/encodings/mac_greek.py | 307 - Lib/encodings/mac_iceland.py | 307 - Lib/encodings/mac_latin2.py | 312 - Lib/encodings/mac_roman.py | 307 - Lib/encodings/mac_romanian.py | 307 - Lib/encodings/mac_turkish.py | 307 - Lib/encodings/mbcs.py | 47 - Lib/encodings/oem.py | 41 - Lib/encodings/palmos.py | 308 - Lib/encodings/ptcp154.py | 312 - Lib/encodings/punycode.py | 237 - Lib/encodings/quopri_codec.py | 56 - Lib/encodings/raw_unicode_escape.py | 46 - Lib/encodings/rot_13.py | 113 - Lib/encodings/shift_jis.py | 39 - Lib/encodings/shift_jis_2004.py | 39 - Lib/encodings/shift_jisx0213.py | 39 - Lib/encodings/tis_620.py | 307 - Lib/encodings/undefined.py | 49 - Lib/encodings/unicode_escape.py | 46 - Lib/encodings/unicode_internal.py | 45 - Lib/encodings/utf_16.py | 155 - Lib/encodings/utf_16_be.py | 42 - Lib/encodings/utf_16_le.py | 42 - Lib/encodings/utf_32.py | 150 - Lib/encodings/utf_32_be.py | 37 - Lib/encodings/utf_32_le.py | 37 - Lib/encodings/utf_7.py | 38 - Lib/encodings/utf_8.py | 48 +- Lib/encodings/utf_8_sig.py | 130 - Lib/encodings/uu_codec.py | 103 - Lib/encodings/zlib_codec.py | 77 - Lib/ensurepip/__init__.py | 293 - Lib/ensurepip/__main__.py | 5 - .../_bundled/pip-22.3.1-py3-none-any.whl | Bin 2051534 -> 0 bytes .../setuptools-65.5.0-py3-none-any.whl | Bin 1232695 -> 0 bytes Lib/ensurepip/_uninstall.py | 31 - Lib/enum.py | 1345 +- Lib/filecmp.py | 407 +- Lib/fileinput.py | 611 +- Lib/fnmatch.py | 261 +- Lib/formatter.py | 197 + Lib/fractions.py | 835 +- Lib/ftplib.py | 972 -- Lib/functools.py | 1287 +- Lib/genericpath.py | 201 +- Lib/getopt.py | 215 - Lib/getpass.py | 185 - Lib/gettext.py | 899 +- Lib/glob.py | 358 +- Lib/graphlib.py | 317 +- Lib/gzip.py | 668 - Lib/hashlib.py | 397 +- Lib/heapq.py | 724 +- Lib/hmac.py | 277 +- Lib/html/__init__.py | 132 - Lib/html/entities.py | 2510 --- Lib/html/parser.py | 455 - Lib/http/__init__.py | 150 - Lib/http/client.py | 1525 -- Lib/http/cookiejar.py | 2128 --- Lib/http/cookies.py | 612 - Lib/http/server.py | 1295 -- Lib/imghdr.py | 229 +- Lib/imp.py | 450 +- Lib/importlib/__init__.py | 242 +- Lib/importlib/_abc.py | 63 +- Lib/importlib/_bootstrap.py | 1862 +-- Lib/importlib/_bootstrap_external.py | 2235 +-- Lib/importlib/abc.py | 397 +- Lib/importlib/machinery.py | 31 +- Lib/importlib/metadata/__init__.py | 1309 +- Lib/importlib/metadata/_adapters.py | 89 +- Lib/importlib/metadata/_collections.py | 34 +- Lib/importlib/metadata/_functools.py | 114 +- Lib/importlib/metadata/_itertools.py | 87 +- Lib/importlib/metadata/_meta.py | 59 +- Lib/importlib/metadata/_text.py | 110 +- Lib/importlib/readers.py | 15 +- Lib/importlib/resources/__init__.py | 41 +- Lib/importlib/resources/_adapters.py | 223 +- Lib/importlib/resources/_common.py | 126 +- Lib/importlib/resources/_itertools.py | 45 +- Lib/importlib/resources/_legacy.py | 134 +- Lib/importlib/resources/abc.py | 187 +- Lib/importlib/resources/readers.py | 157 +- Lib/importlib/resources/simple.py | 161 +- Lib/importlib/simple.py | 17 +- Lib/importlib/util.py | 409 +- Lib/inspect.py | 3158 ---- Lib/io.py | 145 +- Lib/ipaddress.py | 2894 +--- Lib/json/__init__.py | 407 +- Lib/json/decoder.py | 497 +- Lib/json/encoder.py | 592 +- Lib/json/scanner.py | 103 +- Lib/json/tool.py | 99 +- Lib/keyword.py | 69 +- Lib/linecache.py | 255 +- Lib/locale.py | 1789 --- Lib/logging/__init__.py | 2261 --- Lib/logging/config.py | 946 -- Lib/logging/handlers.py | 1573 -- Lib/mailbox.py | 2151 --- Lib/mimetypes.py | 752 +- Lib/multiprocessing/__init__.py | 37 - Lib/multiprocessing/connection.py | 978 -- Lib/multiprocessing/context.py | 362 - Lib/multiprocessing/dummy/__init__.py | 126 - Lib/multiprocessing/dummy/connection.py | 75 - Lib/multiprocessing/forkserver.py | 348 - Lib/multiprocessing/heap.py | 337 - Lib/multiprocessing/managers.py | 1378 -- Lib/multiprocessing/pool.py | 954 -- Lib/multiprocessing/popen_fork.py | 83 - Lib/multiprocessing/popen_forkserver.py | 74 - Lib/multiprocessing/popen_spawn_posix.py | 72 - Lib/multiprocessing/popen_spawn_win32.py | 131 - Lib/multiprocessing/process.py | 432 - Lib/multiprocessing/queues.py | 379 - Lib/multiprocessing/reduction.py | 281 - Lib/multiprocessing/resource_sharer.py | 154 - Lib/multiprocessing/resource_tracker.py | 239 - Lib/multiprocessing/shared_memory.py | 532 - Lib/multiprocessing/sharedctypes.py | 240 - Lib/multiprocessing/spawn.py | 297 - Lib/multiprocessing/synchronize.py | 394 - Lib/multiprocessing/util.py | 489 - Lib/netrc.py | 192 - Lib/nntplib.py | 1093 -- Lib/ntpath.py | 1121 +- Lib/nturl2path.py | 102 +- Lib/numbers.py | 502 +- Lib/opcode.py | 611 +- Lib/operator.py | 634 +- Lib/optparse.py | 1681 -- Lib/os.py | 1122 -- Lib/pathlib.py | 1464 -- Lib/pdb.py | 1730 -- Lib/pickle.py | 1820 --- Lib/pickletools.py | 2890 ---- Lib/pkgutil.py | 715 - Lib/platform.py | 1654 +- Lib/plistlib.py | 1188 +- Lib/posixpath.py | 730 +- Lib/pprint.py | 909 +- Lib/pty.py | 187 - Lib/py_compile.py | 258 +- Lib/pydoc.py | 2673 ---- Lib/pydoc_data/__init__.py | 0 Lib/pydoc_data/_pydoc.css | 6 - Lib/pydoc_data/topics.py | 13062 ---------------- Lib/queue.py | 417 +- Lib/quopri.py | 342 +- Lib/random.py | 1153 +- Lib/re.py | 461 +- Lib/reprlib.py | 225 +- Lib/rlcompleter.py | 272 +- Lib/runpy.py | 428 +- Lib/sched.py | 203 +- Lib/secrets.py | 86 +- Lib/selectors.py | 832 +- Lib/shelve.py | 303 +- Lib/shlex.py | 516 +- Lib/shutil.py | 1430 -- Lib/signal.py | 92 - Lib/site.py | 867 +- Lib/smtpd.py | 979 -- Lib/sndhdr.py | 337 +- Lib/socket.py | 960 -- Lib/socketserver.py | 847 - Lib/sqlite3/__init__.py | 70 - Lib/sqlite3/__main__.py | 132 - Lib/sqlite3/dbapi2.py | 108 - Lib/sqlite3/dump.py | 82 - Lib/sre_compile.py | 1096 +- Lib/sre_constants.py | 351 +- Lib/sre_parse.py | 1591 +- Lib/ssl.py | 1489 -- Lib/stat.py | 285 +- Lib/statistics.py | 1521 +- Lib/string.py | 362 +- Lib/stringprep.py | 338 +- Lib/struct.py | 18 +- Lib/subprocess.py | 2160 --- Lib/sunau.py | 736 +- Lib/sysconfig.py | 859 +- Lib/tabnanny.py | 424 +- Lib/tarfile.py | 2614 ---- Lib/telnetlib.py | 677 - Lib/tempfile.py | 1117 +- Lib/test/__init__.py | 1 - Lib/test/__main__.py | 2 - Lib/test/_test_atexit.py | 136 - Lib/test/_typed_dict_helper.py | 18 - Lib/test/ann_module.py | 62 - Lib/test/ann_module2.py | 36 - Lib/test/ann_module3.py | 18 - Lib/test/ann_module4.py | 5 - Lib/test/ann_module5.py | 10 - Lib/test/ann_module6.py | 7 - Lib/test/ann_module7.py | 11 - Lib/test/badsyntax_3131.py | 2 - Lib/test/badsyntax_future10.py | 3 - Lib/test/badsyntax_future3.py | 10 - Lib/test/badsyntax_future4.py | 10 - Lib/test/badsyntax_future5.py | 12 - Lib/test/badsyntax_future6.py | 10 - Lib/test/badsyntax_future7.py | 11 - Lib/test/badsyntax_future8.py | 10 - Lib/test/badsyntax_future9.py | 10 - Lib/test/badsyntax_pep3120.py | 1 - Lib/test/cfgparser.1 | 3 - Lib/test/cfgparser.2 | 537 - Lib/test/cfgparser.3 | 69 - Lib/test/cmath_testcases.txt | 2511 --- Lib/test/dataclass_module_1.py | 32 - Lib/test/dataclass_module_1_str.py | 32 - Lib/test/dataclass_module_2.py | 32 - Lib/test/dataclass_module_2_str.py | 32 - Lib/test/dataclass_textanno.py | 12 - Lib/test/decimaltestdata/abs.decTest | 161 - Lib/test/decimaltestdata/add.decTest | 2716 ---- Lib/test/decimaltestdata/and.decTest | 338 - Lib/test/decimaltestdata/base.decTest | 1411 -- Lib/test/decimaltestdata/clamp.decTest | 211 - Lib/test/decimaltestdata/class.decTest | 131 - Lib/test/decimaltestdata/compare.decTest | 758 - Lib/test/decimaltestdata/comparetotal.decTest | 798 - .../decimaltestdata/comparetotmag.decTest | 790 - Lib/test/decimaltestdata/copy.decTest | 86 - Lib/test/decimaltestdata/copyabs.decTest | 86 - Lib/test/decimaltestdata/copynegate.decTest | 86 - Lib/test/decimaltestdata/copysign.decTest | 177 - Lib/test/decimaltestdata/ddAbs.decTest | 126 - Lib/test/decimaltestdata/ddAdd.decTest | 1328 -- Lib/test/decimaltestdata/ddAnd.decTest | 347 - Lib/test/decimaltestdata/ddBase.decTest | 1104 -- Lib/test/decimaltestdata/ddCanonical.decTest | 357 - Lib/test/decimaltestdata/ddClass.decTest | 76 - Lib/test/decimaltestdata/ddCompare.decTest | 744 - Lib/test/decimaltestdata/ddCompareSig.decTest | 647 - .../decimaltestdata/ddCompareTotal.decTest | 706 - .../decimaltestdata/ddCompareTotalMag.decTest | 706 - Lib/test/decimaltestdata/ddCopy.decTest | 88 - Lib/test/decimaltestdata/ddCopyAbs.decTest | 88 - Lib/test/decimaltestdata/ddCopyNegate.decTest | 88 - Lib/test/decimaltestdata/ddCopySign.decTest | 175 - Lib/test/decimaltestdata/ddDivide.decTest | 863 - Lib/test/decimaltestdata/ddDivideInt.decTest | 449 - Lib/test/decimaltestdata/ddEncode.decTest | 495 - Lib/test/decimaltestdata/ddFMA.decTest | 1698 -- Lib/test/decimaltestdata/ddInvert.decTest | 202 - Lib/test/decimaltestdata/ddLogB.decTest | 159 - Lib/test/decimaltestdata/ddMax.decTest | 322 - Lib/test/decimaltestdata/ddMaxMag.decTest | 304 - Lib/test/decimaltestdata/ddMin.decTest | 309 - Lib/test/decimaltestdata/ddMinMag.decTest | 293 - Lib/test/decimaltestdata/ddMinus.decTest | 88 - Lib/test/decimaltestdata/ddMultiply.decTest | 553 - Lib/test/decimaltestdata/ddNextMinus.decTest | 126 - Lib/test/decimaltestdata/ddNextPlus.decTest | 124 - Lib/test/decimaltestdata/ddNextToward.decTest | 374 - Lib/test/decimaltestdata/ddOr.decTest | 292 - Lib/test/decimaltestdata/ddPlus.decTest | 88 - Lib/test/decimaltestdata/ddQuantize.decTest | 833 - Lib/test/decimaltestdata/ddReduce.decTest | 182 - Lib/test/decimaltestdata/ddRemainder.decTest | 600 - .../decimaltestdata/ddRemainderNear.decTest | 629 - Lib/test/decimaltestdata/ddRotate.decTest | 262 - .../decimaltestdata/ddSameQuantum.decTest | 389 - Lib/test/decimaltestdata/ddScaleB.decTest | 243 - Lib/test/decimaltestdata/ddShift.decTest | 262 - Lib/test/decimaltestdata/ddSubtract.decTest | 629 - Lib/test/decimaltestdata/ddToIntegral.decTest | 257 - Lib/test/decimaltestdata/ddXor.decTest | 337 - Lib/test/decimaltestdata/decDouble.decTest | 65 - Lib/test/decimaltestdata/decQuad.decTest | 65 - Lib/test/decimaltestdata/decSingle.decTest | 25 - Lib/test/decimaltestdata/divide.decTest | 854 - Lib/test/decimaltestdata/divideint.decTest | 486 - Lib/test/decimaltestdata/dqAbs.decTest | 126 - Lib/test/decimaltestdata/dqAdd.decTest | 1215 -- Lib/test/decimaltestdata/dqAnd.decTest | 420 - Lib/test/decimaltestdata/dqBase.decTest | 1081 -- Lib/test/decimaltestdata/dqCanonical.decTest | 372 - Lib/test/decimaltestdata/dqClass.decTest | 77 - Lib/test/decimaltestdata/dqCompare.decTest | 753 - Lib/test/decimaltestdata/dqCompareSig.decTest | 647 - .../decimaltestdata/dqCompareTotal.decTest | 706 - .../decimaltestdata/dqCompareTotalMag.decTest | 706 - Lib/test/decimaltestdata/dqCopy.decTest | 88 - Lib/test/decimaltestdata/dqCopyAbs.decTest | 88 - Lib/test/decimaltestdata/dqCopyNegate.decTest | 88 - Lib/test/decimaltestdata/dqCopySign.decTest | 175 - Lib/test/decimaltestdata/dqDivide.decTest | 808 - Lib/test/decimaltestdata/dqDivideInt.decTest | 453 - Lib/test/decimaltestdata/dqEncode.decTest | 477 - Lib/test/decimaltestdata/dqFMA.decTest | 1786 --- Lib/test/decimaltestdata/dqInvert.decTest | 245 - Lib/test/decimaltestdata/dqLogB.decTest | 160 - Lib/test/decimaltestdata/dqMax.decTest | 322 - Lib/test/decimaltestdata/dqMaxMag.decTest | 304 - Lib/test/decimaltestdata/dqMin.decTest | 309 - Lib/test/decimaltestdata/dqMinMag.decTest | 293 - Lib/test/decimaltestdata/dqMinus.decTest | 88 - Lib/test/decimaltestdata/dqMultiply.decTest | 589 - Lib/test/decimaltestdata/dqNextMinus.decTest | 126 - Lib/test/decimaltestdata/dqNextPlus.decTest | 124 - Lib/test/decimaltestdata/dqNextToward.decTest | 375 - Lib/test/decimaltestdata/dqOr.decTest | 401 - Lib/test/decimaltestdata/dqPlus.decTest | 88 - Lib/test/decimaltestdata/dqQuantize.decTest | 836 - Lib/test/decimaltestdata/dqReduce.decTest | 183 - Lib/test/decimaltestdata/dqRemainder.decTest | 597 - .../decimaltestdata/dqRemainderNear.decTest | 631 - Lib/test/decimaltestdata/dqRotate.decTest | 298 - .../decimaltestdata/dqSameQuantum.decTest | 389 - Lib/test/decimaltestdata/dqScaleB.decTest | 260 - Lib/test/decimaltestdata/dqShift.decTest | 298 - Lib/test/decimaltestdata/dqSubtract.decTest | 635 - Lib/test/decimaltestdata/dqToIntegral.decTest | 257 - Lib/test/decimaltestdata/dqXor.decTest | 410 - Lib/test/decimaltestdata/dsBase.decTest | 1062 -- Lib/test/decimaltestdata/dsEncode.decTest | 372 - Lib/test/decimaltestdata/exp.decTest | 674 - Lib/test/decimaltestdata/extra.decTest | 2830 ---- Lib/test/decimaltestdata/fma.decTest | 3426 ---- Lib/test/decimaltestdata/inexact.decTest | 215 - Lib/test/decimaltestdata/invert.decTest | 176 - Lib/test/decimaltestdata/ln.decTest | 611 - Lib/test/decimaltestdata/log10.decTest | 551 - Lib/test/decimaltestdata/logb.decTest | 188 - Lib/test/decimaltestdata/max.decTest | 424 - Lib/test/decimaltestdata/maxmag.decTest | 404 - Lib/test/decimaltestdata/min.decTest | 407 - Lib/test/decimaltestdata/minmag.decTest | 390 - Lib/test/decimaltestdata/minus.decTest | 182 - Lib/test/decimaltestdata/multiply.decTest | 731 - Lib/test/decimaltestdata/nextminus.decTest | 148 - Lib/test/decimaltestdata/nextplus.decTest | 150 - Lib/test/decimaltestdata/nexttoward.decTest | 426 - Lib/test/decimaltestdata/or.decTest | 334 - Lib/test/decimaltestdata/plus.decTest | 195 - Lib/test/decimaltestdata/power.decTest | 1624 -- Lib/test/decimaltestdata/powersqrt.decTest | 2970 ---- Lib/test/decimaltestdata/quantize.decTest | 948 -- .../decimaltestdata/randomBound32.decTest | 2443 --- Lib/test/decimaltestdata/randoms.decTest | 4030 ----- Lib/test/decimaltestdata/reduce.decTest | 234 - Lib/test/decimaltestdata/remainder.decTest | 640 - .../decimaltestdata/remainderNear.decTest | 572 - Lib/test/decimaltestdata/rescale.decTest | 764 - Lib/test/decimaltestdata/rotate.decTest | 247 - Lib/test/decimaltestdata/rounding.decTest | 1303 -- Lib/test/decimaltestdata/samequantum.decTest | 389 - Lib/test/decimaltestdata/scaleb.decTest | 209 - Lib/test/decimaltestdata/shift.decTest | 250 - Lib/test/decimaltestdata/squareroot.decTest | 3834 ----- Lib/test/decimaltestdata/subtract.decTest | 873 -- Lib/test/decimaltestdata/testall.decTest | 87 - Lib/test/decimaltestdata/tointegral.decTest | 241 - Lib/test/decimaltestdata/tointegralx.decTest | 255 - Lib/test/decimaltestdata/xor.decTest | 335 - Lib/test/double_const.py | 30 - Lib/test/encoded_modules/__init__.py | 23 - Lib/test/encoded_modules/module_iso_8859_1.py | 5 - Lib/test/encoded_modules/module_koi8_r.py | 3 - Lib/test/exception_hierarchy.txt | 67 - Lib/test/floating_points.txt | 1028 -- Lib/test/formatfloat_testcases.txt | 355 - Lib/test/future_test1.py | 11 - Lib/test/future_test2.py | 10 - Lib/test/keycert.passwd.pem | 50 - Lib/test/keycert.pem | 48 - Lib/test/keycert2.pem | 49 - Lib/test/keycert3.pem | 132 - Lib/test/keycert4.pem | 132 - Lib/test/keycertecc.pem | 96 - Lib/test/libregrtest/__init__.py | 5 - Lib/test/libregrtest/cmdline.py | 394 - Lib/test/libregrtest/main.py | 650 - Lib/test/libregrtest/refleak.py | 288 - Lib/test/libregrtest/runtest.py | 328 - Lib/test/libregrtest/runtest_mp.py | 288 - Lib/test/libregrtest/save_env.py | 291 - Lib/test/libregrtest/setup.py | 134 - Lib/test/libregrtest/utils.py | 61 - Lib/test/libregrtest/win_utils.py | 105 - Lib/test/list_tests.py | 566 - Lib/test/lock_tests.py | 952 -- Lib/test/mapping_tests.py | 666 - Lib/test/math_testcases.txt | 633 - Lib/test/mime.types | 1445 -- Lib/test/mock_socket.py | 166 - Lib/test/mod_generics_cache.py | 53 - Lib/test/pickletester.py | 3880 ----- Lib/test/randv2_32.pck | 633 - Lib/test/randv2_64.pck | 633 - Lib/test/randv3.pck | 633 - Lib/test/re_tests.py | 564 - Lib/test/recursion.tar | Bin 516 -> 0 bytes Lib/test/regrtest.py | 50 - Lib/test/relimport.py | 1 - Lib/test/seq_tests.py | 430 - Lib/test/signalinterproctester.py | 83 - Lib/test/ssl_servers.py | 209 - Lib/test/string_tests.py | 1501 -- Lib/test/subprocessdata/fd_status.py | 34 - Lib/test/subprocessdata/input_reader.py | 7 - Lib/test/subprocessdata/qcat.py | 7 - Lib/test/subprocessdata/qgrep.py | 10 - Lib/test/subprocessdata/sigchild_ignore.py | 15 - Lib/test/support/__init__.py | 2241 --- Lib/test/support/bytecode_helper.py | 42 - Lib/test/support/hashlib_helper.py | 51 - Lib/test/support/import_helper.py | 248 - Lib/test/support/interpreters.py | 197 - Lib/test/support/logging_helper.py | 29 - Lib/test/support/os_helper.py | 717 - Lib/test/support/script_helper.py | 302 - Lib/test/support/socket_helper.py | 272 - Lib/test/support/testresult.py | 185 - Lib/test/support/threading_helper.py | 244 - Lib/test/support/warnings_helper.py | 207 - Lib/test/test___future__.py | 61 - Lib/test/test__osx_support.py | 326 - Lib/test/test_abc.py | 691 - Lib/test/test_abstract_numbers.py | 44 - Lib/test/test_argparse.py | 5575 ------- Lib/test/test_array.py | 1621 -- Lib/test/test_ast.py | 2507 --- Lib/test/test_asyncgen.py | 1706 -- Lib/test/test_asynchat.py | 290 - Lib/test/test_asyncore.py | 838 - Lib/test/test_atexit.py | 106 - Lib/test/test_augassign.py | 326 - Lib/test/test_base64.py | 802 - Lib/test/test_baseexception.py | 219 - Lib/test/test_bdb.py | 1228 -- Lib/test/test_bigmem.py | 1281 -- Lib/test/test_binascii.py | 463 - Lib/test/test_binop.py | 440 - Lib/test/test_bisect.py | 392 - Lib/test/test_bool.py | 383 - Lib/test/test_buffer.py | 4434 ------ Lib/test/test_bufio.py | 77 - Lib/test/test_builtin.py | 2541 --- Lib/test/test_bytes.py | 2089 --- Lib/test/test_calendar.py | 1011 -- Lib/test/test_call.py | 623 - Lib/test/test_cgi.py | 645 - Lib/test/test_cgitb.py | 71 - Lib/test/test_charmapcodec.py | 58 - Lib/test/test_class.py | 743 - Lib/test/test_cmath.py | 652 - Lib/test/test_cmd.py | 266 - Lib/test/test_cmd_line.py | 1038 -- Lib/test/test_cmd_line_script.py | 799 - Lib/test/test_code.py | 820 - Lib/test/test_code_module.py | 157 - Lib/test/test_codeccallbacks.py | 1284 -- Lib/test/test_codecs.py | 3826 ----- Lib/test/test_codeop.py | 332 - Lib/test/test_collections.py | 2399 --- Lib/test/test_colorsys.py | 100 - Lib/test/test_compare.py | 118 - Lib/test/test_compile.py | 1766 --- Lib/test/test_complex.py | 797 - Lib/test/test_configparser.py | 2162 --- Lib/test/test_contains.py | 110 - Lib/test/test_context.py | 1141 -- Lib/test/test_contextlib.py | 1274 -- Lib/test/test_copy.py | 913 -- Lib/test/test_ctypes.py | 10 - Lib/test/test_dataclasses.py | 4038 ----- Lib/test/test_dbm.py | 213 - Lib/test/test_dbm_dumb.py | 304 - Lib/test/test_decimal.py | 5783 ------- Lib/test/test_decorators.py | 467 - Lib/test/test_defaultdict.py | 192 - Lib/test/test_deque.py | 1040 -- Lib/test/test_descr.py | 5944 ------- Lib/test/test_devpoll.py | 142 - Lib/test/test_dict.py | 1618 -- Lib/test/test_dictcomps.py | 134 - Lib/test/test_dictviews.py | 338 - Lib/test/test_difflib.py | 514 - Lib/test/test_difflib_expect.html | 526 - Lib/test/test_dis.py | 60 - Lib/test/test_doctest2.py | 126 - Lib/test/test_docxmlrpc.py | 232 - Lib/test/test_dtrace.py | 178 - Lib/test/test_dummy_thread.py | 278 - Lib/test/test_dummy_threading.py | 60 - Lib/test/test_dynamic.py | 196 - Lib/test/test_dynamicclassattribute.py | 300 - Lib/test/test_eintr.py | 20 - Lib/test/test_ensurepip.py | 361 - Lib/test/test_enum.py | 3216 ---- Lib/test/test_enumerate.py | 269 - Lib/test/test_eof.py | 77 - Lib/test/test_epoll.py | 262 - Lib/test/test_errno.py | 35 - Lib/test/test_exception_group.py | 1000 -- Lib/test/test_exception_hierarchy.py | 219 - Lib/test/test_exception_variations.py | 176 - Lib/test/test_exceptions.py | 2750 ---- Lib/test/test_faulthandler.py | 979 -- Lib/test/test_fcntl.py | 218 - Lib/test/test_file.py | 357 - Lib/test/test_filecmp.py | 250 - Lib/test/test_fileinput.py | 1049 -- Lib/test/test_fileio.py | 672 - Lib/test/test_float.py | 1589 -- Lib/test/test_fnmatch.py | 290 - Lib/test/test_format.py | 645 - Lib/test/test_fractions.py | 733 - Lib/test/test_fstring.py | 1390 -- Lib/test/test_ftplib.py | 1107 -- Lib/test/test_funcattrs.py | 464 - Lib/test/test_functools.py | 2619 ---- Lib/test/test_future.py | 467 - Lib/test/test_future3.py | 26 - Lib/test/test_future4.py | 11 - Lib/test/test_future5.py | 21 - Lib/test/test_generators.py | 2408 --- Lib/test/test_genericalias.py | 346 - Lib/test/test_genericclass.py | 296 - Lib/test/test_genericpath.py | 599 - Lib/test/test_getopt.py | 181 - Lib/test/test_getpass.py | 163 - Lib/test/test_glob.py | 402 - Lib/test/test_global.py | 57 - Lib/test/test_grammar.py | 1818 --- Lib/test/test_graphlib.py | 252 - Lib/test/test_grp.py | 111 - Lib/test/test_gzip.py | 868 - Lib/test/test_hash.py | 371 - Lib/test/test_hashlib.py | 1211 -- Lib/test/test_heapq.py | 477 - Lib/test/test_hmac.py | 698 - Lib/test/test_html.py | 103 - Lib/test/test_htmlparser.py | 791 - Lib/test/test_http_cookiejar.py | 1926 --- Lib/test/test_http_cookies.py | 487 - Lib/test/test_httplib.py | 2118 --- Lib/test/test_httpservers.py | 1336 -- Lib/test/test_imp.py | 465 - Lib/test/test_import/__init__.py | 1423 -- Lib/test/test_import/__main__.py | 3 - .../data/circular_imports/basic.py | 2 - .../data/circular_imports/basic2.py | 1 - .../data/circular_imports/binding.py | 1 - .../data/circular_imports/binding2.py | 1 - .../data/circular_imports/from_cycle1.py | 2 - .../data/circular_imports/from_cycle2.py | 2 - .../data/circular_imports/indirect.py | 1 - .../data/circular_imports/rebinding.py | 3 - .../data/circular_imports/rebinding2.py | 3 - .../data/circular_imports/source.py | 2 - .../data/circular_imports/subpackage.py | 2 - .../circular_imports/subpkg/subpackage2.py | 2 - .../data/circular_imports/subpkg/util.py | 2 - .../data/circular_imports/subpkg2/__init__.py | 0 .../subpkg2/parent/__init__.py | 1 - .../circular_imports/subpkg2/parent/child.py | 3 - .../test_import/data/circular_imports/use.py | 2 - .../test_import/data/circular_imports/util.py | 2 - Lib/test/test_import/data/package/__init__.py | 2 - .../test_import/data/package/submodule.py | 0 .../test_import/data/package2/submodule1.py | 3 - .../test_import/data/package2/submodule2.py | 0 .../test_import/data/unwritable/__init__.py | 12 - Lib/test/test_import/data/unwritable/x.py | 0 Lib/test/test_importlib/__init__.py | 5 - Lib/test/test_importlib/__main__.py | 4 - Lib/test/test_importlib/abc.py | 93 - Lib/test/test_importlib/builtin/__init__.py | 5 - Lib/test/test_importlib/builtin/__main__.py | 4 - .../test_importlib/builtin/test_finder.py | 97 - .../test_importlib/builtin/test_loader.py | 110 - Lib/test/test_importlib/data/__init__.py | 0 .../data/example-21.12-py3-none-any.whl | Bin 1455 -> 0 bytes .../data/example-21.12-py3.6.egg | Bin 1497 -> 0 bytes .../data/example2-1.0.0-py3-none-any.whl | Bin 1167 -> 0 bytes Lib/test/test_importlib/data01/__init__.py | 0 Lib/test/test_importlib/data01/binary.file | Bin 4 -> 0 bytes .../data01/subdirectory/__init__.py | 0 .../data01/subdirectory/binary.file | Bin 4 -> 0 bytes Lib/test/test_importlib/data01/utf-16.file | Bin 44 -> 0 bytes Lib/test/test_importlib/data01/utf-8.file | 1 - Lib/test/test_importlib/data02/__init__.py | 0 .../test_importlib/data02/one/__init__.py | 0 .../test_importlib/data02/one/resource1.txt | 1 - .../test_importlib/data02/two/__init__.py | 0 .../test_importlib/data02/two/resource2.txt | 1 - Lib/test/test_importlib/data03/__init__.py | 0 .../data03/namespace/portion1/__init__.py | 0 .../data03/namespace/portion2/__init__.py | 0 .../data03/namespace/resource1.txt | 0 Lib/test/test_importlib/extension/__init__.py | 5 - Lib/test/test_importlib/extension/__main__.py | 4 - .../extension/test_case_sensitivity.py | 48 - .../test_importlib/extension/test_finder.py | 52 - .../test_importlib/extension/test_loader.py | 305 - .../extension/test_path_hook.py | 31 - Lib/test/test_importlib/fixtures.py | 314 - Lib/test/test_importlib/frozen/__init__.py | 5 - Lib/test/test_importlib/frozen/__main__.py | 4 - Lib/test/test_importlib/frozen/test_finder.py | 240 - Lib/test/test_importlib/frozen/test_loader.py | 277 - Lib/test/test_importlib/import_/__init__.py | 5 - Lib/test/test_importlib/import_/__main__.py | 4 - .../test_importlib/import_/test___loader__.py | 80 - .../import_/test___package__.py | 192 - Lib/test/test_importlib/import_/test_api.py | 150 - .../test_importlib/import_/test_caching.py | 100 - .../test_importlib/import_/test_fromlist.py | 175 - .../test_importlib/import_/test_meta_path.py | 139 - .../test_importlib/import_/test_packages.py | 111 - Lib/test/test_importlib/import_/test_path.py | 307 - .../import_/test_relative_imports.py | 233 - .../namespace_pkgs/both_portions/foo/one.py | 1 - .../namespace_pkgs/both_portions/foo/two.py | 1 - .../namespace_pkgs/missing_directory.zip | Bin 515 -> 0 bytes .../module_and_namespace_package/a_test.py | 1 - .../module_and_namespace_package/a_test/empty | 0 .../namespace_pkgs/nested_portion1.zip | Bin 556 -> 0 bytes .../not_a_namespace_pkg/foo/__init__.py | 0 .../not_a_namespace_pkg/foo/one.py | 1 - .../namespace_pkgs/portion1/foo/one.py | 1 - .../namespace_pkgs/portion2/foo/two.py | 1 - .../project1/parent/child/one.py | 1 - .../project2/parent/child/two.py | 1 - .../project3/parent/child/three.py | 1 - .../namespace_pkgs/top_level_portion1.zip | Bin 332 -> 0 bytes .../namespacedata01/binary.file | Bin 4 -> 0 bytes .../namespacedata01/utf-16.file | Bin 44 -> 0 bytes .../test_importlib/namespacedata01/utf-8.file | 1 - Lib/test/test_importlib/partial/cfimport.py | 38 - .../test_importlib/partial/pool_in_threads.py | 27 - Lib/test/test_importlib/resources/__init__.py | 0 Lib/test/test_importlib/resources/util.py | 178 - Lib/test/test_importlib/source/__init__.py | 5 - Lib/test/test_importlib/source/__main__.py | 4 - .../source/test_case_sensitivity.py | 92 - .../test_importlib/source/test_file_loader.py | 802 - Lib/test/test_importlib/source/test_finder.py | 227 - .../test_importlib/source/test_path_hook.py | 41 - .../source/test_source_encoding.py | 181 - Lib/test/test_importlib/stubs.py | 10 - Lib/test/test_importlib/test_abc.py | 1037 -- Lib/test/test_importlib/test_api.py | 465 - .../test_importlib/test_compatibilty_files.py | 102 - Lib/test/test_importlib/test_contents.py | 43 - Lib/test/test_importlib/test_files.py | 46 - Lib/test/test_importlib/test_lazy.py | 163 - Lib/test/test_importlib/test_locks.py | 155 - Lib/test/test_importlib/test_main.py | 336 - Lib/test/test_importlib/test_metadata_api.py | 340 - .../test_importlib/test_namespace_pkgs.py | 384 - Lib/test/test_importlib/test_open.py | 87 - Lib/test/test_importlib/test_path.py | 62 - Lib/test/test_importlib/test_pkg_import.py | 80 - Lib/test/test_importlib/test_read.py | 75 - Lib/test/test_importlib/test_reader.py | 128 - Lib/test/test_importlib/test_resource.py | 252 - Lib/test/test_importlib/test_spec.py | 839 - .../test_importlib/test_threaded_import.py | 285 - Lib/test/test_importlib/test_util.py | 899 -- Lib/test/test_importlib/test_windows.py | 195 - Lib/test/test_importlib/test_zip.py | 62 - .../test_importlib/threaded_import_hangers.py | 45 - Lib/test/test_importlib/update-zips.py | 53 - Lib/test/test_importlib/util.py | 410 - Lib/test/test_importlib/zipdata01/__init__.py | 0 .../test_importlib/zipdata01/ziptestdata.zip | Bin 876 -> 0 bytes Lib/test/test_importlib/zipdata02/__init__.py | 0 .../test_importlib/zipdata02/ziptestdata.zip | Bin 698 -> 0 bytes Lib/test/test_index.py | 275 - Lib/test/test_int.py | 572 - Lib/test/test_int_literal.py | 143 - Lib/test/test_io.py | 4872 ------ Lib/test/test_ioctl.py | 92 - Lib/test/test_ipaddress.py | 2640 ---- Lib/test/test_isinstance.py | 362 - Lib/test/test_iter.py | 1045 -- Lib/test/test_iterlen.py | 228 - Lib/test/test_itertools.py | 2402 --- Lib/test/test_json/__init__.py | 62 - Lib/test/test_json/__main__.py | 4 - Lib/test/test_json/test_decode.py | 106 - Lib/test/test_json/test_default.py | 12 - Lib/test/test_json/test_dump.py | 78 - .../test_json/test_encode_basestring_ascii.py | 48 - Lib/test/test_json/test_enum.py | 120 - Lib/test/test_json/test_fail.py | 226 - Lib/test/test_json/test_float.py | 33 - Lib/test/test_json/test_indent.py | 67 - Lib/test/test_json/test_pass1.py | 75 - Lib/test/test_json/test_pass2.py | 18 - Lib/test/test_json/test_pass3.py | 24 - Lib/test/test_json/test_recursion.py | 107 - Lib/test/test_json/test_scanstring.py | 152 - Lib/test/test_json/test_separators.py | 50 - Lib/test/test_json/test_speedups.py | 89 - Lib/test/test_json/test_tool.py | 231 - Lib/test/test_json/test_unicode.py | 101 - Lib/test/test_keyword.py | 37 - Lib/test/test_keywordonlyarg.py | 184 - Lib/test/test_kqueue.py | 261 - Lib/test/test_largefile.py | 292 - Lib/test/test_linecache.py | 291 - Lib/test/test_list.py | 277 - Lib/test/test_listcomps.py | 157 - Lib/test/test_locale.py | 685 - Lib/test/test_long.py | 1612 -- Lib/test/test_longexp.py | 10 - Lib/test/test_marshal.py | 636 - Lib/test/test_math.py | 2259 --- Lib/test/test_memoryio.py | 1062 -- Lib/test/test_memoryview.py | 670 - Lib/test/test_mimetypes.py | 294 - Lib/test/test_mmap.py | 953 -- Lib/test/test_module.py | 387 - Lib/test/test_named_expressions.py | 617 - Lib/test/test_netrc.py | 315 - Lib/test/test_ntpath.py | 1015 -- Lib/test/test_numeric_tower.py | 230 - Lib/test/test_opcache.py | 444 - Lib/test/test_opcode.py | 356 - Lib/test/test_openpty.py | 21 - Lib/test/test_operator.py | 665 - Lib/test/test_optparse.py | 1660 -- Lib/test/test_ordered_dict.py | 1034 -- Lib/test/test_os.py | 4704 ------ Lib/test/test_pathlib.py | 2742 ---- Lib/test/test_pickle.py | 669 - Lib/test/test_pickletools.py | 153 - Lib/test/test_pkg.py | 296 - Lib/test/test_pkgutil.py | 593 - Lib/test/test_platform.py | 527 - Lib/test/test_plistlib.py | 1014 -- Lib/test/test_poll.py | 240 - Lib/test/test_popen.py | 72 - Lib/test/test_positional_only_arg.py | 455 - Lib/test/test_posix.py | 2224 --- Lib/test/test_posixpath.py | 803 - Lib/test/test_pow.py | 171 - Lib/test/test_pprint.py | 1240 -- Lib/test/test_print.py | 240 - Lib/test/test_property.py | 398 - Lib/test/test_pty.py | 462 - Lib/test/test_pulldom.py | 356 - Lib/test/test_pwd.py | 114 - Lib/test/test_py_compile.py | 287 - Lib/test/test_pyexpat.py | 808 - Lib/test/test_queue.py | 649 - Lib/test/test_quopri.py | 210 - Lib/test/test_raise.py | 510 - Lib/test/test_random.py | 1366 -- Lib/test/test_range.py | 700 - Lib/test/test_re.py | 2473 --- Lib/test/test_regrtest.py | 1308 -- Lib/test/test_repl.py | 114 - Lib/test/test_reprlib.py | 412 - Lib/test/test_resource.py | 184 - Lib/test/test_richcmp.py | 355 - Lib/test/test_rlcompleter.py | 141 - Lib/test/test_robotparser.py | 387 - Lib/test/test_runpy.py | 846 - Lib/test/test_sched.py | 211 - Lib/test/test_scope.py | 768 - Lib/test/test_script_helper.py | 129 - Lib/test/test_secrets.py | 124 - Lib/test/test_selectors.py | 579 - Lib/test/test_set.py | 2074 --- Lib/test/test_setcomps.py | 167 - Lib/test/test_shelve.py | 224 - Lib/test/test_shlex.py | 384 - Lib/test/test_shutil.py | 2566 --- Lib/test/test_signal.py | 1417 -- Lib/test/test_site.py | 721 - Lib/test/test_slice.py | 257 - Lib/test/test_smtpd.py | 1018 -- Lib/test/test_socket.py | 6671 -------- Lib/test/test_socketserver.py | 527 - Lib/test/test_sort.py | 394 - Lib/test/test_sqlite3/__init__.py | 15 - Lib/test/test_sqlite3/__main__.py | 4 - Lib/test/test_sqlite3/test_backup.py | 166 - Lib/test/test_sqlite3/test_cli.py | 157 - Lib/test/test_sqlite3/test_dbapi.py | 1965 --- Lib/test/test_sqlite3/test_dump.py | 124 - Lib/test/test_sqlite3/test_factory.py | 330 - Lib/test/test_sqlite3/test_hooks.py | 354 - Lib/test/test_sqlite3/test_regression.py | 535 - Lib/test/test_sqlite3/test_transactions.py | 372 - Lib/test/test_sqlite3/test_types.py | 558 - Lib/test/test_sqlite3/test_userfunctions.py | 861 - Lib/test/test_stat.py | 289 - Lib/test/test_statistics.py | 2935 ---- Lib/test/test_strftime.py | 208 - Lib/test/test_string.py | 535 - Lib/test/test_string_literals.py | 254 - Lib/test/test_stringprep.py | 94 - Lib/test/test_strtod.py | 444 - Lib/test/test_struct.py | 898 -- Lib/test/test_structseq.py | 142 - Lib/test/test_subclassinit.py | 284 - Lib/test/test_subprocess.py | 3865 ----- Lib/test/test_sundry.py | 58 - Lib/test/test_super.py | 331 - Lib/test/test_support.py | 732 - Lib/test/test_symtable.py | 269 - Lib/test/test_syntax.py | 2215 --- Lib/test/test_sys.py | 1723 -- Lib/test/test_sys_setprofile.py | 463 - Lib/test/test_sys_settrace.py | 2264 --- Lib/test/test_sysconfig.py | 552 - Lib/test/test_syslog.py | 89 - Lib/test/test_tabnanny.py | 356 - Lib/test/test_tarfile.py | 2944 ---- Lib/test/test_telnetlib.py | 402 - Lib/test/test_tempfile.py | 1677 -- Lib/test/test_textwrap.py | 1080 -- Lib/test/test_thread.py | 270 - Lib/test/test_threadedtempfile.py | 68 - Lib/test/test_threading.py | 1788 --- Lib/test/test_threading_local.py | 231 - Lib/test/test_time.py | 1122 -- Lib/test/test_timeit.py | 403 - Lib/test/test_timeout.py | 299 - Lib/test/test_tokenize.py | 1664 -- Lib/test/test_tomllib/__init__.py | 15 - Lib/test/test_tomllib/__main__.py | 6 - Lib/test/test_tomllib/burntsushi.py | 120 - .../data/invalid/array-missing-comma.toml | 1 - .../overwrite-array-in-parent.toml | 4 - .../overwrite-bool-with-aot.toml | 2 - .../invalid/array/file-end-after-val.toml | 1 - .../invalid/array/unclosed-after-item.toml | 1 - .../data/invalid/array/unclosed-empty.toml | 1 - .../invalid/basic-str-ends-in-escape.toml | 1 - .../invalid/boolean/invalid-false-casing.toml | 1 - .../invalid/boolean/invalid-true-casing.toml | 1 - .../invalid/dates-and-times/invalid-day.toml | 1 - .../invalid/dotted-keys/access-non-table.toml | 2 - .../dotted-keys/extend-defined-aot.toml | 3 - .../extend-defined-table-with-subtable.toml | 4 - .../dotted-keys/extend-defined-table.toml | 4 - .../invalid/inline-table-missing-comma.toml | 1 - .../define-twice-in-subtable.toml | 1 - .../invalid/inline-table/define-twice.toml | 1 - .../inline-table/file-end-after-key-val.toml | 1 - .../data/invalid/inline-table/mutate.toml | 2 - .../inline-table/override-val-in-table.toml | 5 - .../inline-table/override-val-with-array.toml | 3 - .../inline-table/override-val-with-table.toml | 3 - .../inline-table/overwrite-implicitly.toml | 1 - .../overwrite-value-in-inner-array.toml | 1 - .../overwrite-value-in-inner-table.toml | 1 - .../invalid/inline-table/unclosed-empty.toml | 1 - .../data/invalid/invalid-comment-char.toml | 1 - .../data/invalid/invalid-escaped-unicode.toml | 1 - .../data/invalid/invalid-hex.toml | 1 - .../keys-and-vals/ends-early-table-def.toml | 1 - .../invalid/keys-and-vals/ends-early.toml | 1 - .../data/invalid/keys-and-vals/no-value.toml | 1 - .../keys-and-vals/only-ws-after-dot.toml | 1 - .../overwrite-with-deep-table.toml | 2 - .../data/invalid/literal-str/unclosed.toml | 1 - ...missing-closing-double-square-bracket.toml | 2 - .../missing-closing-square-bracket.toml | 2 - .../multiline-basic-str/carriage-return.toml | 2 - .../multiline-basic-str/escape-only.toml | 1 - .../file-ends-after-opening.toml | 1 - .../multiline-basic-str/last-line-escape.toml | 4 - .../unclosed-ends-in-whitespace-escape.toml | 3 - .../file-ends-after-opening.toml | 1 - .../multiline-literal-str/unclosed.toml | 3 - .../data/invalid/non-scalar-escaped.toml | 1 - .../data/invalid/table/eof-after-opening.toml | 1 - .../data/invalid/table/redefine-1.toml | 3 - .../data/invalid/table/redefine-2.toml | 3 - .../invalid/unclosed-multiline-string.toml | 4 - .../data/invalid/unclosed-string.toml | 1 - .../valid/apostrophes-in-literal-string.json | 1 - .../valid/apostrophes-in-literal-string.toml | 3 - .../data/valid/array/array-subtables.json | 11 - .../data/valid/array/array-subtables.toml | 7 - .../data/valid/array/open-parent-table.json | 6 - .../data/valid/array/open-parent-table.toml | 4 - Lib/test/test_tomllib/data/valid/boolean.json | 4 - Lib/test/test_tomllib/data/valid/boolean.toml | 2 - .../data/valid/dates-and-times/datetimes.json | 4 - .../data/valid/dates-and-times/datetimes.toml | 2 - .../data/valid/dates-and-times/localtime.json | 2 - .../data/valid/dates-and-times/localtime.toml | 1 - .../data/valid/empty-inline-table.json | 1 - .../data/valid/empty-inline-table.toml | 1 - .../test_tomllib/data/valid/five-quotes.json | 4 - .../test_tomllib/data/valid/five-quotes.toml | 6 - .../test_tomllib/data/valid/hex-char.json | 5 - .../test_tomllib/data/valid/hex-char.toml | 3 - .../ends-in-whitespace-escape.json | 1 - .../ends-in-whitespace-escape.toml | 6 - .../test_tomllib/data/valid/no-newlines.json | 1 - .../test_tomllib/data/valid/no-newlines.toml | 1 - .../data/valid/trailing-comma.json | 7 - .../data/valid/trailing-comma.toml | 1 - Lib/test/test_tomllib/test_data.py | 64 - Lib/test/test_tomllib/test_error.py | 57 - Lib/test/test_tomllib/test_misc.py | 104 - Lib/test/test_trace.py | 596 - Lib/test/test_traceback.py | 1506 -- Lib/test/test_tuple.py | 488 - Lib/test/test_type_comments.py | 453 - Lib/test/test_typechecks.py | 71 - Lib/test/test_types.py | 2146 --- Lib/test/test_typing.py | 5433 ------- Lib/test/test_ucn.py | 253 - Lib/test/test_unary.py | 53 - Lib/test/test_unicode.py | 3111 ---- Lib/test/test_unicode_file.py | 140 - Lib/test/test_unicode_file_functions.py | 199 - Lib/test/test_unicode_identifiers.py | 38 - Lib/test/test_unicodedata.py | 462 - Lib/test/test_unittest.py | 16 - Lib/test/test_univnewlines.py | 125 - Lib/test/test_unpack.py | 171 - Lib/test/test_urllib.py | 1702 -- Lib/test/test_urllib2.py | 2007 --- Lib/test/test_urllib2_localnet.py | 724 - Lib/test/test_urllib2net.py | 384 - Lib/test/test_urllib_response.py | 63 - Lib/test/test_urllibnet.py | 231 - Lib/test/test_urlparse.py | 1328 -- Lib/test/test_userdict.py | 219 - Lib/test/test_userlist.py | 69 - Lib/test/test_userstring.py | 76 - Lib/test/test_utf8_mode.py | 293 - Lib/test/test_utf8source.py | 47 - Lib/test/test_uu.py | 272 - Lib/test/test_uuid.py | 891 -- Lib/test/test_venv.py | 551 - Lib/test/test_weakref.py | 2300 --- Lib/test/test_weakset.py | 493 - Lib/test/test_webbrowser.py | 334 - Lib/test/test_with.py | 753 - Lib/test/test_wsgiref.py | 873 -- Lib/test/test_xdrlib.py | 81 - Lib/test/test_xml_dom_minicompat.py | 140 - Lib/test/test_xml_etree.py | 4530 ------ Lib/test/test_xmlrpc.py | 1616 -- Lib/test/test_yield_from.py | 1062 -- Lib/test/test_zipapp.py | 405 - Lib/test/test_zipfile.py | 3172 ---- Lib/test/test_zipfile64.py | 147 - Lib/test/test_zipimport.py | 864 - Lib/test/test_zlib.py | 975 -- Lib/test/testcodec.py | 48 - Lib/test/testtar.tar | Bin 435200 -> 0 bytes Lib/test/tf_inherit_check.py | 27 - ...-latin1-coding-cookie-and-utf8-bom-sig.txt | 13 - ...no-coding-cookie-and-utf8-bom-sig-only.txt | 11 - ...utf8-coding-cookie-and-no-utf8-bom-sig.txt | 13 - ...ts-utf8-coding-cookie-and-utf8-bom-sig.txt | 12 - Lib/test/tokenize_tests.txt | 189 - Lib/test/tracedmodules/__init__.py | 4 - Lib/test/tracedmodules/testmod.py | 9 - Lib/test/xmltestdata/c14n-20/README | 40 - Lib/test/xmltestdata/c14n-20/c14nComment.xml | 4 - Lib/test/xmltestdata/c14n-20/c14nDefault.xml | 3 - Lib/test/xmltestdata/c14n-20/c14nPrefix.xml | 4 - .../xmltestdata/c14n-20/c14nPrefixQname.xml | 7 - .../c14n-20/c14nPrefixQnameXpathElem.xml | 8 - Lib/test/xmltestdata/c14n-20/c14nQname.xml | 6 - .../xmltestdata/c14n-20/c14nQnameElem.xml | 6 - .../c14n-20/c14nQnameXpathElem.xml | 7 - Lib/test/xmltestdata/c14n-20/c14nTrim.xml | 4 - Lib/test/xmltestdata/c14n-20/doc.dtd | 6 - Lib/test/xmltestdata/c14n-20/doc.xsl | 5 - Lib/test/xmltestdata/c14n-20/inC14N1.xml | 14 - Lib/test/xmltestdata/c14n-20/inC14N2.xml | 11 - Lib/test/xmltestdata/c14n-20/inC14N3.xml | 18 - Lib/test/xmltestdata/c14n-20/inC14N4.xml | 13 - Lib/test/xmltestdata/c14n-20/inC14N5.xml | 12 - Lib/test/xmltestdata/c14n-20/inC14N6.xml | 2 - Lib/test/xmltestdata/c14n-20/inNsContent.xml | 4 - Lib/test/xmltestdata/c14n-20/inNsDefault.xml | 3 - Lib/test/xmltestdata/c14n-20/inNsPushdown.xml | 6 - Lib/test/xmltestdata/c14n-20/inNsRedecl.xml | 3 - Lib/test/xmltestdata/c14n-20/inNsSort.xml | 4 - .../xmltestdata/c14n-20/inNsSuperfluous.xml | 4 - Lib/test/xmltestdata/c14n-20/inNsXml.xml | 3 - .../c14n-20/out_inC14N1_c14nComment.xml | 6 - .../c14n-20/out_inC14N1_c14nDefault.xml | 4 - .../c14n-20/out_inC14N2_c14nDefault.xml | 11 - .../c14n-20/out_inC14N2_c14nTrim.xml | 1 - .../c14n-20/out_inC14N3_c14nDefault.xml | 14 - .../c14n-20/out_inC14N3_c14nPrefix.xml | 14 - .../c14n-20/out_inC14N3_c14nTrim.xml | 1 - .../c14n-20/out_inC14N4_c14nDefault.xml | 10 - .../c14n-20/out_inC14N4_c14nTrim.xml | 2 - .../c14n-20/out_inC14N5_c14nDefault.xml | 3 - .../c14n-20/out_inC14N5_c14nTrim.xml | 1 - .../c14n-20/out_inC14N6_c14nDefault.xml | 1 - .../c14n-20/out_inNsContent_c14nDefault.xml | 4 - ...t_inNsContent_c14nPrefixQnameXpathElem.xml | 4 - .../c14n-20/out_inNsContent_c14nQnameElem.xml | 4 - .../out_inNsContent_c14nQnameXpathElem.xml | 4 - .../c14n-20/out_inNsDefault_c14nDefault.xml | 3 - .../c14n-20/out_inNsDefault_c14nPrefix.xml | 3 - .../c14n-20/out_inNsPushdown_c14nDefault.xml | 6 - .../c14n-20/out_inNsPushdown_c14nPrefix.xml | 6 - .../c14n-20/out_inNsRedecl_c14nDefault.xml | 3 - .../c14n-20/out_inNsRedecl_c14nPrefix.xml | 3 - .../c14n-20/out_inNsSort_c14nDefault.xml | 4 - .../c14n-20/out_inNsSort_c14nPrefix.xml | 4 - .../out_inNsSuperfluous_c14nDefault.xml | 4 - .../out_inNsSuperfluous_c14nPrefix.xml | 4 - .../c14n-20/out_inNsXml_c14nDefault.xml | 3 - .../c14n-20/out_inNsXml_c14nPrefix.xml | 3 - .../c14n-20/out_inNsXml_c14nPrefixQname.xml | 3 - .../c14n-20/out_inNsXml_c14nQname.xml | 3 - Lib/test/xmltestdata/c14n-20/world.txt | 1 - Lib/test/xmltestdata/expat224_utf8_bug.xml | 2 - Lib/test/xmltestdata/simple-ns.xml | 7 - Lib/test/xmltestdata/simple.xml | 6 - Lib/test/xmltestdata/test.xml | 115 - Lib/test/xmltestdata/test.xml.out | 115 - Lib/test/xmltests.py | 21 - Lib/test/zip_cp437_header.zip | Bin 270 -> 0 bytes Lib/test/zipdir.zip | Bin 374 -> 0 bytes Lib/test/ziptestdata/README.md | 35 - Lib/test/ziptestdata/exe_with_z64 | Bin 978 -> 0 bytes Lib/test/ziptestdata/exe_with_zip | Bin 990 -> 0 bytes Lib/test/ziptestdata/header.sh | 24 - .../ziptestdata/testdata_module_inside_zip.py | 2 - Lib/textwrap.py | 590 +- Lib/this.py | 33 +- Lib/threading.py | 1645 -- Lib/timeit.py | 463 +- Lib/token.py | 207 +- Lib/tokenize.py | 928 +- Lib/tomllib/__init__.py | 10 - Lib/tomllib/_parser.py | 691 - Lib/tomllib/_re.py | 107 - Lib/tomllib/_types.py | 10 - Lib/trace.py | 953 +- Lib/traceback.py | 886 +- Lib/tty.py | 48 +- Lib/types.py | 425 +- Lib/typing.py | 3470 +--- Lib/unittest/__init__.py | 114 - Lib/unittest/__main__.py | 18 - Lib/unittest/_log.py | 86 - Lib/unittest/async_case.py | 142 - Lib/unittest/case.py | 1479 -- Lib/unittest/loader.py | 508 - Lib/unittest/main.py | 278 - Lib/unittest/mock.py | 2976 ---- Lib/unittest/result.py | 244 - Lib/unittest/runner.py | 263 - Lib/unittest/signals.py | 71 - Lib/unittest/suite.py | 379 - Lib/unittest/test/__init__.py | 25 - Lib/unittest/test/__main__.py | 18 - Lib/unittest/test/_test_warnings.py | 73 - Lib/unittest/test/dummy.py | 1 - Lib/unittest/test/support.py | 138 - Lib/unittest/test/test_assertions.py | 416 - Lib/unittest/test/test_async_case.py | 490 - Lib/unittest/test/test_break.py | 306 - Lib/unittest/test/test_case.py | 1990 --- Lib/unittest/test/test_discovery.py | 849 - Lib/unittest/test/test_functiontestcase.py | 148 - Lib/unittest/test/test_loader.py | 1642 -- Lib/unittest/test/test_program.py | 477 - Lib/unittest/test/test_result.py | 1387 -- Lib/unittest/test/test_runner.py | 1373 -- Lib/unittest/test/test_setups.py | 507 - Lib/unittest/test/test_skipping.py | 530 - Lib/unittest/test/test_suite.py | 447 - Lib/unittest/test/testmock/__init__.py | 19 - Lib/unittest/test/testmock/__main__.py | 18 - Lib/unittest/test/testmock/support.py | 16 - Lib/unittest/test/testmock/testasync.py | 1094 -- Lib/unittest/test/testmock/testcallable.py | 150 - Lib/unittest/test/testmock/testhelpers.py | 1127 -- .../test/testmock/testmagicmethods.py | 509 - Lib/unittest/test/testmock/testmock.py | 2278 --- Lib/unittest/test/testmock/testpatch.py | 1953 --- Lib/unittest/test/testmock/testsealable.py | 237 - Lib/unittest/test/testmock/testsentinel.py | 41 - Lib/unittest/test/testmock/testwith.py | 347 - Lib/unittest/util.py | 170 - Lib/urllib/__init__.py | 0 Lib/urllib/error.py | 77 - Lib/urllib/parse.py | 1196 -- Lib/urllib/request.py | 2780 ---- Lib/urllib/response.py | 84 - Lib/urllib/robotparser.py | 273 - Lib/uu.py | 272 +- Lib/uuid.py | 951 +- Lib/venv/__init__.py | 510 - Lib/venv/__main__.py | 10 - Lib/venv/scripts/common/Activate.ps1 | 247 - Lib/venv/scripts/common/activate | 69 - Lib/venv/scripts/nt/activate.bat | 34 - Lib/venv/scripts/nt/deactivate.bat | 22 - Lib/venv/scripts/posix/activate.csh | 26 - Lib/venv/scripts/posix/activate.fish | 66 - Lib/warnings.py | 768 +- Lib/weakref.py | 964 +- Lib/webbrowser.py | 717 - Lib/wsgiref/__init__.py | 23 - Lib/wsgiref/handlers.py | 557 - Lib/wsgiref/headers.py | 184 - Lib/wsgiref/simple_server.py | 164 - Lib/wsgiref/util.py | 165 - Lib/wsgiref/validate.py | 443 - Lib/xdrlib.py | 336 +- Lib/xml/__init__.py | 20 - Lib/xml/dom/NodeFilter.py | 27 - Lib/xml/dom/__init__.py | 140 - Lib/xml/dom/domreg.py | 99 - Lib/xml/dom/expatbuilder.py | 965 -- Lib/xml/dom/minicompat.py | 109 - Lib/xml/dom/minidom.py | 2011 --- Lib/xml/dom/pulldom.py | 336 - Lib/xml/dom/xmlbuilder.py | 387 - Lib/xml/etree/ElementInclude.py | 185 - Lib/xml/etree/ElementPath.py | 421 - Lib/xml/etree/ElementTree.py | 2083 --- Lib/xml/etree/__init__.py | 33 - Lib/xml/etree/cElementTree.py | 3 - Lib/xml/parsers/__init__.py | 8 - Lib/xml/parsers/expat.py | 8 - Lib/xml/sax/__init__.py | 107 - Lib/xml/sax/_exceptions.py | 131 - Lib/xml/sax/expatreader.py | 446 - Lib/xml/sax/handler.py | 387 - Lib/xml/sax/saxutils.py | 369 - Lib/xml/sax/xmlreader.py | 380 - Lib/xmlrpc/__init__.py | 1 - Lib/xmlrpc/client.py | 1533 -- Lib/xmlrpc/server.py | 993 -- Lib/zipapp.py | 206 - Lib/zipfile.py | 2483 --- Lib/zipimport.py | 772 - 1485 files changed, 22733 insertions(+), 678878 deletions(-) delete mode 100644 Lib/PSF-LICENSE delete mode 100644 Lib/README.md delete mode 100644 Lib/_pyio.py delete mode 100644 Lib/antigravity.py delete mode 100644 Lib/argparse.py delete mode 100644 Lib/asynchat.py delete mode 100644 Lib/asyncio/__init__.py delete mode 100644 Lib/asyncio/base_events.py delete mode 100644 Lib/asyncio/base_futures.py delete mode 100644 Lib/asyncio/base_subprocess.py delete mode 100644 Lib/asyncio/base_tasks.py delete mode 100644 Lib/asyncio/compat.py delete mode 100644 Lib/asyncio/constants.py delete mode 100644 Lib/asyncio/coroutines.py delete mode 100644 Lib/asyncio/events.py delete mode 100644 Lib/asyncio/futures.py delete mode 100644 Lib/asyncio/locks.py delete mode 100644 Lib/asyncio/log.py delete mode 100644 Lib/asyncio/proactor_events.py delete mode 100644 Lib/asyncio/protocols.py delete mode 100644 Lib/asyncio/queues.py delete mode 100644 Lib/asyncio/runners.py delete mode 100644 Lib/asyncio/selector_events.py delete mode 100644 Lib/asyncio/sslproto.py delete mode 100644 Lib/asyncio/streams.py delete mode 100644 Lib/asyncio/subprocess.py delete mode 100644 Lib/asyncio/tasks.py delete mode 100644 Lib/asyncio/test_utils.py delete mode 100644 Lib/asyncio/transports.py delete mode 100644 Lib/asyncio/unix_events.py delete mode 100644 Lib/asyncio/windows_events.py delete mode 100644 Lib/asyncio/windows_utils.py delete mode 100644 Lib/asyncore.py create mode 100644 Lib/binhex.py delete mode 100644 Lib/bz2.py delete mode 100755 Lib/cgi.py delete mode 100644 Lib/concurrent/__init__.py delete mode 100644 Lib/concurrent/futures/__init__.py delete mode 100644 Lib/concurrent/futures/_base.py delete mode 100644 Lib/concurrent/futures/process.py delete mode 100644 Lib/concurrent/futures/thread.py delete mode 100644 Lib/configparser.py delete mode 100644 Lib/ctypes/__init__.py delete mode 100644 Lib/ctypes/_aix.py delete mode 100644 Lib/ctypes/_endian.py delete mode 100644 Lib/ctypes/macholib/README.ctypes delete mode 100644 Lib/ctypes/macholib/__init__.py delete mode 100644 Lib/ctypes/macholib/dyld.py delete mode 100644 Lib/ctypes/macholib/dylib.py delete mode 100755 Lib/ctypes/macholib/fetch_macholib delete mode 100644 Lib/ctypes/macholib/fetch_macholib.bat delete mode 100644 Lib/ctypes/macholib/framework.py delete mode 100644 Lib/ctypes/test/__init__.py delete mode 100644 Lib/ctypes/test/__main__.py delete mode 100644 Lib/ctypes/test/test_anon.py delete mode 100644 Lib/ctypes/test/test_array_in_pointer.py delete mode 100644 Lib/ctypes/test/test_arrays.py delete mode 100644 Lib/ctypes/test/test_as_parameter.py delete mode 100644 Lib/ctypes/test/test_bitfields.py delete mode 100644 Lib/ctypes/test/test_buffers.py delete mode 100644 Lib/ctypes/test/test_bytes.py delete mode 100644 Lib/ctypes/test/test_byteswap.py delete mode 100644 Lib/ctypes/test/test_callbacks.py delete mode 100644 Lib/ctypes/test/test_cast.py delete mode 100644 Lib/ctypes/test/test_cfuncs.py delete mode 100644 Lib/ctypes/test/test_checkretval.py delete mode 100644 Lib/ctypes/test/test_delattr.py delete mode 100644 Lib/ctypes/test/test_errno.py delete mode 100644 Lib/ctypes/test/test_find.py delete mode 100644 Lib/ctypes/test/test_frombuffer.py delete mode 100644 Lib/ctypes/test/test_funcptr.py delete mode 100644 Lib/ctypes/test/test_functions.py delete mode 100644 Lib/ctypes/test/test_incomplete.py delete mode 100644 Lib/ctypes/test/test_init.py delete mode 100644 Lib/ctypes/test/test_internals.py delete mode 100644 Lib/ctypes/test/test_keeprefs.py delete mode 100644 Lib/ctypes/test/test_libc.py delete mode 100644 Lib/ctypes/test/test_loading.py delete mode 100644 Lib/ctypes/test/test_macholib.py delete mode 100644 Lib/ctypes/test/test_memfunctions.py delete mode 100644 Lib/ctypes/test/test_numbers.py delete mode 100644 Lib/ctypes/test/test_objects.py delete mode 100644 Lib/ctypes/test/test_parameters.py delete mode 100644 Lib/ctypes/test/test_pep3118.py delete mode 100644 Lib/ctypes/test/test_pickling.py delete mode 100644 Lib/ctypes/test/test_pointers.py delete mode 100644 Lib/ctypes/test/test_prototypes.py delete mode 100644 Lib/ctypes/test/test_python_api.py delete mode 100644 Lib/ctypes/test/test_random_things.py delete mode 100644 Lib/ctypes/test/test_refcounts.py delete mode 100644 Lib/ctypes/test/test_repr.py delete mode 100644 Lib/ctypes/test/test_returnfuncptrs.py delete mode 100644 Lib/ctypes/test/test_simplesubclasses.py delete mode 100644 Lib/ctypes/test/test_sizes.py delete mode 100644 Lib/ctypes/test/test_slicing.py delete mode 100644 Lib/ctypes/test/test_stringptr.py delete mode 100644 Lib/ctypes/test/test_strings.py delete mode 100644 Lib/ctypes/test/test_struct_fields.py delete mode 100644 Lib/ctypes/test/test_structures.py delete mode 100644 Lib/ctypes/test/test_unaligned_structures.py delete mode 100644 Lib/ctypes/test/test_unicode.py delete mode 100644 Lib/ctypes/test/test_values.py delete mode 100644 Lib/ctypes/test/test_varsize_struct.py delete mode 100644 Lib/ctypes/test/test_win32.py delete mode 100644 Lib/ctypes/test/test_wintypes.py delete mode 100644 Lib/ctypes/util.py delete mode 100644 Lib/ctypes/wintypes.py delete mode 100644 Lib/dbm/__init__.py delete mode 100644 Lib/dbm/dumb.py delete mode 100644 Lib/distutils/README delete mode 100644 Lib/distutils/__init__.py delete mode 100644 Lib/distutils/_msvccompiler.py delete mode 100644 Lib/distutils/archive_util.py delete mode 100644 Lib/distutils/bcppcompiler.py delete mode 100644 Lib/distutils/ccompiler.py delete mode 100644 Lib/distutils/cmd.py delete mode 100644 Lib/distutils/command/__init__.py delete mode 100644 Lib/distutils/command/bdist.py delete mode 100644 Lib/distutils/command/bdist_dumb.py delete mode 100644 Lib/distutils/command/bdist_msi.py delete mode 100644 Lib/distutils/command/bdist_rpm.py delete mode 100644 Lib/distutils/command/bdist_wininst.py delete mode 100644 Lib/distutils/command/build.py delete mode 100644 Lib/distutils/command/build_clib.py delete mode 100644 Lib/distutils/command/build_ext.py delete mode 100644 Lib/distutils/command/build_py.py delete mode 100644 Lib/distutils/command/build_scripts.py delete mode 100644 Lib/distutils/command/check.py delete mode 100644 Lib/distutils/command/clean.py delete mode 100644 Lib/distutils/command/command_template delete mode 100644 Lib/distutils/command/config.py delete mode 100644 Lib/distutils/command/install.py delete mode 100644 Lib/distutils/command/install_data.py delete mode 100644 Lib/distutils/command/install_egg_info.py delete mode 100644 Lib/distutils/command/install_headers.py delete mode 100644 Lib/distutils/command/install_lib.py delete mode 100644 Lib/distutils/command/install_scripts.py delete mode 100644 Lib/distutils/command/register.py delete mode 100644 Lib/distutils/command/sdist.py delete mode 100644 Lib/distutils/command/upload.py delete mode 100644 Lib/distutils/config.py delete mode 100644 Lib/distutils/core.py delete mode 100644 Lib/distutils/cygwinccompiler.py delete mode 100644 Lib/distutils/debug.py delete mode 100644 Lib/distutils/dep_util.py delete mode 100644 Lib/distutils/dir_util.py delete mode 100644 Lib/distutils/dist.py delete mode 100644 Lib/distutils/errors.py delete mode 100644 Lib/distutils/extension.py delete mode 100644 Lib/distutils/fancy_getopt.py delete mode 100644 Lib/distutils/file_util.py delete mode 100644 Lib/distutils/filelist.py delete mode 100644 Lib/distutils/log.py delete mode 100644 Lib/distutils/msvc9compiler.py delete mode 100644 Lib/distutils/msvccompiler.py delete mode 100644 Lib/distutils/spawn.py delete mode 100644 Lib/distutils/sysconfig.py delete mode 100644 Lib/distutils/text_file.py delete mode 100644 Lib/distutils/unixccompiler.py delete mode 100644 Lib/distutils/util.py delete mode 100644 Lib/distutils/version.py delete mode 100644 Lib/distutils/versionpredicate.py delete mode 100644 Lib/doctest.py delete mode 100644 Lib/email/__init__.py delete mode 100644 Lib/email/_encoded_words.py delete mode 100644 Lib/email/_header_value_parser.py delete mode 100644 Lib/email/_parseaddr.py delete mode 100644 Lib/email/_policybase.py delete mode 100644 Lib/email/architecture.rst delete mode 100644 Lib/email/base64mime.py delete mode 100644 Lib/email/charset.py delete mode 100644 Lib/email/contentmanager.py delete mode 100644 Lib/email/encoders.py delete mode 100644 Lib/email/errors.py delete mode 100644 Lib/email/feedparser.py delete mode 100644 Lib/email/generator.py delete mode 100644 Lib/email/header.py delete mode 100644 Lib/email/headerregistry.py delete mode 100644 Lib/email/iterators.py delete mode 100644 Lib/email/message.py delete mode 100644 Lib/email/mime/__init__.py delete mode 100644 Lib/email/mime/application.py delete mode 100644 Lib/email/mime/audio.py delete mode 100644 Lib/email/mime/base.py delete mode 100644 Lib/email/mime/image.py delete mode 100644 Lib/email/mime/message.py delete mode 100644 Lib/email/mime/multipart.py delete mode 100644 Lib/email/mime/nonmultipart.py delete mode 100644 Lib/email/mime/text.py delete mode 100644 Lib/email/parser.py delete mode 100644 Lib/email/policy.py delete mode 100644 Lib/email/quoprimime.py delete mode 100644 Lib/email/utils.py delete mode 100644 Lib/encodings/__init__.py delete mode 100644 Lib/encodings/aliases.py delete mode 100644 Lib/encodings/ascii.py delete mode 100644 Lib/encodings/base64_codec.py delete mode 100644 Lib/encodings/big5.py delete mode 100644 Lib/encodings/big5hkscs.py delete mode 100644 Lib/encodings/bz2_codec.py delete mode 100644 Lib/encodings/charmap.py delete mode 100644 Lib/encodings/cp037.py delete mode 100644 Lib/encodings/cp1006.py delete mode 100644 Lib/encodings/cp1026.py delete mode 100644 Lib/encodings/cp1125.py delete mode 100644 Lib/encodings/cp1140.py delete mode 100644 Lib/encodings/cp1250.py delete mode 100644 Lib/encodings/cp1251.py delete mode 100644 Lib/encodings/cp1252.py delete mode 100644 Lib/encodings/cp1253.py delete mode 100644 Lib/encodings/cp1254.py delete mode 100644 Lib/encodings/cp1255.py delete mode 100644 Lib/encodings/cp1256.py delete mode 100644 Lib/encodings/cp1257.py delete mode 100644 Lib/encodings/cp1258.py delete mode 100644 Lib/encodings/cp273.py delete mode 100644 Lib/encodings/cp424.py delete mode 100644 Lib/encodings/cp437.py delete mode 100644 Lib/encodings/cp500.py delete mode 100644 Lib/encodings/cp65001.py delete mode 100644 Lib/encodings/cp720.py delete mode 100644 Lib/encodings/cp737.py delete mode 100644 Lib/encodings/cp775.py delete mode 100644 Lib/encodings/cp850.py delete mode 100644 Lib/encodings/cp852.py delete mode 100644 Lib/encodings/cp855.py delete mode 100644 Lib/encodings/cp856.py delete mode 100644 Lib/encodings/cp857.py delete mode 100644 Lib/encodings/cp858.py delete mode 100644 Lib/encodings/cp860.py delete mode 100644 Lib/encodings/cp861.py delete mode 100644 Lib/encodings/cp862.py delete mode 100644 Lib/encodings/cp863.py delete mode 100644 Lib/encodings/cp864.py delete mode 100644 Lib/encodings/cp865.py delete mode 100644 Lib/encodings/cp866.py delete mode 100644 Lib/encodings/cp869.py delete mode 100644 Lib/encodings/cp874.py delete mode 100644 Lib/encodings/cp875.py delete mode 100644 Lib/encodings/cp932.py delete mode 100644 Lib/encodings/cp949.py delete mode 100644 Lib/encodings/cp950.py delete mode 100644 Lib/encodings/euc_jis_2004.py delete mode 100644 Lib/encodings/euc_jisx0213.py delete mode 100644 Lib/encodings/euc_jp.py delete mode 100644 Lib/encodings/euc_kr.py delete mode 100644 Lib/encodings/gb18030.py delete mode 100644 Lib/encodings/gb2312.py delete mode 100644 Lib/encodings/gbk.py delete mode 100644 Lib/encodings/hex_codec.py delete mode 100644 Lib/encodings/hp_roman8.py delete mode 100644 Lib/encodings/hz.py delete mode 100644 Lib/encodings/idna.py delete mode 100644 Lib/encodings/iso2022_jp.py delete mode 100644 Lib/encodings/iso2022_jp_1.py delete mode 100644 Lib/encodings/iso2022_jp_2.py delete mode 100644 Lib/encodings/iso2022_jp_2004.py delete mode 100644 Lib/encodings/iso2022_jp_3.py delete mode 100644 Lib/encodings/iso2022_jp_ext.py delete mode 100644 Lib/encodings/iso2022_kr.py delete mode 100644 Lib/encodings/iso8859_1.py delete mode 100644 Lib/encodings/iso8859_10.py delete mode 100644 Lib/encodings/iso8859_11.py delete mode 100644 Lib/encodings/iso8859_13.py delete mode 100644 Lib/encodings/iso8859_14.py delete mode 100644 Lib/encodings/iso8859_15.py delete mode 100644 Lib/encodings/iso8859_16.py delete mode 100644 Lib/encodings/iso8859_2.py delete mode 100644 Lib/encodings/iso8859_3.py delete mode 100644 Lib/encodings/iso8859_4.py delete mode 100644 Lib/encodings/iso8859_5.py delete mode 100644 Lib/encodings/iso8859_6.py delete mode 100644 Lib/encodings/iso8859_7.py delete mode 100644 Lib/encodings/iso8859_8.py delete mode 100644 Lib/encodings/iso8859_9.py delete mode 100644 Lib/encodings/johab.py delete mode 100644 Lib/encodings/koi8_r.py delete mode 100644 Lib/encodings/koi8_t.py delete mode 100644 Lib/encodings/koi8_u.py delete mode 100644 Lib/encodings/kz1048.py delete mode 100644 Lib/encodings/latin_1.py delete mode 100644 Lib/encodings/mac_arabic.py delete mode 100644 Lib/encodings/mac_centeuro.py delete mode 100644 Lib/encodings/mac_croatian.py delete mode 100644 Lib/encodings/mac_cyrillic.py delete mode 100644 Lib/encodings/mac_farsi.py delete mode 100644 Lib/encodings/mac_greek.py delete mode 100644 Lib/encodings/mac_iceland.py delete mode 100644 Lib/encodings/mac_latin2.py delete mode 100644 Lib/encodings/mac_roman.py delete mode 100644 Lib/encodings/mac_romanian.py delete mode 100644 Lib/encodings/mac_turkish.py delete mode 100644 Lib/encodings/mbcs.py delete mode 100644 Lib/encodings/oem.py delete mode 100644 Lib/encodings/palmos.py delete mode 100644 Lib/encodings/ptcp154.py delete mode 100644 Lib/encodings/punycode.py delete mode 100644 Lib/encodings/quopri_codec.py delete mode 100644 Lib/encodings/raw_unicode_escape.py delete mode 100755 Lib/encodings/rot_13.py delete mode 100644 Lib/encodings/shift_jis.py delete mode 100644 Lib/encodings/shift_jis_2004.py delete mode 100644 Lib/encodings/shift_jisx0213.py delete mode 100644 Lib/encodings/tis_620.py delete mode 100644 Lib/encodings/undefined.py delete mode 100644 Lib/encodings/unicode_escape.py delete mode 100644 Lib/encodings/unicode_internal.py delete mode 100644 Lib/encodings/utf_16.py delete mode 100644 Lib/encodings/utf_16_be.py delete mode 100644 Lib/encodings/utf_16_le.py delete mode 100644 Lib/encodings/utf_32.py delete mode 100644 Lib/encodings/utf_32_be.py delete mode 100644 Lib/encodings/utf_32_le.py delete mode 100644 Lib/encodings/utf_7.py delete mode 100644 Lib/encodings/utf_8_sig.py delete mode 100644 Lib/encodings/uu_codec.py delete mode 100644 Lib/encodings/zlib_codec.py delete mode 100644 Lib/ensurepip/__init__.py delete mode 100644 Lib/ensurepip/__main__.py delete mode 100644 Lib/ensurepip/_bundled/pip-22.3.1-py3-none-any.whl delete mode 100644 Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl delete mode 100644 Lib/ensurepip/_uninstall.py create mode 100644 Lib/formatter.py delete mode 100644 Lib/ftplib.py delete mode 100644 Lib/getopt.py delete mode 100644 Lib/getpass.py delete mode 100644 Lib/gzip.py delete mode 100644 Lib/html/__init__.py delete mode 100644 Lib/html/entities.py delete mode 100644 Lib/html/parser.py delete mode 100644 Lib/http/__init__.py delete mode 100644 Lib/http/client.py delete mode 100644 Lib/http/cookiejar.py delete mode 100644 Lib/http/cookies.py delete mode 100644 Lib/http/server.py delete mode 100644 Lib/inspect.py delete mode 100644 Lib/locale.py delete mode 100644 Lib/logging/__init__.py delete mode 100644 Lib/logging/config.py delete mode 100644 Lib/logging/handlers.py delete mode 100644 Lib/mailbox.py delete mode 100644 Lib/multiprocessing/__init__.py delete mode 100644 Lib/multiprocessing/connection.py delete mode 100644 Lib/multiprocessing/context.py delete mode 100644 Lib/multiprocessing/dummy/__init__.py delete mode 100644 Lib/multiprocessing/dummy/connection.py delete mode 100644 Lib/multiprocessing/forkserver.py delete mode 100644 Lib/multiprocessing/heap.py delete mode 100644 Lib/multiprocessing/managers.py delete mode 100644 Lib/multiprocessing/pool.py delete mode 100644 Lib/multiprocessing/popen_fork.py delete mode 100644 Lib/multiprocessing/popen_forkserver.py delete mode 100644 Lib/multiprocessing/popen_spawn_posix.py delete mode 100644 Lib/multiprocessing/popen_spawn_win32.py delete mode 100644 Lib/multiprocessing/process.py delete mode 100644 Lib/multiprocessing/queues.py delete mode 100644 Lib/multiprocessing/reduction.py delete mode 100644 Lib/multiprocessing/resource_sharer.py delete mode 100644 Lib/multiprocessing/resource_tracker.py delete mode 100644 Lib/multiprocessing/shared_memory.py delete mode 100644 Lib/multiprocessing/sharedctypes.py delete mode 100644 Lib/multiprocessing/spawn.py delete mode 100644 Lib/multiprocessing/synchronize.py delete mode 100644 Lib/multiprocessing/util.py delete mode 100644 Lib/netrc.py delete mode 100644 Lib/nntplib.py delete mode 100644 Lib/optparse.py delete mode 100644 Lib/os.py delete mode 100644 Lib/pathlib.py delete mode 100755 Lib/pdb.py delete mode 100644 Lib/pickle.py delete mode 100644 Lib/pickletools.py delete mode 100644 Lib/pkgutil.py delete mode 100644 Lib/pty.py delete mode 100644 Lib/pydoc.py delete mode 100644 Lib/pydoc_data/__init__.py delete mode 100644 Lib/pydoc_data/_pydoc.css delete mode 100644 Lib/pydoc_data/topics.py delete mode 100644 Lib/shutil.py delete mode 100644 Lib/signal.py delete mode 100755 Lib/smtpd.py delete mode 100644 Lib/socket.py delete mode 100644 Lib/socketserver.py delete mode 100644 Lib/sqlite3/__init__.py delete mode 100644 Lib/sqlite3/__main__.py delete mode 100644 Lib/sqlite3/dbapi2.py delete mode 100644 Lib/sqlite3/dump.py delete mode 100644 Lib/ssl.py delete mode 100644 Lib/subprocess.py delete mode 100755 Lib/tarfile.py delete mode 100644 Lib/telnetlib.py delete mode 100644 Lib/test/__init__.py delete mode 100644 Lib/test/__main__.py delete mode 100644 Lib/test/_test_atexit.py delete mode 100644 Lib/test/_typed_dict_helper.py delete mode 100644 Lib/test/ann_module.py delete mode 100644 Lib/test/ann_module2.py delete mode 100644 Lib/test/ann_module3.py delete mode 100644 Lib/test/ann_module4.py delete mode 100644 Lib/test/ann_module5.py delete mode 100644 Lib/test/ann_module6.py delete mode 100644 Lib/test/ann_module7.py delete mode 100644 Lib/test/badsyntax_3131.py delete mode 100644 Lib/test/badsyntax_future10.py delete mode 100644 Lib/test/badsyntax_future3.py delete mode 100644 Lib/test/badsyntax_future4.py delete mode 100644 Lib/test/badsyntax_future5.py delete mode 100644 Lib/test/badsyntax_future6.py delete mode 100644 Lib/test/badsyntax_future7.py delete mode 100644 Lib/test/badsyntax_future8.py delete mode 100644 Lib/test/badsyntax_future9.py delete mode 100644 Lib/test/badsyntax_pep3120.py delete mode 100644 Lib/test/cfgparser.1 delete mode 100644 Lib/test/cfgparser.2 delete mode 100644 Lib/test/cfgparser.3 delete mode 100644 Lib/test/cmath_testcases.txt delete mode 100644 Lib/test/dataclass_module_1.py delete mode 100644 Lib/test/dataclass_module_1_str.py delete mode 100644 Lib/test/dataclass_module_2.py delete mode 100644 Lib/test/dataclass_module_2_str.py delete mode 100644 Lib/test/dataclass_textanno.py delete mode 100644 Lib/test/decimaltestdata/abs.decTest delete mode 100644 Lib/test/decimaltestdata/add.decTest delete mode 100644 Lib/test/decimaltestdata/and.decTest delete mode 100644 Lib/test/decimaltestdata/base.decTest delete mode 100644 Lib/test/decimaltestdata/clamp.decTest delete mode 100644 Lib/test/decimaltestdata/class.decTest delete mode 100644 Lib/test/decimaltestdata/compare.decTest delete mode 100644 Lib/test/decimaltestdata/comparetotal.decTest delete mode 100644 Lib/test/decimaltestdata/comparetotmag.decTest delete mode 100644 Lib/test/decimaltestdata/copy.decTest delete mode 100644 Lib/test/decimaltestdata/copyabs.decTest delete mode 100644 Lib/test/decimaltestdata/copynegate.decTest delete mode 100644 Lib/test/decimaltestdata/copysign.decTest delete mode 100644 Lib/test/decimaltestdata/ddAbs.decTest delete mode 100644 Lib/test/decimaltestdata/ddAdd.decTest delete mode 100644 Lib/test/decimaltestdata/ddAnd.decTest delete mode 100644 Lib/test/decimaltestdata/ddBase.decTest delete mode 100644 Lib/test/decimaltestdata/ddCanonical.decTest delete mode 100644 Lib/test/decimaltestdata/ddClass.decTest delete mode 100644 Lib/test/decimaltestdata/ddCompare.decTest delete mode 100644 Lib/test/decimaltestdata/ddCompareSig.decTest delete mode 100644 Lib/test/decimaltestdata/ddCompareTotal.decTest delete mode 100644 Lib/test/decimaltestdata/ddCompareTotalMag.decTest delete mode 100644 Lib/test/decimaltestdata/ddCopy.decTest delete mode 100644 Lib/test/decimaltestdata/ddCopyAbs.decTest delete mode 100644 Lib/test/decimaltestdata/ddCopyNegate.decTest delete mode 100644 Lib/test/decimaltestdata/ddCopySign.decTest delete mode 100644 Lib/test/decimaltestdata/ddDivide.decTest delete mode 100644 Lib/test/decimaltestdata/ddDivideInt.decTest delete mode 100644 Lib/test/decimaltestdata/ddEncode.decTest delete mode 100644 Lib/test/decimaltestdata/ddFMA.decTest delete mode 100644 Lib/test/decimaltestdata/ddInvert.decTest delete mode 100644 Lib/test/decimaltestdata/ddLogB.decTest delete mode 100644 Lib/test/decimaltestdata/ddMax.decTest delete mode 100644 Lib/test/decimaltestdata/ddMaxMag.decTest delete mode 100644 Lib/test/decimaltestdata/ddMin.decTest delete mode 100644 Lib/test/decimaltestdata/ddMinMag.decTest delete mode 100644 Lib/test/decimaltestdata/ddMinus.decTest delete mode 100644 Lib/test/decimaltestdata/ddMultiply.decTest delete mode 100644 Lib/test/decimaltestdata/ddNextMinus.decTest delete mode 100644 Lib/test/decimaltestdata/ddNextPlus.decTest delete mode 100644 Lib/test/decimaltestdata/ddNextToward.decTest delete mode 100644 Lib/test/decimaltestdata/ddOr.decTest delete mode 100644 Lib/test/decimaltestdata/ddPlus.decTest delete mode 100644 Lib/test/decimaltestdata/ddQuantize.decTest delete mode 100644 Lib/test/decimaltestdata/ddReduce.decTest delete mode 100644 Lib/test/decimaltestdata/ddRemainder.decTest delete mode 100644 Lib/test/decimaltestdata/ddRemainderNear.decTest delete mode 100644 Lib/test/decimaltestdata/ddRotate.decTest delete mode 100644 Lib/test/decimaltestdata/ddSameQuantum.decTest delete mode 100644 Lib/test/decimaltestdata/ddScaleB.decTest delete mode 100644 Lib/test/decimaltestdata/ddShift.decTest delete mode 100644 Lib/test/decimaltestdata/ddSubtract.decTest delete mode 100644 Lib/test/decimaltestdata/ddToIntegral.decTest delete mode 100644 Lib/test/decimaltestdata/ddXor.decTest delete mode 100644 Lib/test/decimaltestdata/decDouble.decTest delete mode 100644 Lib/test/decimaltestdata/decQuad.decTest delete mode 100644 Lib/test/decimaltestdata/decSingle.decTest delete mode 100644 Lib/test/decimaltestdata/divide.decTest delete mode 100644 Lib/test/decimaltestdata/divideint.decTest delete mode 100644 Lib/test/decimaltestdata/dqAbs.decTest delete mode 100644 Lib/test/decimaltestdata/dqAdd.decTest delete mode 100644 Lib/test/decimaltestdata/dqAnd.decTest delete mode 100644 Lib/test/decimaltestdata/dqBase.decTest delete mode 100644 Lib/test/decimaltestdata/dqCanonical.decTest delete mode 100644 Lib/test/decimaltestdata/dqClass.decTest delete mode 100644 Lib/test/decimaltestdata/dqCompare.decTest delete mode 100644 Lib/test/decimaltestdata/dqCompareSig.decTest delete mode 100644 Lib/test/decimaltestdata/dqCompareTotal.decTest delete mode 100644 Lib/test/decimaltestdata/dqCompareTotalMag.decTest delete mode 100644 Lib/test/decimaltestdata/dqCopy.decTest delete mode 100644 Lib/test/decimaltestdata/dqCopyAbs.decTest delete mode 100644 Lib/test/decimaltestdata/dqCopyNegate.decTest delete mode 100644 Lib/test/decimaltestdata/dqCopySign.decTest delete mode 100644 Lib/test/decimaltestdata/dqDivide.decTest delete mode 100644 Lib/test/decimaltestdata/dqDivideInt.decTest delete mode 100644 Lib/test/decimaltestdata/dqEncode.decTest delete mode 100644 Lib/test/decimaltestdata/dqFMA.decTest delete mode 100644 Lib/test/decimaltestdata/dqInvert.decTest delete mode 100644 Lib/test/decimaltestdata/dqLogB.decTest delete mode 100644 Lib/test/decimaltestdata/dqMax.decTest delete mode 100644 Lib/test/decimaltestdata/dqMaxMag.decTest delete mode 100644 Lib/test/decimaltestdata/dqMin.decTest delete mode 100644 Lib/test/decimaltestdata/dqMinMag.decTest delete mode 100644 Lib/test/decimaltestdata/dqMinus.decTest delete mode 100644 Lib/test/decimaltestdata/dqMultiply.decTest delete mode 100644 Lib/test/decimaltestdata/dqNextMinus.decTest delete mode 100644 Lib/test/decimaltestdata/dqNextPlus.decTest delete mode 100644 Lib/test/decimaltestdata/dqNextToward.decTest delete mode 100644 Lib/test/decimaltestdata/dqOr.decTest delete mode 100644 Lib/test/decimaltestdata/dqPlus.decTest delete mode 100644 Lib/test/decimaltestdata/dqQuantize.decTest delete mode 100644 Lib/test/decimaltestdata/dqReduce.decTest delete mode 100644 Lib/test/decimaltestdata/dqRemainder.decTest delete mode 100644 Lib/test/decimaltestdata/dqRemainderNear.decTest delete mode 100644 Lib/test/decimaltestdata/dqRotate.decTest delete mode 100644 Lib/test/decimaltestdata/dqSameQuantum.decTest delete mode 100644 Lib/test/decimaltestdata/dqScaleB.decTest delete mode 100644 Lib/test/decimaltestdata/dqShift.decTest delete mode 100644 Lib/test/decimaltestdata/dqSubtract.decTest delete mode 100644 Lib/test/decimaltestdata/dqToIntegral.decTest delete mode 100644 Lib/test/decimaltestdata/dqXor.decTest delete mode 100644 Lib/test/decimaltestdata/dsBase.decTest delete mode 100644 Lib/test/decimaltestdata/dsEncode.decTest delete mode 100644 Lib/test/decimaltestdata/exp.decTest delete mode 100644 Lib/test/decimaltestdata/extra.decTest delete mode 100644 Lib/test/decimaltestdata/fma.decTest delete mode 100644 Lib/test/decimaltestdata/inexact.decTest delete mode 100644 Lib/test/decimaltestdata/invert.decTest delete mode 100644 Lib/test/decimaltestdata/ln.decTest delete mode 100644 Lib/test/decimaltestdata/log10.decTest delete mode 100644 Lib/test/decimaltestdata/logb.decTest delete mode 100644 Lib/test/decimaltestdata/max.decTest delete mode 100644 Lib/test/decimaltestdata/maxmag.decTest delete mode 100644 Lib/test/decimaltestdata/min.decTest delete mode 100644 Lib/test/decimaltestdata/minmag.decTest delete mode 100644 Lib/test/decimaltestdata/minus.decTest delete mode 100644 Lib/test/decimaltestdata/multiply.decTest delete mode 100644 Lib/test/decimaltestdata/nextminus.decTest delete mode 100644 Lib/test/decimaltestdata/nextplus.decTest delete mode 100644 Lib/test/decimaltestdata/nexttoward.decTest delete mode 100644 Lib/test/decimaltestdata/or.decTest delete mode 100644 Lib/test/decimaltestdata/plus.decTest delete mode 100644 Lib/test/decimaltestdata/power.decTest delete mode 100644 Lib/test/decimaltestdata/powersqrt.decTest delete mode 100644 Lib/test/decimaltestdata/quantize.decTest delete mode 100644 Lib/test/decimaltestdata/randomBound32.decTest delete mode 100644 Lib/test/decimaltestdata/randoms.decTest delete mode 100644 Lib/test/decimaltestdata/reduce.decTest delete mode 100644 Lib/test/decimaltestdata/remainder.decTest delete mode 100644 Lib/test/decimaltestdata/remainderNear.decTest delete mode 100644 Lib/test/decimaltestdata/rescale.decTest delete mode 100644 Lib/test/decimaltestdata/rotate.decTest delete mode 100644 Lib/test/decimaltestdata/rounding.decTest delete mode 100644 Lib/test/decimaltestdata/samequantum.decTest delete mode 100644 Lib/test/decimaltestdata/scaleb.decTest delete mode 100644 Lib/test/decimaltestdata/shift.decTest delete mode 100644 Lib/test/decimaltestdata/squareroot.decTest delete mode 100644 Lib/test/decimaltestdata/subtract.decTest delete mode 100644 Lib/test/decimaltestdata/testall.decTest delete mode 100644 Lib/test/decimaltestdata/tointegral.decTest delete mode 100644 Lib/test/decimaltestdata/tointegralx.decTest delete mode 100644 Lib/test/decimaltestdata/xor.decTest delete mode 100644 Lib/test/double_const.py delete mode 100644 Lib/test/encoded_modules/__init__.py delete mode 100644 Lib/test/encoded_modules/module_iso_8859_1.py delete mode 100644 Lib/test/encoded_modules/module_koi8_r.py delete mode 100644 Lib/test/exception_hierarchy.txt delete mode 100644 Lib/test/floating_points.txt delete mode 100644 Lib/test/formatfloat_testcases.txt delete mode 100644 Lib/test/future_test1.py delete mode 100644 Lib/test/future_test2.py delete mode 100644 Lib/test/keycert.passwd.pem delete mode 100644 Lib/test/keycert.pem delete mode 100644 Lib/test/keycert2.pem delete mode 100644 Lib/test/keycert3.pem delete mode 100644 Lib/test/keycert4.pem delete mode 100644 Lib/test/keycertecc.pem delete mode 100644 Lib/test/libregrtest/__init__.py delete mode 100644 Lib/test/libregrtest/cmdline.py delete mode 100644 Lib/test/libregrtest/main.py delete mode 100644 Lib/test/libregrtest/refleak.py delete mode 100644 Lib/test/libregrtest/runtest.py delete mode 100644 Lib/test/libregrtest/runtest_mp.py delete mode 100644 Lib/test/libregrtest/save_env.py delete mode 100644 Lib/test/libregrtest/setup.py delete mode 100644 Lib/test/libregrtest/utils.py delete mode 100644 Lib/test/libregrtest/win_utils.py delete mode 100644 Lib/test/list_tests.py delete mode 100644 Lib/test/lock_tests.py delete mode 100644 Lib/test/mapping_tests.py delete mode 100644 Lib/test/math_testcases.txt delete mode 100644 Lib/test/mime.types delete mode 100644 Lib/test/mock_socket.py delete mode 100644 Lib/test/mod_generics_cache.py delete mode 100644 Lib/test/pickletester.py delete mode 100644 Lib/test/randv2_32.pck delete mode 100644 Lib/test/randv2_64.pck delete mode 100644 Lib/test/randv3.pck delete mode 100755 Lib/test/re_tests.py delete mode 100644 Lib/test/recursion.tar delete mode 100755 Lib/test/regrtest.py delete mode 100644 Lib/test/relimport.py delete mode 100644 Lib/test/seq_tests.py delete mode 100644 Lib/test/signalinterproctester.py delete mode 100644 Lib/test/ssl_servers.py delete mode 100644 Lib/test/string_tests.py delete mode 100644 Lib/test/subprocessdata/fd_status.py delete mode 100644 Lib/test/subprocessdata/input_reader.py delete mode 100644 Lib/test/subprocessdata/qcat.py delete mode 100644 Lib/test/subprocessdata/qgrep.py delete mode 100644 Lib/test/subprocessdata/sigchild_ignore.py delete mode 100644 Lib/test/support/__init__.py delete mode 100644 Lib/test/support/bytecode_helper.py delete mode 100644 Lib/test/support/hashlib_helper.py delete mode 100644 Lib/test/support/import_helper.py delete mode 100644 Lib/test/support/interpreters.py delete mode 100644 Lib/test/support/logging_helper.py delete mode 100644 Lib/test/support/os_helper.py delete mode 100644 Lib/test/support/script_helper.py delete mode 100644 Lib/test/support/socket_helper.py delete mode 100644 Lib/test/support/testresult.py delete mode 100644 Lib/test/support/threading_helper.py delete mode 100644 Lib/test/support/warnings_helper.py delete mode 100644 Lib/test/test___future__.py delete mode 100644 Lib/test/test__osx_support.py delete mode 100644 Lib/test/test_abc.py delete mode 100644 Lib/test/test_abstract_numbers.py delete mode 100644 Lib/test/test_argparse.py delete mode 100644 Lib/test/test_array.py delete mode 100644 Lib/test/test_ast.py delete mode 100644 Lib/test/test_asyncgen.py delete mode 100644 Lib/test/test_asynchat.py delete mode 100644 Lib/test/test_asyncore.py delete mode 100644 Lib/test/test_atexit.py delete mode 100644 Lib/test/test_augassign.py delete mode 100644 Lib/test/test_base64.py delete mode 100644 Lib/test/test_baseexception.py delete mode 100644 Lib/test/test_bdb.py delete mode 100644 Lib/test/test_bigmem.py delete mode 100644 Lib/test/test_binascii.py delete mode 100644 Lib/test/test_binop.py delete mode 100644 Lib/test/test_bisect.py delete mode 100644 Lib/test/test_bool.py delete mode 100644 Lib/test/test_buffer.py delete mode 100644 Lib/test/test_bufio.py delete mode 100644 Lib/test/test_builtin.py delete mode 100644 Lib/test/test_bytes.py delete mode 100644 Lib/test/test_calendar.py delete mode 100644 Lib/test/test_call.py delete mode 100644 Lib/test/test_cgi.py delete mode 100644 Lib/test/test_cgitb.py delete mode 100644 Lib/test/test_charmapcodec.py delete mode 100644 Lib/test/test_class.py delete mode 100644 Lib/test/test_cmath.py delete mode 100644 Lib/test/test_cmd.py delete mode 100644 Lib/test/test_cmd_line.py delete mode 100644 Lib/test/test_cmd_line_script.py delete mode 100644 Lib/test/test_code.py delete mode 100644 Lib/test/test_code_module.py delete mode 100644 Lib/test/test_codeccallbacks.py delete mode 100644 Lib/test/test_codecs.py delete mode 100644 Lib/test/test_codeop.py delete mode 100644 Lib/test/test_collections.py delete mode 100644 Lib/test/test_colorsys.py delete mode 100644 Lib/test/test_compare.py delete mode 100644 Lib/test/test_compile.py delete mode 100644 Lib/test/test_complex.py delete mode 100644 Lib/test/test_configparser.py delete mode 100644 Lib/test/test_contains.py delete mode 100644 Lib/test/test_context.py delete mode 100644 Lib/test/test_contextlib.py delete mode 100644 Lib/test/test_copy.py delete mode 100644 Lib/test/test_ctypes.py delete mode 100644 Lib/test/test_dataclasses.py delete mode 100644 Lib/test/test_dbm.py delete mode 100644 Lib/test/test_dbm_dumb.py delete mode 100644 Lib/test/test_decimal.py delete mode 100644 Lib/test/test_decorators.py delete mode 100644 Lib/test/test_defaultdict.py delete mode 100644 Lib/test/test_deque.py delete mode 100644 Lib/test/test_descr.py delete mode 100644 Lib/test/test_devpoll.py delete mode 100644 Lib/test/test_dict.py delete mode 100644 Lib/test/test_dictcomps.py delete mode 100644 Lib/test/test_dictviews.py delete mode 100644 Lib/test/test_difflib.py delete mode 100644 Lib/test/test_difflib_expect.html delete mode 100644 Lib/test/test_dis.py delete mode 100644 Lib/test/test_doctest2.py delete mode 100644 Lib/test/test_docxmlrpc.py delete mode 100644 Lib/test/test_dtrace.py delete mode 100644 Lib/test/test_dummy_thread.py delete mode 100644 Lib/test/test_dummy_threading.py delete mode 100644 Lib/test/test_dynamic.py delete mode 100644 Lib/test/test_dynamicclassattribute.py delete mode 100644 Lib/test/test_eintr.py delete mode 100644 Lib/test/test_ensurepip.py delete mode 100644 Lib/test/test_enum.py delete mode 100644 Lib/test/test_enumerate.py delete mode 100644 Lib/test/test_eof.py delete mode 100644 Lib/test/test_epoll.py delete mode 100644 Lib/test/test_errno.py delete mode 100644 Lib/test/test_exception_group.py delete mode 100644 Lib/test/test_exception_hierarchy.py delete mode 100644 Lib/test/test_exception_variations.py delete mode 100644 Lib/test/test_exceptions.py delete mode 100644 Lib/test/test_faulthandler.py delete mode 100644 Lib/test/test_fcntl.py delete mode 100644 Lib/test/test_file.py delete mode 100644 Lib/test/test_filecmp.py delete mode 100644 Lib/test/test_fileinput.py delete mode 100644 Lib/test/test_fileio.py delete mode 100644 Lib/test/test_float.py delete mode 100644 Lib/test/test_fnmatch.py delete mode 100644 Lib/test/test_format.py delete mode 100644 Lib/test/test_fractions.py delete mode 100644 Lib/test/test_fstring.py delete mode 100644 Lib/test/test_ftplib.py delete mode 100644 Lib/test/test_funcattrs.py delete mode 100644 Lib/test/test_functools.py delete mode 100644 Lib/test/test_future.py delete mode 100644 Lib/test/test_future3.py delete mode 100644 Lib/test/test_future4.py delete mode 100644 Lib/test/test_future5.py delete mode 100644 Lib/test/test_generators.py delete mode 100644 Lib/test/test_genericalias.py delete mode 100644 Lib/test/test_genericclass.py delete mode 100644 Lib/test/test_genericpath.py delete mode 100644 Lib/test/test_getopt.py delete mode 100644 Lib/test/test_getpass.py delete mode 100644 Lib/test/test_glob.py delete mode 100644 Lib/test/test_global.py delete mode 100644 Lib/test/test_grammar.py delete mode 100644 Lib/test/test_graphlib.py delete mode 100644 Lib/test/test_grp.py delete mode 100644 Lib/test/test_gzip.py delete mode 100644 Lib/test/test_hash.py delete mode 100644 Lib/test/test_hashlib.py delete mode 100644 Lib/test/test_heapq.py delete mode 100644 Lib/test/test_hmac.py delete mode 100644 Lib/test/test_html.py delete mode 100644 Lib/test/test_htmlparser.py delete mode 100644 Lib/test/test_http_cookiejar.py delete mode 100644 Lib/test/test_http_cookies.py delete mode 100644 Lib/test/test_httplib.py delete mode 100644 Lib/test/test_httpservers.py delete mode 100644 Lib/test/test_imp.py delete mode 100644 Lib/test/test_import/__init__.py delete mode 100644 Lib/test/test_import/__main__.py delete mode 100644 Lib/test/test_import/data/circular_imports/basic.py delete mode 100644 Lib/test/test_import/data/circular_imports/basic2.py delete mode 100644 Lib/test/test_import/data/circular_imports/binding.py delete mode 100644 Lib/test/test_import/data/circular_imports/binding2.py delete mode 100644 Lib/test/test_import/data/circular_imports/from_cycle1.py delete mode 100644 Lib/test/test_import/data/circular_imports/from_cycle2.py delete mode 100644 Lib/test/test_import/data/circular_imports/indirect.py delete mode 100644 Lib/test/test_import/data/circular_imports/rebinding.py delete mode 100644 Lib/test/test_import/data/circular_imports/rebinding2.py delete mode 100644 Lib/test/test_import/data/circular_imports/source.py delete mode 100644 Lib/test/test_import/data/circular_imports/subpackage.py delete mode 100644 Lib/test/test_import/data/circular_imports/subpkg/subpackage2.py delete mode 100644 Lib/test/test_import/data/circular_imports/subpkg/util.py delete mode 100644 Lib/test/test_import/data/circular_imports/subpkg2/__init__.py delete mode 100644 Lib/test/test_import/data/circular_imports/subpkg2/parent/__init__.py delete mode 100644 Lib/test/test_import/data/circular_imports/subpkg2/parent/child.py delete mode 100644 Lib/test/test_import/data/circular_imports/use.py delete mode 100644 Lib/test/test_import/data/circular_imports/util.py delete mode 100644 Lib/test/test_import/data/package/__init__.py delete mode 100644 Lib/test/test_import/data/package/submodule.py delete mode 100644 Lib/test/test_import/data/package2/submodule1.py delete mode 100644 Lib/test/test_import/data/package2/submodule2.py delete mode 100644 Lib/test/test_import/data/unwritable/__init__.py delete mode 100644 Lib/test/test_import/data/unwritable/x.py delete mode 100644 Lib/test/test_importlib/__init__.py delete mode 100644 Lib/test/test_importlib/__main__.py delete mode 100644 Lib/test/test_importlib/abc.py delete mode 100644 Lib/test/test_importlib/builtin/__init__.py delete mode 100644 Lib/test/test_importlib/builtin/__main__.py delete mode 100644 Lib/test/test_importlib/builtin/test_finder.py delete mode 100644 Lib/test/test_importlib/builtin/test_loader.py delete mode 100644 Lib/test/test_importlib/data/__init__.py delete mode 100644 Lib/test/test_importlib/data/example-21.12-py3-none-any.whl delete mode 100644 Lib/test/test_importlib/data/example-21.12-py3.6.egg delete mode 100644 Lib/test/test_importlib/data/example2-1.0.0-py3-none-any.whl delete mode 100644 Lib/test/test_importlib/data01/__init__.py delete mode 100644 Lib/test/test_importlib/data01/binary.file delete mode 100644 Lib/test/test_importlib/data01/subdirectory/__init__.py delete mode 100644 Lib/test/test_importlib/data01/subdirectory/binary.file delete mode 100644 Lib/test/test_importlib/data01/utf-16.file delete mode 100644 Lib/test/test_importlib/data01/utf-8.file delete mode 100644 Lib/test/test_importlib/data02/__init__.py delete mode 100644 Lib/test/test_importlib/data02/one/__init__.py delete mode 100644 Lib/test/test_importlib/data02/one/resource1.txt delete mode 100644 Lib/test/test_importlib/data02/two/__init__.py delete mode 100644 Lib/test/test_importlib/data02/two/resource2.txt delete mode 100644 Lib/test/test_importlib/data03/__init__.py delete mode 100644 Lib/test/test_importlib/data03/namespace/portion1/__init__.py delete mode 100644 Lib/test/test_importlib/data03/namespace/portion2/__init__.py delete mode 100644 Lib/test/test_importlib/data03/namespace/resource1.txt delete mode 100644 Lib/test/test_importlib/extension/__init__.py delete mode 100644 Lib/test/test_importlib/extension/__main__.py delete mode 100644 Lib/test/test_importlib/extension/test_case_sensitivity.py delete mode 100644 Lib/test/test_importlib/extension/test_finder.py delete mode 100644 Lib/test/test_importlib/extension/test_loader.py delete mode 100644 Lib/test/test_importlib/extension/test_path_hook.py delete mode 100644 Lib/test/test_importlib/fixtures.py delete mode 100644 Lib/test/test_importlib/frozen/__init__.py delete mode 100644 Lib/test/test_importlib/frozen/__main__.py delete mode 100644 Lib/test/test_importlib/frozen/test_finder.py delete mode 100644 Lib/test/test_importlib/frozen/test_loader.py delete mode 100644 Lib/test/test_importlib/import_/__init__.py delete mode 100644 Lib/test/test_importlib/import_/__main__.py delete mode 100644 Lib/test/test_importlib/import_/test___loader__.py delete mode 100644 Lib/test/test_importlib/import_/test___package__.py delete mode 100644 Lib/test/test_importlib/import_/test_api.py delete mode 100644 Lib/test/test_importlib/import_/test_caching.py delete mode 100644 Lib/test/test_importlib/import_/test_fromlist.py delete mode 100644 Lib/test/test_importlib/import_/test_meta_path.py delete mode 100644 Lib/test/test_importlib/import_/test_packages.py delete mode 100644 Lib/test/test_importlib/import_/test_path.py delete mode 100644 Lib/test/test_importlib/import_/test_relative_imports.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/both_portions/foo/one.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/both_portions/foo/two.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/missing_directory.zip delete mode 100644 Lib/test/test_importlib/namespace_pkgs/module_and_namespace_package/a_test.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/module_and_namespace_package/a_test/empty delete mode 100644 Lib/test/test_importlib/namespace_pkgs/nested_portion1.zip delete mode 100644 Lib/test/test_importlib/namespace_pkgs/not_a_namespace_pkg/foo/__init__.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/not_a_namespace_pkg/foo/one.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/portion1/foo/one.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/portion2/foo/two.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/project1/parent/child/one.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/project2/parent/child/two.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/project3/parent/child/three.py delete mode 100644 Lib/test/test_importlib/namespace_pkgs/top_level_portion1.zip delete mode 100644 Lib/test/test_importlib/namespacedata01/binary.file delete mode 100644 Lib/test/test_importlib/namespacedata01/utf-16.file delete mode 100644 Lib/test/test_importlib/namespacedata01/utf-8.file delete mode 100644 Lib/test/test_importlib/partial/cfimport.py delete mode 100644 Lib/test/test_importlib/partial/pool_in_threads.py delete mode 100644 Lib/test/test_importlib/resources/__init__.py delete mode 100644 Lib/test/test_importlib/resources/util.py delete mode 100644 Lib/test/test_importlib/source/__init__.py delete mode 100644 Lib/test/test_importlib/source/__main__.py delete mode 100644 Lib/test/test_importlib/source/test_case_sensitivity.py delete mode 100644 Lib/test/test_importlib/source/test_file_loader.py delete mode 100644 Lib/test/test_importlib/source/test_finder.py delete mode 100644 Lib/test/test_importlib/source/test_path_hook.py delete mode 100644 Lib/test/test_importlib/source/test_source_encoding.py delete mode 100644 Lib/test/test_importlib/stubs.py delete mode 100644 Lib/test/test_importlib/test_abc.py delete mode 100644 Lib/test/test_importlib/test_api.py delete mode 100644 Lib/test/test_importlib/test_compatibilty_files.py delete mode 100644 Lib/test/test_importlib/test_contents.py delete mode 100644 Lib/test/test_importlib/test_files.py delete mode 100644 Lib/test/test_importlib/test_lazy.py delete mode 100644 Lib/test/test_importlib/test_locks.py delete mode 100644 Lib/test/test_importlib/test_main.py delete mode 100644 Lib/test/test_importlib/test_metadata_api.py delete mode 100644 Lib/test/test_importlib/test_namespace_pkgs.py delete mode 100644 Lib/test/test_importlib/test_open.py delete mode 100644 Lib/test/test_importlib/test_path.py delete mode 100644 Lib/test/test_importlib/test_pkg_import.py delete mode 100644 Lib/test/test_importlib/test_read.py delete mode 100644 Lib/test/test_importlib/test_reader.py delete mode 100644 Lib/test/test_importlib/test_resource.py delete mode 100644 Lib/test/test_importlib/test_spec.py delete mode 100644 Lib/test/test_importlib/test_threaded_import.py delete mode 100644 Lib/test/test_importlib/test_util.py delete mode 100644 Lib/test/test_importlib/test_windows.py delete mode 100644 Lib/test/test_importlib/test_zip.py delete mode 100644 Lib/test/test_importlib/threaded_import_hangers.py delete mode 100755 Lib/test/test_importlib/update-zips.py delete mode 100644 Lib/test/test_importlib/util.py delete mode 100644 Lib/test/test_importlib/zipdata01/__init__.py delete mode 100644 Lib/test/test_importlib/zipdata01/ziptestdata.zip delete mode 100644 Lib/test/test_importlib/zipdata02/__init__.py delete mode 100644 Lib/test/test_importlib/zipdata02/ziptestdata.zip delete mode 100644 Lib/test/test_index.py delete mode 100644 Lib/test/test_int.py delete mode 100644 Lib/test/test_int_literal.py delete mode 100644 Lib/test/test_io.py delete mode 100644 Lib/test/test_ioctl.py delete mode 100644 Lib/test/test_ipaddress.py delete mode 100644 Lib/test/test_isinstance.py delete mode 100644 Lib/test/test_iter.py delete mode 100644 Lib/test/test_iterlen.py delete mode 100644 Lib/test/test_itertools.py delete mode 100644 Lib/test/test_json/__init__.py delete mode 100644 Lib/test/test_json/__main__.py delete mode 100644 Lib/test/test_json/test_decode.py delete mode 100644 Lib/test/test_json/test_default.py delete mode 100644 Lib/test/test_json/test_dump.py delete mode 100644 Lib/test/test_json/test_encode_basestring_ascii.py delete mode 100644 Lib/test/test_json/test_enum.py delete mode 100644 Lib/test/test_json/test_fail.py delete mode 100644 Lib/test/test_json/test_float.py delete mode 100644 Lib/test/test_json/test_indent.py delete mode 100644 Lib/test/test_json/test_pass1.py delete mode 100644 Lib/test/test_json/test_pass2.py delete mode 100644 Lib/test/test_json/test_pass3.py delete mode 100644 Lib/test/test_json/test_recursion.py delete mode 100644 Lib/test/test_json/test_scanstring.py delete mode 100644 Lib/test/test_json/test_separators.py delete mode 100644 Lib/test/test_json/test_speedups.py delete mode 100644 Lib/test/test_json/test_tool.py delete mode 100644 Lib/test/test_json/test_unicode.py delete mode 100644 Lib/test/test_keyword.py delete mode 100644 Lib/test/test_keywordonlyarg.py delete mode 100644 Lib/test/test_kqueue.py delete mode 100644 Lib/test/test_largefile.py delete mode 100644 Lib/test/test_linecache.py delete mode 100644 Lib/test/test_list.py delete mode 100644 Lib/test/test_listcomps.py delete mode 100644 Lib/test/test_locale.py delete mode 100644 Lib/test/test_long.py delete mode 100644 Lib/test/test_longexp.py delete mode 100644 Lib/test/test_marshal.py delete mode 100644 Lib/test/test_math.py delete mode 100644 Lib/test/test_memoryio.py delete mode 100644 Lib/test/test_memoryview.py delete mode 100644 Lib/test/test_mimetypes.py delete mode 100644 Lib/test/test_mmap.py delete mode 100644 Lib/test/test_module.py delete mode 100644 Lib/test/test_named_expressions.py delete mode 100644 Lib/test/test_netrc.py delete mode 100644 Lib/test/test_ntpath.py delete mode 100644 Lib/test/test_numeric_tower.py delete mode 100644 Lib/test/test_opcache.py delete mode 100644 Lib/test/test_opcode.py delete mode 100644 Lib/test/test_openpty.py delete mode 100644 Lib/test/test_operator.py delete mode 100644 Lib/test/test_optparse.py delete mode 100644 Lib/test/test_ordered_dict.py delete mode 100644 Lib/test/test_os.py delete mode 100644 Lib/test/test_pathlib.py delete mode 100644 Lib/test/test_pickle.py delete mode 100644 Lib/test/test_pickletools.py delete mode 100644 Lib/test/test_pkg.py delete mode 100644 Lib/test/test_pkgutil.py delete mode 100644 Lib/test/test_platform.py delete mode 100644 Lib/test/test_plistlib.py delete mode 100644 Lib/test/test_poll.py delete mode 100644 Lib/test/test_popen.py delete mode 100644 Lib/test/test_positional_only_arg.py delete mode 100644 Lib/test/test_posix.py delete mode 100644 Lib/test/test_posixpath.py delete mode 100644 Lib/test/test_pow.py delete mode 100644 Lib/test/test_pprint.py delete mode 100644 Lib/test/test_print.py delete mode 100644 Lib/test/test_property.py delete mode 100644 Lib/test/test_pty.py delete mode 100644 Lib/test/test_pulldom.py delete mode 100644 Lib/test/test_pwd.py delete mode 100644 Lib/test/test_py_compile.py delete mode 100644 Lib/test/test_pyexpat.py delete mode 100644 Lib/test/test_queue.py delete mode 100644 Lib/test/test_quopri.py delete mode 100644 Lib/test/test_raise.py delete mode 100644 Lib/test/test_random.py delete mode 100644 Lib/test/test_range.py delete mode 100644 Lib/test/test_re.py delete mode 100644 Lib/test/test_regrtest.py delete mode 100644 Lib/test/test_repl.py delete mode 100644 Lib/test/test_reprlib.py delete mode 100644 Lib/test/test_resource.py delete mode 100644 Lib/test/test_richcmp.py delete mode 100644 Lib/test/test_rlcompleter.py delete mode 100644 Lib/test/test_robotparser.py delete mode 100644 Lib/test/test_runpy.py delete mode 100644 Lib/test/test_sched.py delete mode 100644 Lib/test/test_scope.py delete mode 100644 Lib/test/test_script_helper.py delete mode 100644 Lib/test/test_secrets.py delete mode 100644 Lib/test/test_selectors.py delete mode 100644 Lib/test/test_set.py delete mode 100644 Lib/test/test_setcomps.py delete mode 100644 Lib/test/test_shelve.py delete mode 100644 Lib/test/test_shlex.py delete mode 100644 Lib/test/test_shutil.py delete mode 100644 Lib/test/test_signal.py delete mode 100644 Lib/test/test_site.py delete mode 100644 Lib/test/test_slice.py delete mode 100644 Lib/test/test_smtpd.py delete mode 100644 Lib/test/test_socket.py delete mode 100644 Lib/test/test_socketserver.py delete mode 100644 Lib/test/test_sort.py delete mode 100644 Lib/test/test_sqlite3/__init__.py delete mode 100644 Lib/test/test_sqlite3/__main__.py delete mode 100644 Lib/test/test_sqlite3/test_backup.py delete mode 100644 Lib/test/test_sqlite3/test_cli.py delete mode 100644 Lib/test/test_sqlite3/test_dbapi.py delete mode 100644 Lib/test/test_sqlite3/test_dump.py delete mode 100644 Lib/test/test_sqlite3/test_factory.py delete mode 100644 Lib/test/test_sqlite3/test_hooks.py delete mode 100644 Lib/test/test_sqlite3/test_regression.py delete mode 100644 Lib/test/test_sqlite3/test_transactions.py delete mode 100644 Lib/test/test_sqlite3/test_types.py delete mode 100644 Lib/test/test_sqlite3/test_userfunctions.py delete mode 100644 Lib/test/test_stat.py delete mode 100644 Lib/test/test_statistics.py delete mode 100644 Lib/test/test_strftime.py delete mode 100644 Lib/test/test_string.py delete mode 100644 Lib/test/test_string_literals.py delete mode 100644 Lib/test/test_stringprep.py delete mode 100644 Lib/test/test_strtod.py delete mode 100644 Lib/test/test_struct.py delete mode 100644 Lib/test/test_structseq.py delete mode 100644 Lib/test/test_subclassinit.py delete mode 100644 Lib/test/test_subprocess.py delete mode 100644 Lib/test/test_sundry.py delete mode 100644 Lib/test/test_super.py delete mode 100644 Lib/test/test_support.py delete mode 100644 Lib/test/test_symtable.py delete mode 100644 Lib/test/test_syntax.py delete mode 100644 Lib/test/test_sys.py delete mode 100644 Lib/test/test_sys_setprofile.py delete mode 100644 Lib/test/test_sys_settrace.py delete mode 100644 Lib/test/test_sysconfig.py delete mode 100644 Lib/test/test_syslog.py delete mode 100644 Lib/test/test_tabnanny.py delete mode 100644 Lib/test/test_tarfile.py delete mode 100644 Lib/test/test_telnetlib.py delete mode 100644 Lib/test/test_tempfile.py delete mode 100644 Lib/test/test_textwrap.py delete mode 100644 Lib/test/test_thread.py delete mode 100644 Lib/test/test_threadedtempfile.py delete mode 100644 Lib/test/test_threading.py delete mode 100644 Lib/test/test_threading_local.py delete mode 100644 Lib/test/test_time.py delete mode 100644 Lib/test/test_timeit.py delete mode 100644 Lib/test/test_timeout.py delete mode 100644 Lib/test/test_tokenize.py delete mode 100644 Lib/test/test_tomllib/__init__.py delete mode 100644 Lib/test/test_tomllib/__main__.py delete mode 100644 Lib/test/test_tomllib/burntsushi.py delete mode 100644 Lib/test/test_tomllib/data/invalid/array-missing-comma.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/array-of-tables/overwrite-array-in-parent.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/array-of-tables/overwrite-bool-with-aot.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/array/file-end-after-val.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/array/unclosed-after-item.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/array/unclosed-empty.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/basic-str-ends-in-escape.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/boolean/invalid-false-casing.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/boolean/invalid-true-casing.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/dates-and-times/invalid-day.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/dotted-keys/access-non-table.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/dotted-keys/extend-defined-aot.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/dotted-keys/extend-defined-table-with-subtable.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/dotted-keys/extend-defined-table.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table-missing-comma.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/define-twice-in-subtable.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/define-twice.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/file-end-after-key-val.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/mutate.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/override-val-in-table.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/override-val-with-array.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/override-val-with-table.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/overwrite-implicitly.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/overwrite-value-in-inner-array.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/overwrite-value-in-inner-table.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/inline-table/unclosed-empty.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/invalid-comment-char.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/invalid-escaped-unicode.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/invalid-hex.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/keys-and-vals/ends-early-table-def.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/keys-and-vals/ends-early.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/keys-and-vals/no-value.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/keys-and-vals/only-ws-after-dot.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/keys-and-vals/overwrite-with-deep-table.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/literal-str/unclosed.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/missing-closing-double-square-bracket.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/missing-closing-square-bracket.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/multiline-basic-str/carriage-return.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/multiline-basic-str/escape-only.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/multiline-basic-str/file-ends-after-opening.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/multiline-basic-str/last-line-escape.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/multiline-basic-str/unclosed-ends-in-whitespace-escape.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/multiline-literal-str/file-ends-after-opening.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/multiline-literal-str/unclosed.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/non-scalar-escaped.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/table/eof-after-opening.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/table/redefine-1.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/table/redefine-2.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/unclosed-multiline-string.toml delete mode 100644 Lib/test/test_tomllib/data/invalid/unclosed-string.toml delete mode 100644 Lib/test/test_tomllib/data/valid/apostrophes-in-literal-string.json delete mode 100644 Lib/test/test_tomllib/data/valid/apostrophes-in-literal-string.toml delete mode 100644 Lib/test/test_tomllib/data/valid/array/array-subtables.json delete mode 100644 Lib/test/test_tomllib/data/valid/array/array-subtables.toml delete mode 100644 Lib/test/test_tomllib/data/valid/array/open-parent-table.json delete mode 100644 Lib/test/test_tomllib/data/valid/array/open-parent-table.toml delete mode 100644 Lib/test/test_tomllib/data/valid/boolean.json delete mode 100644 Lib/test/test_tomllib/data/valid/boolean.toml delete mode 100644 Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.json delete mode 100644 Lib/test/test_tomllib/data/valid/dates-and-times/datetimes.toml delete mode 100644 Lib/test/test_tomllib/data/valid/dates-and-times/localtime.json delete mode 100644 Lib/test/test_tomllib/data/valid/dates-and-times/localtime.toml delete mode 100644 Lib/test/test_tomllib/data/valid/empty-inline-table.json delete mode 100644 Lib/test/test_tomllib/data/valid/empty-inline-table.toml delete mode 100644 Lib/test/test_tomllib/data/valid/five-quotes.json delete mode 100644 Lib/test/test_tomllib/data/valid/five-quotes.toml delete mode 100644 Lib/test/test_tomllib/data/valid/hex-char.json delete mode 100644 Lib/test/test_tomllib/data/valid/hex-char.toml delete mode 100644 Lib/test/test_tomllib/data/valid/multiline-basic-str/ends-in-whitespace-escape.json delete mode 100644 Lib/test/test_tomllib/data/valid/multiline-basic-str/ends-in-whitespace-escape.toml delete mode 100644 Lib/test/test_tomllib/data/valid/no-newlines.json delete mode 100644 Lib/test/test_tomllib/data/valid/no-newlines.toml delete mode 100644 Lib/test/test_tomllib/data/valid/trailing-comma.json delete mode 100644 Lib/test/test_tomllib/data/valid/trailing-comma.toml delete mode 100644 Lib/test/test_tomllib/test_data.py delete mode 100644 Lib/test/test_tomllib/test_error.py delete mode 100644 Lib/test/test_tomllib/test_misc.py delete mode 100644 Lib/test/test_trace.py delete mode 100644 Lib/test/test_traceback.py delete mode 100644 Lib/test/test_tuple.py delete mode 100644 Lib/test/test_type_comments.py delete mode 100644 Lib/test/test_typechecks.py delete mode 100644 Lib/test/test_types.py delete mode 100644 Lib/test/test_typing.py delete mode 100644 Lib/test/test_ucn.py delete mode 100644 Lib/test/test_unary.py delete mode 100644 Lib/test/test_unicode.py delete mode 100644 Lib/test/test_unicode_file.py delete mode 100644 Lib/test/test_unicode_file_functions.py delete mode 100644 Lib/test/test_unicode_identifiers.py delete mode 100644 Lib/test/test_unicodedata.py delete mode 100644 Lib/test/test_unittest.py delete mode 100644 Lib/test/test_univnewlines.py delete mode 100644 Lib/test/test_unpack.py delete mode 100644 Lib/test/test_urllib.py delete mode 100644 Lib/test/test_urllib2.py delete mode 100644 Lib/test/test_urllib2_localnet.py delete mode 100644 Lib/test/test_urllib2net.py delete mode 100644 Lib/test/test_urllib_response.py delete mode 100644 Lib/test/test_urllibnet.py delete mode 100644 Lib/test/test_urlparse.py delete mode 100644 Lib/test/test_userdict.py delete mode 100644 Lib/test/test_userlist.py delete mode 100644 Lib/test/test_userstring.py delete mode 100644 Lib/test/test_utf8_mode.py delete mode 100644 Lib/test/test_utf8source.py delete mode 100644 Lib/test/test_uu.py delete mode 100644 Lib/test/test_uuid.py delete mode 100644 Lib/test/test_venv.py delete mode 100644 Lib/test/test_weakref.py delete mode 100644 Lib/test/test_weakset.py delete mode 100644 Lib/test/test_webbrowser.py delete mode 100644 Lib/test/test_with.py delete mode 100644 Lib/test/test_wsgiref.py delete mode 100644 Lib/test/test_xdrlib.py delete mode 100644 Lib/test/test_xml_dom_minicompat.py delete mode 100644 Lib/test/test_xml_etree.py delete mode 100644 Lib/test/test_xmlrpc.py delete mode 100644 Lib/test/test_yield_from.py delete mode 100644 Lib/test/test_zipapp.py delete mode 100644 Lib/test/test_zipfile.py delete mode 100644 Lib/test/test_zipfile64.py delete mode 100644 Lib/test/test_zipimport.py delete mode 100644 Lib/test/test_zlib.py delete mode 100644 Lib/test/testcodec.py delete mode 100644 Lib/test/testtar.tar delete mode 100644 Lib/test/tf_inherit_check.py delete mode 100644 Lib/test/tokenize_tests-latin1-coding-cookie-and-utf8-bom-sig.txt delete mode 100644 Lib/test/tokenize_tests-no-coding-cookie-and-utf8-bom-sig-only.txt delete mode 100644 Lib/test/tokenize_tests-utf8-coding-cookie-and-no-utf8-bom-sig.txt delete mode 100644 Lib/test/tokenize_tests-utf8-coding-cookie-and-utf8-bom-sig.txt delete mode 100644 Lib/test/tokenize_tests.txt delete mode 100644 Lib/test/tracedmodules/__init__.py delete mode 100644 Lib/test/tracedmodules/testmod.py delete mode 100644 Lib/test/xmltestdata/c14n-20/README delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nComment.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nPrefixQname.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nPrefixQnameXpathElem.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nQname.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nQnameElem.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nQnameXpathElem.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/c14nTrim.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/doc.dtd delete mode 100644 Lib/test/xmltestdata/c14n-20/doc.xsl delete mode 100644 Lib/test/xmltestdata/c14n-20/inC14N1.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inC14N2.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inC14N3.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inC14N4.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inC14N5.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inC14N6.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inNsContent.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inNsDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inNsPushdown.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inNsRedecl.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inNsSort.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inNsSuperfluous.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/inNsXml.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N1_c14nComment.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N1_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N2_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N2_c14nTrim.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N3_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N3_c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N3_c14nTrim.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N4_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N4_c14nTrim.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N5_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N5_c14nTrim.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inC14N6_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsContent_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsContent_c14nPrefixQnameXpathElem.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsContent_c14nQnameElem.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsContent_c14nQnameXpathElem.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsDefault_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsDefault_c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsPushdown_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsPushdown_c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsRedecl_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsRedecl_c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsSort_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsSort_c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsSuperfluous_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsSuperfluous_c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsXml_c14nDefault.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsXml_c14nPrefix.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsXml_c14nPrefixQname.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/out_inNsXml_c14nQname.xml delete mode 100644 Lib/test/xmltestdata/c14n-20/world.txt delete mode 100644 Lib/test/xmltestdata/expat224_utf8_bug.xml delete mode 100644 Lib/test/xmltestdata/simple-ns.xml delete mode 100644 Lib/test/xmltestdata/simple.xml delete mode 100644 Lib/test/xmltestdata/test.xml delete mode 100644 Lib/test/xmltestdata/test.xml.out delete mode 100644 Lib/test/xmltests.py delete mode 100644 Lib/test/zip_cp437_header.zip delete mode 100644 Lib/test/zipdir.zip delete mode 100644 Lib/test/ziptestdata/README.md delete mode 100755 Lib/test/ziptestdata/exe_with_z64 delete mode 100755 Lib/test/ziptestdata/exe_with_zip delete mode 100755 Lib/test/ziptestdata/header.sh delete mode 100644 Lib/test/ziptestdata/testdata_module_inside_zip.py delete mode 100644 Lib/threading.py delete mode 100644 Lib/tomllib/__init__.py delete mode 100644 Lib/tomllib/_parser.py delete mode 100644 Lib/tomllib/_re.py delete mode 100644 Lib/tomllib/_types.py delete mode 100644 Lib/unittest/__init__.py delete mode 100644 Lib/unittest/__main__.py delete mode 100644 Lib/unittest/_log.py delete mode 100644 Lib/unittest/async_case.py delete mode 100644 Lib/unittest/case.py delete mode 100644 Lib/unittest/loader.py delete mode 100644 Lib/unittest/main.py delete mode 100644 Lib/unittest/mock.py delete mode 100644 Lib/unittest/result.py delete mode 100644 Lib/unittest/runner.py delete mode 100644 Lib/unittest/signals.py delete mode 100644 Lib/unittest/suite.py delete mode 100644 Lib/unittest/test/__init__.py delete mode 100644 Lib/unittest/test/__main__.py delete mode 100644 Lib/unittest/test/_test_warnings.py delete mode 100644 Lib/unittest/test/dummy.py delete mode 100644 Lib/unittest/test/support.py delete mode 100644 Lib/unittest/test/test_assertions.py delete mode 100644 Lib/unittest/test/test_async_case.py delete mode 100644 Lib/unittest/test/test_break.py delete mode 100644 Lib/unittest/test/test_case.py delete mode 100644 Lib/unittest/test/test_discovery.py delete mode 100644 Lib/unittest/test/test_functiontestcase.py delete mode 100644 Lib/unittest/test/test_loader.py delete mode 100644 Lib/unittest/test/test_program.py delete mode 100644 Lib/unittest/test/test_result.py delete mode 100644 Lib/unittest/test/test_runner.py delete mode 100644 Lib/unittest/test/test_setups.py delete mode 100644 Lib/unittest/test/test_skipping.py delete mode 100644 Lib/unittest/test/test_suite.py delete mode 100644 Lib/unittest/test/testmock/__init__.py delete mode 100644 Lib/unittest/test/testmock/__main__.py delete mode 100644 Lib/unittest/test/testmock/support.py delete mode 100644 Lib/unittest/test/testmock/testasync.py delete mode 100644 Lib/unittest/test/testmock/testcallable.py delete mode 100644 Lib/unittest/test/testmock/testhelpers.py delete mode 100644 Lib/unittest/test/testmock/testmagicmethods.py delete mode 100644 Lib/unittest/test/testmock/testmock.py delete mode 100644 Lib/unittest/test/testmock/testpatch.py delete mode 100644 Lib/unittest/test/testmock/testsealable.py delete mode 100644 Lib/unittest/test/testmock/testsentinel.py delete mode 100644 Lib/unittest/test/testmock/testwith.py delete mode 100644 Lib/unittest/util.py delete mode 100644 Lib/urllib/__init__.py delete mode 100644 Lib/urllib/error.py delete mode 100644 Lib/urllib/parse.py delete mode 100644 Lib/urllib/request.py delete mode 100644 Lib/urllib/response.py delete mode 100644 Lib/urllib/robotparser.py delete mode 100644 Lib/venv/__init__.py delete mode 100644 Lib/venv/__main__.py delete mode 100644 Lib/venv/scripts/common/Activate.ps1 delete mode 100644 Lib/venv/scripts/common/activate delete mode 100644 Lib/venv/scripts/nt/activate.bat delete mode 100644 Lib/venv/scripts/nt/deactivate.bat delete mode 100644 Lib/venv/scripts/posix/activate.csh delete mode 100644 Lib/venv/scripts/posix/activate.fish delete mode 100755 Lib/webbrowser.py delete mode 100644 Lib/wsgiref/__init__.py delete mode 100644 Lib/wsgiref/handlers.py delete mode 100644 Lib/wsgiref/headers.py delete mode 100644 Lib/wsgiref/simple_server.py delete mode 100644 Lib/wsgiref/util.py delete mode 100644 Lib/wsgiref/validate.py delete mode 100644 Lib/xml/__init__.py delete mode 100644 Lib/xml/dom/NodeFilter.py delete mode 100644 Lib/xml/dom/__init__.py delete mode 100644 Lib/xml/dom/domreg.py delete mode 100644 Lib/xml/dom/expatbuilder.py delete mode 100644 Lib/xml/dom/minicompat.py delete mode 100644 Lib/xml/dom/minidom.py delete mode 100644 Lib/xml/dom/pulldom.py delete mode 100644 Lib/xml/dom/xmlbuilder.py delete mode 100644 Lib/xml/etree/ElementInclude.py delete mode 100644 Lib/xml/etree/ElementPath.py delete mode 100644 Lib/xml/etree/ElementTree.py delete mode 100644 Lib/xml/etree/__init__.py delete mode 100644 Lib/xml/etree/cElementTree.py delete mode 100644 Lib/xml/parsers/__init__.py delete mode 100644 Lib/xml/parsers/expat.py delete mode 100644 Lib/xml/sax/__init__.py delete mode 100644 Lib/xml/sax/_exceptions.py delete mode 100644 Lib/xml/sax/expatreader.py delete mode 100644 Lib/xml/sax/handler.py delete mode 100644 Lib/xml/sax/saxutils.py delete mode 100644 Lib/xml/sax/xmlreader.py delete mode 100644 Lib/xmlrpc/__init__.py delete mode 100644 Lib/xmlrpc/client.py delete mode 100644 Lib/xmlrpc/server.py delete mode 100644 Lib/zipapp.py delete mode 100644 Lib/zipfile.py delete mode 100644 Lib/zipimport.py diff --git a/Lib/PSF-LICENSE b/Lib/PSF-LICENSE deleted file mode 100644 index 1afbedba92b..00000000000 --- a/Lib/PSF-LICENSE +++ /dev/null @@ -1,254 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations, which became -Zope Corporation. In 2001, the Python Software Foundation (PSF, see -https://www.python.org/psf/) was formed, a non-profit organization -created specifically to own Python-related Intellectual Property. -Zope Corporation was a sponsoring member of the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018 Python Software Foundation; All -Rights Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/Lib/README.md b/Lib/README.md deleted file mode 100644 index 535d8939e80..00000000000 --- a/Lib/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Standard Library for RustPython - -This directory contains all of the Python files that make up the standard -library for RustPython. - -Most of these files are copied over from the CPython repository (the 3.7 -branch), with slight modifications to allow them to work under RustPython. The -current goal is to complete the standard library with as few modifications as -possible. Current modifications are just temporary workarounds for bugs/missing -feature within the RustPython implementation. - -The first big module we are targeting is `unittest`, so we can leverage the -CPython test suite. diff --git a/Lib/__future__.py b/Lib/__future__.py index 97dc90c6e46..82f70a570ab 100644 --- a/Lib/__future__.py +++ b/Lib/__future__.py @@ -1,147 +1,30 @@ -"""Record of phased-in incompatible language changes. - -Each line is of the form: - - FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease "," - CompilerFlag ")" - -where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples -of the same form as sys.version_info: - - (PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int - PY_MINOR_VERSION, # the 1; an int - PY_MICRO_VERSION, # the 0; an int - PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string - PY_RELEASE_SERIAL # the 3; an int - ) - -OptionalRelease records the first release in which - - from __future__ import FeatureName - -was accepted. - -In the case of MandatoryReleases that have not yet occurred, -MandatoryRelease predicts the release in which the feature will become part -of the language. - -Else MandatoryRelease records when the feature became part of the language; -in releases at or after that, modules no longer need - - from __future__ import FeatureName - -to use the feature in question, but may continue to use such imports. - -MandatoryRelease may also be None, meaning that a planned feature got -dropped. - -Instances of class _Feature have two corresponding methods, -.getOptionalRelease() and .getMandatoryRelease(). - -CompilerFlag is the (bitfield) flag that should be passed in the fourth -argument to the builtin function compile() to enable the feature in -dynamically compiled code. This flag is stored in the .compiler_flag -attribute on _Future instances. These values must match the appropriate -#defines of CO_xxx flags in Include/cpython/compile.h. - -No feature line is ever to be deleted from this file. -""" - -all_feature_names = [ - "nested_scopes", - "generators", - "division", - "absolute_import", - "with_statement", - "print_function", - "unicode_literals", - "barry_as_FLUFL", - "generator_stop", - "annotations", -] - -__all__ = ["all_feature_names"] + all_feature_names - -# The CO_xxx symbols are defined here under the same names defined in -# code.h and used by compile.h, so that an editor search will find them here. -# However, they're not exported in __all__, because they don't really belong to -# this module. -CO_NESTED = 0x0010 # nested_scopes -CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000) -CO_FUTURE_DIVISION = 0x20000 # division -CO_FUTURE_ABSOLUTE_IMPORT = 0x40000 # perform absolute imports by default -CO_FUTURE_WITH_STATEMENT = 0x80000 # with statement -CO_FUTURE_PRINT_FUNCTION = 0x100000 # print function -CO_FUTURE_UNICODE_LITERALS = 0x200000 # unicode string literals -CO_FUTURE_BARRY_AS_BDFL = 0x400000 -CO_FUTURE_GENERATOR_STOP = 0x800000 # StopIteration becomes RuntimeError in generators -CO_FUTURE_ANNOTATIONS = 0x1000000 # annotations become strings at runtime - - +'Record of phased-in incompatible language changes.\n\nEach line is of the form:\n\n FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease ","\n CompilerFlag ")"\n\nwhere, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples\nof the same form as sys.version_info:\n\n (PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int\n PY_MINOR_VERSION, # the 1; an int\n PY_MICRO_VERSION, # the 0; an int\n PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string\n PY_RELEASE_SERIAL # the 3; an int\n )\n\nOptionalRelease records the first release in which\n\n from __future__ import FeatureName\n\nwas accepted.\n\nIn the case of MandatoryReleases that have not yet occurred,\nMandatoryRelease predicts the release in which the feature will become part\nof the language.\n\nElse MandatoryRelease records when the feature became part of the language;\nin releases at or after that, modules no longer need\n\n from __future__ import FeatureName\n\nto use the feature in question, but may continue to use such imports.\n\nMandatoryRelease may also be None, meaning that a planned feature got\ndropped.\n\nInstances of class _Feature have two corresponding methods,\n.getOptionalRelease() and .getMandatoryRelease().\n\nCompilerFlag is the (bitfield) flag that should be passed in the fourth\nargument to the builtin function compile() to enable the feature in\ndynamically compiled code. This flag is stored in the .compiler_flag\nattribute on _Future instances. These values must match the appropriate\n#defines of CO_xxx flags in Include/cpython/compile.h.\n\nNo feature line is ever to be deleted from this file.\n' +_B='beta' +_A='alpha' +all_feature_names=['nested_scopes','generators','division','absolute_import','with_statement','print_function','unicode_literals','barry_as_FLUFL','generator_stop','annotations'] +__all__=['all_feature_names']+all_feature_names +CO_NESTED=16 +CO_GENERATOR_ALLOWED=0 +CO_FUTURE_DIVISION=131072 +CO_FUTURE_ABSOLUTE_IMPORT=262144 +CO_FUTURE_WITH_STATEMENT=524288 +CO_FUTURE_PRINT_FUNCTION=1048576 +CO_FUTURE_UNICODE_LITERALS=2097152 +CO_FUTURE_BARRY_AS_BDFL=4194304 +CO_FUTURE_GENERATOR_STOP=8388608 +CO_FUTURE_ANNOTATIONS=16777216 class _Feature: - - def __init__(self, optionalRelease, mandatoryRelease, compiler_flag): - self.optional = optionalRelease - self.mandatory = mandatoryRelease - self.compiler_flag = compiler_flag - - def getOptionalRelease(self): - """Return first release in which this feature was recognized. - - This is a 5-tuple, of the same form as sys.version_info. - """ - return self.optional - - def getMandatoryRelease(self): - """Return release in which this feature will become mandatory. - - This is a 5-tuple, of the same form as sys.version_info, or, if - the feature was dropped, is None. - """ - return self.mandatory - - def __repr__(self): - return "_Feature" + repr((self.optional, - self.mandatory, - self.compiler_flag)) - - -nested_scopes = _Feature((2, 1, 0, "beta", 1), - (2, 2, 0, "alpha", 0), - CO_NESTED) - -generators = _Feature((2, 2, 0, "alpha", 1), - (2, 3, 0, "final", 0), - CO_GENERATOR_ALLOWED) - -division = _Feature((2, 2, 0, "alpha", 2), - (3, 0, 0, "alpha", 0), - CO_FUTURE_DIVISION) - -absolute_import = _Feature((2, 5, 0, "alpha", 1), - (3, 0, 0, "alpha", 0), - CO_FUTURE_ABSOLUTE_IMPORT) - -with_statement = _Feature((2, 5, 0, "alpha", 1), - (2, 6, 0, "alpha", 0), - CO_FUTURE_WITH_STATEMENT) - -print_function = _Feature((2, 6, 0, "alpha", 2), - (3, 0, 0, "alpha", 0), - CO_FUTURE_PRINT_FUNCTION) - -unicode_literals = _Feature((2, 6, 0, "alpha", 2), - (3, 0, 0, "alpha", 0), - CO_FUTURE_UNICODE_LITERALS) - -barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2), - (4, 0, 0, "alpha", 0), - CO_FUTURE_BARRY_AS_BDFL) - -generator_stop = _Feature((3, 5, 0, "beta", 1), - (3, 7, 0, "alpha", 0), - CO_FUTURE_GENERATOR_STOP) - -annotations = _Feature((3, 7, 0, "beta", 1), - (3, 11, 0, "alpha", 0), - CO_FUTURE_ANNOTATIONS) + def __init__(A,optionalRelease,mandatoryRelease,compiler_flag):A.optional=optionalRelease;A.mandatory=mandatoryRelease;A.compiler_flag=compiler_flag + def getOptionalRelease(A):'Return first release in which this feature was recognized.\n\n This is a 5-tuple, of the same form as sys.version_info.\n ';return A.optional + def getMandatoryRelease(A):'Return release in which this feature will become mandatory.\n\n This is a 5-tuple, of the same form as sys.version_info, or, if\n the feature was dropped, is None.\n ';return A.mandatory + def __repr__(A):return'_Feature'+repr((A.optional,A.mandatory,A.compiler_flag)) +nested_scopes=_Feature((2,1,0,_B,1),(2,2,0,_A,0),CO_NESTED) +generators=_Feature((2,2,0,_A,1),(2,3,0,'final',0),CO_GENERATOR_ALLOWED) +division=_Feature((2,2,0,_A,2),(3,0,0,_A,0),CO_FUTURE_DIVISION) +absolute_import=_Feature((2,5,0,_A,1),(3,0,0,_A,0),CO_FUTURE_ABSOLUTE_IMPORT) +with_statement=_Feature((2,5,0,_A,1),(2,6,0,_A,0),CO_FUTURE_WITH_STATEMENT) +print_function=_Feature((2,6,0,_A,2),(3,0,0,_A,0),CO_FUTURE_PRINT_FUNCTION) +unicode_literals=_Feature((2,6,0,_A,2),(3,0,0,_A,0),CO_FUTURE_UNICODE_LITERALS) +barry_as_FLUFL=_Feature((3,1,0,_A,2),(4,0,0,_A,0),CO_FUTURE_BARRY_AS_BDFL) +generator_stop=_Feature((3,5,0,_B,1),(3,7,0,_A,0),CO_FUTURE_GENERATOR_STOP) +annotations=_Feature((3,7,0,_B,1),(3,11,0,_A,0),CO_FUTURE_ANNOTATIONS) \ No newline at end of file diff --git a/Lib/__hello__.py b/Lib/__hello__.py index c09d6a4f523..cdbafcfce4f 100644 --- a/Lib/__hello__.py +++ b/Lib/__hello__.py @@ -1,16 +1,6 @@ -initialized = True - -class TestFrozenUtf8_1: - """\u00b6""" - -class TestFrozenUtf8_2: - """\u03c0""" - -class TestFrozenUtf8_4: - """\U0001f600""" - -def main(): - print("Hello world!") - -if __name__ == '__main__': - main() +initialized=True +class TestFrozenUtf8_1:'¶' +class TestFrozenUtf8_2:'π' +class TestFrozenUtf8_4:'😀' +def main():print('Hello world!') +if __name__=='__main__':main() \ No newline at end of file diff --git a/Lib/__phello__/__init__.py b/Lib/__phello__/__init__.py index d37bd2766ac..5521a750ce4 100644 --- a/Lib/__phello__/__init__.py +++ b/Lib/__phello__/__init__.py @@ -1,7 +1,3 @@ -initialized = True - -def main(): - print("Hello world!") - -if __name__ == '__main__': - main() +initialized=True +def main():print('Hello world!') +if __name__=='__main__':main() \ No newline at end of file diff --git a/Lib/__phello__/spam.py b/Lib/__phello__/spam.py index d37bd2766ac..5521a750ce4 100644 --- a/Lib/__phello__/spam.py +++ b/Lib/__phello__/spam.py @@ -1,7 +1,3 @@ -initialized = True - -def main(): - print("Hello world!") - -if __name__ == '__main__': - main() +initialized=True +def main():print('Hello world!') +if __name__=='__main__':main() \ No newline at end of file diff --git a/Lib/_collections_abc.py b/Lib/_collections_abc.py index 87a9cd2d46d..91aad3ca963 100644 --- a/Lib/_collections_abc.py +++ b/Lib/_collections_abc.py @@ -1,295 +1,163 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod +'Abstract Base Classes (ABCs) for collections, according to PEP 3119.\n\nUnit tests are in test_collections.\n' +_M='ParamSpec' +_L='typing' +_K='__parameters__' +_J='__contains__' +_I='__len__' +_H='__next__' +_G='__anext__' +_F='__await__' +_E='__aiter__' +_D='__iter__' +_C=False +_B=True +_A=None +from abc import ABCMeta,abstractmethod import sys - -GenericAlias = type(list[int]) -EllipsisType = type(...) -def _f(): pass -FunctionType = type(_f) +GenericAlias=type(list[int]) +EllipsisType=type(...) +def _f():0 +FunctionType=type(_f) del _f - -__all__ = ["Awaitable", "Coroutine", - "AsyncIterable", "AsyncIterator", "AsyncGenerator", - "Hashable", "Iterable", "Iterator", "Generator", "Reversible", - "Sized", "Container", "Callable", "Collection", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types might not be distinct -# and they may have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -longrange_iterator = type(iter(range(1 << 1000))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) -generator = type((lambda: (yield))()) -## coroutine ## -async def _coro(): pass -_coro = _coro() -coroutine = type(_coro) -_coro.close() # Prevent ResourceWarning +__all__=['Awaitable','Coroutine','AsyncIterable','AsyncIterator','AsyncGenerator','Hashable','Iterable','Iterator','Generator','Reversible','Sized','Container','Callable','Collection','Set','MutableSet','Mapping','MutableMapping','MappingView','KeysView','ItemsView','ValuesView','Sequence','MutableSequence','ByteString'] +__name__='collections.abc' +bytes_iterator=type(iter(b'')) +bytearray_iterator=type(iter(bytearray())) +dict_keyiterator=type(iter({}.keys())) +dict_valueiterator=type(iter({}.values())) +dict_itemiterator=type(iter({}.items())) +list_iterator=type(iter([])) +list_reverseiterator=type(iter(reversed([]))) +range_iterator=type(iter(range(0))) +longrange_iterator=type(iter(range(1<<1000))) +set_iterator=type(iter(set())) +str_iterator=type(iter('')) +tuple_iterator=type(iter(())) +zip_iterator=type(iter(zip())) +dict_keys=type({}.keys()) +dict_values=type({}.values()) +dict_items=type({}.items()) +mappingproxy=type(type.__dict__) +generator=type((lambda:(yield))()) +async def _coro():0 +_coro=_coro() +coroutine=type(_coro) +_coro.close() del _coro -## asynchronous generator ## -async def _ag(): yield -_ag = _ag() -async_generator = type(_ag) +async def _ag():yield +_ag=_ag() +async_generator=type(_ag) del _ag - - -### ONE-TRICK PONIES ### - -def _check_methods(C, *methods): - mro = C.__mro__ - for method in methods: - for B in mro: - if method in B.__dict__: - if B.__dict__[method] is None: - return NotImplemented - break - else: - return NotImplemented - return True - +def _check_methods(C,*D): + E=C.__mro__ + for A in D: + for B in E: + if A in B.__dict__: + if B.__dict__[A]is _A:return NotImplemented + break + else:return NotImplemented + return _B class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - return _check_methods(C, "__hash__") - return NotImplemented - - + __slots__=() + @abstractmethod + def __hash__(self):return 0 + @classmethod + def __subclasshook__(A,C): + if A is Hashable:return _check_methods(C,'__hash__') + return NotImplemented class Awaitable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __await__(self): - yield - - @classmethod - def __subclasshook__(cls, C): - if cls is Awaitable: - return _check_methods(C, "__await__") - return NotImplemented - - __class_getitem__ = classmethod(GenericAlias) - - + __slots__=() + @abstractmethod + def __await__(self):yield + @classmethod + def __subclasshook__(A,C): + if A is Awaitable:return _check_methods(C,_F) + return NotImplemented + __class_getitem__=classmethod(GenericAlias) class Coroutine(Awaitable): - - __slots__ = () - - @abstractmethod - def send(self, value): - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside coroutine. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("coroutine ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Coroutine: - return _check_methods(C, '__await__', 'send', 'throw', 'close') - return NotImplemented - - + __slots__=() + @abstractmethod + def send(self,value):'Send a value into the coroutine.\n Return next yielded value or raise StopIteration.\n ';raise StopIteration + @abstractmethod + def throw(self,typ,val=_A,tb=_A): + 'Raise an exception in the coroutine.\n Return next yielded value or raise StopIteration.\n ';A=val + if A is _A: + if tb is _A:raise typ + A=typ() + if tb is not _A:A=A.with_traceback(tb) + raise A + def close(A): + 'Raise GeneratorExit inside coroutine.\n ' + try:A.throw(GeneratorExit) + except(GeneratorExit,StopIteration):pass + else:raise RuntimeError('coroutine ignored GeneratorExit') + @classmethod + def __subclasshook__(A,C): + if A is Coroutine:return _check_methods(C,_F,'send','throw','close') + return NotImplemented Coroutine.register(coroutine) - - class AsyncIterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __aiter__(self): - return AsyncIterator() - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterable: - return _check_methods(C, "__aiter__") - return NotImplemented - - __class_getitem__ = classmethod(GenericAlias) - - + __slots__=() + @abstractmethod + def __aiter__(self):return AsyncIterator() + @classmethod + def __subclasshook__(A,C): + if A is AsyncIterable:return _check_methods(C,_E) + return NotImplemented + __class_getitem__=classmethod(GenericAlias) class AsyncIterator(AsyncIterable): - - __slots__ = () - - @abstractmethod - async def __anext__(self): - """Return the next item or raise StopAsyncIteration when exhausted.""" - raise StopAsyncIteration - - def __aiter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterator: - return _check_methods(C, "__anext__", "__aiter__") - return NotImplemented - - + __slots__=() + @abstractmethod + async def __anext__(self):'Return the next item or raise StopAsyncIteration when exhausted.';raise StopAsyncIteration + def __aiter__(A):return A + @classmethod + def __subclasshook__(A,C): + if A is AsyncIterator:return _check_methods(C,_G,_E) + return NotImplemented class AsyncGenerator(AsyncIterator): - - __slots__ = () - - async def __anext__(self): - """Return the next item from the asynchronous generator. - When exhausted, raise StopAsyncIteration. - """ - return await self.asend(None) - - @abstractmethod - async def asend(self, value): - """Send a value into the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - raise StopAsyncIteration - - @abstractmethod - async def athrow(self, typ, val=None, tb=None): - """Raise an exception in the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - async def aclose(self): - """Raise GeneratorExit inside coroutine. - """ - try: - await self.athrow(GeneratorExit) - except (GeneratorExit, StopAsyncIteration): - pass - else: - raise RuntimeError("asynchronous generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncGenerator: - return _check_methods(C, '__aiter__', '__anext__', - 'asend', 'athrow', 'aclose') - return NotImplemented - - + __slots__=() + async def __anext__(A):'Return the next item from the asynchronous generator.\n When exhausted, raise StopAsyncIteration.\n ';return await A.asend(_A) + @abstractmethod + async def asend(self,value):'Send a value into the asynchronous generator.\n Return next yielded value or raise StopAsyncIteration.\n ';raise StopAsyncIteration + @abstractmethod + async def athrow(self,typ,val=_A,tb=_A): + 'Raise an exception in the asynchronous generator.\n Return next yielded value or raise StopAsyncIteration.\n ';A=val + if A is _A: + if tb is _A:raise typ + A=typ() + if tb is not _A:A=A.with_traceback(tb) + raise A + async def aclose(A): + 'Raise GeneratorExit inside coroutine.\n ' + try:await A.athrow(GeneratorExit) + except(GeneratorExit,StopAsyncIteration):pass + else:raise RuntimeError('asynchronous generator ignored GeneratorExit') + @classmethod + def __subclasshook__(A,C): + if A is AsyncGenerator:return _check_methods(C,_E,_G,'asend','athrow','aclose') + return NotImplemented AsyncGenerator.register(async_generator) - - class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - return _check_methods(C, "__iter__") - return NotImplemented - - __class_getitem__ = classmethod(GenericAlias) - - + __slots__=() + @abstractmethod + def __iter__(self): + while _C:yield _A + @classmethod + def __subclasshook__(A,C): + if A is Iterable:return _check_methods(C,_D) + return NotImplemented + __class_getitem__=classmethod(GenericAlias) class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - return _check_methods(C, '__iter__', '__next__') - return NotImplemented - - + __slots__=() + @abstractmethod + def __next__(self):'Return the next item from the iterator. When exhausted, raise StopIteration';raise StopIteration + def __iter__(A):return A + @classmethod + def __subclasshook__(A,C): + if A is Iterator:return _check_methods(C,_D,_H) + return NotImplemented Iterator.register(bytes_iterator) Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) Iterator.register(dict_keyiterator) Iterator.register(dict_valueiterator) Iterator.register(dict_itemiterator) @@ -301,863 +169,396 @@ def __subclasshook__(cls, C): Iterator.register(str_iterator) Iterator.register(tuple_iterator) Iterator.register(zip_iterator) - - class Reversible(Iterable): - - __slots__ = () - - @abstractmethod - def __reversed__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Reversible: - return _check_methods(C, "__reversed__", "__iter__") - return NotImplemented - - + __slots__=() + @abstractmethod + def __reversed__(self): + while _C:yield _A + @classmethod + def __subclasshook__(A,C): + if A is Reversible:return _check_methods(C,'__reversed__',_D) + return NotImplemented class Generator(Iterator): - - __slots__ = () - - def __next__(self): - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - return self.send(None) - - @abstractmethod - def send(self, value): - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside generator. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Generator: - return _check_methods(C, '__iter__', '__next__', - 'send', 'throw', 'close') - return NotImplemented - - + __slots__=() + def __next__(A):'Return the next item from the generator.\n When exhausted, raise StopIteration.\n ';return A.send(_A) + @abstractmethod + def send(self,value):'Send a value into the generator.\n Return next yielded value or raise StopIteration.\n ';raise StopIteration + @abstractmethod + def throw(self,typ,val=_A,tb=_A): + 'Raise an exception in the generator.\n Return next yielded value or raise StopIteration.\n ';A=val + if A is _A: + if tb is _A:raise typ + A=typ() + if tb is not _A:A=A.with_traceback(tb) + raise A + def close(A): + 'Raise GeneratorExit inside generator.\n ' + try:A.throw(GeneratorExit) + except(GeneratorExit,StopIteration):pass + else:raise RuntimeError('generator ignored GeneratorExit') + @classmethod + def __subclasshook__(A,C): + if A is Generator:return _check_methods(C,_D,_H,'send','throw','close') + return NotImplemented Generator.register(generator) - - class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - return _check_methods(C, "__len__") - return NotImplemented - - + __slots__=() + @abstractmethod + def __len__(self):return 0 + @classmethod + def __subclasshook__(A,C): + if A is Sized:return _check_methods(C,_I) + return NotImplemented class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - return _check_methods(C, "__contains__") - return NotImplemented - - __class_getitem__ = classmethod(GenericAlias) - - -class Collection(Sized, Iterable, Container): - - __slots__ = () - - @classmethod - def __subclasshook__(cls, C): - if cls is Collection: - return _check_methods(C, "__len__", "__iter__", "__contains__") - return NotImplemented - - + __slots__=() + @abstractmethod + def __contains__(self,x):return _C + @classmethod + def __subclasshook__(A,C): + if A is Container:return _check_methods(C,_J) + return NotImplemented + __class_getitem__=classmethod(GenericAlias) +class Collection(Sized,Iterable,Container): + __slots__=() + @classmethod + def __subclasshook__(A,C): + if A is Collection:return _check_methods(C,_I,_D,_J) + return NotImplemented class _CallableGenericAlias(GenericAlias): - """ Represent `Callable[argtypes, resulttype]`. - - This sets ``__args__`` to a tuple containing the flattened ``argtypes`` - followed by ``resulttype``. - - Example: ``Callable[[int, str], float]`` sets ``__args__`` to - ``(int, str, float)``. - """ - - __slots__ = () - - def __new__(cls, origin, args): - if not (isinstance(args, tuple) and len(args) == 2): - raise TypeError( - "Callable must be used as Callable[[arg, ...], result].") - t_args, t_result = args - if isinstance(t_args, list): - args = (*t_args, t_result) - elif not _is_param_expr(t_args): - raise TypeError(f"Expected a list of types, an ellipsis, " - f"ParamSpec, or Concatenate. Got {t_args}") - return super().__new__(cls, origin, args) - - @property - def __parameters__(self): - params = [] - for arg in self.__args__: - # Looks like a genericalias - if hasattr(arg, "__parameters__") and isinstance(arg.__parameters__, tuple): - params.extend(arg.__parameters__) - else: - if _is_typevarlike(arg): - params.append(arg) - return tuple(dict.fromkeys(params)) - - def __repr__(self): - if len(self.__args__) == 2 and _is_param_expr(self.__args__[0]): - return super().__repr__() - return (f'collections.abc.Callable' - f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' - f'{_type_repr(self.__args__[-1])}]') - - def __reduce__(self): - args = self.__args__ - if not (len(args) == 2 and _is_param_expr(args[0])): - args = list(args[:-1]), args[-1] - return _CallableGenericAlias, (Callable, args) - - def __getitem__(self, item): - # Called during TypeVar substitution, returns the custom subclass - # rather than the default types.GenericAlias object. Most of the - # code is copied from typing's _GenericAlias and the builtin - # types.GenericAlias. - - # A special case in PEP 612 where if X = Callable[P, int], - # then X[int, str] == X[[int, str]]. - param_len = len(self.__parameters__) - if param_len == 0: - raise TypeError(f'{self} is not a generic class') - if not isinstance(item, tuple): - item = (item,) - if (param_len == 1 and _is_param_expr(self.__parameters__[0]) - and item and not _is_param_expr(item[0])): - item = (list(item),) - item_len = len(item) - if item_len != param_len: - raise TypeError(f'Too {"many" if item_len > param_len else "few"}' - f' arguments for {self};' - f' actual {item_len}, expected {param_len}') - subst = dict(zip(self.__parameters__, item)) - new_args = [] - for arg in self.__args__: - if _is_typevarlike(arg): - if _is_param_expr(arg): - arg = subst[arg] - if not _is_param_expr(arg): - raise TypeError(f"Expected a list of types, an ellipsis, " - f"ParamSpec, or Concatenate. Got {arg}") - else: - arg = subst[arg] - # Looks like a GenericAlias - elif hasattr(arg, '__parameters__') and isinstance(arg.__parameters__, tuple): - subparams = arg.__parameters__ - if subparams: - subargs = tuple(subst[x] for x in subparams) - arg = arg[subargs] - new_args.append(arg) - - # args[0] occurs due to things like Z[[int, str, bool]] from PEP 612 - if not isinstance(new_args[0], list): - t_result = new_args[-1] - t_args = new_args[:-1] - new_args = (t_args, t_result) - return _CallableGenericAlias(Callable, tuple(new_args)) - - -def _is_typevarlike(arg): - obj = type(arg) - # looks like a TypeVar/ParamSpec - return (obj.__module__ == 'typing' - and obj.__name__ in {'ParamSpec', 'TypeVar'}) - + ' Represent `Callable[argtypes, resulttype]`.\n\n This sets ``__args__`` to a tuple containing the flattened ``argtypes``\n followed by ``resulttype``.\n\n Example: ``Callable[[int, str], float]`` sets ``__args__`` to\n ``(int, str, float)``.\n ';__slots__=() + def __new__(C,origin,args): + A=args + if not(isinstance(A,tuple)and len(A)==2):raise TypeError('Callable must be used as Callable[[arg, ...], result].') + B,D=A + if isinstance(B,list):A=*B,D + elif not _is_param_expr(B):raise TypeError(f"Expected a list of types, an ellipsis, ParamSpec, or Concatenate. Got {B}") + return super().__new__(C,origin,A) + @property + def __parameters__(self): + B=[] + for A in self.__args__: + if hasattr(A,_K)and isinstance(A.__parameters__,tuple):B.extend(A.__parameters__) + elif _is_typevarlike(A):B.append(A) + return tuple(dict.fromkeys(B)) + def __repr__(A): + if len(A.__args__)==2 and _is_param_expr(A.__args__[0]):return super().__repr__() + return f"collections.abc.Callable[[{', '.join([_type_repr(A)for A in A.__args__[:-1]])}], {_type_repr(A.__args__[-1])}]" + def __reduce__(B): + A=B.__args__ + if not(len(A)==2 and _is_param_expr(A[0])):A=list(A[:-1]),A[-1] + return _CallableGenericAlias,(Callable,A) + def __getitem__(C,item): + B=item;E=len(C.__parameters__) + if E==0:raise TypeError(f"{C} is not a generic class") + if not isinstance(B,tuple):B=B, + if E==1 and _is_param_expr(C.__parameters__[0])and B and not _is_param_expr(B[0]):B=list(B), + F=len(B) + if F!=E:raise TypeError(f"Too {'many'if F>E else'few'} arguments for {C}; actual {F}, expected {E}") + G=dict(zip(C.__parameters__,B));D=[] + for A in C.__args__: + if _is_typevarlike(A): + if _is_param_expr(A): + A=G[A] + if not _is_param_expr(A):raise TypeError(f"Expected a list of types, an ellipsis, ParamSpec, or Concatenate. Got {A}") + else:A=G[A] + elif hasattr(A,_K)and isinstance(A.__parameters__,tuple): + H=A.__parameters__ + if H:I=tuple(G[A]for A in H);A=A[I] + D.append(A) + if not isinstance(D[0],list):J=D[-1];K=D[:-1];D=K,J + return _CallableGenericAlias(Callable,tuple(D)) +def _is_typevarlike(arg):A=type(arg);return A.__module__==_L and A.__name__ in{_M,'TypeVar'} def _is_param_expr(obj): - """Checks if obj matches either a list of types, ``...``, ``ParamSpec`` or - ``_ConcatenateGenericAlias`` from typing.py - """ - if obj is Ellipsis: - return True - if isinstance(obj, list): - return True - obj = type(obj) - names = ('ParamSpec', '_ConcatenateGenericAlias') - return obj.__module__ == 'typing' and any(obj.__name__ == name for name in names) - + 'Checks if obj matches either a list of types, ``...``, ``ParamSpec`` or\n ``_ConcatenateGenericAlias`` from typing.py\n ';A=obj + if A is Ellipsis:return _B + if isinstance(A,list):return _B + A=type(A);B=_M,'_ConcatenateGenericAlias';return A.__module__==_L and any(A.__name__==B for B in B) def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - Copied from :mod:`typing` since collections.abc - shouldn't depend on that module. - """ - if isinstance(obj, GenericAlias): - return repr(obj) - if isinstance(obj, type): - if obj.__module__ == 'builtins': - return obj.__qualname__ - return f'{obj.__module__}.{obj.__qualname__}' - if obj is Ellipsis: - return '...' - if isinstance(obj, FunctionType): - return obj.__name__ - return repr(obj) - - + "Return the repr() of an object, special-casing types (internal helper).\n\n Copied from :mod:`typing` since collections.abc\n shouldn't depend on that module.\n ";A=obj + if isinstance(A,GenericAlias):return repr(A) + if isinstance(A,type): + if A.__module__=='builtins':return A.__qualname__ + return f"{A.__module__}.{A.__qualname__}" + if A is Ellipsis:return'...' + if isinstance(A,FunctionType):return A.__name__ + return repr(A) class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - return _check_methods(C, "__call__") - return NotImplemented - - __class_getitem__ = classmethod(_CallableGenericAlias) - - -### SETS ### - - + __slots__=() + @abstractmethod + def __call__(self,*A,**B):return _C + @classmethod + def __subclasshook__(A,C): + if A is Callable:return _check_methods(C,'__call__') + return NotImplemented + __class_getitem__=classmethod(_CallableGenericAlias) class Set(Collection): - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h ^= (h >> 11) ^ (h >> 25) - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - - + 'A set is a finite, iterable container.\n\n This class provides concrete generic implementations of all\n methods except for __contains__, __iter__ and __len__.\n\n To override the comparisons (presumably for speed, as the\n semantics are fixed), redefine __le__ and __ge__,\n then the other operations will automatically follow suit.\n ';__slots__=() + def __le__(B,other): + A=other + if not isinstance(A,Set):return NotImplemented + if len(B)>len(A):return _C + for C in B: + if C not in A:return _C + return _B + def __lt__(B,other): + A=other + if not isinstance(A,Set):return NotImplemented + return len(B)len(A)and B.__ge__(A) + def __ge__(B,other): + A=other + if not isinstance(A,Set):return NotImplemented + if len(B)>11^A>>25;A=A*69069+907133923;A&=B + if A>D:A-=B+1 + if A==-1:A=590923713 + return A Set.register(frozenset) - - class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError from None - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - - + 'A mutable set is a finite, iterable container.\n\n This class provides concrete generic implementations of all\n methods except for __contains__, __iter__, __len__,\n add(), and discard().\n\n To override the comparisons (presumably for speed, as the\n semantics are fixed), all you have to do is redefine __le__ and\n then the other operations will automatically follow suit.\n ';__slots__=() + @abstractmethod + def add(self,value):'Add an element.';raise NotImplementedError + @abstractmethod + def discard(self,value):'Remove an element. Do not raise an exception if absent.';raise NotImplementedError + def remove(B,value): + 'Remove an element. If not a member, raise a KeyError.';A=value + if A not in B:raise KeyError(A) + B.discard(A) + def pop(A): + 'Return the popped value. Raise KeyError if empty.';C=iter(A) + try:B=next(C) + except StopIteration:raise KeyError from _A + A.discard(B);return B + def clear(A): + 'This is slow (creates N new iterators!) but effective.' + try: + while _B:A.pop() + except KeyError:pass + def __ior__(A,it): + for B in it:A.add(B) + return A + def __iand__(A,it): + for B in A-it:A.discard(B) + return A + def __ixor__(A,it): + B=it + if B is A:A.clear() + else: + if not isinstance(B,Set):B=A._from_iterable(B) + for C in B: + if C in A:A.discard(C) + else:A.add(C) + return A + def __isub__(A,it): + if it is A:A.clear() + else: + for B in it:A.discard(B) + return A MutableSet.register(set) - - -### MAPPINGS ### - class Mapping(Collection): - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - """ - - __slots__ = () - - # Tell ABCMeta.__new__ that this class should have TPFLAGS_MAPPING set. - __abc_tpflags__ = 1 << 6 # Py_TPFLAGS_MAPPING - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - - __reversed__ = None - + 'A Mapping is a generic container for associating key/value\n pairs.\n\n This class provides concrete generic implementations of all\n methods except for __getitem__, __iter__, and __len__.\n ';__slots__=();__abc_tpflags__=1<<6 + @abstractmethod + def __getitem__(self,key):raise KeyError + def get(A,key,default=_A): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try:return A[key] + except KeyError:return default + def __contains__(A,key): + try:A[key] + except KeyError:return _C + else:return _B + def keys(A):"D.keys() -> a set-like object providing a view on D's keys";return KeysView(A) + def items(A):"D.items() -> a set-like object providing a view on D's items";return ItemsView(A) + def values(A):"D.values() -> an object providing a view on D's values";return ValuesView(A) + def __eq__(B,other): + A=other + if not isinstance(A,Mapping):return NotImplemented + return dict(B.items())==dict(A.items()) + __reversed__=_A Mapping.register(mappingproxy) - - class MappingView(Sized): - - __slots__ = '_mapping', - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - __class_getitem__ = classmethod(GenericAlias) - - -class KeysView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - - + __slots__='_mapping', + def __init__(A,mapping):A._mapping=mapping + def __len__(A):return len(A._mapping) + def __repr__(A):return'{0.__class__.__name__}({0._mapping!r})'.format(A) + __class_getitem__=classmethod(GenericAlias) +class KeysView(MappingView,Set): + __slots__=() + @classmethod + def _from_iterable(A,it):return set(it) + def __contains__(A,key):return key in A._mapping + def __iter__(A):yield from A._mapping KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v is value or v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - - +class ItemsView(MappingView,Set): + __slots__=() + @classmethod + def _from_iterable(A,it):return set(it) + def __contains__(C,item): + D,A=item + try:B=C._mapping[D] + except KeyError:return _C + else:return B is A or B==A + def __iter__(A): + for B in A._mapping:yield(B,A._mapping[B]) ItemsView.register(dict_items) - - -class ValuesView(MappingView, Collection): - - __slots__ = () - - def __contains__(self, value): - for key in self._mapping: - v = self._mapping[key] - if v is value or v == value: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - - +class ValuesView(MappingView,Collection): + __slots__=() + def __contains__(A,value): + B=value + for D in A._mapping: + C=A._mapping[D] + if C is B or C==B:return _B + return _C + def __iter__(A): + for B in A._mapping:yield A._mapping[B] ValuesView.register(dict_values) - - class MutableMapping(Mapping): - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError from None - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(self, other=(), /, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - - + 'A MutableMapping is a generic container for associating\n key/value pairs.\n\n This class provides concrete generic implementations of all\n methods except for __getitem__, __setitem__, __delitem__,\n __iter__, and __len__.\n ';__slots__=() + @abstractmethod + def __setitem__(self,key,value):raise KeyError + @abstractmethod + def __delitem__(self,key):raise KeyError + __marker=object() + def pop(A,key,default=__marker): + 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.\n If key is not found, d is returned if given, otherwise KeyError is raised.\n ';B=default + try:C=A[key] + except KeyError: + if B is A.__marker:raise + return B + else:del A[key];return C + def popitem(A): + 'D.popitem() -> (k, v), remove and return some (key, value) pair\n as a 2-tuple; but raise KeyError if D is empty.\n ' + try:B=next(iter(A)) + except StopIteration:raise KeyError from _A + C=A[B];del A[B];return B,C + def clear(A): + 'D.clear() -> None. Remove all items from D.' + try: + while _B:A.popitem() + except KeyError:pass + def update(C,B=(),**E): + ' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.\n If E present and has a .keys() method, does: for k in E: D[k] = E[k]\n If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v\n In either case, this is followed by: for k, v in F.items(): D[k] = v\n ' + if isinstance(B,Mapping): + for A in B:C[A]=B[A] + elif hasattr(B,'keys'): + for A in B.keys():C[A]=B[A] + else: + for(A,D)in B:C[A]=D + for(A,D)in E.items():C[A]=D + def setdefault(A,key,default=_A): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D';B=default + try:return A[key] + except KeyError:A[key]=B + return B MutableMapping.register(dict) - - -### SEQUENCES ### - -class Sequence(Reversible, Collection): - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - # Tell ABCMeta.__new__ that this class should have TPFLAGS_SEQUENCE set. - __abc_tpflags__ = 1 << 5 # Py_TPFLAGS_SEQUENCE - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v is value or v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value, start=0, stop=None): - '''S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - - Supporting start and stop arguments is optional, but - recommended. - ''' - if start is not None and start < 0: - start = max(len(self) + start, 0) - if stop is not None and stop < 0: - stop += len(self) - - i = start - while stop is None or i < stop: - try: - v = self[i] - if v is value or v == value: - return i - except IndexError: - break - i += 1 - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v is value or v == value) - +class Sequence(Reversible,Collection): + 'All the operations on a read-only sequence.\n\n Concrete subclasses must override __new__ or __init__,\n __getitem__, and __len__.\n ';__slots__=();__abc_tpflags__=1<<5 + @abstractmethod + def __getitem__(self,index):raise IndexError + def __iter__(B): + A=0 + try: + while _B:C=B[A];yield C;A+=1 + except IndexError:return + def __contains__(C,value): + A=value + for B in C: + if B is A or B==A:return _B + return _C + def __reversed__(A): + for B in reversed(range(len(A))):yield A[B] + def index(D,value,start=0,stop=_A): + 'S.index(value, [start, [stop]]) -> integer -- return first index of value.\n Raises ValueError if the value is not present.\n\n Supporting start and stop arguments is optional, but\n recommended.\n ';E=value;A=stop;B=start + if B is not _A and B<0:B=max(len(D)+B,0) + if A is not _A and A<0:A+=len(D) + C=B + while A is _A or C integer -- return number of occurrences of value';A=value;return sum(1 for B in B if B is A or B==A) Sequence.register(tuple) Sequence.register(str) Sequence.register(range) Sequence.register(memoryview) - - -class ByteString(Sequence): - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - +class ByteString(Sequence):'This unifies bytes and bytearray.\n\n XXX Should add all their methods.\n ';__slots__=() ByteString.register(bytes) ByteString.register(bytearray) - - class MutableSequence(Sequence): - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - """ - - __slots__ = () - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - if values is self: - values = list(values) - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - - + 'All the operations on a read-write sequence.\n\n Concrete subclasses must provide __new__ or __init__,\n __getitem__, __setitem__, __delitem__, __len__, and insert().\n ';__slots__=() + @abstractmethod + def __setitem__(self,index,value):raise IndexError + @abstractmethod + def __delitem__(self,index):raise IndexError + @abstractmethod + def insert(self,index,value):'S.insert(index, value) -- insert value before index';raise IndexError + def append(A,value):'S.append(value) -- append value to the end of the sequence';A.insert(len(A),value) + def clear(A): + 'S.clear() -> None -- remove all items from S' + try: + while _B:A.pop() + except IndexError:pass + def reverse(A): + 'S.reverse() -- reverse *IN PLACE*';C=len(A) + for B in range(C//2):A[B],A[C-B-1]=A[C-B-1],A[B] + def extend(B,values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable';A=values + if A is B:A=list(A) + for C in A:B.append(C) + def pop(A,index=-1):'S.pop([index]) -> item -- remove and return item at index (default last).\n Raise IndexError if list is empty or index is out of range.\n ';B=index;C=A[B];del A[B];return C + def remove(A,value):'S.remove(value) -- remove first occurrence of value.\n Raise ValueError if the value is not present.\n ';del A[A.index(value)] + def __iadd__(A,values):A.extend(values);return A MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString +MutableSequence.register(bytearray) \ No newline at end of file diff --git a/Lib/_compat_pickle.py b/Lib/_compat_pickle.py index 17b9010278f..1fceb5f3fec 100644 --- a/Lib/_compat_pickle.py +++ b/Lib/_compat_pickle.py @@ -1,259 +1,85 @@ -# This module is used to map the old Python 2 names to the new names used in -# Python 3 for the pickle module. This needed to make pickle streams -# generated with Python 2 loadable by Python 3. - -# This is a copy of lib2to3.fixes.fix_imports.MAPPING. We cannot import -# lib2to3 and use the mapping defined there, because lib2to3 uses pickle. -# Thus, this could cause the module to be imported recursively. -IMPORT_MAPPING = { - '__builtin__' : 'builtins', - 'copy_reg': 'copyreg', - 'Queue': 'queue', - 'SocketServer': 'socketserver', - 'ConfigParser': 'configparser', - 'repr': 'reprlib', - 'tkFileDialog': 'tkinter.filedialog', - 'tkSimpleDialog': 'tkinter.simpledialog', - 'tkColorChooser': 'tkinter.colorchooser', - 'tkCommonDialog': 'tkinter.commondialog', - 'Dialog': 'tkinter.dialog', - 'Tkdnd': 'tkinter.dnd', - 'tkFont': 'tkinter.font', - 'tkMessageBox': 'tkinter.messagebox', - 'ScrolledText': 'tkinter.scrolledtext', - 'Tkconstants': 'tkinter.constants', - 'Tix': 'tkinter.tix', - 'ttk': 'tkinter.ttk', - 'Tkinter': 'tkinter', - 'markupbase': '_markupbase', - '_winreg': 'winreg', - 'thread': '_thread', - 'dummy_thread': '_dummy_thread', - 'dbhash': 'dbm.bsd', - 'dumbdbm': 'dbm.dumb', - 'dbm': 'dbm.ndbm', - 'gdbm': 'dbm.gnu', - 'xmlrpclib': 'xmlrpc.client', - 'SimpleXMLRPCServer': 'xmlrpc.server', - 'httplib': 'http.client', - 'htmlentitydefs' : 'html.entities', - 'HTMLParser' : 'html.parser', - 'Cookie': 'http.cookies', - 'cookielib': 'http.cookiejar', - 'BaseHTTPServer': 'http.server', - 'test.test_support': 'test.support', - 'commands': 'subprocess', - 'urlparse' : 'urllib.parse', - 'robotparser' : 'urllib.robotparser', - 'urllib2': 'urllib.request', - 'anydbm': 'dbm', - '_abcoll' : 'collections.abc', -} - - -# This contains rename rules that are easy to handle. We ignore the more -# complex stuff (e.g. mapping the names in the urllib and types modules). -# These rules should be run before import names are fixed. -NAME_MAPPING = { - ('__builtin__', 'xrange'): ('builtins', 'range'), - ('__builtin__', 'reduce'): ('functools', 'reduce'), - ('__builtin__', 'intern'): ('sys', 'intern'), - ('__builtin__', 'unichr'): ('builtins', 'chr'), - ('__builtin__', 'unicode'): ('builtins', 'str'), - ('__builtin__', 'long'): ('builtins', 'int'), - ('itertools', 'izip'): ('builtins', 'zip'), - ('itertools', 'imap'): ('builtins', 'map'), - ('itertools', 'ifilter'): ('builtins', 'filter'), - ('itertools', 'ifilterfalse'): ('itertools', 'filterfalse'), - ('itertools', 'izip_longest'): ('itertools', 'zip_longest'), - ('UserDict', 'IterableUserDict'): ('collections', 'UserDict'), - ('UserList', 'UserList'): ('collections', 'UserList'), - ('UserString', 'UserString'): ('collections', 'UserString'), - ('whichdb', 'whichdb'): ('dbm', 'whichdb'), - ('_socket', 'fromfd'): ('socket', 'fromfd'), - ('_multiprocessing', 'Connection'): ('multiprocessing.connection', 'Connection'), - ('multiprocessing.process', 'Process'): ('multiprocessing.context', 'Process'), - ('multiprocessing.forking', 'Popen'): ('multiprocessing.popen_fork', 'Popen'), - ('urllib', 'ContentTooShortError'): ('urllib.error', 'ContentTooShortError'), - ('urllib', 'getproxies'): ('urllib.request', 'getproxies'), - ('urllib', 'pathname2url'): ('urllib.request', 'pathname2url'), - ('urllib', 'quote_plus'): ('urllib.parse', 'quote_plus'), - ('urllib', 'quote'): ('urllib.parse', 'quote'), - ('urllib', 'unquote_plus'): ('urllib.parse', 'unquote_plus'), - ('urllib', 'unquote'): ('urllib.parse', 'unquote'), - ('urllib', 'url2pathname'): ('urllib.request', 'url2pathname'), - ('urllib', 'urlcleanup'): ('urllib.request', 'urlcleanup'), - ('urllib', 'urlencode'): ('urllib.parse', 'urlencode'), - ('urllib', 'urlopen'): ('urllib.request', 'urlopen'), - ('urllib', 'urlretrieve'): ('urllib.request', 'urlretrieve'), - ('urllib2', 'HTTPError'): ('urllib.error', 'HTTPError'), - ('urllib2', 'URLError'): ('urllib.error', 'URLError'), -} - -PYTHON2_EXCEPTIONS = ( - "ArithmeticError", - "AssertionError", - "AttributeError", - "BaseException", - "BufferError", - "BytesWarning", - "DeprecationWarning", - "EOFError", - "EnvironmentError", - "Exception", - "FloatingPointError", - "FutureWarning", - "GeneratorExit", - "IOError", - "ImportError", - "ImportWarning", - "IndentationError", - "IndexError", - "KeyError", - "KeyboardInterrupt", - "LookupError", - "MemoryError", - "NameError", - "NotImplementedError", - "OSError", - "OverflowError", - "PendingDeprecationWarning", - "ReferenceError", - "RuntimeError", - "RuntimeWarning", - # StandardError is gone in Python 3, so we map it to Exception - "StopIteration", - "SyntaxError", - "SyntaxWarning", - "SystemError", - "SystemExit", - "TabError", - "TypeError", - "UnboundLocalError", - "UnicodeDecodeError", - "UnicodeEncodeError", - "UnicodeError", - "UnicodeTranslateError", - "UnicodeWarning", - "UserWarning", - "ValueError", - "Warning", - "ZeroDivisionError", -) - -try: - WindowsError -except NameError: - pass -else: - PYTHON2_EXCEPTIONS += ("WindowsError",) - -# NOTE: RUSTPYTHON exceptions -try: - JitError -except NameError: - pass -else: - PYTHON2_EXCEPTIONS += ("JitError",) - -for excname in PYTHON2_EXCEPTIONS: - NAME_MAPPING[("exceptions", excname)] = ("builtins", excname) - -MULTIPROCESSING_EXCEPTIONS = ( - 'AuthenticationError', - 'BufferTooShort', - 'ProcessError', - 'TimeoutError', -) - -for excname in MULTIPROCESSING_EXCEPTIONS: - NAME_MAPPING[("multiprocessing", excname)] = ("multiprocessing.context", excname) - -# Same, but for 3.x to 2.x -REVERSE_IMPORT_MAPPING = dict((v, k) for (k, v) in IMPORT_MAPPING.items()) -assert len(REVERSE_IMPORT_MAPPING) == len(IMPORT_MAPPING) -REVERSE_NAME_MAPPING = dict((v, k) for (k, v) in NAME_MAPPING.items()) -assert len(REVERSE_NAME_MAPPING) == len(NAME_MAPPING) - -# Non-mutual mappings. - -IMPORT_MAPPING.update({ - 'cPickle': 'pickle', - '_elementtree': 'xml.etree.ElementTree', - 'FileDialog': 'tkinter.filedialog', - 'SimpleDialog': 'tkinter.simpledialog', - 'DocXMLRPCServer': 'xmlrpc.server', - 'SimpleHTTPServer': 'http.server', - 'CGIHTTPServer': 'http.server', - # For compatibility with broken pickles saved in old Python 3 versions - 'UserDict': 'collections', - 'UserList': 'collections', - 'UserString': 'collections', - 'whichdb': 'dbm', - 'StringIO': 'io', - 'cStringIO': 'io', -}) - -REVERSE_IMPORT_MAPPING.update({ - '_bz2': 'bz2', - '_dbm': 'dbm', - '_functools': 'functools', - '_gdbm': 'gdbm', - '_pickle': 'pickle', -}) - -NAME_MAPPING.update({ - ('__builtin__', 'basestring'): ('builtins', 'str'), - ('exceptions', 'StandardError'): ('builtins', 'Exception'), - ('UserDict', 'UserDict'): ('collections', 'UserDict'), - ('socket', '_socketobject'): ('socket', 'SocketType'), -}) - -REVERSE_NAME_MAPPING.update({ - ('_functools', 'reduce'): ('__builtin__', 'reduce'), - ('tkinter.filedialog', 'FileDialog'): ('FileDialog', 'FileDialog'), - ('tkinter.filedialog', 'LoadFileDialog'): ('FileDialog', 'LoadFileDialog'), - ('tkinter.filedialog', 'SaveFileDialog'): ('FileDialog', 'SaveFileDialog'), - ('tkinter.simpledialog', 'SimpleDialog'): ('SimpleDialog', 'SimpleDialog'), - ('xmlrpc.server', 'ServerHTMLDoc'): ('DocXMLRPCServer', 'ServerHTMLDoc'), - ('xmlrpc.server', 'XMLRPCDocGenerator'): - ('DocXMLRPCServer', 'XMLRPCDocGenerator'), - ('xmlrpc.server', 'DocXMLRPCRequestHandler'): - ('DocXMLRPCServer', 'DocXMLRPCRequestHandler'), - ('xmlrpc.server', 'DocXMLRPCServer'): - ('DocXMLRPCServer', 'DocXMLRPCServer'), - ('xmlrpc.server', 'DocCGIXMLRPCRequestHandler'): - ('DocXMLRPCServer', 'DocCGIXMLRPCRequestHandler'), - ('http.server', 'SimpleHTTPRequestHandler'): - ('SimpleHTTPServer', 'SimpleHTTPRequestHandler'), - ('http.server', 'CGIHTTPRequestHandler'): - ('CGIHTTPServer', 'CGIHTTPRequestHandler'), - ('_socket', 'socket'): ('socket', '_socketobject'), -}) - -PYTHON3_OSERROR_EXCEPTIONS = ( - 'BrokenPipeError', - 'ChildProcessError', - 'ConnectionAbortedError', - 'ConnectionError', - 'ConnectionRefusedError', - 'ConnectionResetError', - 'FileExistsError', - 'FileNotFoundError', - 'InterruptedError', - 'IsADirectoryError', - 'NotADirectoryError', - 'PermissionError', - 'ProcessLookupError', - 'TimeoutError', -) - -for excname in PYTHON3_OSERROR_EXCEPTIONS: - REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'OSError') - -PYTHON3_IMPORTERROR_EXCEPTIONS = ( - 'ModuleNotFoundError', -) - -for excname in PYTHON3_IMPORTERROR_EXCEPTIONS: - REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'ImportError') +_A8='CGIHTTPRequestHandler' +_A7='SimpleHTTPRequestHandler' +_A6='DocCGIXMLRPCRequestHandler' +_A5='DocXMLRPCRequestHandler' +_A4='XMLRPCDocGenerator' +_A3='ServerHTMLDoc' +_A2='SaveFileDialog' +_A1='LoadFileDialog' +_A0='_socketobject' +_z='_functools' +_y='pickle' +_x='CGIHTTPServer' +_w='SimpleHTTPServer' +_v='TimeoutError' +_u='OSError' +_t='ImportError' +_s='Exception' +_r='multiprocessing.context' +_q='functools' +_p='URLError' +_o='HTTPError' +_n='urlretrieve' +_m='urlopen' +_l='urlencode' +_k='urlcleanup' +_j='url2pathname' +_i='unquote' +_h='unquote_plus' +_g='quote_plus' +_f='pathname2url' +_e='getproxies' +_d='ContentTooShortError' +_c='Process' +_b='Connection' +_a='fromfd' +_Z='_socket' +_Y='intern' +_X='urllib.error' +_W='tkinter.simpledialog' +_V='urllib2' +_U='SimpleDialog' +_T='exceptions' +_S='whichdb' +_R='UserString' +_Q='UserList' +_P='reduce' +_O='socket' +_N='http.server' +_M='tkinter.filedialog' +_L='dbm' +_K='FileDialog' +_J='UserDict' +_I='urllib.parse' +_H='collections' +_G='itertools' +_F='urllib.request' +_E='xmlrpc.server' +_D='DocXMLRPCServer' +_C='__builtin__' +_B='urllib' +_A='builtins' +IMPORT_MAPPING={_C:_A,'copy_reg':'copyreg','Queue':'queue','SocketServer':'socketserver','ConfigParser':'configparser','repr':'reprlib','tkFileDialog':_M,'tkSimpleDialog':_W,'tkColorChooser':'tkinter.colorchooser','tkCommonDialog':'tkinter.commondialog','Dialog':'tkinter.dialog','Tkdnd':'tkinter.dnd','tkFont':'tkinter.font','tkMessageBox':'tkinter.messagebox','ScrolledText':'tkinter.scrolledtext','Tkconstants':'tkinter.constants','Tix':'tkinter.tix','ttk':'tkinter.ttk','Tkinter':'tkinter','markupbase':'_markupbase','_winreg':'winreg','thread':'_thread','dummy_thread':'_dummy_thread','dbhash':'dbm.bsd','dumbdbm':'dbm.dumb',_L:'dbm.ndbm','gdbm':'dbm.gnu','xmlrpclib':'xmlrpc.client','SimpleXMLRPCServer':_E,'httplib':'http.client','htmlentitydefs':'html.entities','HTMLParser':'html.parser','Cookie':'http.cookies','cookielib':'http.cookiejar','BaseHTTPServer':_N,'test.test_support':'test.support','commands':'subprocess','urlparse':_I,'robotparser':'urllib.robotparser',_V:_F,'anydbm':_L,'_abcoll':'collections.abc'} +NAME_MAPPING={(_C,'xrange'):(_A,'range'),(_C,_P):(_q,_P),(_C,_Y):('sys',_Y),(_C,'unichr'):(_A,'chr'),(_C,'unicode'):(_A,'str'),(_C,'long'):(_A,'int'),(_G,'izip'):(_A,'zip'),(_G,'imap'):(_A,'map'),(_G,'ifilter'):(_A,'filter'),(_G,'ifilterfalse'):(_G,'filterfalse'),(_G,'izip_longest'):(_G,'zip_longest'),(_J,'IterableUserDict'):(_H,_J),(_Q,_Q):(_H,_Q),(_R,_R):(_H,_R),(_S,_S):(_L,_S),(_Z,_a):(_O,_a),('_multiprocessing',_b):('multiprocessing.connection',_b),('multiprocessing.process',_c):(_r,_c),('multiprocessing.forking','Popen'):('multiprocessing.popen_fork','Popen'),(_B,_d):(_X,_d),(_B,_e):(_F,_e),(_B,_f):(_F,_f),(_B,_g):(_I,_g),(_B,'quote'):(_I,'quote'),(_B,_h):(_I,_h),(_B,_i):(_I,_i),(_B,_j):(_F,_j),(_B,_k):(_F,_k),(_B,_l):(_I,_l),(_B,_m):(_F,_m),(_B,_n):(_F,_n),(_V,_o):(_X,_o),(_V,_p):(_X,_p)} +PYTHON2_EXCEPTIONS='ArithmeticError','AssertionError','AttributeError','BaseException','BufferError','BytesWarning','DeprecationWarning','EOFError','EnvironmentError',_s,'FloatingPointError','FutureWarning','GeneratorExit','IOError',_t,'ImportWarning','IndentationError','IndexError','KeyError','KeyboardInterrupt','LookupError','MemoryError','NameError','NotImplementedError',_u,'OverflowError','PendingDeprecationWarning','ReferenceError','RuntimeError','RuntimeWarning','StopIteration','SyntaxError','SyntaxWarning','SystemError','SystemExit','TabError','TypeError','UnboundLocalError','UnicodeDecodeError','UnicodeEncodeError','UnicodeError','UnicodeTranslateError','UnicodeWarning','UserWarning','ValueError','Warning','ZeroDivisionError' +try:WindowsError +except NameError:pass +else:PYTHON2_EXCEPTIONS+='WindowsError', +try:JitError +except NameError:pass +else:PYTHON2_EXCEPTIONS+='JitError', +for excname in PYTHON2_EXCEPTIONS:NAME_MAPPING[_T,excname]=_A,excname +MULTIPROCESSING_EXCEPTIONS='AuthenticationError','BufferTooShort','ProcessError',_v +for excname in MULTIPROCESSING_EXCEPTIONS:NAME_MAPPING['multiprocessing',excname]=_r,excname +REVERSE_IMPORT_MAPPING=dict((B,A)for(A,B)in IMPORT_MAPPING.items()) +assert len(REVERSE_IMPORT_MAPPING)==len(IMPORT_MAPPING) +REVERSE_NAME_MAPPING=dict((B,A)for(A,B)in NAME_MAPPING.items()) +assert len(REVERSE_NAME_MAPPING)==len(NAME_MAPPING) +IMPORT_MAPPING.update({'cPickle':_y,'_elementtree':'xml.etree.ElementTree',_K:_M,_U:_W,_D:_E,_w:_N,_x:_N,_J:_H,_Q:_H,_R:_H,_S:_L,'StringIO':'io','cStringIO':'io'}) +REVERSE_IMPORT_MAPPING.update({'_bz2':'bz2','_dbm':_L,_z:_q,'_gdbm':'gdbm','_pickle':_y}) +NAME_MAPPING.update({(_C,'basestring'):(_A,'str'),(_T,'StandardError'):(_A,_s),(_J,_J):(_H,_J),(_O,_A0):(_O,'SocketType')}) +REVERSE_NAME_MAPPING.update({(_z,_P):(_C,_P),(_M,_K):(_K,_K),(_M,_A1):(_K,_A1),(_M,_A2):(_K,_A2),(_W,_U):(_U,_U),(_E,_A3):(_D,_A3),(_E,_A4):(_D,_A4),(_E,_A5):(_D,_A5),(_E,_D):(_D,_D),(_E,_A6):(_D,_A6),(_N,_A7):(_w,_A7),(_N,_A8):(_x,_A8),(_Z,_O):(_O,_A0)}) +PYTHON3_OSERROR_EXCEPTIONS='BrokenPipeError','ChildProcessError','ConnectionAbortedError','ConnectionError','ConnectionRefusedError','ConnectionResetError','FileExistsError','FileNotFoundError','InterruptedError','IsADirectoryError','NotADirectoryError','PermissionError','ProcessLookupError',_v +for excname in PYTHON3_OSERROR_EXCEPTIONS:REVERSE_NAME_MAPPING[_A,excname]=_T,_u +PYTHON3_IMPORTERROR_EXCEPTIONS='ModuleNotFoundError', +for excname in PYTHON3_IMPORTERROR_EXCEPTIONS:REVERSE_NAME_MAPPING[_A,excname]=_T,_t \ No newline at end of file diff --git a/Lib/_compression.py b/Lib/_compression.py index e8b70aa0a3e..25b719df636 100644 --- a/Lib/_compression.py +++ b/Lib/_compression.py @@ -1,162 +1,66 @@ -"""Internal classes used by the gzip, lzma and bz2 modules""" - -import io -import sys - -BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE # Compressed data read chunk size - - +'Internal classes used by the gzip, lzma and bz2 modules' +import io,sys +BUFFER_SIZE=io.DEFAULT_BUFFER_SIZE class BaseStream(io.BufferedIOBase): - """Mode-checking helper functions.""" - - def _check_not_closed(self): - if self.closed: - raise ValueError("I/O operation on closed file") - - def _check_can_read(self): - if not self.readable(): - raise io.UnsupportedOperation("File not open for reading") - - def _check_can_write(self): - if not self.writable(): - raise io.UnsupportedOperation("File not open for writing") - - def _check_can_seek(self): - if not self.readable(): - raise io.UnsupportedOperation("Seeking is only supported " - "on files open for reading") - if not self.seekable(): - raise io.UnsupportedOperation("The underlying file object " - "does not support seeking") - - + 'Mode-checking helper functions.' + def _check_not_closed(A): + if A.closed:raise ValueError('I/O operation on closed file') + def _check_can_read(A): + if not A.readable():raise io.UnsupportedOperation('File not open for reading') + def _check_can_write(A): + if not A.writable():raise io.UnsupportedOperation('File not open for writing') + def _check_can_seek(A): + if not A.readable():raise io.UnsupportedOperation('Seeking is only supported on files open for reading') + if not A.seekable():raise io.UnsupportedOperation('The underlying file object does not support seeking') class DecompressReader(io.RawIOBase): - """Adapts the decompressor API to a RawIOBase reader API""" - - def readable(self): - return True - - def __init__(self, fp, decomp_factory, trailing_error=(), **decomp_args): - self._fp = fp - self._eof = False - self._pos = 0 # Current offset in decompressed stream - - # Set to size of decompressed stream once it is known, for SEEK_END - self._size = -1 - - # Save the decompressor factory and arguments. - # If the file contains multiple compressed streams, each - # stream will need a separate decompressor object. A new decompressor - # object is also needed when implementing a backwards seek(). - self._decomp_factory = decomp_factory - self._decomp_args = decomp_args - self._decompressor = self._decomp_factory(**self._decomp_args) - - # Exception class to catch from decompressor signifying invalid - # trailing data to ignore - self._trailing_error = trailing_error - - def close(self): - self._decompressor = None - return super().close() - - def seekable(self): - return self._fp.seekable() - - def readinto(self, b): - with memoryview(b) as view, view.cast("B") as byte_view: - data = self.read(len(byte_view)) - byte_view[:len(data)] = data - return len(data) - - def read(self, size=-1): - if size < 0: - return self.readall() - - if not size or self._eof: - return b"" - data = None # Default if EOF is encountered - # Depending on the input data, our call to the decompressor may not - # return any data. In this case, try again after reading another block. - while True: - if self._decompressor.eof: - rawblock = (self._decompressor.unused_data or - self._fp.read(BUFFER_SIZE)) - if not rawblock: - break - # Continue to next stream. - self._decompressor = self._decomp_factory( - **self._decomp_args) - try: - data = self._decompressor.decompress(rawblock, size) - except self._trailing_error: - # Trailing data isn't a valid compressed stream; ignore it. - break - else: - if self._decompressor.needs_input: - rawblock = self._fp.read(BUFFER_SIZE) - if not rawblock: - raise EOFError("Compressed file ended before the " - "end-of-stream marker was reached") - else: - rawblock = b"" - data = self._decompressor.decompress(rawblock, size) - if data: - break - if not data: - self._eof = True - self._size = self._pos - return b"" - self._pos += len(data) - return data - - def readall(self): - chunks = [] - # sys.maxsize means the max length of output buffer is unlimited, - # so that the whole input buffer can be decompressed within one - # .decompress() call. - while data := self.read(sys.maxsize): - chunks.append(data) - - return b"".join(chunks) - - # Rewind the file to the beginning of the data stream. - def _rewind(self): - self._fp.seek(0) - self._eof = False - self._pos = 0 - self._decompressor = self._decomp_factory(**self._decomp_args) - - def seek(self, offset, whence=io.SEEK_SET): - # Recalculate offset as an absolute file position. - if whence == io.SEEK_SET: - pass - elif whence == io.SEEK_CUR: - offset = self._pos + offset - elif whence == io.SEEK_END: - # Seeking relative to EOF - we need to know the file's size. - if self._size < 0: - while self.read(io.DEFAULT_BUFFER_SIZE): - pass - offset = self._size + offset - else: - raise ValueError("Invalid value for whence: {}".format(whence)) - - # Make it so that offset is the number of bytes to skip forward. - if offset < self._pos: - self._rewind() - else: - offset -= self._pos - - # Read and discard data until we reach the desired position. - while offset > 0: - data = self.read(min(io.DEFAULT_BUFFER_SIZE, offset)) - if not data: - break - offset -= len(data) - - return self._pos - - def tell(self): - """Return the current file position.""" - return self._pos + 'Adapts the decompressor API to a RawIOBase reader API' + def readable(A):return True + def __init__(A,fp,decomp_factory,trailing_error=(),**B):A._fp=fp;A._eof=False;A._pos=0;A._size=-1;A._decomp_factory=decomp_factory;A._decomp_args=B;A._decompressor=A._decomp_factory(**A._decomp_args);A._trailing_error=trailing_error + def close(A):A._decompressor=None;return super().close() + def seekable(A):return A._fp.seekable() + def readinto(C,b): + with memoryview(b)as D,D.cast('B')as B:A=C.read(len(B));B[:len(A)]=A + return len(A) + def read(A,size=-1): + D=size + if D<0:return A.readall() + if not D or A._eof:return b'' + B=None + while True: + if A._decompressor.eof: + C=A._decompressor.unused_data or A._fp.read(BUFFER_SIZE) + if not C:break + A._decompressor=A._decomp_factory(**A._decomp_args) + try:B=A._decompressor.decompress(C,D) + except A._trailing_error:break + else: + if A._decompressor.needs_input: + C=A._fp.read(BUFFER_SIZE) + if not C:raise EOFError('Compressed file ended before the end-of-stream marker was reached') + else:C=b'' + B=A._decompressor.decompress(C,D) + if B:break + if not B:A._eof=True;A._size=A._pos;return b'' + A._pos+=len(B);return B + def readall(B): + A=[] + while(C:=B.read(sys.maxsize)):A.append(C) + return b''.join(A) + def _rewind(A):A._fp.seek(0);A._eof=False;A._pos=0;A._decompressor=A._decomp_factory(**A._decomp_args) + def seek(A,offset,whence=io.SEEK_SET): + C=whence;B=offset + if C==io.SEEK_SET:0 + elif C==io.SEEK_CUR:B=A._pos+B + elif C==io.SEEK_END: + if A._size<0: + while A.read(io.DEFAULT_BUFFER_SIZE):0 + B=A._size+B + else:raise ValueError('Invalid value for whence: {}'.format(C)) + if B0: + D=A.read(min(io.DEFAULT_BUFFER_SIZE,B)) + if not D:break + B-=len(D) + return A._pos + def tell(A):'Return the current file position.';return A._pos \ No newline at end of file diff --git a/Lib/_dummy_os.py b/Lib/_dummy_os.py index 5bd5ec0a13a..5ae55c87eab 100644 --- a/Lib/_dummy_os.py +++ b/Lib/_dummy_os.py @@ -1,66 +1,25 @@ -""" -A shim of the os module containing only simple path-related utilities -""" - -try: - from os import * +'\nA shim of the os module containing only simple path-related utilities\n' +_B='__fspath__' +_A='os.path' +try:from os import* except ImportError: - import abc - - def __getattr__(name): - raise OSError("no os specific module found") - - def _shim(): - import _dummy_os, sys - sys.modules['os'] = _dummy_os - sys.modules['os.path'] = _dummy_os.path - - import posixpath as path - import sys - sys.modules['os.path'] = path - del sys - - sep = path.sep - - - def fspath(path): - """Return the path representation of a path-like object. - - If str or bytes is passed in, it is returned unchanged. Otherwise the - os.PathLike interface is used to get the path representation. If the - path representation is not str or bytes, TypeError is raised. If the - provided path is not str, bytes, or os.PathLike, TypeError is raised. - """ - if isinstance(path, (str, bytes)): - return path - - # Work from the object's type to match method resolution of other magic - # methods. - path_type = type(path) - try: - path_repr = path_type.__fspath__(path) - except AttributeError: - if hasattr(path_type, '__fspath__'): - raise - else: - raise TypeError("expected str, bytes or os.PathLike object, " - "not " + path_type.__name__) - if isinstance(path_repr, (str, bytes)): - return path_repr - else: - raise TypeError("expected {}.__fspath__() to return str or bytes, " - "not {}".format(path_type.__name__, - type(path_repr).__name__)) - - class PathLike(abc.ABC): - - """Abstract base class for implementing the file system path protocol.""" - - @abc.abstractmethod - def __fspath__(self): - """Return the file system path representation of the object.""" - raise NotImplementedError - - @classmethod - def __subclasshook__(cls, subclass): - return hasattr(subclass, '__fspath__') + import abc + def __getattr__(name):raise OSError('no os specific module found') + def _shim():import _dummy_os,sys;sys.modules['os']=_dummy_os;sys.modules[_A]=_dummy_os.path + import posixpath as path;import sys;sys.modules[_A]=path;del sys;sep=path.sep + def fspath(path): + 'Return the path representation of a path-like object.\n\n If str or bytes is passed in, it is returned unchanged. Otherwise the\n os.PathLike interface is used to get the path representation. If the\n path representation is not str or bytes, TypeError is raised. If the\n provided path is not str, bytes, or os.PathLike, TypeError is raised.\n ' + if isinstance(path,(str,bytes)):return path + path_type=type(path) + try:path_repr=path_type.__fspath__(path) + except AttributeError: + if hasattr(path_type,_B):raise + else:raise TypeError('expected str, bytes or os.PathLike object, not '+path_type.__name__) + if isinstance(path_repr,(str,bytes)):return path_repr + else:raise TypeError('expected {}.__fspath__() to return str or bytes, not {}'.format(path_type.__name__,type(path_repr).__name__)) + class PathLike(abc.ABC): + 'Abstract base class for implementing the file system path protocol.' + @abc.abstractmethod + def __fspath__(self):'Return the file system path representation of the object.';raise NotImplementedError + @classmethod + def __subclasshook__(cls,subclass):return hasattr(subclass,_B) \ No newline at end of file diff --git a/Lib/_dummy_thread.py b/Lib/_dummy_thread.py index 424b0b3be5e..a120dbcd9f9 100644 --- a/Lib/_dummy_thread.py +++ b/Lib/_dummy_thread.py @@ -1,203 +1,63 @@ -"""Drop-in replacement for the thread module. - -Meant to be used as a brain-dead substitute so that threaded code does -not need to be rewritten for when the thread module is not present. - -Suggested usage is:: - - try: - import _thread - except ImportError: - import _dummy_thread as _thread - -""" -# Exports only things specified by thread documentation; -# skipping obsolete synonyms allocate(), start_new(), exit_thread(). -__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock', - 'interrupt_main', 'LockType', 'RLock', - '_count'] - -# A dummy value -TIMEOUT_MAX = 2**31 - -# NOTE: this module can be imported early in the extension building process, -# and so top level imports of other modules should be avoided. Instead, all -# imports are done when needed on a function-by-function basis. Since threads -# are disabled, the import lock should not be an issue anyway (??). - -error = RuntimeError - -def start_new_thread(function, args, kwargs={}): - """Dummy implementation of _thread.start_new_thread(). - - Compatibility is maintained by making sure that ``args`` is a - tuple and ``kwargs`` is a dictionary. If an exception is raised - and it is SystemExit (which can be done by _thread.exit()) it is - caught and nothing is done; all other exceptions are printed out - by using traceback.print_exc(). - - If the executed function calls interrupt_main the KeyboardInterrupt will be - raised when the function returns. - - """ - if type(args) != type(tuple()): - raise TypeError("2nd arg must be a tuple") - if type(kwargs) != type(dict()): - raise TypeError("3rd arg must be a dict") - global _main - _main = False - try: - function(*args, **kwargs) - except SystemExit: - pass - except: - import traceback - traceback.print_exc() - _main = True - global _interrupt - if _interrupt: - _interrupt = False - raise KeyboardInterrupt - -def exit(): - """Dummy implementation of _thread.exit().""" - raise SystemExit - -def get_ident(): - """Dummy implementation of _thread.get_ident(). - - Since this module should only be used when _threadmodule is not - available, it is safe to assume that the current process is the - only thread. Thus a constant can be safely returned. - """ - return -1 - -def allocate_lock(): - """Dummy implementation of _thread.allocate_lock().""" - return LockType() - -def stack_size(size=None): - """Dummy implementation of _thread.stack_size().""" - if size is not None: - raise error("setting thread stack size not supported") - return 0 - -def _set_sentinel(): - """Dummy implementation of _thread._set_sentinel().""" - return LockType() - -def _count(): - """Dummy implementation of _thread._count().""" - return 0 - -class LockType(object): - """Class implementing dummy implementation of _thread.LockType. - - Compatibility is maintained by maintaining self.locked_status - which is a boolean that stores the state of the lock. Pickling of - the lock, though, should not be done since if the _thread module is - then used with an unpickled ``lock()`` from here problems could - occur from this class not having atomic methods. - - """ - - def __init__(self): - self.locked_status = False - - def acquire(self, waitflag=None, timeout=-1): - """Dummy implementation of acquire(). - - For blocking calls, self.locked_status is automatically set to - True and returned appropriately based on value of - ``waitflag``. If it is non-blocking, then the value is - actually checked and not set if it is already acquired. This - is all done so that threading.Condition's assert statements - aren't triggered and throw a little fit. - - """ - if waitflag is None or waitflag: - self.locked_status = True - return True - else: - if not self.locked_status: - self.locked_status = True - return True - else: - if timeout > 0: - import time - time.sleep(timeout) - return False - - __enter__ = acquire - - def __exit__(self, typ, val, tb): - self.release() - - def release(self): - """Release the dummy lock.""" - # XXX Perhaps shouldn't actually bother to test? Could lead - # to problems for complex, threaded code. - if not self.locked_status: - raise error - self.locked_status = False - return True - - def locked(self): - return self.locked_status - - def _at_fork_reinit(self): - self.locked_status = False - - def __repr__(self): - return "<%s %s.%s object at %s>" % ( - "locked" if self.locked_status else "unlocked", - self.__class__.__module__, - self.__class__.__qualname__, - hex(id(self)) - ) - -# Used to signal that interrupt_main was called in a "thread" -_interrupt = False -# True when not executing in a "thread" -_main = True - +'Drop-in replacement for the thread module.\n\nMeant to be used as a brain-dead substitute so that threaded code does\nnot need to be rewritten for when the thread module is not present.\n\nSuggested usage is::\n\n try:\n import _thread\n except ImportError:\n import _dummy_thread as _thread\n\n' +_E='unlocked' +_D='locked' +_C=None +_B=False +_A=True +__all__=['error','start_new_thread','exit','get_ident','allocate_lock','interrupt_main','LockType','RLock','_count'] +TIMEOUT_MAX=2**31 +error=RuntimeError +def start_new_thread(function,args,kwargs={}): + 'Dummy implementation of _thread.start_new_thread().\n\n Compatibility is maintained by making sure that ``args`` is a\n tuple and ``kwargs`` is a dictionary. If an exception is raised\n and it is SystemExit (which can be done by _thread.exit()) it is\n caught and nothing is done; all other exceptions are printed out\n by using traceback.print_exc().\n\n If the executed function calls interrupt_main the KeyboardInterrupt will be\n raised when the function returns.\n\n ';A=kwargs + if type(args)!=type(tuple()):raise TypeError('2nd arg must be a tuple') + if type(A)!=type(dict()):raise TypeError('3rd arg must be a dict') + global _main;_main=_B + try:function(*args,**A) + except SystemExit:pass + except:import traceback as B;B.print_exc() + _main=_A;global _interrupt + if _interrupt:_interrupt=_B;raise KeyboardInterrupt +def exit():'Dummy implementation of _thread.exit().';raise SystemExit +def get_ident():'Dummy implementation of _thread.get_ident().\n\n Since this module should only be used when _threadmodule is not\n available, it is safe to assume that the current process is the\n only thread. Thus a constant can be safely returned.\n ';return-1 +def allocate_lock():'Dummy implementation of _thread.allocate_lock().';return LockType() +def stack_size(size=_C): + 'Dummy implementation of _thread.stack_size().' + if size is not _C:raise error('setting thread stack size not supported') + return 0 +def _set_sentinel():'Dummy implementation of _thread._set_sentinel().';return LockType() +def _count():'Dummy implementation of _thread._count().';return 0 +class LockType: + 'Class implementing dummy implementation of _thread.LockType.\n\n Compatibility is maintained by maintaining self.locked_status\n which is a boolean that stores the state of the lock. Pickling of\n the lock, though, should not be done since if the _thread module is\n then used with an unpickled ``lock()`` from here problems could\n occur from this class not having atomic methods.\n\n ' + def __init__(A):A.locked_status=_B + def acquire(A,waitflag=_C,timeout=-1): + "Dummy implementation of acquire().\n\n For blocking calls, self.locked_status is automatically set to\n True and returned appropriately based on value of\n ``waitflag``. If it is non-blocking, then the value is\n actually checked and not set if it is already acquired. This\n is all done so that threading.Condition's assert statements\n aren't triggered and throw a little fit.\n\n ";B=timeout;C=waitflag + if C is _C or C:A.locked_status=_A;return _A + elif not A.locked_status:A.locked_status=_A;return _A + else: + if B>0:import time;time.sleep(B) + return _B + __enter__=acquire + def __exit__(A,typ,val,tb):A.release() + def release(A): + 'Release the dummy lock.' + if not A.locked_status:raise error + A.locked_status=_B;return _A + def locked(A):return A.locked_status + def _at_fork_reinit(A):A.locked_status=_B + def __repr__(A):return'<%s %s.%s object at %s>'%(_D if A.locked_status else _E,A.__class__.__module__,A.__class__.__qualname__,hex(id(A))) +_interrupt=_B +_main=_A def interrupt_main(): - """Set _interrupt flag to True to have start_new_thread raise - KeyboardInterrupt upon exiting.""" - if _main: - raise KeyboardInterrupt - else: - global _interrupt - _interrupt = True - + 'Set _interrupt flag to True to have start_new_thread raise\n KeyboardInterrupt upon exiting.' + if _main:raise KeyboardInterrupt + else:global _interrupt;_interrupt=_A class RLock: - def __init__(self): - self.locked_count = 0 - - def acquire(self, waitflag=None, timeout=-1): - self.locked_count += 1 - return True - - __enter__ = acquire - - def __exit__(self, typ, val, tb): - self.release() - - def release(self): - if not self.locked_count: - raise error - self.locked_count -= 1 - return True - - def locked(self): - return self.locked_status != 0 - - def __repr__(self): - return "<%s %s.%s object owner=%s count=%s at %s>" % ( - "locked" if self.locked_count else "unlocked", - self.__class__.__module__, - self.__class__.__qualname__, - get_ident() if self.locked_count else 0, - self.locked_count, - hex(id(self)) - ) + def __init__(A):A.locked_count=0 + def acquire(A,waitflag=_C,timeout=-1):A.locked_count+=1;return _A + __enter__=acquire + def __exit__(A,typ,val,tb):A.release() + def release(A): + if not A.locked_count:raise error + A.locked_count-=1;return _A + def locked(A):return A.locked_status!=0 + def __repr__(A):return'<%s %s.%s object owner=%s count=%s at %s>'%(_D if A.locked_count else _E,A.__class__.__module__,A.__class__.__qualname__,get_ident()if A.locked_count else 0,A.locked_count,hex(id(A))) \ No newline at end of file diff --git a/Lib/_markupbase.py b/Lib/_markupbase.py index 3ad7e279960..434ac9b3f1b 100644 --- a/Lib/_markupbase.py +++ b/Lib/_markupbase.py @@ -1,396 +1,189 @@ -"""Shared support for scanning document type declarations in HTML and XHTML. - -This module is used as a foundation for the html.parser module. It has no -documented public API and should not be used directly. - -""" - +'Shared support for scanning document type declarations in HTML and XHTML.\n\nThis module is used as a foundation for the html.parser module. It has no\ndocumented public API and should not be used directly.\n\n' +_D='element' +_C='attlist' +_B='\'"' +_A='>' import re - -_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match -_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match -_commentclose = re.compile(r'--\s*>') -_markedsectionclose = re.compile(r']\s*]\s*>') - -# An analysis of the MS-Word extensions is available at -# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf - -_msmarkedsectionclose = re.compile(r']\s*>') - +_declname_match=re.compile('[a-zA-Z][-_.a-zA-Z0-9]*\\s*').match +_declstringlit_match=re.compile('(\\\'[^\\\']*\\\'|"[^"]*")\\s*').match +_commentclose=re.compile('--\\s*>') +_markedsectionclose=re.compile(']\\s*]\\s*>') +_msmarkedsectionclose=re.compile(']\\s*>') del re - - class ParserBase: - """Parser base class which provides some common support methods used - by the SGML/HTML and XHTML parsers.""" - - def __init__(self): - if self.__class__ is ParserBase: - raise RuntimeError( - "_markupbase.ParserBase must be subclassed") - - def reset(self): - self.lineno = 1 - self.offset = 0 - - def getpos(self): - """Return current line number and offset.""" - return self.lineno, self.offset - - # Internal -- update line number and offset. This should be - # called for each piece of data exactly once, in order -- in other - # words the concatenation of all the input strings to this - # function should be exactly the entire input. - def updatepos(self, i, j): - if i >= j: - return j - rawdata = self.rawdata - nlines = rawdata.count("\n", i, j) - if nlines: - self.lineno = self.lineno + nlines - pos = rawdata.rindex("\n", i, j) # Should not fail - self.offset = j-(pos+1) - else: - self.offset = self.offset + j-i - return j - - _decl_otherchars = '' - - # Internal -- parse declaration (for use by subclasses). - def parse_declaration(self, i): - # This is some sort of declaration; in "HTML as - # deployed," this should only be the document type - # declaration (""). - # ISO 8879:1986, however, has more complex - # declaration syntax for elements in , including: - # --comment-- - # [marked section] - # name in the following list: ENTITY, DOCTYPE, ELEMENT, - # ATTLIST, NOTATION, SHORTREF, USEMAP, - # LINKTYPE, LINK, IDLINK, USELINK, SYSTEM - rawdata = self.rawdata - j = i + 2 - assert rawdata[i:j] == "": - # the empty comment - return j + 1 - if rawdata[j:j+1] in ("-", ""): - # Start of comment followed by buffer boundary, - # or just a buffer boundary. - return -1 - # A simple, practical version could look like: ((name|stringlit) S*) + '>' - n = len(rawdata) - if rawdata[j:j+2] == '--': #comment - # Locate --.*-- as the body of the comment - return self.parse_comment(i) - elif rawdata[j] == '[': #marked section - # Locate [statusWord [...arbitrary SGML...]] as the body of the marked section - # Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA - # Note that this is extended by Microsoft Office "Save as Web" function - # to include [if...] and [endif]. - return self.parse_marked_section(i) - else: #all other declaration elements - decltype, j = self._scan_name(j, i) - if j < 0: - return j - if decltype == "doctype": - self._decl_otherchars = '' - while j < n: - c = rawdata[j] - if c == ">": - # end of declaration syntax - data = rawdata[i+2:j] - if decltype == "doctype": - self.handle_decl(data) - else: - # According to the HTML5 specs sections "8.2.4.44 Bogus - # comment state" and "8.2.4.45 Markup declaration open - # state", a comment token should be emitted. - # Calling unknown_decl provides more flexibility though. - self.unknown_decl(data) - return j + 1 - if c in "\"'": - m = _declstringlit_match(rawdata, j) - if not m: - return -1 # incomplete - j = m.end() - elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ": - name, j = self._scan_name(j, i) - elif c in self._decl_otherchars: - j = j + 1 - elif c == "[": - # this could be handled in a separate doctype parser - if decltype == "doctype": - j = self._parse_doctype_subset(j + 1, i) - elif decltype in {"attlist", "linktype", "link", "element"}: - # must tolerate []'d groups in a content model in an element declaration - # also in data attribute specifications of attlist declaration - # also link type declaration subsets in linktype declarations - # also link attribute specification lists in link declarations - raise AssertionError("unsupported '[' char in %s declaration" % decltype) - else: - raise AssertionError("unexpected '[' char in declaration") - else: - raise AssertionError("unexpected %r char in declaration" % rawdata[j]) - if j < 0: - return j - return -1 # incomplete - - # Internal -- parse a marked section - # Override this to handle MS-word extension syntax content - def parse_marked_section(self, i, report=1): - rawdata= self.rawdata - assert rawdata[i:i+3] == ' ending - match= _markedsectionclose.search(rawdata, i+3) - elif sectName in {"if", "else", "endif"}: - # look for MS Office ]> ending - match= _msmarkedsectionclose.search(rawdata, i+3) - else: - raise AssertionError( - 'unknown status keyword %r in marked section' % rawdata[i+3:j] - ) - if not match: - return -1 - if report: - j = match.start(0) - self.unknown_decl(rawdata[i+3: j]) - return match.end(0) - - # Internal -- parse comment, return length or -1 if not terminated - def parse_comment(self, i, report=1): - rawdata = self.rawdata - if rawdata[i:i+4] != ' - --> --> - - ''' - -__UNDEF__ = [] # a special sentinel object +"More comprehensive traceback formatting for Python scripts.\n\nTo enable this module, do:\n\n import cgitb; cgitb.enable()\n\nat the top of your script. The optional arguments to enable() are:\n\n display - if true, tracebacks are displayed in the web browser\n logdir - if set, tracebacks are written to files in this directory\n context - number of lines of source code to show for each stack frame\n format - 'text' or 'html' controls the output format\n\nBy default, tracebacks are displayed but not saved, the context is 5 lines\nand the output format is 'html' (for backwards compatibility with the\noriginal use of this module)\n\nAlternatively, if you have caught an exception and want cgitb to display it\nfor you, call cgitb.handler(). The optional argument to handler() is a\n3-item tuple (etype, evalue, etb) just like the value of sys.exc_info().\nThe default handler displays output as HTML.\n\n" +_H='' +_G='Python ' +_F='builtin' +_E='global' +_D='local' +_C='\n' +_B='html' +_A=None +import inspect,keyword,linecache,os,pydoc,sys,tempfile,time,tokenize,traceback +def reset():'Return a string that resets the CGI and browser to a known state.';return'\n --> -->\n \n ' +__UNDEF__=[] def small(text): - if text: - return '' + text + '' - else: - return '' - + if text:return''+text+'' + else:return'' def strong(text): - if text: - return '' + text + '' - else: - return '' - + if text:return''+text+'' + else:return'' def grey(text): - if text: - return '' + text + '' - else: - return '' - -def lookup(name, frame, locals): - """Find the value for a given name in the given environment.""" - if name in locals: - return 'local', locals[name] - if name in frame.f_globals: - return 'global', frame.f_globals[name] - if '__builtins__' in frame.f_globals: - builtins = frame.f_globals['__builtins__'] - if type(builtins) is type({}): - if name in builtins: - return 'builtin', builtins[name] - else: - if hasattr(builtins, name): - return 'builtin', getattr(builtins, name) - return None, __UNDEF__ - -def scanvars(reader, frame, locals): - """Scan one logical line of Python and look up values of variables used.""" - vars, lasttoken, parent, prefix, value = [], None, None, '', __UNDEF__ - for ttype, token, start, end, line in tokenize.generate_tokens(reader): - if ttype == tokenize.NEWLINE: break - if ttype == tokenize.NAME and token not in keyword.kwlist: - if lasttoken == '.': - if parent is not __UNDEF__: - value = getattr(parent, token, __UNDEF__) - vars.append((prefix + token, prefix, value)) - else: - where, value = lookup(token, frame, locals) - vars.append((token, where, value)) - elif token == '.': - prefix += lasttoken + '.' - parent = value - else: - parent, prefix = None, '' - lasttoken = token - return vars - -def html(einfo, context=5): - """Return a nice HTML document describing a given traceback.""" - etype, evalue, etb = einfo - if isinstance(etype, type): - etype = etype.__name__ - pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable - date = time.ctime(time.time()) - head = f''' - - - - - -
 
- 
-{html_escape(str(etype))}
-{pyver}
{date}
-

A problem occurred in a Python script. Here is the sequence of -function calls leading up to the error, in the order they occurred.

''' - - indent = '' + small(' ' * 5) + ' ' - frames = [] - records = inspect.getinnerframes(etb, context) - for frame, file, lnum, func, lines, index in records: - if file: - file = os.path.abspath(file) - link = '%s' % (file, pydoc.html.escape(file)) - else: - file = link = '?' - args, varargs, varkw, locals = inspect.getargvalues(frame) - call = '' - if func != '?': - call = 'in ' + strong(pydoc.html.escape(func)) - if func != "": - call += inspect.formatargvalues(args, varargs, varkw, locals, - formatvalue=lambda value: '=' + pydoc.html.repr(value)) - - highlight = {} - def reader(lnum=[lnum]): - highlight[lnum[0]] = 1 - try: return linecache.getline(file, lnum[0]) - finally: lnum[0] += 1 - vars = scanvars(reader, frame, locals) - - rows = ['%s%s %s' % - (' ', link, call)] - if index is not None: - i = lnum - index - for line in lines: - num = small(' ' * (5-len(str(i))) + str(i)) + ' ' - if i in highlight: - line = '=>%s%s' % (num, pydoc.html.preformat(line)) - rows.append('%s' % line) - else: - line = '  %s%s' % (num, pydoc.html.preformat(line)) - rows.append('%s' % grey(line)) - i += 1 - - done, dump = {}, [] - for name, where, value in vars: - if name in done: continue - done[name] = 1 - if value is not __UNDEF__: - if where in ('global', 'builtin'): - name = ('%s ' % where) + strong(name) - elif where == 'local': - name = strong(name) - else: - name = where + strong(name.split('.')[-1]) - dump.append('%s = %s' % (name, pydoc.html.repr(value))) - else: - dump.append(name + ' undefined') - - rows.append('%s' % small(grey(', '.join(dump)))) - frames.append(''' - -%s
''' % '\n'.join(rows)) - - exception = ['

%s: %s' % (strong(pydoc.html.escape(str(etype))), - pydoc.html.escape(str(evalue)))] - for name in dir(evalue): - if name[:1] == '_': continue - value = pydoc.html.repr(getattr(evalue, name)) - exception.append('\n
%s%s =\n%s' % (indent, name, value)) - - return head + ''.join(frames) + ''.join(exception) + ''' - - - -''' % pydoc.html.escape( - ''.join(traceback.format_exception(etype, evalue, etb))) - -def text(einfo, context=5): - """Return a plain text document describing a given traceback.""" - etype, evalue, etb = einfo - if isinstance(etype, type): - etype = etype.__name__ - pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable - date = time.ctime(time.time()) - head = "%s\n%s\n%s\n" % (str(etype), pyver, date) + ''' -A problem occurred in a Python script. Here is the sequence of -function calls leading up to the error, in the order they occurred. -''' - - frames = [] - records = inspect.getinnerframes(etb, context) - for frame, file, lnum, func, lines, index in records: - file = file and os.path.abspath(file) or '?' - args, varargs, varkw, locals = inspect.getargvalues(frame) - call = '' - if func != '?': - call = 'in ' + func - if func != "": - call += inspect.formatargvalues(args, varargs, varkw, locals, - formatvalue=lambda value: '=' + pydoc.text.repr(value)) - - highlight = {} - def reader(lnum=[lnum]): - highlight[lnum[0]] = 1 - try: return linecache.getline(file, lnum[0]) - finally: lnum[0] += 1 - vars = scanvars(reader, frame, locals) - - rows = [' %s %s' % (file, call)] - if index is not None: - i = lnum - index - for line in lines: - num = '%5d ' % i - rows.append(num+line.rstrip()) - i += 1 - - done, dump = {}, [] - for name, where, value in vars: - if name in done: continue - done[name] = 1 - if value is not __UNDEF__: - if where == 'global': name = 'global ' + name - elif where != 'local': name = where + name.split('.')[-1] - dump.append('%s = %s' % (name, pydoc.text.repr(value))) - else: - dump.append(name + ' undefined') - - rows.append('\n'.join(dump)) - frames.append('\n%s\n' % '\n'.join(rows)) - - exception = ['%s: %s' % (str(etype), str(evalue))] - for name in dir(evalue): - value = pydoc.text.repr(getattr(evalue, name)) - exception.append('\n%s%s = %s' % (" "*4, name, value)) - - return head + ''.join(frames) + ''.join(exception) + ''' - -The above is a description of an error in a Python program. Here is -the original traceback: - -%s -''' % ''.join(traceback.format_exception(etype, evalue, etb)) - + if text:return''+text+'' + else:return'' +def lookup(name,frame,locals): + 'Find the value for a given name in the given environment.';D='__builtins__';C=frame;A=name + if A in locals:return _D,locals[A] + if A in C.f_globals:return _E,C.f_globals[A] + if D in C.f_globals: + B=C.f_globals[D] + if type(B)is type({}): + if A in B:return _F,B[A] + elif hasattr(B,A):return _F,getattr(B,A) + return _A,__UNDEF__ +def scanvars(reader,frame,locals): + 'Scan one logical line of Python and look up values of variables used.';vars,E,C,D,B=[],_A,_A,'',__UNDEF__ + for(F,A,H,I,J)in tokenize.generate_tokens(reader): + if F==tokenize.NEWLINE:break + if F==tokenize.NAME and A not in keyword.kwlist: + if E=='.': + if C is not __UNDEF__:B=getattr(C,A,__UNDEF__);vars.append((D+A,D,B)) + else:G,B=lookup(A,frame,locals);vars.append((A,G,B)) + elif A=='.':D+=E+'.';C=B + else:C,D=_A,'' + E=A + return vars +def html(einfo,context=5): + 'Return a nice HTML document describing a given traceback.';N='%s';J=' ';C,E,O=einfo + if isinstance(C,type):C=C.__name__ + Y=_G+sys.version.split()[0]+': '+sys.executable;Z=time.ctime(time.time());a=''+pydoc.html.heading('%s'%strong(pydoc.html.escape(str(C))),'#ffffff','#6622aa',Y+'
'+Z)+'\n

A problem occurred in a Python script. Here is the sequence of\nfunction calls leading up to the error, in the order they occurred.

';b=''+small(J*5)+' ';P=[];c=inspect.getinnerframes(O,context) + for(Q,B,R,K,d,S)in c: + if B:B=os.path.abspath(B);T='%s'%(B,pydoc.html.escape(B)) + else:B=T='?' + e,f,g,locals=inspect.getargvalues(Q);L='' + if K!='?': + L='in '+strong(pydoc.html.escape(K)) + if K!=_H:L+=inspect.formatargvalues(e,f,g,locals,formatvalue=lambda value:'='+pydoc.html.repr(value)) + U={} + def h(lnum=[R]): + A=lnum;U[A[0]]=1 + try:return linecache.getline(B,A[0]) + finally:A[0]+=1 + vars=scanvars(h,Q,locals);F=['%s%s %s'%(' ',T,L)] + if S is not _A: + G=R-S + for D in d: + V=small(J*(5-len(str(G)))+str(G))+J + if G in U:D='=>%s%s'%(V,pydoc.html.preformat(D));F.append('%s'%D) + else:D='  %s%s'%(V,pydoc.html.preformat(D));F.append(N%grey(D)) + G+=1 + W,M={},[] + for(A,H,I)in vars: + if A in W:continue + W[A]=1 + if I is not __UNDEF__: + if H in(_E,_F):A='%s '%H+strong(A) + elif H==_D:A=strong(A) + else:A=H+strong(A.split('.')[-1]) + M.append('%s = %s'%(A,pydoc.html.repr(I))) + else:M.append(A+' undefined') + F.append(N%small(grey(', '.join(M))));P.append('\n\n%s
'%_C.join(F)) + X=['

%s: %s'%(strong(pydoc.html.escape(str(C))),pydoc.html.escape(str(E)))] + for A in dir(E): + if A[:1]=='_':continue + I=pydoc.html.repr(getattr(E,A));X.append('\n
%s%s =\n%s'%(b,A,I)) + return a+''.join(P)+''.join(X)+"\n\n\n\n"%pydoc.html.escape(''.join(traceback.format_exception(C,E,O))) +def text(einfo,context=5): + 'Return a plain text document describing a given traceback.';B,D,K=einfo + if isinstance(B,type):B=B.__name__ + S=_G+sys.version.split()[0]+': '+sys.executable;T=time.ctime(time.time());U='%s\n%s\n%s\n'%(str(B),S,T)+'\nA problem occurred in a Python script. Here is the sequence of\nfunction calls leading up to the error, in the order they occurred.\n';L=[];V=inspect.getinnerframes(K,context) + for(M,C,N,F,W,O)in V: + C=C and os.path.abspath(C)or'?';X,Y,Z,locals=inspect.getargvalues(M);G='' + if F!='?': + G='in '+F + if F!=_H:G+=inspect.formatargvalues(X,Y,Z,locals,formatvalue=lambda value:'='+pydoc.text.repr(value)) + a={} + def b(lnum=[N]): + A=lnum;a[A[0]]=1 + try:return linecache.getline(C,A[0]) + finally:A[0]+=1 + vars=scanvars(b,M,locals);H=[' %s %s'%(C,G)] + if O is not _A: + P=N-O + for c in W:d='%5d '%P;H.append(d+c.rstrip());P+=1 + Q,I={},[] + for(A,J,E)in vars: + if A in Q:continue + Q[A]=1 + if E is not __UNDEF__: + if J==_E:A='global '+A + elif J!=_D:A=J+A.split('.')[-1] + I.append('%s = %s'%(A,pydoc.text.repr(E))) + else:I.append(A+' undefined') + H.append(_C.join(I));L.append('\n%s\n'%_C.join(H)) + R=['%s: %s'%(str(B),str(D))] + for A in dir(D):E=pydoc.text.repr(getattr(D,A));R.append('\n%s%s = %s'%(' '*4,A,E)) + return U+''.join(L)+''.join(R)+'\n\nThe above is a description of an error in a Python program. Here is\nthe original traceback:\n\n%s\n'%''.join(traceback.format_exception(B,D,K)) class Hook: - """A hook to replace sys.excepthook that shows tracebacks in HTML.""" - - def __init__(self, display=1, logdir=None, context=5, file=None, - format="html"): - self.display = display # send tracebacks to browser if true - self.logdir = logdir # log tracebacks to files if not None - self.context = context # number of source code lines per frame - self.file = file or sys.stdout # place to send the output - self.format = format - - def __call__(self, etype, evalue, etb): - self.handle((etype, evalue, etb)) - - def handle(self, info=None): - info = info or sys.exc_info() - if self.format == "html": - self.file.write(reset()) - - formatter = (self.format=="html") and html or text - plain = False - try: - doc = formatter(info, self.context) - except: # just in case something goes wrong - doc = ''.join(traceback.format_exception(*info)) - plain = True - - if self.display: - if plain: - doc = pydoc.html.escape(doc) - self.file.write('

' + doc + '
\n') - else: - self.file.write(doc + '\n') - else: - self.file.write('

A problem occurred in a Python script.\n') - - if self.logdir is not None: - suffix = ['.txt', '.html'][self.format=="html"] - (fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir) - - try: - with os.fdopen(fd, 'w') as file: - file.write(doc) - msg = '%s contains the description of this error.' % path - except: - msg = 'Tried to save traceback to %s, but failed.' % path - - if self.format == 'html': - self.file.write('

%s

\n' % msg) - else: - self.file.write(msg + '\n') - try: - self.file.flush() - except: pass - -handler = Hook().handle -def enable(display=1, logdir=None, context=5, format="html"): - """Install an exception handler that formats tracebacks as HTML. - - The optional argument 'display' can be set to 0 to suppress sending the - traceback to the browser, and 'logdir' can be set to a directory to cause - tracebacks to be written to files there.""" - sys.excepthook = Hook(display=display, logdir=logdir, - context=context, format=format) + 'A hook to replace sys.excepthook that shows tracebacks in HTML.' + def __init__(A,display=1,logdir=_A,context=5,file=_A,format=_B):A.display=display;A.logdir=logdir;A.context=context;A.file=file or sys.stdout;A.format=format + def __call__(A,etype,evalue,etb):A.handle((etype,evalue,etb)) + def handle(A,info=_A): + C=info;C=C or sys.exc_info() + if A.format==_B:A.file.write(reset()) + G=A.format==_B and html or text;E=False + try:B=G(C,A.context) + except:B=''.join(traceback.format_exception(*C));E=True + if A.display: + if E:B=pydoc.html.escape(B);A.file.write('
'+B+'
\n') + else:A.file.write(B+_C) + else:A.file.write('

A problem occurred in a Python script.\n') + if A.logdir is not _A: + H=['.txt','.html'][A.format==_B];I,F=tempfile.mkstemp(suffix=H,dir=A.logdir) + try: + with os.fdopen(I,'w')as J:J.write(B) + D='%s contains the description of this error.'%F + except:D='Tried to save traceback to %s, but failed.'%F + if A.format==_B:A.file.write('

%s

\n'%D) + else:A.file.write(D+_C) + try:A.file.flush() + except:pass +handler=Hook().handle +def enable(display=1,logdir=_A,context=5,format=_B):"Install an exception handler that formats tracebacks as HTML.\n\n The optional argument 'display' can be set to 0 to suppress sending the\n traceback to the browser, and 'logdir' can be set to a directory to cause\n tracebacks to be written to files there.";sys.excepthook=Hook(display=display,logdir=logdir,context=context,format=format) \ No newline at end of file diff --git a/Lib/chunk.py b/Lib/chunk.py index 618781efd11..72233630447 100644 --- a/Lib/chunk.py +++ b/Lib/chunk.py @@ -1,173 +1,59 @@ -"""Simple class to read IFF chunks. - -An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File -Format)) has the following structure: - -+----------------+ -| ID (4 bytes) | -+----------------+ -| size (4 bytes) | -+----------------+ -| data | -| ... | -+----------------+ - -The ID is a 4-byte string which identifies the type of chunk. - -The size field (a 32-bit value, encoded using big-endian byte order) -gives the size of the whole chunk, including the 8-byte header. - -Usually an IFF-type file consists of one or more chunks. The proposed -usage of the Chunk class defined here is to instantiate an instance at -the start of each chunk and read from the instance until it reaches -the end, after which a new instance can be instantiated. At the end -of the file, creating a new instance will fail with an EOFError -exception. - -Usage: -while True: - try: - chunk = Chunk(file) - except EOFError: - break - chunktype = chunk.getname() - while True: - data = chunk.read(nbytes) - if not data: - pass - # do something with data - -The interface is file-like. The implemented methods are: -read, close, seek, tell, isatty. -Extra methods are: skip() (called by close, skips to the end of the chunk), -getname() (returns the name (ID) of the chunk) - -The __init__ method has one required argument, a file-like object -(including a chunk instance), and one optional argument, a flag which -specifies whether or not chunks are aligned on 2-byte boundaries. The -default is 1, i.e. aligned. -""" - -import warnings - -warnings._deprecated(__name__, remove=(3, 13)) - +'Simple class to read IFF chunks.\n\nAn IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File\nFormat)) has the following structure:\n\n+----------------+\n| ID (4 bytes) |\n+----------------+\n| size (4 bytes) |\n+----------------+\n| data |\n| ... |\n+----------------+\n\nThe ID is a 4-byte string which identifies the type of chunk.\n\nThe size field (a 32-bit value, encoded using big-endian byte order)\ngives the size of the whole chunk, including the 8-byte header.\n\nUsually an IFF-type file consists of one or more chunks. The proposed\nusage of the Chunk class defined here is to instantiate an instance at\nthe start of each chunk and read from the instance until it reaches\nthe end, after which a new instance can be instantiated. At the end\nof the file, creating a new instance will fail with an EOFError\nexception.\n\nUsage:\nwhile True:\n try:\n chunk = Chunk(file)\n except EOFError:\n break\n chunktype = chunk.getname()\n while True:\n data = chunk.read(nbytes)\n if not data:\n pass\n # do something with data\n\nThe interface is file-like. The implemented methods are:\nread, close, seek, tell, isatty.\nExtra methods are: skip() (called by close, skips to the end of the chunk),\ngetname() (returns the name (ID) of the chunk)\n\nThe __init__ method has one required argument, a file-like object\n(including a chunk instance), and one optional argument, a flag which\nspecifies whether or not chunks are aligned on 2-byte boundaries. The\ndefault is 1, i.e. aligned.\n' +_C=False +_B=True +_A='I/O operation on closed file' class Chunk: - def __init__(self, file, align=True, bigendian=True, inclheader=False): - import struct - self.closed = False - self.align = align # whether to align to word (2-byte) boundaries - if bigendian: - strflag = '>' - else: - strflag = '<' - self.file = file - self.chunkname = file.read(4) - if len(self.chunkname) < 4: - raise EOFError - try: - self.chunksize = struct.unpack_from(strflag+'L', file.read(4))[0] - except struct.error: - raise EOFError from None - if inclheader: - self.chunksize = self.chunksize - 8 # subtract header - self.size_read = 0 - try: - self.offset = self.file.tell() - except (AttributeError, OSError): - self.seekable = False - else: - self.seekable = True - - def getname(self): - """Return the name (ID) of the current chunk.""" - return self.chunkname - - def getsize(self): - """Return the size of the current chunk.""" - return self.chunksize - - def close(self): - if not self.closed: - try: - self.skip() - finally: - self.closed = True - - def isatty(self): - if self.closed: - raise ValueError("I/O operation on closed file") - return False - - def seek(self, pos, whence=0): - """Seek to specified position into the chunk. - Default position is 0 (start of chunk). - If the file is not seekable, this will result in an error. - """ - - if self.closed: - raise ValueError("I/O operation on closed file") - if not self.seekable: - raise OSError("cannot seek") - if whence == 1: - pos = pos + self.size_read - elif whence == 2: - pos = pos + self.chunksize - if pos < 0 or pos > self.chunksize: - raise RuntimeError - self.file.seek(self.offset + pos, 0) - self.size_read = pos - - def tell(self): - if self.closed: - raise ValueError("I/O operation on closed file") - return self.size_read - - def read(self, size=-1): - """Read at most size bytes from the chunk. - If size is omitted or negative, read until the end - of the chunk. - """ - - if self.closed: - raise ValueError("I/O operation on closed file") - if self.size_read >= self.chunksize: - return b'' - if size < 0: - size = self.chunksize - self.size_read - if size > self.chunksize - self.size_read: - size = self.chunksize - self.size_read - data = self.file.read(size) - self.size_read = self.size_read + len(data) - if self.size_read == self.chunksize and \ - self.align and \ - (self.chunksize & 1): - dummy = self.file.read(1) - self.size_read = self.size_read + len(dummy) - return data - - def skip(self): - """Skip the rest of the chunk. - If you are not interested in the contents of the chunk, - this method should be called so that the file points to - the start of the next chunk. - """ - - if self.closed: - raise ValueError("I/O operation on closed file") - if self.seekable: - try: - n = self.chunksize - self.size_read - # maybe fix alignment - if self.align and (self.chunksize & 1): - n = n + 1 - self.file.seek(n, 1) - self.size_read = self.size_read + n - return - except OSError: - pass - while self.size_read < self.chunksize: - n = min(8192, self.chunksize - self.size_read) - dummy = self.read(n) - if not dummy: - raise EOFError + def __init__(A,file,align=_B,bigendian=_B,inclheader=_C): + B=file;import struct as C;A.closed=_C;A.align=align + if bigendian:D='>' + else:D='<' + A.file=B;A.chunkname=B.read(4) + if len(A.chunkname)<4:raise EOFError + try:A.chunksize=C.unpack_from(D+'L',B.read(4))[0] + except C.error:raise EOFError + if inclheader:A.chunksize=A.chunksize-8 + A.size_read=0 + try:A.offset=A.file.tell() + except(AttributeError,OSError):A.seekable=_C + else:A.seekable=_B + def getname(A):'Return the name (ID) of the current chunk.';return A.chunkname + def getsize(A):'Return the size of the current chunk.';return A.chunksize + def close(A): + if not A.closed: + try:A.skip() + finally:A.closed=_B + def isatty(A): + if A.closed:raise ValueError(_A) + return _C + def seek(A,pos,whence=0): + 'Seek to specified position into the chunk.\n Default position is 0 (start of chunk).\n If the file is not seekable, this will result in an error.\n ';C=whence;B=pos + if A.closed:raise ValueError(_A) + if not A.seekable:raise OSError('cannot seek') + if C==1:B=B+A.size_read + elif C==2:B=B+A.chunksize + if B<0 or B>A.chunksize:raise RuntimeError + A.file.seek(A.offset+B,0);A.size_read=B + def tell(A): + if A.closed:raise ValueError(_A) + return A.size_read + def read(A,size=-1): + 'Read at most size bytes from the chunk.\n If size is omitted or negative, read until the end\n of the chunk.\n ';B=size + if A.closed:raise ValueError(_A) + if A.size_read>=A.chunksize:return b'' + if B<0:B=A.chunksize-A.size_read + if B>A.chunksize-A.size_read:B=A.chunksize-A.size_read + C=A.file.read(B);A.size_read=A.size_read+len(C) + if A.size_read==A.chunksize and A.align and A.chunksize&1:D=A.file.read(1);A.size_read=A.size_read+len(D) + return C + def skip(A): + 'Skip the rest of the chunk.\n If you are not interested in the contents of the chunk,\n this method should be called so that the file points to\n the start of the next chunk.\n ' + if A.closed:raise ValueError(_A) + if A.seekable: + try: + B=A.chunksize-A.size_read + if A.align and A.chunksize&1:B=B+1 + A.file.seek(B,1);A.size_read=A.size_read+B;return + except OSError:pass + while A.size_read to get list of completions. - """ - if state == 0: - import readline - origline = readline.get_line_buffer() - line = origline.lstrip() - stripped = len(origline) - len(line) - begidx = readline.get_begidx() - stripped - endidx = readline.get_endidx() - stripped - if begidx>0: - cmd, args, foo = self.parseline(line) - if cmd == '': - compfunc = self.completedefault - else: - try: - compfunc = getattr(self, 'complete_' + cmd) - except AttributeError: - compfunc = self.completedefault - else: - compfunc = self.completenames - self.completion_matches = compfunc(text, line, begidx, endidx) - try: - return self.completion_matches[state] - except IndexError: - return None - - def get_names(self): - # This method used to pull in base class attributes - # at a time dir() didn't do it yet. - return dir(self.__class__) - - def complete_help(self, *args): - commands = set(self.completenames(*args)) - topics = set(a[5:] for a in self.get_names() - if a.startswith('help_' + args[0])) - return list(commands | topics) - - def do_help(self, arg): - 'List available commands with "help" or detailed help with "help cmd".' - if arg: - # XXX check arg syntax - try: - func = getattr(self, 'help_' + arg) - except AttributeError: - try: - doc=getattr(self, 'do_' + arg).__doc__ - if doc: - self.stdout.write("%s\n"%str(doc)) - return - except AttributeError: - pass - self.stdout.write("%s\n"%str(self.nohelp % (arg,))) - return - func() - else: - names = self.get_names() - cmds_doc = [] - cmds_undoc = [] - topics = set() - for name in names: - if name[:5] == 'help_': - topics.add(name[5:]) - names.sort() - # There can be duplicates if routines overridden - prevname = '' - for name in names: - if name[:3] == 'do_': - if name == prevname: - continue - prevname = name - cmd=name[3:] - if cmd in topics: - cmds_doc.append(cmd) - topics.remove(cmd) - elif getattr(self, name).__doc__: - cmds_doc.append(cmd) - else: - cmds_undoc.append(cmd) - self.stdout.write("%s\n"%str(self.doc_leader)) - self.print_topics(self.doc_header, cmds_doc, 15,80) - self.print_topics(self.misc_header, sorted(topics),15,80) - self.print_topics(self.undoc_header, cmds_undoc, 15,80) - - def print_topics(self, header, cmds, cmdlen, maxcol): - if cmds: - self.stdout.write("%s\n"%str(header)) - if self.ruler: - self.stdout.write("%s\n"%str(self.ruler * len(header))) - self.columnize(cmds, maxcol-1) - self.stdout.write("\n") - - def columnize(self, list, displaywidth=80): - """Display a list of strings as a compact set of columns. - - Each column is only as wide as necessary. - Columns are separated by two spaces (one was not legible enough). - """ - if not list: - self.stdout.write("\n") - return - - nonstrings = [i for i in range(len(list)) - if not isinstance(list[i], str)] - if nonstrings: - raise TypeError("list[i] not a string for i in %s" - % ", ".join(map(str, nonstrings))) - size = len(list) - if size == 1: - self.stdout.write('%s\n'%str(list[0])) - return - # Try every row count from 1 upwards - for nrows in range(1, len(list)): - ncols = (size+nrows-1) // nrows - colwidths = [] - totwidth = -2 - for col in range(ncols): - colwidth = 0 - for row in range(nrows): - i = row + nrows*col - if i >= size: - break - x = list[i] - colwidth = max(colwidth, len(x)) - colwidths.append(colwidth) - totwidth += colwidth + 2 - if totwidth > displaywidth: - break - if totwidth <= displaywidth: - break - else: - nrows = len(list) - ncols = 1 - colwidths = [0] - for row in range(nrows): - texts = [] - for col in range(ncols): - i = row + nrows*col - if i >= size: - x = "" - else: - x = list[i] - texts.append(x) - while texts and not texts[-1]: - del texts[-1] - for col in range(len(texts)): - texts[col] = texts[col].ljust(colwidths[col]) - self.stdout.write("%s\n"%str(" ".join(texts))) + "A simple framework for writing line-oriented command interpreters.\n\n These are often useful for test harnesses, administrative tools, and\n prototypes that will later be wrapped in a more sophisticated interface.\n\n A Cmd instance or subclass instance is a line-oriented interpreter\n framework. There is no good reason to instantiate Cmd itself; rather,\n it's useful as a superclass of an interpreter class you define yourself\n in order to inherit Cmd's methods and encapsulate action methods.\n\n ";prompt=PROMPT;identchars=IDENTCHARS;ruler='=';lastcmd='';intro=_A;doc_leader='';doc_header='Documented commands (type help ):';misc_header='Miscellaneous help topics:';undoc_header='Undocumented commands:';nohelp='*** No help on %s';use_rawinput=1 + def __init__(A,completekey='tab',stdin=_A,stdout=_A): + "Instantiate a line-oriented interpreter framework.\n\n The optional argument 'completekey' is the readline name of a\n completion key; it defaults to the Tab key. If completekey is\n not None and the readline module is available, command completion\n is done automatically. The optional arguments stdin and stdout\n specify alternate input and output file objects; if not specified,\n sys.stdin and sys.stdout are used.\n\n ";B=stdout;C=stdin + if C is not _A:A.stdin=C + else:A.stdin=sys.stdin + if B is not _A:A.stdout=B + else:A.stdout=sys.stdout + A.cmdqueue=[];A.completekey=completekey + def cmdloop(A,intro=_A): + 'Repeatedly issue a prompt, accept input, parse an initial prefix\n off the received input, and dispatch to action methods, passing them\n the remainder of the line as argument.\n\n ';E=intro;A.preloop() + if A.use_rawinput and A.completekey: + try:import readline as C;A.old_completer=C.get_completer();C.set_completer(A.complete);C.parse_and_bind(A.completekey+': complete') + except ImportError:pass + try: + if E is not _A:A.intro=E + if A.intro:A.stdout.write(str(A.intro)+'\n') + D=_A + while not D: + if A.cmdqueue:B=A.cmdqueue.pop(0) + elif A.use_rawinput: + try:B=input(A.prompt) + except EOFError:B=_D + else: + A.stdout.write(A.prompt);A.stdout.flush();B=A.stdin.readline() + if not len(B):B=_D + else:B=B.rstrip('\r\n') + B=A.precmd(B);D=A.onecmd(B);D=A.postcmd(D,B) + A.postloop() + finally: + if A.use_rawinput and A.completekey: + try:import readline as C;C.set_completer(A.old_completer) + except ImportError:pass + def precmd(A,line):'Hook method executed just before the command line is\n interpreted, but after the input prompt is generated and issued.\n\n ';return line + def postcmd(A,stop,line):'Hook method executed just after a command dispatch is finished.';return stop + def preloop(A):'Hook method executed once when the cmdloop() method is called.' + def postloop(A):'Hook method executed once when the cmdloop() method is about to\n return.\n\n ' + def parseline(C,line): + "Parse the line into a command name and a string containing\n the arguments. Returns a tuple containing (command, args, line).\n 'command' and 'args' may be None if the line couldn't be parsed.\n ";A=line;A=A.strip() + if not A:return _A,_A,A + elif A[0]=='?':A='help '+A[1:] + elif A[0]=='!': + if hasattr(C,'do_shell'):A='shell '+A[1:] + else:return _A,_A,A + B,D=0,len(A) + while B to get list of completions.\n ";E=state + if E==0: + import readline as C;F=C.get_line_buffer();D=F.lstrip();G=len(F)-len(D);H=C.get_begidx()-G;J=C.get_endidx()-G + if H>0: + I,K,L=A.parseline(D) + if I=='':B=A.completedefault + else: + try:B=getattr(A,'complete_'+I) + except AttributeError:B=A.completedefault + else:B=A.completenames + A.completion_matches=B(text,D,H,J) + try:return A.completion_matches[E] + except IndexError:return + def get_names(A):return dir(A.__class__) + def complete_help(A,*B):C=set(A.completenames(*B));D=set(A[5:]for A in A.get_names()if A.startswith(_E+B[0]));return list(C|D) + def do_help(A,arg): + 'List available commands with "help" or detailed help with "help cmd".';D=arg + if D: + try:J=getattr(A,_E+D) + except AttributeError: + try: + G=getattr(A,_C+D).__doc__ + if G:A.stdout.write(_B%str(G));return + except AttributeError:pass + A.stdout.write(_B%str(A.nohelp%(D,)));return + J() + else: + E=A.get_names();F=[];H=[];help={} + for B in E: + if B[:5]==_E:help[B[5:]]=1 + E.sort();I='' + for B in E: + if B[:3]==_C: + if B==I:continue + I=B;C=B[3:] + if C in help:F.append(C);del help[C] + elif getattr(A,B).__doc__:F.append(C) + else:H.append(C) + A.stdout.write(_B%str(A.doc_leader));A.print_topics(A.doc_header,F,15,80);A.print_topics(A.misc_header,list(help.keys()),15,80);A.print_topics(A.undoc_header,H,15,80) + def print_topics(A,header,cmds,cmdlen,maxcol): + B=header + if cmds: + A.stdout.write(_B%str(B)) + if A.ruler:A.stdout.write(_B%str(A.ruler*len(B))) + A.columnize(cmds,maxcol-1);A.stdout.write('\n') + def columnize(H,list,displaywidth=80): + 'Display a list of strings as a compact set of columns.\n\n Each column is only as wide as necessary.\n Columns are separated by two spaces (one was not legible enough).\n ';M=displaywidth + if not list:H.stdout.write('\n');return + N=[A for A in range(len(list))if not isinstance(list[A],str)] + if N:raise TypeError('list[i] not a string for i in %s'%', '.join(map(str,N))) + E=len(list) + if E==1:H.stdout.write(_B%str(list[0]));return + for B in range(1,len(list)): + I=(E+B-1)//B;J=[];K=-2 + for C in range(I): + F=0 + for L in range(B): + D=L+B*C + if D>=E:break + G=list[D];F=max(F,len(G)) + J.append(F);K+=F+2 + if K>M:break + if K<=M:break + else:B=len(list);I=1;J=[0] + for L in range(B): + A=[] + for C in range(I): + D=L+B*C + if D>=E:G='' + else:G=list[D] + A.append(G) + while A and not A[-1]:del A[-1] + for C in range(len(A)):A[C]=A[C].ljust(J[C]) + H.stdout.write(_B%str(' '.join(A))) \ No newline at end of file diff --git a/Lib/code.py b/Lib/code.py index 23295f4cf59..b6978e19749 100644 --- a/Lib/code.py +++ b/Lib/code.py @@ -1,314 +1,79 @@ -"""Utilities needed to emulate Python's interactive interpreter. - -""" - -# Inspired by similar code by Jeff Epler and Fredrik Lundh. - - -import sys -import traceback -import argparse -from codeop import CommandCompiler, compile_command - -__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", - "compile_command"] - +"Utilities needed to emulate Python's interactive interpreter.\n\n" +_A=None +import sys,traceback,argparse +from codeop import CommandCompiler,compile_command +__all__=['InteractiveInterpreter','InteractiveConsole','interact','compile_command'] class InteractiveInterpreter: - """Base class for InteractiveConsole. - - This class deals with parsing and interpreter state (the user's - namespace); it doesn't deal with input buffering or prompting or - input file naming (the filename is always passed in explicitly). - - """ - - def __init__(self, locals=None): - """Constructor. - - The optional 'locals' argument specifies the dictionary in - which code will be executed; it defaults to a newly created - dictionary with key "__name__" set to "__console__" and key - "__doc__" set to None. - - """ - if locals is None: - locals = {"__name__": "__console__", "__doc__": None} - self.locals = locals - self.compile = CommandCompiler() - - def runsource(self, source, filename="", symbol="single"): - """Compile and run some source in the interpreter. - - Arguments are as for compile_command(). - - One several things can happen: - - 1) The input is incorrect; compile_command() raised an - exception (SyntaxError or OverflowError). A syntax traceback - will be printed by calling the showsyntaxerror() method. - - 2) The input is incomplete, and more input is required; - compile_command() returned None. Nothing happens. - - 3) The input is complete; compile_command() returned a code - object. The code is executed by calling self.runcode() (which - also handles run-time exceptions, except for SystemExit). - - The return value is True in case 2, False in the other cases (unless - an exception is raised). The return value can be used to - decide whether to use sys.ps1 or sys.ps2 to prompt the next - line. - - """ - try: - code = self.compile(source, filename, symbol) - except (OverflowError, SyntaxError, ValueError): - # Case 1 - self.showsyntaxerror(filename) - return False - - if code is None: - # Case 2 - return True - - # Case 3 - self.runcode(code) - return False - - def runcode(self, code): - """Execute a code object. - - When an exception occurs, self.showtraceback() is called to - display a traceback. All exceptions are caught except - SystemExit, which is reraised. - - A note about KeyboardInterrupt: this exception may occur - elsewhere in this code, and may not always be caught. The - caller should be prepared to deal with it. - - """ - try: - exec(code, self.locals) - except SystemExit: - raise - except: - self.showtraceback() - - def showsyntaxerror(self, filename=None): - """Display the syntax error that just occurred. - - This doesn't display a stack trace because there isn't one. - - If a filename is given, it is stuffed in the exception instead - of what was there before (because Python's parser always uses - "" when reading from a string). - - The output is written by self.write(), below. - - """ - type, value, tb = sys.exc_info() - sys.last_type = type - sys.last_value = value - sys.last_traceback = tb - if filename and type is SyntaxError: - # Work hard to stuff the correct filename in the exception - try: - msg, (dummy_filename, lineno, offset, line) = value.args - except ValueError: - # Not the format we expect; leave it alone - pass - else: - # Stuff in the right filename - value = SyntaxError(msg, (filename, lineno, offset, line)) - sys.last_value = value - if sys.excepthook is sys.__excepthook__: - lines = traceback.format_exception_only(type, value) - self.write(''.join(lines)) - else: - # If someone has set sys.excepthook, we let that take precedence - # over self.write - sys.excepthook(type, value, tb) - - def showtraceback(self): - """Display the exception that just occurred. - - We remove the first stack item because it is our own code. - - The output is written by self.write(), below. - - """ - sys.last_type, sys.last_value, last_tb = ei = sys.exc_info() - sys.last_traceback = last_tb - try: - lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next) - if sys.excepthook is sys.__excepthook__: - self.write(''.join(lines)) - else: - # If someone has set sys.excepthook, we let that take precedence - # over self.write - sys.excepthook(ei[0], ei[1], last_tb) - finally: - last_tb = ei = None - - def write(self, data): - """Write a string. - - The base implementation writes to sys.stderr; a subclass may - replace this with a different implementation. - - """ - sys.stderr.write(data) - - + "Base class for InteractiveConsole.\n\n This class deals with parsing and interpreter state (the user's\n namespace); it doesn't deal with input buffering or prompting or\n input file naming (the filename is always passed in explicitly).\n\n " + def __init__(self,locals=_A): + 'Constructor.\n\n The optional \'locals\' argument specifies the dictionary in\n which code will be executed; it defaults to a newly created\n dictionary with key "__name__" set to "__console__" and key\n "__doc__" set to None.\n\n ' + if locals is _A:locals={'__name__':'__console__','__doc__':_A} + self.locals=locals;self.compile=CommandCompiler() + def runsource(self,source,filename='',symbol='single'): + 'Compile and run some source in the interpreter.\n\n Arguments are as for compile_command().\n\n One several things can happen:\n\n 1) The input is incorrect; compile_command() raised an\n exception (SyntaxError or OverflowError). A syntax traceback\n will be printed by calling the showsyntaxerror() method.\n\n 2) The input is incomplete, and more input is required;\n compile_command() returned None. Nothing happens.\n\n 3) The input is complete; compile_command() returned a code\n object. The code is executed by calling self.runcode() (which\n also handles run-time exceptions, except for SystemExit).\n\n The return value is True in case 2, False in the other cases (unless\n an exception is raised). The return value can be used to\n decide whether to use sys.ps1 or sys.ps2 to prompt the next\n line.\n\n ';A=False + try:code=self.compile(source,filename,symbol) + except(OverflowError,SyntaxError,ValueError):self.showsyntaxerror(filename);return A + if code is _A:return True + self.runcode(code);return A + def runcode(self,code): + 'Execute a code object.\n\n When an exception occurs, self.showtraceback() is called to\n display a traceback. All exceptions are caught except\n SystemExit, which is reraised.\n\n A note about KeyboardInterrupt: this exception may occur\n elsewhere in this code, and may not always be caught. The\n caller should be prepared to deal with it.\n\n ' + try:exec(code,self.locals) + except SystemExit:raise + except:self.showtraceback() + def showsyntaxerror(self,filename=_A): + 'Display the syntax error that just occurred.\n\n This doesn\'t display a stack trace because there isn\'t one.\n\n If a filename is given, it is stuffed in the exception instead\n of what was there before (because Python\'s parser always uses\n "" when reading from a string).\n\n The output is written by self.write(), below.\n\n ';type,value,tb=sys.exc_info();sys.last_type=type;sys.last_value=value;sys.last_traceback=tb + if filename and type is SyntaxError: + try:msg,(dummy_filename,lineno,offset,line)=value.args + except ValueError:pass + else:value=SyntaxError(msg,(filename,lineno,offset,line));sys.last_value=value + if sys.excepthook is sys.__excepthook__:lines=traceback.format_exception_only(type,value);self.write(''.join(lines)) + else:sys.excepthook(type,value,tb) + def showtraceback(self): + 'Display the exception that just occurred.\n\n We remove the first stack item because it is our own code.\n\n The output is written by self.write(), below.\n\n ';sys.last_type,sys.last_value,last_tb=ei=sys.exc_info();sys.last_traceback=last_tb + try: + lines=traceback.format_exception(ei[0],ei[1],last_tb.tb_next) + if sys.excepthook is sys.__excepthook__:self.write(''.join(lines)) + else:sys.excepthook(ei[0],ei[1],last_tb) + finally:last_tb=ei=_A + def write(self,data):'Write a string.\n\n The base implementation writes to sys.stderr; a subclass may\n replace this with a different implementation.\n\n ';sys.stderr.write(data) class InteractiveConsole(InteractiveInterpreter): - """Closely emulate the behavior of the interactive Python interpreter. - - This class builds on InteractiveInterpreter and adds prompting - using the familiar sys.ps1 and sys.ps2, and input buffering. - - """ - - def __init__(self, locals=None, filename=""): - """Constructor. - - The optional locals argument will be passed to the - InteractiveInterpreter base class. - - The optional filename argument should specify the (file)name - of the input stream; it will show up in tracebacks. - - """ - InteractiveInterpreter.__init__(self, locals) - self.filename = filename - self.resetbuffer() - - def resetbuffer(self): - """Reset the input buffer.""" - self.buffer = [] - - def interact(self, banner=None, exitmsg=None): - """Closely emulate the interactive Python console. - - The optional banner argument specifies the banner to print - before the first interaction; by default it prints a banner - similar to the one printed by the real Python interpreter, - followed by the current class name in parentheses (so as not - to confuse this with the real interpreter -- since it's so - close!). - - The optional exitmsg argument specifies the exit message - printed when exiting. Pass the empty string to suppress - printing an exit message. If exitmsg is not given or None, - a default message is printed. - - """ - try: - sys.ps1 - except AttributeError: - sys.ps1 = ">>> " - try: - sys.ps2 - except AttributeError: - sys.ps2 = "... " - cprt = 'Type "help", "copyright", "credits" or "license" for more information.' - if banner is None: - self.write("Python %s on %s\n%s\n(%s)\n" % - (sys.version, sys.platform, cprt, - self.__class__.__name__)) - elif banner: - self.write("%s\n" % str(banner)) - more = 0 - while 1: - try: - if more: - prompt = sys.ps2 - else: - prompt = sys.ps1 - try: - line = self.raw_input(prompt) - except EOFError: - self.write("\n") - break - else: - more = self.push(line) - except KeyboardInterrupt: - self.write("\nKeyboardInterrupt\n") - self.resetbuffer() - more = 0 - if exitmsg is None: - self.write('now exiting %s...\n' % self.__class__.__name__) - elif exitmsg != '': - self.write('%s\n' % exitmsg) - - def push(self, line): - """Push a line to the interpreter. - - The line should not have a trailing newline; it may have - internal newlines. The line is appended to a buffer and the - interpreter's runsource() method is called with the - concatenated contents of the buffer as source. If this - indicates that the command was executed or invalid, the buffer - is reset; otherwise, the command is incomplete, and the buffer - is left as it was after the line was appended. The return - value is 1 if more input is required, 0 if the line was dealt - with in some way (this is the same as runsource()). - - """ - self.buffer.append(line) - source = "\n".join(self.buffer) - more = self.runsource(source, self.filename) - if not more: - self.resetbuffer() - return more - - def raw_input(self, prompt=""): - """Write a prompt and read a line. - - The returned line does not include the trailing newline. - When the user enters the EOF key sequence, EOFError is raised. - - The base implementation uses the built-in function - input(); a subclass may replace this with a different - implementation. - - """ - return input(prompt) - - - -def interact(banner=None, readfunc=None, local=None, exitmsg=None): - """Closely emulate the interactive Python interpreter. - - This is a backwards compatible interface to the InteractiveConsole - class. When readfunc is not specified, it attempts to import the - readline module to enable GNU readline if it is available. - - Arguments (all optional, all default to None): - - banner -- passed to InteractiveConsole.interact() - readfunc -- if not None, replaces InteractiveConsole.raw_input() - local -- passed to InteractiveInterpreter.__init__() - exitmsg -- passed to InteractiveConsole.interact() - - """ - console = InteractiveConsole(local) - if readfunc is not None: - console.raw_input = readfunc - else: - try: - import readline - except ImportError: - pass - console.interact(banner, exitmsg) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('-q', action='store_true', - help="don't print version and copyright messages") - args = parser.parse_args() - if args.q or sys.flags.quiet: - banner = '' - else: - banner = None - interact(banner) + 'Closely emulate the behavior of the interactive Python interpreter.\n\n This class builds on InteractiveInterpreter and adds prompting\n using the familiar sys.ps1 and sys.ps2, and input buffering.\n\n ' + def __init__(self,locals=_A,filename=''):'Constructor.\n\n The optional locals argument will be passed to the\n InteractiveInterpreter base class.\n\n The optional filename argument should specify the (file)name\n of the input stream; it will show up in tracebacks.\n\n ';InteractiveInterpreter.__init__(self,locals);self.filename=filename;self.resetbuffer() + def resetbuffer(self):'Reset the input buffer.';self.buffer=[] + def interact(self,banner=_A,exitmsg=_A): + "Closely emulate the interactive Python console.\n\n The optional banner argument specifies the banner to print\n before the first interaction; by default it prints a banner\n similar to the one printed by the real Python interpreter,\n followed by the current class name in parentheses (so as not\n to confuse this with the real interpreter -- since it's so\n close!).\n\n The optional exitmsg argument specifies the exit message\n printed when exiting. Pass the empty string to suppress\n printing an exit message. If exitmsg is not given or None,\n a default message is printed.\n\n ";A='%s\n' + try:sys.ps1 + except AttributeError:sys.ps1='>>> ' + try:sys.ps2 + except AttributeError:sys.ps2='... ' + cprt='Type "help", "copyright", "credits" or "license" for more information.' + if banner is _A:self.write('Python %s on %s\n%s\n(%s)\n'%(sys.version,sys.platform,cprt,self.__class__.__name__)) + elif banner:self.write(A%str(banner)) + more=0 + while 1: + try: + if more:prompt=sys.ps2 + else:prompt=sys.ps1 + try:line=self.raw_input(prompt) + except EOFError:self.write('\n');break + else:more=self.push(line) + except KeyboardInterrupt:self.write('\nKeyboardInterrupt\n');self.resetbuffer();more=0 + if exitmsg is _A:self.write('now exiting %s...\n'%self.__class__.__name__) + elif exitmsg!='':self.write(A%exitmsg) + def push(self,line): + "Push a line to the interpreter.\n\n The line should not have a trailing newline; it may have\n internal newlines. The line is appended to a buffer and the\n interpreter's runsource() method is called with the\n concatenated contents of the buffer as source. If this\n indicates that the command was executed or invalid, the buffer\n is reset; otherwise, the command is incomplete, and the buffer\n is left as it was after the line was appended. The return\n value is 1 if more input is required, 0 if the line was dealt\n with in some way (this is the same as runsource()).\n\n ";self.buffer.append(line);source='\n'.join(self.buffer);more=self.runsource(source,self.filename) + if not more:self.resetbuffer() + return more + def raw_input(self,prompt=''):'Write a prompt and read a line.\n\n The returned line does not include the trailing newline.\n When the user enters the EOF key sequence, EOFError is raised.\n\n The base implementation uses the built-in function\n input(); a subclass may replace this with a different\n implementation.\n\n ';return input(prompt) +def interact(banner=_A,readfunc=_A,local=_A,exitmsg=_A): + 'Closely emulate the interactive Python interpreter.\n\n This is a backwards compatible interface to the InteractiveConsole\n class. When readfunc is not specified, it attempts to import the\n readline module to enable GNU readline if it is available.\n\n Arguments (all optional, all default to None):\n\n banner -- passed to InteractiveConsole.interact()\n readfunc -- if not None, replaces InteractiveConsole.raw_input()\n local -- passed to InteractiveInterpreter.__init__()\n exitmsg -- passed to InteractiveConsole.interact()\n\n ';console=InteractiveConsole(local) + if readfunc is not _A:console.raw_input=readfunc + else: + try:import readline + except ImportError:pass + console.interact(banner,exitmsg) +if __name__=='__main__': + parser=argparse.ArgumentParser();parser.add_argument('-q',action='store_true',help="don't print version and copyright messages");args=parser.parse_args() + if args.q or sys.flags.quiet:banner='' + else:banner=_A + interact(banner) \ No newline at end of file diff --git a/Lib/codecs.py b/Lib/codecs.py index e6ad6e3a052..024a7dc4578 100644 --- a/Lib/codecs.py +++ b/Lib/codecs.py @@ -1,1126 +1,227 @@ -""" codecs -- Python Codec Registry, API and helpers. - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - -import builtins -import sys - -### Registry and builtin stateless codec functions - -try: - from _codecs import * -except ImportError as why: - raise SystemError('Failed to load the builtin codecs: %s' % why) - -__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE", - "BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE", - "BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE", - "BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE", - "CodecInfo", "Codec", "IncrementalEncoder", "IncrementalDecoder", - "StreamReader", "StreamWriter", - "StreamReaderWriter", "StreamRecoder", - "getencoder", "getdecoder", "getincrementalencoder", - "getincrementaldecoder", "getreader", "getwriter", - "encode", "decode", "iterencode", "iterdecode", - "strict_errors", "ignore_errors", "replace_errors", - "xmlcharrefreplace_errors", - "backslashreplace_errors", "namereplace_errors", - "register_error", "lookup_error"] - -### Constants - -# -# Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF) -# and its possible byte string values -# for UTF8/UTF16/UTF32 output and little/big endian machines -# - -# UTF-8 -BOM_UTF8 = b'\xef\xbb\xbf' - -# UTF-16, little endian -BOM_LE = BOM_UTF16_LE = b'\xff\xfe' - -# UTF-16, big endian -BOM_BE = BOM_UTF16_BE = b'\xfe\xff' - -# UTF-32, little endian -BOM_UTF32_LE = b'\xff\xfe\x00\x00' - -# UTF-32, big endian -BOM_UTF32_BE = b'\x00\x00\xfe\xff' - -if sys.byteorder == 'little': - - # UTF-16, native endianness - BOM = BOM_UTF16 = BOM_UTF16_LE - - # UTF-32, native endianness - BOM_UTF32 = BOM_UTF32_LE - -else: - - # UTF-16, native endianness - BOM = BOM_UTF16 = BOM_UTF16_BE - - # UTF-32, native endianness - BOM_UTF32 = BOM_UTF32_BE - -# Old broken names (don't use in new code) -BOM32_LE = BOM_UTF16_LE -BOM32_BE = BOM_UTF16_BE -BOM64_LE = BOM_UTF32_LE -BOM64_BE = BOM_UTF32_BE - - -### Codec base classes (defining the API) - +' codecs -- Python Codec Registry, API and helpers.\n\n\nWritten by Marc-Andre Lemburg (mal@lemburg.com).\n\n(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.\n\n' +_G='latin-1' +_F='unknown' +_E=b'' +_D=False +_C=True +_B='strict' +_A=None +import builtins,sys +try:from _codecs import* +except ImportError as why:raise SystemError('Failed to load the builtin codecs: %s'%why) +__all__=['register','lookup','open','EncodedFile','BOM','BOM_BE','BOM_LE','BOM32_BE','BOM32_LE','BOM64_BE','BOM64_LE','BOM_UTF8','BOM_UTF16','BOM_UTF16_LE','BOM_UTF16_BE','BOM_UTF32','BOM_UTF32_LE','BOM_UTF32_BE','CodecInfo','Codec','IncrementalEncoder','IncrementalDecoder','StreamReader','StreamWriter','StreamReaderWriter','StreamRecoder','getencoder','getdecoder','getincrementalencoder','getincrementaldecoder','getreader','getwriter','encode','decode','iterencode','iterdecode','strict_errors','ignore_errors','replace_errors','xmlcharrefreplace_errors','backslashreplace_errors','namereplace_errors','register_error','lookup_error'] +BOM_UTF8=b'\xef\xbb\xbf' +BOM_LE=BOM_UTF16_LE=b'\xff\xfe' +BOM_BE=BOM_UTF16_BE=b'\xfe\xff' +BOM_UTF32_LE=b'\xff\xfe\x00\x00' +BOM_UTF32_BE=b'\x00\x00\xfe\xff' +if sys.byteorder=='little':BOM=BOM_UTF16=BOM_UTF16_LE;BOM_UTF32=BOM_UTF32_LE +else:BOM=BOM_UTF16=BOM_UTF16_BE;BOM_UTF32=BOM_UTF32_BE +BOM32_LE=BOM_UTF16_LE +BOM32_BE=BOM_UTF16_BE +BOM64_LE=BOM_UTF32_LE +BOM64_BE=BOM_UTF32_BE class CodecInfo(tuple): - """Codec details when looking up the codec registry""" - - # Private API to allow Python 3.4 to denylist the known non-Unicode - # codecs in the standard library. A more general mechanism to - # reliably distinguish test encodings from other codecs will hopefully - # be defined for Python 3.5 - # - # See http://bugs.python.org/issue19619 - _is_text_encoding = True # Assume codecs are text encodings by default - - def __new__(cls, encode, decode, streamreader=None, streamwriter=None, - incrementalencoder=None, incrementaldecoder=None, name=None, - *, _is_text_encoding=None): - self = tuple.__new__(cls, (encode, decode, streamreader, streamwriter)) - self.name = name - self.encode = encode - self.decode = decode - self.incrementalencoder = incrementalencoder - self.incrementaldecoder = incrementaldecoder - self.streamwriter = streamwriter - self.streamreader = streamreader - if _is_text_encoding is not None: - self._is_text_encoding = _is_text_encoding - return self - - def __repr__(self): - return "<%s.%s object for encoding %s at %#x>" % \ - (self.__class__.__module__, self.__class__.__qualname__, - self.name, id(self)) - + 'Codec details when looking up the codec registry';_is_text_encoding=_C + def __new__(cls,encode,decode,streamreader=_A,streamwriter=_A,incrementalencoder=_A,incrementaldecoder=_A,name=_A,*,_is_text_encoding=_A): + self=tuple.__new__(cls,(encode,decode,streamreader,streamwriter));self.name=name;self.encode=encode;self.decode=decode;self.incrementalencoder=incrementalencoder;self.incrementaldecoder=incrementaldecoder;self.streamwriter=streamwriter;self.streamreader=streamreader + if _is_text_encoding is not _A:self._is_text_encoding=_is_text_encoding + return self + def __repr__(self):return'<%s.%s object for encoding %s at %#x>'%(self.__class__.__module__,self.__class__.__qualname__,self.name,id(self)) class Codec: - - """ Defines the interface for stateless encoders/decoders. - - The .encode()/.decode() methods may use different error - handling schemes by providing the errors argument. These - string values are predefined: - - 'strict' - raise a ValueError error (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace' - replace with a suitable replacement character; - Python will use the official U+FFFD REPLACEMENT - CHARACTER for the builtin Unicode codecs on - decoding and '?' on encoding. - 'surrogateescape' - replace with private code points U+DCnn. - 'xmlcharrefreplace' - Replace with the appropriate XML - character reference (only for encoding). - 'backslashreplace' - Replace with backslashed escape sequences. - 'namereplace' - Replace with \\N{...} escape sequences - (only for encoding). - - The set of allowed values can be extended via register_error. - - """ - def encode(self, input, errors='strict'): - - """ Encodes the object input and returns a tuple (output - object, length consumed). - - errors defines the error handling to apply. It defaults to - 'strict' handling. - - The method may not store state in the Codec instance. Use - StreamWriter for codecs which have to keep state in order to - make encoding efficient. - - The encoder must be able to handle zero length input and - return an empty object of the output object type in this - situation. - - """ - raise NotImplementedError - - def decode(self, input, errors='strict'): - - """ Decodes the object input and returns a tuple (output - object, length consumed). - - input must be an object which provides the bf_getreadbuf - buffer slot. Python strings, buffer objects and memory - mapped files are examples of objects providing this slot. - - errors defines the error handling to apply. It defaults to - 'strict' handling. - - The method may not store state in the Codec instance. Use - StreamReader for codecs which have to keep state in order to - make decoding efficient. - - The decoder must be able to handle zero length input and - return an empty object of the output object type in this - situation. - - """ - raise NotImplementedError - -class IncrementalEncoder(object): - """ - An IncrementalEncoder encodes an input in multiple steps. The input can - be passed piece by piece to the encode() method. The IncrementalEncoder - remembers the state of the encoding process between calls to encode(). - """ - def __init__(self, errors='strict'): - """ - Creates an IncrementalEncoder instance. - - The IncrementalEncoder may use different error handling schemes by - providing the errors keyword argument. See the module docstring - for a list of possible values. - """ - self.errors = errors - self.buffer = "" - - def encode(self, input, final=False): - """ - Encodes input and returns the resulting object. - """ - raise NotImplementedError - - def reset(self): - """ - Resets the encoder to the initial state. - """ - - def getstate(self): - """ - Return the current state of the encoder. - """ - return 0 - - def setstate(self, state): - """ - Set the current state of the encoder. state must have been - returned by getstate(). - """ - + " Defines the interface for stateless encoders/decoders.\n\n The .encode()/.decode() methods may use different error\n handling schemes by providing the errors argument. These\n string values are predefined:\n\n 'strict' - raise a ValueError error (or a subclass)\n 'ignore' - ignore the character and continue with the next\n 'replace' - replace with a suitable replacement character;\n Python will use the official U+FFFD REPLACEMENT\n CHARACTER for the builtin Unicode codecs on\n decoding and '?' on encoding.\n 'surrogateescape' - replace with private code points U+DCnn.\n 'xmlcharrefreplace' - Replace with the appropriate XML\n character reference (only for encoding).\n 'backslashreplace' - Replace with backslashed escape sequences.\n 'namereplace' - Replace with \\N{...} escape sequences\n (only for encoding).\n\n The set of allowed values can be extended via register_error.\n\n " + def encode(self,input,errors=_B):" Encodes the object input and returns a tuple (output\n object, length consumed).\n\n errors defines the error handling to apply. It defaults to\n 'strict' handling.\n\n The method may not store state in the Codec instance. Use\n StreamWriter for codecs which have to keep state in order to\n make encoding efficient.\n\n The encoder must be able to handle zero length input and\n return an empty object of the output object type in this\n situation.\n\n ";raise NotImplementedError + def decode(self,input,errors=_B):" Decodes the object input and returns a tuple (output\n object, length consumed).\n\n input must be an object which provides the bf_getreadbuf\n buffer slot. Python strings, buffer objects and memory\n mapped files are examples of objects providing this slot.\n\n errors defines the error handling to apply. It defaults to\n 'strict' handling.\n\n The method may not store state in the Codec instance. Use\n StreamReader for codecs which have to keep state in order to\n make decoding efficient.\n\n The decoder must be able to handle zero length input and\n return an empty object of the output object type in this\n situation.\n\n ";raise NotImplementedError +class IncrementalEncoder: + '\n An IncrementalEncoder encodes an input in multiple steps. The input can\n be passed piece by piece to the encode() method. The IncrementalEncoder\n remembers the state of the encoding process between calls to encode().\n ' + def __init__(self,errors=_B):'\n Creates an IncrementalEncoder instance.\n\n The IncrementalEncoder may use different error handling schemes by\n providing the errors keyword argument. See the module docstring\n for a list of possible values.\n ';self.errors=errors;self.buffer='' + def encode(self,input,final=_D):'\n Encodes input and returns the resulting object.\n ';raise NotImplementedError + def reset(self):'\n Resets the encoder to the initial state.\n ' + def getstate(self):'\n Return the current state of the encoder.\n ';return 0 + def setstate(self,state):'\n Set the current state of the encoder. state must have been\n returned by getstate().\n ' class BufferedIncrementalEncoder(IncrementalEncoder): - """ - This subclass of IncrementalEncoder can be used as the baseclass for an - incremental encoder if the encoder must keep some of the output in a - buffer between calls to encode(). - """ - def __init__(self, errors='strict'): - IncrementalEncoder.__init__(self, errors) - # unencoded input that is kept between calls to encode() - self.buffer = "" - - def _buffer_encode(self, input, errors, final): - # Overwrite this method in subclasses: It must encode input - # and return an (output, length consumed) tuple - raise NotImplementedError - - def encode(self, input, final=False): - # encode input (taking the buffer into account) - data = self.buffer + input - (result, consumed) = self._buffer_encode(data, self.errors, final) - # keep unencoded input until the next call - self.buffer = data[consumed:] - return result - - def reset(self): - IncrementalEncoder.reset(self) - self.buffer = "" - - def getstate(self): - return self.buffer or 0 - - def setstate(self, state): - self.buffer = state or "" - -class IncrementalDecoder(object): - """ - An IncrementalDecoder decodes an input in multiple steps. The input can - be passed piece by piece to the decode() method. The IncrementalDecoder - remembers the state of the decoding process between calls to decode(). - """ - def __init__(self, errors='strict'): - """ - Create an IncrementalDecoder instance. - - The IncrementalDecoder may use different error handling schemes by - providing the errors keyword argument. See the module docstring - for a list of possible values. - """ - self.errors = errors - - def decode(self, input, final=False): - """ - Decode input and returns the resulting object. - """ - raise NotImplementedError - - def reset(self): - """ - Reset the decoder to the initial state. - """ - - def getstate(self): - """ - Return the current state of the decoder. - - This must be a (buffered_input, additional_state_info) tuple. - buffered_input must be a bytes object containing bytes that - were passed to decode() that have not yet been converted. - additional_state_info must be a non-negative integer - representing the state of the decoder WITHOUT yet having - processed the contents of buffered_input. In the initial state - and after reset(), getstate() must return (b"", 0). - """ - return (b"", 0) - - def setstate(self, state): - """ - Set the current state of the decoder. - - state must have been returned by getstate(). The effect of - setstate((b"", 0)) must be equivalent to reset(). - """ - + '\n This subclass of IncrementalEncoder can be used as the baseclass for an\n incremental encoder if the encoder must keep some of the output in a\n buffer between calls to encode().\n ' + def __init__(self,errors=_B):IncrementalEncoder.__init__(self,errors);self.buffer='' + def _buffer_encode(self,input,errors,final):raise NotImplementedError + def encode(self,input,final=_D):data=self.buffer+input;result,consumed=self._buffer_encode(data,self.errors,final);self.buffer=data[consumed:];return result + def reset(self):IncrementalEncoder.reset(self);self.buffer='' + def getstate(self):return self.buffer or 0 + def setstate(self,state):self.buffer=state or'' +class IncrementalDecoder: + '\n An IncrementalDecoder decodes an input in multiple steps. The input can\n be passed piece by piece to the decode() method. The IncrementalDecoder\n remembers the state of the decoding process between calls to decode().\n ' + def __init__(self,errors=_B):'\n Create an IncrementalDecoder instance.\n\n The IncrementalDecoder may use different error handling schemes by\n providing the errors keyword argument. See the module docstring\n for a list of possible values.\n ';self.errors=errors + def decode(self,input,final=_D):'\n Decode input and returns the resulting object.\n ';raise NotImplementedError + def reset(self):'\n Reset the decoder to the initial state.\n ' + def getstate(self):'\n Return the current state of the decoder.\n\n This must be a (buffered_input, additional_state_info) tuple.\n buffered_input must be a bytes object containing bytes that\n were passed to decode() that have not yet been converted.\n additional_state_info must be a non-negative integer\n representing the state of the decoder WITHOUT yet having\n processed the contents of buffered_input. In the initial state\n and after reset(), getstate() must return (b"", 0).\n ';return _E,0 + def setstate(self,state):'\n Set the current state of the decoder.\n\n state must have been returned by getstate(). The effect of\n setstate((b"", 0)) must be equivalent to reset().\n ' class BufferedIncrementalDecoder(IncrementalDecoder): - """ - This subclass of IncrementalDecoder can be used as the baseclass for an - incremental decoder if the decoder must be able to handle incomplete - byte sequences. - """ - def __init__(self, errors='strict'): - IncrementalDecoder.__init__(self, errors) - # undecoded input that is kept between calls to decode() - self.buffer = b"" - - def _buffer_decode(self, input, errors, final): - # Overwrite this method in subclasses: It must decode input - # and return an (output, length consumed) tuple - raise NotImplementedError - - def decode(self, input, final=False): - # decode input (taking the buffer into account) - data = self.buffer + input - (result, consumed) = self._buffer_decode(data, self.errors, final) - # keep undecoded input until the next call - self.buffer = data[consumed:] - return result - - def reset(self): - IncrementalDecoder.reset(self) - self.buffer = b"" - - def getstate(self): - # additional state info is always 0 - return (self.buffer, 0) - - def setstate(self, state): - # ignore additional state info - self.buffer = state[0] - -# -# The StreamWriter and StreamReader class provide generic working -# interfaces which can be used to implement new encoding submodules -# very easily. See encodings/utf_8.py for an example on how this is -# done. -# - + '\n This subclass of IncrementalDecoder can be used as the baseclass for an\n incremental decoder if the decoder must be able to handle incomplete\n byte sequences.\n ' + def __init__(self,errors=_B):IncrementalDecoder.__init__(self,errors);self.buffer=_E + def _buffer_decode(self,input,errors,final):raise NotImplementedError + def decode(self,input,final=_D):data=self.buffer+input;result,consumed=self._buffer_decode(data,self.errors,final);self.buffer=data[consumed:];return result + def reset(self):IncrementalDecoder.reset(self);self.buffer=_E + def getstate(self):return self.buffer,0 + def setstate(self,state):self.buffer=state[0] class StreamWriter(Codec): - - def __init__(self, stream, errors='strict'): - - """ Creates a StreamWriter instance. - - stream must be a file-like object open for writing. - - The StreamWriter may use different error handling - schemes by providing the errors keyword argument. These - parameters are predefined: - - 'strict' - raise a ValueError (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace'- replace with a suitable replacement character - 'xmlcharrefreplace' - Replace with the appropriate XML - character reference. - 'backslashreplace' - Replace with backslashed escape - sequences. - 'namereplace' - Replace with \\N{...} escape sequences. - - The set of allowed parameter values can be extended via - register_error. - """ - self.stream = stream - self.errors = errors - - def write(self, object): - - """ Writes the object's contents encoded to self.stream. - """ - data, consumed = self.encode(object, self.errors) - self.stream.write(data) - - def writelines(self, list): - - """ Writes the concatenated list of strings to the stream - using .write(). - """ - self.write(''.join(list)) - - def reset(self): - - """ Resets the codec buffers used for keeping internal state. - - Calling this method should ensure that the data on the - output is put into a clean state, that allows appending - of new fresh data without having to rescan the whole - stream to recover state. - - """ - pass - - def seek(self, offset, whence=0): - self.stream.seek(offset, whence) - if whence == 0 and offset == 0: - self.reset() - - def __getattr__(self, name, - getattr=getattr): - - """ Inherit all other methods from the underlying stream. - """ - return getattr(self.stream, name) - - def __enter__(self): - return self - - def __exit__(self, type, value, tb): - self.stream.close() - -### - + def __init__(self,stream,errors=_B):" Creates a StreamWriter instance.\n\n stream must be a file-like object open for writing.\n\n The StreamWriter may use different error handling\n schemes by providing the errors keyword argument. These\n parameters are predefined:\n\n 'strict' - raise a ValueError (or a subclass)\n 'ignore' - ignore the character and continue with the next\n 'replace'- replace with a suitable replacement character\n 'xmlcharrefreplace' - Replace with the appropriate XML\n character reference.\n 'backslashreplace' - Replace with backslashed escape\n sequences.\n 'namereplace' - Replace with \\N{...} escape sequences.\n\n The set of allowed parameter values can be extended via\n register_error.\n ";self.stream=stream;self.errors=errors + def write(self,object):" Writes the object's contents encoded to self.stream.\n ";data,consumed=self.encode(object,self.errors);self.stream.write(data) + def writelines(self,list):' Writes the concatenated list of strings to the stream\n using .write().\n ';self.write(''.join(list)) + def reset(self):' Resets the codec buffers used for keeping internal state.\n\n Calling this method should ensure that the data on the\n output is put into a clean state, that allows appending\n of new fresh data without having to rescan the whole\n stream to recover state.\n\n ' + def seek(self,offset,whence=0): + self.stream.seek(offset,whence) + if whence==0 and offset==0:self.reset() + def __getattr__(self,name,getattr=getattr):' Inherit all other methods from the underlying stream.\n ';return getattr(self.stream,name) + def __enter__(self):return self + def __exit__(self,type,value,tb):self.stream.close() class StreamReader(Codec): - - charbuffertype = str - - def __init__(self, stream, errors='strict'): - - """ Creates a StreamReader instance. - - stream must be a file-like object open for reading. - - The StreamReader may use different error handling - schemes by providing the errors keyword argument. These - parameters are predefined: - - 'strict' - raise a ValueError (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace'- replace with a suitable replacement character - 'backslashreplace' - Replace with backslashed escape sequences; - - The set of allowed parameter values can be extended via - register_error. - """ - self.stream = stream - self.errors = errors - self.bytebuffer = b"" - self._empty_charbuffer = self.charbuffertype() - self.charbuffer = self._empty_charbuffer - self.linebuffer = None - - def decode(self, input, errors='strict'): - raise NotImplementedError - - def read(self, size=-1, chars=-1, firstline=False): - - """ Decodes data from the stream self.stream and returns the - resulting object. - - chars indicates the number of decoded code points or bytes to - return. read() will never return more data than requested, - but it might return less, if there is not enough available. - - size indicates the approximate maximum number of decoded - bytes or code points to read for decoding. The decoder - can modify this setting as appropriate. The default value - -1 indicates to read and decode as much as possible. size - is intended to prevent having to decode huge files in one - step. - - If firstline is true, and a UnicodeDecodeError happens - after the first line terminator in the input only the first line - will be returned, the rest of the input will be kept until the - next call to read(). - - The method should use a greedy read strategy, meaning that - it should read as much data as is allowed within the - definition of the encoding and the given size, e.g. if - optional encoding endings or state markers are available - on the stream, these should be read too. - """ - # If we have lines cached, first merge them back into characters - if self.linebuffer: - self.charbuffer = self._empty_charbuffer.join(self.linebuffer) - self.linebuffer = None - - if chars < 0: - # For compatibility with other read() methods that take a - # single argument - chars = size - - # read until we get the required number of characters (if available) - while True: - # can the request be satisfied from the character buffer? - if chars >= 0: - if len(self.charbuffer) >= chars: - break - # we need more data - if size < 0: - newdata = self.stream.read() - else: - newdata = self.stream.read(size) - # decode bytes (those remaining from the last call included) - data = self.bytebuffer + newdata - if not data: - break - try: - newchars, decodedbytes = self.decode(data, self.errors) - except UnicodeDecodeError as exc: - if firstline: - newchars, decodedbytes = \ - self.decode(data[:exc.start], self.errors) - lines = newchars.splitlines(keepends=True) - if len(lines)<=1: - raise - else: - raise - # keep undecoded bytes until the next call - self.bytebuffer = data[decodedbytes:] - # put new characters in the character buffer - self.charbuffer += newchars - # there was no data available - if not newdata: - break - if chars < 0: - # Return everything we've got - result = self.charbuffer - self.charbuffer = self._empty_charbuffer - else: - # Return the first chars characters - result = self.charbuffer[:chars] - self.charbuffer = self.charbuffer[chars:] - return result - - def readline(self, size=None, keepends=True): - - """ Read one line from the input stream and return the - decoded data. - - size, if given, is passed as size argument to the - read() method. - - """ - # If we have lines cached from an earlier read, return - # them unconditionally - if self.linebuffer: - line = self.linebuffer[0] - del self.linebuffer[0] - if len(self.linebuffer) == 1: - # revert to charbuffer mode; we might need more data - # next time - self.charbuffer = self.linebuffer[0] - self.linebuffer = None - if not keepends: - line = line.splitlines(keepends=False)[0] - return line - - readsize = size or 72 - line = self._empty_charbuffer - # If size is given, we call read() only once - while True: - data = self.read(readsize, firstline=True) - if data: - # If we're at a "\r" read one extra character (which might - # be a "\n") to get a proper line ending. If the stream is - # temporarily exhausted we return the wrong line ending. - if (isinstance(data, str) and data.endswith("\r")) or \ - (isinstance(data, bytes) and data.endswith(b"\r")): - data += self.read(size=1, chars=1) - - line += data - lines = line.splitlines(keepends=True) - if lines: - if len(lines) > 1: - # More than one line result; the first line is a full line - # to return - line = lines[0] - del lines[0] - if len(lines) > 1: - # cache the remaining lines - lines[-1] += self.charbuffer - self.linebuffer = lines - self.charbuffer = None - else: - # only one remaining line, put it back into charbuffer - self.charbuffer = lines[0] + self.charbuffer - if not keepends: - line = line.splitlines(keepends=False)[0] - break - line0withend = lines[0] - line0withoutend = lines[0].splitlines(keepends=False)[0] - if line0withend != line0withoutend: # We really have a line end - # Put the rest back together and keep it until the next call - self.charbuffer = self._empty_charbuffer.join(lines[1:]) + \ - self.charbuffer - if keepends: - line = line0withend - else: - line = line0withoutend - break - # we didn't get anything or this was our only try - if not data or size is not None: - if line and not keepends: - line = line.splitlines(keepends=False)[0] - break - if readsize < 8000: - readsize *= 2 - return line - - def readlines(self, sizehint=None, keepends=True): - - """ Read all lines available on the input stream - and return them as a list. - - Line breaks are implemented using the codec's decoder - method and are included in the list entries. - - sizehint, if given, is ignored since there is no efficient - way to finding the true end-of-line. - - """ - data = self.read() - return data.splitlines(keepends) - - def reset(self): - - """ Resets the codec buffers used for keeping internal state. - - Note that no stream repositioning should take place. - This method is primarily intended to be able to recover - from decoding errors. - - """ - self.bytebuffer = b"" - self.charbuffer = self._empty_charbuffer - self.linebuffer = None - - def seek(self, offset, whence=0): - """ Set the input stream's current position. - - Resets the codec buffers used for keeping state. - """ - self.stream.seek(offset, whence) - self.reset() - - def __next__(self): - - """ Return the next decoded line from the input stream.""" - line = self.readline() - if line: - return line - raise StopIteration - - def __iter__(self): - return self - - def __getattr__(self, name, - getattr=getattr): - - """ Inherit all other methods from the underlying stream. - """ - return getattr(self.stream, name) - - def __enter__(self): - return self - - def __exit__(self, type, value, tb): - self.stream.close() - -### - + charbuffertype=str + def __init__(self,stream,errors=_B):" Creates a StreamReader instance.\n\n stream must be a file-like object open for reading.\n\n The StreamReader may use different error handling\n schemes by providing the errors keyword argument. These\n parameters are predefined:\n\n 'strict' - raise a ValueError (or a subclass)\n 'ignore' - ignore the character and continue with the next\n 'replace'- replace with a suitable replacement character\n 'backslashreplace' - Replace with backslashed escape sequences;\n\n The set of allowed parameter values can be extended via\n register_error.\n ";self.stream=stream;self.errors=errors;self.bytebuffer=_E;self._empty_charbuffer=self.charbuffertype();self.charbuffer=self._empty_charbuffer;self.linebuffer=_A + def decode(self,input,errors=_B):raise NotImplementedError + def read(self,size=-1,chars=-1,firstline=_D): + ' Decodes data from the stream self.stream and returns the\n resulting object.\n\n chars indicates the number of decoded code points or bytes to\n return. read() will never return more data than requested,\n but it might return less, if there is not enough available.\n\n size indicates the approximate maximum number of decoded\n bytes or code points to read for decoding. The decoder\n can modify this setting as appropriate. The default value\n -1 indicates to read and decode as much as possible. size\n is intended to prevent having to decode huge files in one\n step.\n\n If firstline is true, and a UnicodeDecodeError happens\n after the first line terminator in the input only the first line\n will be returned, the rest of the input will be kept until the\n next call to read().\n\n The method should use a greedy read strategy, meaning that\n it should read as much data as is allowed within the\n definition of the encoding and the given size, e.g. if\n optional encoding endings or state markers are available\n on the stream, these should be read too.\n ' + if self.linebuffer:self.charbuffer=self._empty_charbuffer.join(self.linebuffer);self.linebuffer=_A + if chars<0:chars=size + while _C: + if chars>=0: + if len(self.charbuffer)>=chars:break + if size<0:newdata=self.stream.read() + else:newdata=self.stream.read(size) + data=self.bytebuffer+newdata + if not data:break + try:newchars,decodedbytes=self.decode(data,self.errors) + except UnicodeDecodeError as exc: + if firstline: + newchars,decodedbytes=self.decode(data[:exc.start],self.errors);lines=newchars.splitlines(keepends=_C) + if len(lines)<=1:raise + else:raise + self.bytebuffer=data[decodedbytes:];self.charbuffer+=newchars + if not newdata:break + if chars<0:result=self.charbuffer;self.charbuffer=self._empty_charbuffer + else:result=self.charbuffer[:chars];self.charbuffer=self.charbuffer[chars:] + return result + def readline(self,size=_A,keepends=_C): + ' Read one line from the input stream and return the\n decoded data.\n\n size, if given, is passed as size argument to the\n read() method.\n\n ' + if self.linebuffer: + line=self.linebuffer[0];del self.linebuffer[0] + if len(self.linebuffer)==1:self.charbuffer=self.linebuffer[0];self.linebuffer=_A + if not keepends:line=line.splitlines(keepends=_D)[0] + return line + readsize=size or 72;line=self._empty_charbuffer + while _C: + data=self.read(readsize,firstline=_C) + if data: + if isinstance(data,str)and data.endswith('\r')or isinstance(data,bytes)and data.endswith(b'\r'):data+=self.read(size=1,chars=1) + line+=data;lines=line.splitlines(keepends=_C) + if lines: + if len(lines)>1: + line=lines[0];del lines[0] + if len(lines)>1:lines[-1]+=self.charbuffer;self.linebuffer=lines;self.charbuffer=_A + else:self.charbuffer=lines[0]+self.charbuffer + if not keepends:line=line.splitlines(keepends=_D)[0] + break + line0withend=lines[0];line0withoutend=lines[0].splitlines(keepends=_D)[0] + if line0withend!=line0withoutend: + self.charbuffer=self._empty_charbuffer.join(lines[1:])+self.charbuffer + if keepends:line=line0withend + else:line=line0withoutend + break + if not data or size is not _A: + if line and not keepends:line=line.splitlines(keepends=_D)[0] + break + if readsize<8000:readsize*=2 + return line + def readlines(self,sizehint=_A,keepends=_C):" Read all lines available on the input stream\n and return them as a list.\n\n Line breaks are implemented using the codec's decoder\n method and are included in the list entries.\n\n sizehint, if given, is ignored since there is no efficient\n way to finding the true end-of-line.\n\n ";data=self.read();return data.splitlines(keepends) + def reset(self):' Resets the codec buffers used for keeping internal state.\n\n Note that no stream repositioning should take place.\n This method is primarily intended to be able to recover\n from decoding errors.\n\n ';self.bytebuffer=_E;self.charbuffer=self._empty_charbuffer;self.linebuffer=_A + def seek(self,offset,whence=0):" Set the input stream's current position.\n\n Resets the codec buffers used for keeping state.\n ";self.stream.seek(offset,whence);self.reset() + def __next__(self): + ' Return the next decoded line from the input stream.';line=self.readline() + if line:return line + raise StopIteration + def __iter__(self):return self + def __getattr__(self,name,getattr=getattr):' Inherit all other methods from the underlying stream.\n ';return getattr(self.stream,name) + def __enter__(self):return self + def __exit__(self,type,value,tb):self.stream.close() class StreamReaderWriter: - - """ StreamReaderWriter instances allow wrapping streams which - work in both read and write modes. - - The design is such that one can use the factory functions - returned by the codec.lookup() function to construct the - instance. - - """ - # Optional attributes set by the file wrappers below - encoding = 'unknown' - - def __init__(self, stream, Reader, Writer, errors='strict'): - - """ Creates a StreamReaderWriter instance. - - stream must be a Stream-like object. - - Reader, Writer must be factory functions or classes - providing the StreamReader, StreamWriter interface resp. - - Error handling is done in the same way as defined for the - StreamWriter/Readers. - - """ - self.stream = stream - self.reader = Reader(stream, errors) - self.writer = Writer(stream, errors) - self.errors = errors - - def read(self, size=-1): - - return self.reader.read(size) - - def readline(self, size=None): - - return self.reader.readline(size) - - def readlines(self, sizehint=None): - - return self.reader.readlines(sizehint) - - def __next__(self): - - """ Return the next decoded line from the input stream.""" - return next(self.reader) - - def __iter__(self): - return self - - def write(self, data): - - return self.writer.write(data) - - def writelines(self, list): - - return self.writer.writelines(list) - - def reset(self): - - self.reader.reset() - self.writer.reset() - - def seek(self, offset, whence=0): - self.stream.seek(offset, whence) - self.reader.reset() - if whence == 0 and offset == 0: - self.writer.reset() - - def __getattr__(self, name, - getattr=getattr): - - """ Inherit all other methods from the underlying stream. - """ - return getattr(self.stream, name) - - # these are needed to make "with StreamReaderWriter(...)" work properly - - def __enter__(self): - return self - - def __exit__(self, type, value, tb): - self.stream.close() - -### - + ' StreamReaderWriter instances allow wrapping streams which\n work in both read and write modes.\n\n The design is such that one can use the factory functions\n returned by the codec.lookup() function to construct the\n instance.\n\n ';encoding=_F + def __init__(self,stream,Reader,Writer,errors=_B):' Creates a StreamReaderWriter instance.\n\n stream must be a Stream-like object.\n\n Reader, Writer must be factory functions or classes\n providing the StreamReader, StreamWriter interface resp.\n\n Error handling is done in the same way as defined for the\n StreamWriter/Readers.\n\n ';self.stream=stream;self.reader=Reader(stream,errors);self.writer=Writer(stream,errors);self.errors=errors + def read(self,size=-1):return self.reader.read(size) + def readline(self,size=_A):return self.reader.readline(size) + def readlines(self,sizehint=_A):return self.reader.readlines(sizehint) + def __next__(self):' Return the next decoded line from the input stream.';return next(self.reader) + def __iter__(self):return self + def write(self,data):return self.writer.write(data) + def writelines(self,list):return self.writer.writelines(list) + def reset(self):self.reader.reset();self.writer.reset() + def seek(self,offset,whence=0): + self.stream.seek(offset,whence);self.reader.reset() + if whence==0 and offset==0:self.writer.reset() + def __getattr__(self,name,getattr=getattr):' Inherit all other methods from the underlying stream.\n ';return getattr(self.stream,name) + def __enter__(self):return self + def __exit__(self,type,value,tb):self.stream.close() class StreamRecoder: - - """ StreamRecoder instances translate data from one encoding to another. - - They use the complete set of APIs returned by the - codecs.lookup() function to implement their task. - - Data written to the StreamRecoder is first decoded into an - intermediate format (depending on the "decode" codec) and then - written to the underlying stream using an instance of the provided - Writer class. - - In the other direction, data is read from the underlying stream using - a Reader instance and then encoded and returned to the caller. - - """ - # Optional attributes set by the file wrappers below - data_encoding = 'unknown' - file_encoding = 'unknown' - - def __init__(self, stream, encode, decode, Reader, Writer, - errors='strict'): - - """ Creates a StreamRecoder instance which implements a two-way - conversion: encode and decode work on the frontend (the - data visible to .read() and .write()) while Reader and Writer - work on the backend (the data in stream). - - You can use these objects to do transparent - transcodings from e.g. latin-1 to utf-8 and back. - - stream must be a file-like object. - - encode and decode must adhere to the Codec interface; Reader and - Writer must be factory functions or classes providing the - StreamReader and StreamWriter interfaces resp. - - Error handling is done in the same way as defined for the - StreamWriter/Readers. - - """ - self.stream = stream - self.encode = encode - self.decode = decode - self.reader = Reader(stream, errors) - self.writer = Writer(stream, errors) - self.errors = errors - - def read(self, size=-1): - - data = self.reader.read(size) - data, bytesencoded = self.encode(data, self.errors) - return data - - def readline(self, size=None): - - if size is None: - data = self.reader.readline() - else: - data = self.reader.readline(size) - data, bytesencoded = self.encode(data, self.errors) - return data - - def readlines(self, sizehint=None): - - data = self.reader.read() - data, bytesencoded = self.encode(data, self.errors) - return data.splitlines(keepends=True) - - def __next__(self): - - """ Return the next decoded line from the input stream.""" - data = next(self.reader) - data, bytesencoded = self.encode(data, self.errors) - return data - - def __iter__(self): - return self - - def write(self, data): - - data, bytesdecoded = self.decode(data, self.errors) - return self.writer.write(data) - - def writelines(self, list): - - data = b''.join(list) - data, bytesdecoded = self.decode(data, self.errors) - return self.writer.write(data) - - def reset(self): - - self.reader.reset() - self.writer.reset() - - def seek(self, offset, whence=0): - # Seeks must be propagated to both the readers and writers - # as they might need to reset their internal buffers. - self.reader.seek(offset, whence) - self.writer.seek(offset, whence) - - def __getattr__(self, name, - getattr=getattr): - - """ Inherit all other methods from the underlying stream. - """ - return getattr(self.stream, name) - - def __enter__(self): - return self - - def __exit__(self, type, value, tb): - self.stream.close() - -### Shortcuts - -def open(filename, mode='r', encoding=None, errors='strict', buffering=-1): - - """ Open an encoded file using the given mode and return - a wrapped version providing transparent encoding/decoding. - - Note: The wrapped version will only accept the object format - defined by the codecs, i.e. Unicode objects for most builtin - codecs. Output is also codec dependent and will usually be - Unicode as well. - - Underlying encoded files are always opened in binary mode. - The default file mode is 'r', meaning to open the file in read mode. - - encoding specifies the encoding which is to be used for the - file. - - errors may be given to define the error handling. It defaults - to 'strict' which causes ValueErrors to be raised in case an - encoding error occurs. - - buffering has the same meaning as for the builtin open() API. - It defaults to -1 which means that the default buffer size will - be used. - - The returned wrapped file object provides an extra attribute - .encoding which allows querying the used encoding. This - attribute is only available if an encoding was specified as - parameter. - - """ - if encoding is not None and \ - 'b' not in mode: - # Force opening of the file in binary mode - mode = mode + 'b' - file = builtins.open(filename, mode, buffering) - if encoding is None: - return file - - try: - info = lookup(encoding) - srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors) - # Add attributes to simplify introspection - srw.encoding = encoding - return srw - except: - file.close() - raise - -def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'): - - """ Return a wrapped version of file which provides transparent - encoding translation. - - Data written to the wrapped file is decoded according - to the given data_encoding and then encoded to the underlying - file using file_encoding. The intermediate data type - will usually be Unicode but depends on the specified codecs. - - Bytes read from the file are decoded using file_encoding and then - passed back to the caller encoded using data_encoding. - - If file_encoding is not given, it defaults to data_encoding. - - errors may be given to define the error handling. It defaults - to 'strict' which causes ValueErrors to be raised in case an - encoding error occurs. - - The returned wrapped file object provides two extra attributes - .data_encoding and .file_encoding which reflect the given - parameters of the same name. The attributes can be used for - introspection by Python programs. - - """ - if file_encoding is None: - file_encoding = data_encoding - data_info = lookup(data_encoding) - file_info = lookup(file_encoding) - sr = StreamRecoder(file, data_info.encode, data_info.decode, - file_info.streamreader, file_info.streamwriter, errors) - # Add attributes to simplify introspection - sr.data_encoding = data_encoding - sr.file_encoding = file_encoding - return sr - -### Helpers for codec lookup - -def getencoder(encoding): - - """ Lookup up the codec for the given encoding and return - its encoder function. - - Raises a LookupError in case the encoding cannot be found. - - """ - return lookup(encoding).encode - -def getdecoder(encoding): - - """ Lookup up the codec for the given encoding and return - its decoder function. - - Raises a LookupError in case the encoding cannot be found. - - """ - return lookup(encoding).decode - + ' StreamRecoder instances translate data from one encoding to another.\n\n They use the complete set of APIs returned by the\n codecs.lookup() function to implement their task.\n\n Data written to the StreamRecoder is first decoded into an\n intermediate format (depending on the "decode" codec) and then\n written to the underlying stream using an instance of the provided\n Writer class.\n\n In the other direction, data is read from the underlying stream using\n a Reader instance and then encoded and returned to the caller.\n\n ';data_encoding=_F;file_encoding=_F + def __init__(self,stream,encode,decode,Reader,Writer,errors=_B):' Creates a StreamRecoder instance which implements a two-way\n conversion: encode and decode work on the frontend (the\n data visible to .read() and .write()) while Reader and Writer\n work on the backend (the data in stream).\n\n You can use these objects to do transparent\n transcodings from e.g. latin-1 to utf-8 and back.\n\n stream must be a file-like object.\n\n encode and decode must adhere to the Codec interface; Reader and\n Writer must be factory functions or classes providing the\n StreamReader and StreamWriter interfaces resp.\n\n Error handling is done in the same way as defined for the\n StreamWriter/Readers.\n\n ';self.stream=stream;self.encode=encode;self.decode=decode;self.reader=Reader(stream,errors);self.writer=Writer(stream,errors);self.errors=errors + def read(self,size=-1):data=self.reader.read(size);data,bytesencoded=self.encode(data,self.errors);return data + def readline(self,size=_A): + if size is _A:data=self.reader.readline() + else:data=self.reader.readline(size) + data,bytesencoded=self.encode(data,self.errors);return data + def readlines(self,sizehint=_A):data=self.reader.read();data,bytesencoded=self.encode(data,self.errors);return data.splitlines(keepends=_C) + def __next__(self):' Return the next decoded line from the input stream.';data=next(self.reader);data,bytesencoded=self.encode(data,self.errors);return data + def __iter__(self):return self + def write(self,data):data,bytesdecoded=self.decode(data,self.errors);return self.writer.write(data) + def writelines(self,list):data=_E.join(list);data,bytesdecoded=self.decode(data,self.errors);return self.writer.write(data) + def reset(self):self.reader.reset();self.writer.reset() + def seek(self,offset,whence=0):self.reader.seek(offset,whence);self.writer.seek(offset,whence) + def __getattr__(self,name,getattr=getattr):' Inherit all other methods from the underlying stream.\n ';return getattr(self.stream,name) + def __enter__(self):return self + def __exit__(self,type,value,tb):self.stream.close() +def open(filename,mode='r',encoding=_A,errors=_B,buffering=-1): + " Open an encoded file using the given mode and return\n a wrapped version providing transparent encoding/decoding.\n\n Note: The wrapped version will only accept the object format\n defined by the codecs, i.e. Unicode objects for most builtin\n codecs. Output is also codec dependent and will usually be\n Unicode as well.\n\n Underlying encoded files are always opened in binary mode.\n The default file mode is 'r', meaning to open the file in read mode.\n\n encoding specifies the encoding which is to be used for the\n file.\n\n errors may be given to define the error handling. It defaults\n to 'strict' which causes ValueErrors to be raised in case an\n encoding error occurs.\n\n buffering has the same meaning as for the builtin open() API.\n It defaults to -1 which means that the default buffer size will\n be used.\n\n The returned wrapped file object provides an extra attribute\n .encoding which allows querying the used encoding. This\n attribute is only available if an encoding was specified as\n parameter.\n\n " + if encoding is not _A and'b'not in mode:mode=mode+'b' + file=builtins.open(filename,mode,buffering) + if encoding is _A:return file + try:info=lookup(encoding);srw=StreamReaderWriter(file,info.streamreader,info.streamwriter,errors);srw.encoding=encoding;return srw + except:file.close();raise +def EncodedFile(file,data_encoding,file_encoding=_A,errors=_B): + " Return a wrapped version of file which provides transparent\n encoding translation.\n\n Data written to the wrapped file is decoded according\n to the given data_encoding and then encoded to the underlying\n file using file_encoding. The intermediate data type\n will usually be Unicode but depends on the specified codecs.\n\n Bytes read from the file are decoded using file_encoding and then\n passed back to the caller encoded using data_encoding.\n\n If file_encoding is not given, it defaults to data_encoding.\n\n errors may be given to define the error handling. It defaults\n to 'strict' which causes ValueErrors to be raised in case an\n encoding error occurs.\n\n The returned wrapped file object provides two extra attributes\n .data_encoding and .file_encoding which reflect the given\n parameters of the same name. The attributes can be used for\n introspection by Python programs.\n\n " + if file_encoding is _A:file_encoding=data_encoding + data_info=lookup(data_encoding);file_info=lookup(file_encoding);sr=StreamRecoder(file,data_info.encode,data_info.decode,file_info.streamreader,file_info.streamwriter,errors);sr.data_encoding=data_encoding;sr.file_encoding=file_encoding;return sr +def getencoder(encoding):' Lookup up the codec for the given encoding and return\n its encoder function.\n\n Raises a LookupError in case the encoding cannot be found.\n\n ';return lookup(encoding).encode +def getdecoder(encoding):' Lookup up the codec for the given encoding and return\n its decoder function.\n\n Raises a LookupError in case the encoding cannot be found.\n\n ';return lookup(encoding).decode def getincrementalencoder(encoding): - - """ Lookup up the codec for the given encoding and return - its IncrementalEncoder class or factory function. - - Raises a LookupError in case the encoding cannot be found - or the codecs doesn't provide an incremental encoder. - - """ - encoder = lookup(encoding).incrementalencoder - if encoder is None: - raise LookupError(encoding) - return encoder - + " Lookup up the codec for the given encoding and return\n its IncrementalEncoder class or factory function.\n\n Raises a LookupError in case the encoding cannot be found\n or the codecs doesn't provide an incremental encoder.\n\n ";encoder=lookup(encoding).incrementalencoder + if encoder is _A:raise LookupError(encoding) + return encoder def getincrementaldecoder(encoding): - - """ Lookup up the codec for the given encoding and return - its IncrementalDecoder class or factory function. - - Raises a LookupError in case the encoding cannot be found - or the codecs doesn't provide an incremental decoder. - - """ - decoder = lookup(encoding).incrementaldecoder - if decoder is None: - raise LookupError(encoding) - return decoder - -def getreader(encoding): - - """ Lookup up the codec for the given encoding and return - its StreamReader class or factory function. - - Raises a LookupError in case the encoding cannot be found. - - """ - return lookup(encoding).streamreader - -def getwriter(encoding): - - """ Lookup up the codec for the given encoding and return - its StreamWriter class or factory function. - - Raises a LookupError in case the encoding cannot be found. - - """ - return lookup(encoding).streamwriter - -def iterencode(iterator, encoding, errors='strict', **kwargs): - """ - Encoding iterator. - - Encodes the input strings from the iterator using an IncrementalEncoder. - - errors and kwargs are passed through to the IncrementalEncoder - constructor. - """ - encoder = getincrementalencoder(encoding)(errors, **kwargs) - for input in iterator: - output = encoder.encode(input) - if output: - yield output - output = encoder.encode("", True) - if output: - yield output - -def iterdecode(iterator, encoding, errors='strict', **kwargs): - """ - Decoding iterator. - - Decodes the input strings from the iterator using an IncrementalDecoder. - - errors and kwargs are passed through to the IncrementalDecoder - constructor. - """ - decoder = getincrementaldecoder(encoding)(errors, **kwargs) - for input in iterator: - output = decoder.decode(input) - if output: - yield output - output = decoder.decode(b"", True) - if output: - yield output - -### Helpers for charmap-based codecs - -def make_identity_dict(rng): - - """ make_identity_dict(rng) -> dict - - Return a dictionary where elements of the rng sequence are - mapped to themselves. - - """ - return {i:i for i in rng} - + " Lookup up the codec for the given encoding and return\n its IncrementalDecoder class or factory function.\n\n Raises a LookupError in case the encoding cannot be found\n or the codecs doesn't provide an incremental decoder.\n\n ";decoder=lookup(encoding).incrementaldecoder + if decoder is _A:raise LookupError(encoding) + return decoder +def getreader(encoding):' Lookup up the codec for the given encoding and return\n its StreamReader class or factory function.\n\n Raises a LookupError in case the encoding cannot be found.\n\n ';return lookup(encoding).streamreader +def getwriter(encoding):' Lookup up the codec for the given encoding and return\n its StreamWriter class or factory function.\n\n Raises a LookupError in case the encoding cannot be found.\n\n ';return lookup(encoding).streamwriter +def iterencode(iterator,encoding,errors=_B,**kwargs): + '\n Encoding iterator.\n\n Encodes the input strings from the iterator using an IncrementalEncoder.\n\n errors and kwargs are passed through to the IncrementalEncoder\n constructor.\n ';encoder=getincrementalencoder(encoding)(errors,**kwargs) + for input in iterator: + output=encoder.encode(input) + if output:yield output + output=encoder.encode('',_C) + if output:yield output +def iterdecode(iterator,encoding,errors=_B,**kwargs): + '\n Decoding iterator.\n\n Decodes the input strings from the iterator using an IncrementalDecoder.\n\n errors and kwargs are passed through to the IncrementalDecoder\n constructor.\n ';decoder=getincrementaldecoder(encoding)(errors,**kwargs) + for input in iterator: + output=decoder.decode(input) + if output:yield output + output=decoder.decode(_E,_C) + if output:yield output +def make_identity_dict(rng):' make_identity_dict(rng) -> dict\n\n Return a dictionary where elements of the rng sequence are\n mapped to themselves.\n\n ';return{i:i for i in rng} def make_encoding_map(decoding_map): - - """ Creates an encoding map from a decoding map. - - If a target mapping in the decoding map occurs multiple - times, then that target is mapped to None (undefined mapping), - causing an exception when encountered by the charmap codec - during translation. - - One example where this happens is cp875.py which decodes - multiple character to \\u001a. - - """ - m = {} - for k,v in decoding_map.items(): - if not v in m: - m[v] = k - else: - m[v] = None - return m - -### error handlers - -try: - strict_errors = lookup_error("strict") - ignore_errors = lookup_error("ignore") - replace_errors = lookup_error("replace") - xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace") - backslashreplace_errors = lookup_error("backslashreplace") - namereplace_errors = lookup_error("namereplace") -except LookupError: - # In --disable-unicode builds, these error handler are missing - strict_errors = None - ignore_errors = None - replace_errors = None - xmlcharrefreplace_errors = None - backslashreplace_errors = None - namereplace_errors = None - -# Tell modulefinder that using codecs probably needs the encodings -# package -_false = 0 -if _false: - import encodings - -### Tests - -if __name__ == '__main__': - - # Make stdout translate Latin-1 output into UTF-8 output - sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8') - - # Have stdin translate Latin-1 input into UTF-8 input - sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1') + ' Creates an encoding map from a decoding map.\n\n If a target mapping in the decoding map occurs multiple\n times, then that target is mapped to None (undefined mapping),\n causing an exception when encountered by the charmap codec\n during translation.\n\n One example where this happens is cp875.py which decodes\n multiple character to \\u001a.\n\n ';m={} + for(k,v)in decoding_map.items(): + if not v in m:m[v]=k + else:m[v]=_A + return m +try:strict_errors=lookup_error(_B);ignore_errors=lookup_error('ignore');replace_errors=lookup_error('replace');xmlcharrefreplace_errors=lookup_error('xmlcharrefreplace');backslashreplace_errors=lookup_error('backslashreplace');namereplace_errors=lookup_error('namereplace') +except LookupError:strict_errors=_A;ignore_errors=_A;replace_errors=_A;xmlcharrefreplace_errors=_A;backslashreplace_errors=_A;namereplace_errors=_A +_false=0 +if _false:import encodings +if __name__=='__main__':sys.stdout=EncodedFile(sys.stdout,_G,'utf-8');sys.stdin=EncodedFile(sys.stdin,'utf-8',_G) \ No newline at end of file diff --git a/Lib/codeop.py b/Lib/codeop.py index 2213b69f231..08a99a7071d 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -1,153 +1,41 @@ -r"""Utilities to compile possibly incomplete Python source code. - -This module provides two interfaces, broadly similar to the builtin -function compile(), which take program text, a filename and a 'mode' -and: - -- Return code object if the command is complete and valid -- Return None if the command is incomplete -- Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). - -The two interfaces are: - -compile_command(source, filename, symbol): - - Compiles a single command in the manner described above. - -CommandCompiler(): - - Instances of this class have __call__ methods identical in - signature to compile_command; the difference is that if the - instance compiles program text containing a __future__ statement, - the instance 'remembers' and compiles all subsequent program texts - with the statement in force. - -The module also provides another class: - -Compile(): - - Instances of this class act like the built-in function compile, - but with 'memory' in the sense described above. -""" - -import __future__ -import warnings - -_features = [getattr(__future__, fname) - for fname in __future__.all_feature_names] - -__all__ = ["compile_command", "Compile", "CommandCompiler"] - -# The following flags match the values from Include/cpython/compile.h -# Caveat emptor: These flags are undocumented on purpose and depending -# on their effect outside the standard library is **unsupported**. -PyCF_DONT_IMPLY_DEDENT = 0x200 -PyCF_ALLOW_INCOMPLETE_INPUT = 0x4000 - -def _maybe_compile(compiler, source, filename, symbol): - # Check for source consisting of only blank lines and comments. - for line in source.split("\n"): - line = line.strip() - if line and line[0] != '#': - break # Leave it alone. - else: - if symbol != "eval": - source = "pass" # Replace it with a 'pass' statement - - # Disable compiler warnings when checking for incomplete input. - with warnings.catch_warnings(): - warnings.simplefilter("ignore", (SyntaxWarning, DeprecationWarning)) - try: - compiler(source, filename, symbol) - except SyntaxError: # Let other compile() errors propagate. - try: - compiler(source + "\n", filename, symbol) - return None - except SyntaxError as e: - if "incomplete input" in str(e): - return None - # fallthrough - - return compiler(source, filename, symbol) - - -def _is_syntax_error(err1, err2): - rep1 = repr(err1) - rep2 = repr(err2) - if "was never closed" in rep1 and "was never closed" in rep2: - return False - if rep1 == rep2: - return True - return False - -def _compile(source, filename, symbol): - return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT | PyCF_ALLOW_INCOMPLETE_INPUT) - -def compile_command(source, filename="", symbol="single"): - r"""Compile a command and determine whether it is incomplete. - - Arguments: - - source -- the source string; may contain \n characters - filename -- optional filename from which source was read; default - "" - symbol -- optional grammar start symbol; "single" (default), "exec" - or "eval" - - Return value / exceptions raised: - - - Return a code object if the command is complete and valid - - Return None if the command is incomplete - - Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). - """ - return _maybe_compile(_compile, source, filename, symbol) - +"Utilities to compile possibly incomplete Python source code.\n\nThis module provides two interfaces, broadly similar to the builtin\nfunction compile(), which take program text, a filename and a 'mode'\nand:\n\n- Return code object if the command is complete and valid\n- Return None if the command is incomplete\n- Raise SyntaxError, ValueError or OverflowError if the command is a\n syntax error (OverflowError and ValueError can be produced by\n malformed literals).\n\nApproach:\n\nFirst, check if the source consists entirely of blank lines and\ncomments; if so, replace it with 'pass', because the built-in\nparser doesn't always do the right thing for these.\n\nCompile three times: as is, with \\n, and with \\n\\n appended. If it\ncompiles as is, it's complete. If it compiles with one \\n appended,\nwe expect more. If it doesn't compile either way, we compare the\nerror we get when compiling with \\n or \\n\\n appended. If the errors\nare the same, the code is broken. But if the errors are different, we\nexpect more. Not intuitive; not even guaranteed to hold in future\nreleases; but this matches the compiler's behavior from Python 1.4\nthrough 2.2, at least.\n\nCaveat:\n\nIt is possible (but not likely) that the parser stops parsing with a\nsuccessful outcome before reaching the end of the source; in this\ncase, trailing symbols may be ignored instead of causing an error.\nFor example, a backslash followed by two newlines may be followed by\narbitrary garbage. This will be fixed once the API for the parser is\nbetter.\n\nThe two interfaces are:\n\ncompile_command(source, filename, symbol):\n\n Compiles a single command in the manner described above.\n\nCommandCompiler():\n\n Instances of this class have __call__ methods identical in\n signature to compile_command; the difference is that if the\n instance compiles program text containing a __future__ statement,\n the instance 'remembers' and compiles all subsequent program texts\n with the statement in force.\n\nThe module also provides another class:\n\nCompile():\n\n Instances of this class act like the built-in function compile,\n but with 'memory' in the sense described above.\n" +_B='single' +_A='' +import __future__,warnings +_features=[getattr(__future__,A)for A in __future__.all_feature_names] +__all__=['compile_command','Compile','CommandCompiler'] +PyCF_DONT_IMPLY_DEDENT=512 +def _maybe_compile(compiler,source,filename,symbol): + E=None;F=filename;G=compiler;B=symbol;A=source + for C in A.split('\n'): + C=C.strip() + if C and C[0]!='#':break + else: + if B!='eval':A='pass' + M=D=H=E;I=K=L=E + try:I=G(A,F,B) + except SyntaxError:pass + with warnings.catch_warnings(): + warnings.simplefilter('error') + try:K=G(A+'\n',F,B) + except SyntaxError as J:D=J + try:L=G(A+'\n\n',F,B) + except SyntaxError as J:H=J + try: + if I:return I + if not K and repr(D)==repr(H):raise D + finally:D=H=E +def _compile(source,filename,symbol):return compile(source,filename,symbol,PyCF_DONT_IMPLY_DEDENT) +def compile_command(source,filename=_A,symbol=_B):'Compile a command and determine whether it is incomplete.\n\n Arguments:\n\n source -- the source string; may contain \\n characters\n filename -- optional filename from which source was read; default\n ""\n symbol -- optional grammar start symbol; "single" (default), "exec"\n or "eval"\n\n Return value / exceptions raised:\n\n - Return a code object if the command is complete and valid\n - Return None if the command is incomplete\n - Raise SyntaxError, ValueError or OverflowError if the command is a\n syntax error (OverflowError and ValueError can be produced by\n malformed literals).\n ';return _maybe_compile(_compile,source,filename,symbol) class Compile: - """Instances of this class behave much like the built-in compile - function, but if one is used to compile text containing a future - statement, it "remembers" and compiles all subsequent program texts - with the statement in force.""" - def __init__(self): - self.flags = PyCF_DONT_IMPLY_DEDENT | PyCF_ALLOW_INCOMPLETE_INPUT - - def __call__(self, source, filename, symbol): - codeob = compile(source, filename, symbol, self.flags, True) - for feature in _features: - if codeob.co_flags & feature.compiler_flag: - self.flags |= feature.compiler_flag - return codeob - + 'Instances of this class behave much like the built-in compile\n function, but if one is used to compile text containing a future\n statement, it "remembers" and compiles all subsequent program texts\n with the statement in force.' + def __init__(A):A.flags=PyCF_DONT_IMPLY_DEDENT + def __call__(A,source,filename,symbol): + B=compile(source,filename,symbol,A.flags,True) + for C in _features: + if B.co_flags&C.compiler_flag:A.flags|=C.compiler_flag + return B class CommandCompiler: - """Instances of this class have __call__ methods identical in - signature to compile_command; the difference is that if the - instance compiles program text containing a __future__ statement, - the instance 'remembers' and compiles all subsequent program texts - with the statement in force.""" - - def __init__(self,): - self.compiler = Compile() - - def __call__(self, source, filename="", symbol="single"): - r"""Compile a command and determine whether it is incomplete. - - Arguments: - - source -- the source string; may contain \n characters - filename -- optional filename from which source was read; - default "" - symbol -- optional grammar start symbol; "single" (default) or - "eval" - - Return value / exceptions raised: - - - Return a code object if the command is complete and valid - - Return None if the command is incomplete - - Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). - """ - return _maybe_compile(self.compiler, source, filename, symbol) + "Instances of this class have __call__ methods identical in\n signature to compile_command; the difference is that if the\n instance compiles program text containing a __future__ statement,\n the instance 'remembers' and compiles all subsequent program texts\n with the statement in force." + def __init__(A):A.compiler=Compile() + def __call__(A,source,filename=_A,symbol=_B):'Compile a command and determine whether it is incomplete.\n\n Arguments:\n\n source -- the source string; may contain \\n characters\n filename -- optional filename from which source was read;\n default ""\n symbol -- optional grammar start symbol; "single" (default) or\n "eval"\n\n Return value / exceptions raised:\n\n - Return a code object if the command is complete and valid\n - Return None if the command is incomplete\n - Raise SyntaxError, ValueError or OverflowError if the command is a\n syntax error (OverflowError and ValueError can be produced by\n malformed literals).\n ';return _maybe_compile(A.compiler,source,filename,symbol) \ No newline at end of file diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 59a2d520fea..3d3205c952c 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -1,1578 +1,545 @@ -'''This module implements specialized container datatypes providing -alternatives to Python's general purpose built-in containers, dict, -list, set, and tuple. - -* namedtuple factory function for creating tuple subclasses with named fields -* deque list-like container with fast appends and pops on either end -* ChainMap dict-like class for creating a single view of multiple mappings -* Counter dict subclass for counting hashable objects -* OrderedDict dict subclass that remembers the order entries were added -* defaultdict dict subclass that calls a factory function to supply missing values -* UserDict wrapper around dictionary objects for easier dict subclassing -* UserList wrapper around list objects for easier list subclassing -* UserString wrapper around string objects for easier string subclassing - -''' - -__all__ = [ - 'ChainMap', - 'Counter', - 'OrderedDict', - 'UserDict', - 'UserList', - 'UserString', - 'defaultdict', - 'deque', - 'namedtuple', -] - -import _collections_abc -import sys as _sys - -from itertools import chain as _chain -from itertools import repeat as _repeat -from itertools import starmap as _starmap +"This module implements specialized container datatypes providing\nalternatives to Python's general purpose built-in containers, dict,\nlist, set, and tuple.\n\n* namedtuple factory function for creating tuple subclasses with named fields\n* deque list-like container with fast appends and pops on either end\n* ChainMap dict-like class for creating a single view of multiple mappings\n* Counter dict subclass for counting hashable objects\n* OrderedDict dict subclass that remembers the order entries were added\n* defaultdict dict subclass that calls a factory function to supply missing values\n* UserDict wrapper around dictionary objects for easier dict subclassing\n* UserList wrapper around list objects for easier list subclassing\n* UserString wrapper around string objects for easier string subclassing\n\n" +_C='strict' +_B='data' +_A=None +__all__=['ChainMap','Counter','OrderedDict','UserDict','UserList','UserString','defaultdict','deque','namedtuple'] +import _collections_abc,sys as _sys +from itertools import chain as _chain,repeat as _repeat,starmap as _starmap from keyword import iskeyword as _iskeyword from operator import eq as _eq from operator import itemgetter as _itemgetter from reprlib import recursive_repr as _recursive_repr from _weakref import proxy as _proxy - -try: - from _collections import deque -except ImportError: - pass -else: - _collections_abc.MutableSequence.register(deque) - -try: - from _collections import defaultdict -except ImportError: - # FIXME: try to implement defaultdict in collections.rs rather than in Python - # I (coolreader18) couldn't figure out some class stuff with __new__ and - # __init__ and __missing__ and subclassing built-in types from Rust, so I went - # with this instead. - from ._defaultdict import defaultdict - - -################################################################################ -### OrderedDict -################################################################################ - +try:from _collections import deque +except ImportError:pass +else:_collections_abc.MutableSequence.register(deque) +try:from _collections import defaultdict +except ImportError:from._defaultdict import defaultdict class _OrderedDictKeysView(_collections_abc.KeysView): - - def __reversed__(self): - yield from reversed(self._mapping) - + def __reversed__(self):yield from reversed(self._mapping) class _OrderedDictItemsView(_collections_abc.ItemsView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield (key, self._mapping[key]) - + def __reversed__(self): + for key in reversed(self._mapping):yield(key,self._mapping[key]) class _OrderedDictValuesView(_collections_abc.ValuesView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield self._mapping[key] - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - + def __reversed__(self): + for key in reversed(self._mapping):yield self._mapping[key] +class _Link:__slots__='prev','next','key','__weakref__' class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(self, other=(), /, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries. Keyword argument order is preserved. - ''' - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(other, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''Remove and return a (key, value) pair from the dictionary. - - Pairs are returned in LIFO order if last is true or FIFO order if false. - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last is false). - - Raise KeyError if the element does not exist. - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - soft_link = link_next.prev - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - root.prev = soft_link - last.next = link - else: - first = root.next - link.prev = root - link.next = first - first.prev = soft_link - root.next = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = _collections_abc.MutableMapping.update - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return _OrderedDictKeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return _OrderedDictItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return _OrderedDictValuesView(self) - - __ne__ = _collections_abc.MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - marker = self.__marker - result = dict.pop(self, key, marker) - if result is not marker: - # The same as in __delitem__(). - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - return result - if default is marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - '''Insert key with a value of default if key is not in the dictionary. - - Return the value for key if key is in the dictionary, else default. - ''' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - state = self.__getstate__() - if state: - if isinstance(state, tuple): - state, slots = state - else: - slots = {} - state = state.copy() - slots = slots.copy() - for k in vars(OrderedDict()): - state.pop(k, None) - slots.pop(k, None) - if slots: - state = state, slots - else: - state = state or None - return self.__class__, (), state, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''Create a new ordered dictionary with keys from iterable and values set to value. - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - def __ior__(self, other): - self.update(other) - return self - - def __or__(self, other): - if not isinstance(other, dict): - return NotImplemented - new = self.__class__(self) - new.update(other) - return new - - def __ror__(self, other): - if not isinstance(other, dict): - return NotImplemented - new = self.__class__(other) - new.update(self) - return new - - -try: - from _collections import OrderedDict -except ImportError: - # Leave the pure Python version in place. - pass - - -################################################################################ -### namedtuple -################################################################################ - -try: - from _collections import _tuplegetter -except ImportError: - _tuplegetter = lambda index, doc: property(_itemgetter(index), doc=doc) - -def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = _sys.intern(str(typename)) - - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = f'_{index}' - seen.add(name) - - for name in [typename] + field_names: - if type(name) is not str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - f'identifiers: {name!r}') - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - f'keyword: {name!r}') - - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - f'{name!r}') - if name in seen: - raise ValueError(f'Encountered duplicate field name: {name!r}') - seen.add(name) - - field_defaults = {} - if defaults is not None: - defaults = tuple(defaults) - if len(defaults) > len(field_names): - raise TypeError('Got more default values than field names') - field_defaults = dict(reversed(list(zip(reversed(field_names), - reversed(defaults))))) - - # Variables used in the methods and docstrings - field_names = tuple(map(_sys.intern, field_names)) - num_fields = len(field_names) - arg_list = ', '.join(field_names) - if num_fields == 1: - arg_list += ',' - repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')' - tuple_new = tuple.__new__ - _dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip - - # Create all the named tuple methods to be added to the class namespace - - namespace = { - '_tuple_new': tuple_new, - '__builtins__': {}, - '__name__': f'namedtuple_{typename}', - } - code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))' - __new__ = eval(code, namespace) - __new__.__name__ = '__new__' - __new__.__doc__ = f'Create new instance of {typename}({arg_list})' - if defaults is not None: - __new__.__defaults__ = defaults - - @classmethod - def _make(cls, iterable): - result = tuple_new(cls, iterable) - if _len(result) != num_fields: - raise TypeError(f'Expected {num_fields} arguments, got {len(result)}') - return result - - _make.__func__.__doc__ = (f'Make a new {typename} object from a sequence ' - 'or iterable') - - def _replace(self, /, **kwds): - result = self._make(_map(kwds.pop, field_names, self)) - if kwds: - raise ValueError(f'Got unexpected field names: {list(kwds)!r}') - return result - - _replace.__doc__ = (f'Return a new {typename} object replacing specified ' - 'fields with new values') - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + repr_fmt % self - - def _asdict(self): - 'Return a new dict which maps field names to their values.' - return _dict(_zip(self._fields, self)) - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return _tuple(self) - - # Modify function metadata to help with introspection and debugging - for method in ( - __new__, - _make.__func__, - _replace, - __repr__, - _asdict, - __getnewargs__, - ): - method.__qualname__ = f'{typename}.{method.__name__}' - - # Build-up the class namespace dictionary - # and use type() to build the result class - class_namespace = { - '__doc__': f'{typename}({arg_list})', - '__slots__': (), - '_fields': field_names, - '_field_defaults': field_defaults, - '__new__': __new__, - '_make': _make, - '_replace': _replace, - '__repr__': __repr__, - '_asdict': _asdict, - '__getnewargs__': __getnewargs__, - '__match_args__': field_names, - } - for index, name in enumerate(field_names): - doc = _sys.intern(f'Alias for field number {index}') - class_namespace[name] = _tuplegetter(index, doc) - - result = type(typename, (tuple,), class_namespace) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython), or where the user has - # specified a particular module. - if module is None: - try: - module = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - if module is not None: - result.__module__ = module - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - + 'Dictionary that remembers insertion order' + def __init__(self,other=(),**kwds): + 'Initialize an ordered dictionary. The signature is the same as\n regular dictionaries. Keyword argument order is preserved.\n ' + try:self.__root + except AttributeError:self.__hardroot=_Link();self.__root=root=_proxy(self.__hardroot);root.prev=root.next=root;self.__map={} + self.__update(other,**kwds) + def __setitem__(self,key,value,dict_setitem=dict.__setitem__,proxy=_proxy,Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + if key not in self:self.__map[key]=link=Link();root=self.__root;last=root.prev;link.prev,link.next,link.key=last,root,key;last.next=link;root.prev=proxy(link) + dict_setitem(self,key,value) + def __delitem__(self,key,dict_delitem=dict.__delitem__):'od.__delitem__(y) <==> del od[y]';dict_delitem(self,key);link=self.__map.pop(key);link_prev=link.prev;link_next=link.next;link_prev.next=link_next;link_next.prev=link_prev;link.prev=_A;link.next=_A + def __iter__(self): + 'od.__iter__() <==> iter(od)';root=self.__root;curr=root.next + while curr is not root:yield curr.key;curr=curr.next + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)';root=self.__root;curr=root.prev + while curr is not root:yield curr.key;curr=curr.prev + def clear(self):'od.clear() -> None. Remove all items from od.';root=self.__root;root.prev=root.next=root;self.__map.clear();dict.clear(self) + def popitem(self,last=True): + 'Remove and return a (key, value) pair from the dictionary.\n\n Pairs are returned in LIFO order if last is true or FIFO order if false.\n ' + if not self:raise KeyError('dictionary is empty') + root=self.__root + if last:link=root.prev;link_prev=link.prev;link_prev.next=root;root.prev=link_prev + else:link=root.next;link_next=link.next;root.next=link_next;link_next.prev=root + key=link.key;del self.__map[key];value=dict.pop(self,key);return key,value + def move_to_end(self,key,last=True): + 'Move an existing element to the end (or beginning if last is false).\n\n Raise KeyError if the element does not exist.\n ';link=self.__map[key];link_prev=link.prev;link_next=link.next;soft_link=link_next.prev;link_prev.next=link_next;link_next.prev=link_prev;root=self.__root + if last:last=root.prev;link.prev=last;link.next=root;root.prev=soft_link;last.next=link + else:first=root.next;link.prev=root;link.next=first;first.prev=soft_link;root.next=link + def __sizeof__(self):sizeof=_sys.getsizeof;n=len(self)+1;size=sizeof(self.__dict__);size+=sizeof(self.__map)*2;size+=sizeof(self.__hardroot)*n;size+=sizeof(self.__root)*n;return size + update=__update=_collections_abc.MutableMapping.update + def keys(self):"D.keys() -> a set-like object providing a view on D's keys";return _OrderedDictKeysView(self) + def items(self):"D.items() -> a set-like object providing a view on D's items";return _OrderedDictItemsView(self) + def values(self):"D.values() -> an object providing a view on D's values";return _OrderedDictValuesView(self) + __ne__=_collections_abc.MutableMapping.__ne__;__marker=object() + def pop(self,key,default=__marker): + 'od.pop(k[,d]) -> v, remove specified key and return the corresponding\n value. If key is not found, d is returned if given, otherwise KeyError\n is raised.\n\n ';marker=self.__marker;result=dict.pop(self,key,marker) + if result is not marker:link=self.__map.pop(key);link_prev=link.prev;link_next=link.next;link_prev.next=link_next;link_next.prev=link_prev;link.prev=_A;link.next=_A;return result + if default is marker:raise KeyError(key) + return default + def setdefault(self,key,default=_A): + 'Insert key with a value of default if key is not in the dictionary.\n\n Return the value for key if key is in the dictionary, else default.\n ' + if key in self:return self[key] + self[key]=default;return default + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self:return'%s()'%(self.__class__.__name__,) + return'%s(%r)'%(self.__class__.__name__,list(self.items())) + def __reduce__(self): + 'Return state information for pickling';state=self.__getstate__() + if state: + if isinstance(state,tuple):state,slots=state + else:slots={} + state=state.copy();slots=slots.copy() + for k in vars(OrderedDict()):state.pop(k,_A);slots.pop(k,_A) + if slots:state=state,slots + else:state=state or _A + return self.__class__,(),state,_A,iter(self.items()) + def copy(self):'od.copy() -> a shallow copy of od';return self.__class__(self) + @classmethod + def fromkeys(cls,iterable,value=_A): + 'Create a new ordered dictionary with keys from iterable and values set to value.\n ';self=cls() + for key in iterable:self[key]=value + return self + def __eq__(self,other): + 'od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive\n while comparison to a regular mapping is order-insensitive.\n\n ' + if isinstance(other,OrderedDict):return dict.__eq__(self,other)and all(map(_eq,self,other)) + return dict.__eq__(self,other) + def __ior__(self,other):self.update(other);return self + def __or__(self,other): + if not isinstance(other,dict):return NotImplemented + new=self.__class__(self);new.update(other);return new + def __ror__(self,other): + if not isinstance(other,dict):return NotImplemented + new=self.__class__(other);new.update(self);return new +try:from _collections import OrderedDict +except ImportError:pass +try:from _collections import _tuplegetter +except ImportError:_tuplegetter=lambda index,doc:property(_itemgetter(index),doc=doc) +def namedtuple(typename,field_names,*,rename=False,defaults=_A,module=_A): + "Returns a new subclass of tuple with named fields.\n\n >>> Point = namedtuple('Point', ['x', 'y'])\n >>> Point.__doc__ # docstring for the new class\n 'Point(x, y)'\n >>> p = Point(11, y=22) # instantiate with positional args or keywords\n >>> p[0] + p[1] # indexable like a plain tuple\n 33\n >>> x, y = p # unpack like a regular tuple\n >>> x, y\n (11, 22)\n >>> p.x + p.y # fields also accessible by name\n 33\n >>> d = p._asdict() # convert to a dictionary\n >>> d['x']\n 11\n >>> Point(**d) # convert from a dictionary\n Point(x=11, y=22)\n >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields\n Point(x=100, y=22)\n\n ";B='__new__';A='__name__' + if isinstance(field_names,str):field_names=field_names.replace(',',' ').split() + field_names=list(map(str,field_names));typename=_sys.intern(str(typename)) + if rename: + seen=set() + for(index,name)in enumerate(field_names): + if not name.isidentifier()or _iskeyword(name)or name.startswith('_')or name in seen:field_names[index]=f"_{index}" + seen.add(name) + for name in[typename]+field_names: + if type(name)is not str:raise TypeError('Type names and field names must be strings') + if not name.isidentifier():raise ValueError(f"Type names and field names must be valid identifiers: {name!r}") + if _iskeyword(name):raise ValueError(f"Type names and field names cannot be a keyword: {name!r}") + seen=set() + for name in field_names: + if name.startswith('_')and not rename:raise ValueError(f"Field names cannot start with an underscore: {name!r}") + if name in seen:raise ValueError(f"Encountered duplicate field name: {name!r}") + seen.add(name) + field_defaults={} + if defaults is not _A: + defaults=tuple(defaults) + if len(defaults)>len(field_names):raise TypeError('Got more default values than field names') + field_defaults=dict(reversed(list(zip(reversed(field_names),reversed(defaults))))) + field_names=tuple(map(_sys.intern,field_names));num_fields=len(field_names);arg_list=', '.join(field_names) + if num_fields==1:arg_list+=',' + repr_fmt='('+', '.join(f"{name}=%r"for name in field_names)+')';tuple_new=tuple.__new__;_dict,_tuple,_len,_map,_zip=dict,tuple,len,map,zip;namespace={'_tuple_new':tuple_new,'__builtins__':{},A:f"namedtuple_{typename}"};code=f"lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))";__new__=eval(code,namespace);__new__.__name__=B;__new__.__doc__=f"Create new instance of {typename}({arg_list})" + if defaults is not _A:__new__.__defaults__=defaults + @classmethod + def _make(cls,iterable): + result=tuple_new(cls,iterable) + if _len(result)!=num_fields:raise TypeError(f"Expected {num_fields} arguments, got {len(result)}") + return result + _make.__func__.__doc__=f"Make a new {typename} object from a sequence or iterable" + def _replace(self,**kwds): + result=self._make(_map(kwds.pop,field_names,self)) + if kwds:raise ValueError(f"Got unexpected field names: {list(kwds)!r}") + return result + _replace.__doc__=f"Return a new {typename} object replacing specified fields with new values" + def __repr__(self):'Return a nicely formatted representation string';return self.__class__.__name__+repr_fmt%self + def _asdict(self):'Return a new dict which maps field names to their values.';return _dict(_zip(self._fields,self)) + def __getnewargs__(self):'Return self as a plain tuple. Used by copy and pickle.';return _tuple(self) + for method in(__new__,_make.__func__,_replace,__repr__,_asdict,__getnewargs__):method.__qualname__=f"{typename}.{method.__name__}" + class_namespace={'__doc__':f"{typename}({arg_list})",'__slots__':(),'_fields':field_names,'_field_defaults':field_defaults,B:__new__,'_make':_make,'_replace':_replace,'__repr__':__repr__,'_asdict':_asdict,'__getnewargs__':__getnewargs__,'__match_args__':field_names} + for(index,name)in enumerate(field_names):doc=_sys.intern(f"Alias for field number {index}");class_namespace[name]=_tuplegetter(index,doc) + result=type(typename,(tuple,),class_namespace) + if module is _A: + try:module=_sys._getframe(1).f_globals.get(A,'__main__') + except(AttributeError,ValueError):pass + if module is not _A:result.__module__=module + return result +def _count_elements(mapping,iterable): + 'Tally elements from the iterable.';mapping_get=mapping.get + for elem in iterable:mapping[elem]=mapping_get(elem,0)+1 +try:from _collections import _count_elements +except ImportError:pass class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(self, iterable=None, /, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - super().__init__() - self.update(iterable, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def total(self): - 'Sum of the counts' - return sum(self.values()) - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abracadabra').most_common(3) - [('a', 5), ('b', 2), ('r', 2)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - - # Lazy import to speedup Python startup time - import heapq - return heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> import math - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> math.prod(prime_factors.elements()) - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because the semantics - # would be ambiguous in cases such as Counter.fromkeys('aaabbc', v=2). - # Initializing counters to zero values isn't necessary because zero - # is already the default value for counter lookups. Initializing - # to one is easily accomplished with Counter(set(iterable)). For - # more exotic cases, create a dictionary first using a dictionary - # comprehension or dict.fromkeys(). - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(self, iterable=None, /, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if iterable is not None: - if isinstance(iterable, _collections_abc.Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - # fast path when counter is empty - super().update(iterable) - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(self, iterable=None, /, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if iterable is not None: - self_get = self.get - if isinstance(iterable, _collections_abc.Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return f'{self.__class__.__name__}()' - try: - # dict() preserves the ordering returned by most_common() - d = dict(self.most_common()) - except TypeError: - # handle case where values are not orderable - d = dict(self) - return f'{self.__class__.__name__}({d!r})' - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - # - # Results are ordered according to when an element is first - # encountered in the left operand and then by the order - # encountered in the right operand. - # - # When the multiplicities are all zero or one, multiset operations - # are guaranteed to be equivalent to the corresponding operations - # for regular sets. - # Given counter multisets such as: - # cp = Counter(a=1, b=0, c=1) - # cq = Counter(c=1, d=0, e=1) - # The corresponding regular sets would be: - # sp = {'a', 'c'} - # sq = {'c', 'e'} - # All of the following relations would hold: - # set(cp + cq) == sp | sq - # set(cp - cq) == sp - sq - # set(cp | cq) == sp | sq - # set(cp & cq) == sp & sq - # (cp == cq) == (sp == sq) - # (cp != cq) == (sp != sq) - # (cp <= cq) == (sp <= sq) - # (cp < cq) == (sp < sq) - # (cp >= cq) == (sp >= sq) - # (cp > cq) == (sp > sq) - - def __eq__(self, other): - 'True if all counts agree. Missing counts are treated as zero.' - if not isinstance(other, Counter): - return NotImplemented - return all(self[e] == other[e] for c in (self, other) for e in c) - - def __ne__(self, other): - 'True if any counts disagree. Missing counts are treated as zero.' - if not isinstance(other, Counter): - return NotImplemented - return not self == other - - def __le__(self, other): - 'True if all counts in self are a subset of those in other.' - if not isinstance(other, Counter): - return NotImplemented - return all(self[e] <= other[e] for c in (self, other) for e in c) - - def __lt__(self, other): - 'True if all counts in self are a proper subset of those in other.' - if not isinstance(other, Counter): - return NotImplemented - return self <= other and self != other - - def __ge__(self, other): - 'True if all counts in self are a superset of those in other.' - if not isinstance(other, Counter): - return NotImplemented - return all(self[e] >= other[e] for c in (self, other) for e in c) - - def __gt__(self, other): - 'True if all counts in self are a proper superset of those in other.' - if not isinstance(other, Counter): - return NotImplemented - return self >= other and self != other - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - result = Counter() - for elem, count in self.items(): - if count > 0: - result[elem] = count - return result - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - result = Counter() - for elem, count in self.items(): - if count < 0: - result[elem] = 0 - count - return result - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap -######################################################################## - + "Dict subclass for counting hashable items. Sometimes called a bag\n or multiset. Elements are stored as dictionary keys and their counts\n are stored as dictionary values.\n\n >>> c = Counter('abcdeabcdabcaba') # count elements from a string\n\n >>> c.most_common(3) # three most common elements\n [('a', 5), ('b', 4), ('c', 3)]\n >>> sorted(c) # list all unique elements\n ['a', 'b', 'c', 'd', 'e']\n >>> ''.join(sorted(c.elements())) # list elements with repetitions\n 'aaaaabbbbcccdde'\n >>> sum(c.values()) # total of all counts\n 15\n\n >>> c['a'] # count of letter 'a'\n 5\n >>> for elem in 'shazam': # update counts from an iterable\n ... c[elem] += 1 # by adding 1 to each element's count\n >>> c['a'] # now there are seven 'a'\n 7\n >>> del c['b'] # remove all 'b'\n >>> c['b'] # now there are zero 'b'\n 0\n\n >>> d = Counter('simsalabim') # make another counter\n >>> c.update(d) # add in the second counter\n >>> c['a'] # now there are nine 'a'\n 9\n\n >>> c.clear() # empty the counter\n >>> c\n Counter()\n\n Note: If a count is set to zero or reduced to zero, it will remain\n in the counter until the entry is deleted or the counter is cleared:\n\n >>> c = Counter('aaabbc')\n >>> c['b'] -= 2 # reduce the count of 'b' by two\n >>> c.most_common() # 'b' is still in, but its count is zero\n [('a', 3), ('c', 1), ('b', 0)]\n\n " + def __init__(self,iterable=_A,**kwds):"Create a new, empty Counter object. And if given, count elements\n from an input iterable. Or, initialize the count from another mapping\n of elements to their counts.\n\n >>> c = Counter() # a new, empty counter\n >>> c = Counter('gallahad') # a new counter from an iterable\n >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping\n >>> c = Counter(a=4, b=2) # a new counter from keyword args\n\n ";super().__init__();self.update(iterable,**kwds) + def __missing__(self,key):'The count of elements not in the Counter is zero.';return 0 + def total(self):'Sum of the counts';return sum(self.values()) + def most_common(self,n=_A): + "List the n most common elements and their counts from the most\n common to the least. If n is None, then list all element counts.\n\n >>> Counter('abracadabra').most_common(3)\n [('a', 5), ('b', 2), ('r', 2)]\n\n " + if n is _A:return sorted(self.items(),key=_itemgetter(1),reverse=True) + import heapq;return heapq.nlargest(n,self.items(),key=_itemgetter(1)) + def elements(self):"Iterator over elements repeating each as many times as its count.\n\n >>> c = Counter('ABCABC')\n >>> sorted(c.elements())\n ['A', 'A', 'B', 'B', 'C', 'C']\n\n # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1\n >>> import math\n >>> prime_factors = Counter({2: 2, 3: 3, 17: 1})\n >>> math.prod(prime_factors.elements())\n 1836\n\n Note, if an element's count has been set to zero or is a negative\n number, elements() will ignore it.\n\n ";return _chain.from_iterable(_starmap(_repeat,self.items())) + @classmethod + def fromkeys(cls,iterable,v=_A):raise NotImplementedError('Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + def update(self,iterable=_A,**kwds): + "Like dict.update() but add counts instead of replacing them.\n\n Source can be an iterable, a dictionary, or another Counter instance.\n\n >>> c = Counter('which')\n >>> c.update('witch') # add elements from another iterable\n >>> d = Counter('watch')\n >>> c.update(d) # add elements from another counter\n >>> c['h'] # four 'h' in which, witch, and watch\n 4\n\n " + if iterable is not _A: + if isinstance(iterable,_collections_abc.Mapping): + if self: + self_get=self.get + for(elem,count)in iterable.items():self[elem]=count+self_get(elem,0) + else:super().update(iterable) + else:_count_elements(self,iterable) + if kwds:self.update(kwds) + def subtract(self,iterable=_A,**kwds): + "Like dict.update() but subtracts counts instead of replacing them.\n Counts can be reduced below zero. Both the inputs and outputs are\n allowed to contain zero and negative counts.\n\n Source can be an iterable, a dictionary, or another Counter instance.\n\n >>> c = Counter('which')\n >>> c.subtract('witch') # subtract elements from another iterable\n >>> c.subtract(Counter('watch')) # subtract elements from another counter\n >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch\n 0\n >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch\n -1\n\n " + if iterable is not _A: + self_get=self.get + if isinstance(iterable,_collections_abc.Mapping): + for(elem,count)in iterable.items():self[elem]=self_get(elem,0)-count + else: + for elem in iterable:self[elem]=self_get(elem,0)-1 + if kwds:self.subtract(kwds) + def copy(self):'Return a shallow copy.';return self.__class__(self) + def __reduce__(self):return self.__class__,(dict(self),) + def __delitem__(self,elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self:super().__delitem__(elem) + def __repr__(self): + if not self:return f"{self.__class__.__name__}()" + try:d=dict(self.most_common()) + except TypeError:d=dict(self) + return f"{self.__class__.__name__}({d!r})" + def __eq__(self,other): + 'True if all counts agree. Missing counts are treated as zero.' + if not isinstance(other,Counter):return NotImplemented + return all(self[e]==other[e]for c in(self,other)for e in c) + def __ne__(self,other): + 'True if any counts disagree. Missing counts are treated as zero.' + if not isinstance(other,Counter):return NotImplemented + return not self==other + def __le__(self,other): + 'True if all counts in self are a subset of those in other.' + if not isinstance(other,Counter):return NotImplemented + return all(self[e]<=other[e]for c in(self,other)for e in c) + def __lt__(self,other): + 'True if all counts in self are a proper subset of those in other.' + if not isinstance(other,Counter):return NotImplemented + return self<=other and self!=other + def __ge__(self,other): + 'True if all counts in self are a superset of those in other.' + if not isinstance(other,Counter):return NotImplemented + return all(self[e]>=other[e]for c in(self,other)for e in c) + def __gt__(self,other): + 'True if all counts in self are a proper superset of those in other.' + if not isinstance(other,Counter):return NotImplemented + return self>=other and self!=other + def __add__(self,other): + "Add counts from two counters.\n\n >>> Counter('abbb') + Counter('bcc')\n Counter({'b': 4, 'c': 2, 'a': 1})\n\n " + if not isinstance(other,Counter):return NotImplemented + result=Counter() + for(elem,count)in self.items(): + newcount=count+other[elem] + if newcount>0:result[elem]=newcount + for(elem,count)in other.items(): + if elem not in self and count>0:result[elem]=count + return result + def __sub__(self,other): + " Subtract count, but keep only results with positive counts.\n\n >>> Counter('abbbc') - Counter('bccd')\n Counter({'b': 2, 'a': 1})\n\n " + if not isinstance(other,Counter):return NotImplemented + result=Counter() + for(elem,count)in self.items(): + newcount=count-other[elem] + if newcount>0:result[elem]=newcount + for(elem,count)in other.items(): + if elem not in self and count<0:result[elem]=0-count + return result + def __or__(self,other): + "Union is the maximum of value in either of the input counters.\n\n >>> Counter('abbb') | Counter('bcc')\n Counter({'b': 3, 'c': 2, 'a': 1})\n\n " + if not isinstance(other,Counter):return NotImplemented + result=Counter() + for(elem,count)in self.items(): + other_count=other[elem];newcount=other_count if count0:result[elem]=newcount + for(elem,count)in other.items(): + if elem not in self and count>0:result[elem]=count + return result + def __and__(self,other): + " Intersection is the minimum of corresponding counts.\n\n >>> Counter('abbb') & Counter('bcc')\n Counter({'b': 1})\n\n " + if not isinstance(other,Counter):return NotImplemented + result=Counter() + for(elem,count)in self.items(): + other_count=other[elem];newcount=count if count0:result[elem]=newcount + return result + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts';result=Counter() + for(elem,count)in self.items(): + if count>0:result[elem]=count + return result + def __neg__(self): + 'Subtracts from an empty counter. Strips positive and zero counts,\n and flips the sign on negative counts.\n\n ';result=Counter() + for(elem,count)in self.items(): + if count<0:result[elem]=0-count + return result + def _keep_positive(self): + 'Internal method to strip elements with a negative or zero count';nonpositive=[elem for(elem,count)in self.items()if not count>0] + for elem in nonpositive:del self[elem] + return self + def __iadd__(self,other): + "Inplace add from another counter, keeping only positive counts.\n\n >>> c = Counter('abbb')\n >>> c += Counter('bcc')\n >>> c\n Counter({'b': 4, 'c': 2, 'a': 1})\n\n " + for(elem,count)in other.items():self[elem]+=count + return self._keep_positive() + def __isub__(self,other): + "Inplace subtract counter, but keep only results with positive counts.\n\n >>> c = Counter('abbbc')\n >>> c -= Counter('bccd')\n >>> c\n Counter({'b': 2, 'a': 1})\n\n " + for(elem,count)in other.items():self[elem]-=count + return self._keep_positive() + def __ior__(self,other): + "Inplace union is the maximum of value from either counter.\n\n >>> c = Counter('abbb')\n >>> c |= Counter('bcc')\n >>> c\n Counter({'b': 3, 'c': 2, 'a': 1})\n\n " + for(elem,other_count)in other.items(): + count=self[elem] + if other_count>count:self[elem]=other_count + return self._keep_positive() + def __iand__(self,other): + "Inplace intersection is the minimum of corresponding counts.\n\n >>> c = Counter('abbb')\n >>> c &= Counter('bcc')\n >>> c\n Counter({'b': 1})\n\n " + for(elem,count)in self.items(): + other_count=other[elem] + if other_count self.__cast(other) - - def __ge__(self, other): - return self.data >= self.__cast(other) - - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - - def __contains__(self, item): - return item in self.data - - def __len__(self): - return len(self.data) - - def __getitem__(self, i): - if isinstance(i, slice): - return self.__class__(self.data[i]) - else: - return self.data[i] - - def __setitem__(self, i, item): - self.data[i] = item - - def __delitem__(self, i): - del self.data[i] - - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - - def __mul__(self, n): - return self.__class__(self.data * n) - - __rmul__ = __mul__ - - def __imul__(self, n): - self.data *= n - return self - - def __copy__(self): - inst = self.__class__.__new__(self.__class__) - inst.__dict__.update(self.__dict__) - # Create a copy and avoid triggering descriptors - inst.__dict__["data"] = self.__dict__["data"][:] - return inst - - def append(self, item): - self.data.append(item) - - def insert(self, i, item): - self.data.insert(i, item) - - def pop(self, i=-1): - return self.data.pop(i) - - def remove(self, item): - self.data.remove(item) - - def clear(self): - self.data.clear() - - def copy(self): - return self.__class__(self) - - def count(self, item): - return self.data.count(item) - - def index(self, item, *args): - return self.data.index(item, *args) - - def reverse(self): - self.data.reverse() - - def sort(self, /, *args, **kwds): - self.data.sort(*args, **kwds) - - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - -################################################################################ -### UserString -################################################################################ - + 'A more or less complete user-defined wrapper around list objects.' + def __init__(self,initlist=_A): + self.data=[] + if initlist is not _A: + if type(initlist)==type(self.data):self.data[:]=initlist + elif isinstance(initlist,UserList):self.data[:]=initlist.data[:] + else:self.data=list(initlist) + def __repr__(self):return repr(self.data) + def __lt__(self,other):return self.dataself.__cast(other) + def __ge__(self,other):return self.data>=self.__cast(other) + def __cast(self,other):return other.data if isinstance(other,UserList)else other + def __contains__(self,item):return item in self.data + def __len__(self):return len(self.data) + def __getitem__(self,i): + if isinstance(i,slice):return self.__class__(self.data[i]) + else:return self.data[i] + def __setitem__(self,i,item):self.data[i]=item + def __delitem__(self,i):del self.data[i] + def __add__(self,other): + if isinstance(other,UserList):return self.__class__(self.data+other.data) + elif isinstance(other,type(self.data)):return self.__class__(self.data+other) + return self.__class__(self.data+list(other)) + def __radd__(self,other): + if isinstance(other,UserList):return self.__class__(other.data+self.data) + elif isinstance(other,type(self.data)):return self.__class__(other+self.data) + return self.__class__(list(other)+self.data) + def __iadd__(self,other): + if isinstance(other,UserList):self.data+=other.data + elif isinstance(other,type(self.data)):self.data+=other + else:self.data+=list(other) + return self + def __mul__(self,n):return self.__class__(self.data*n) + __rmul__=__mul__ + def __imul__(self,n):self.data*=n;return self + def __copy__(self):inst=self.__class__.__new__(self.__class__);inst.__dict__.update(self.__dict__);inst.__dict__[_B]=self.__dict__[_B][:];return inst + def append(self,item):self.data.append(item) + def insert(self,i,item):self.data.insert(i,item) + def pop(self,i=-1):return self.data.pop(i) + def remove(self,item):self.data.remove(item) + def clear(self):self.data.clear() + def copy(self):return self.__class__(self) + def count(self,item):return self.data.count(item) + def index(self,item,*args):return self.data.index(item,*args) + def reverse(self):self.data.reverse() + def sort(self,*args,**kwds):self.data.sort(*args,**kwds) + def extend(self,other): + if isinstance(other,UserList):self.data.extend(other.data) + else:self.data.extend(other) class UserString(_collections_abc.Sequence): - - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - - def __str__(self): - return str(self.data) - - def __repr__(self): - return repr(self.data) - - def __int__(self): - return int(self.data) - - def __float__(self): - return float(self.data) - - def __complex__(self): - return complex(self.data) - - def __hash__(self): - return hash(self.data) - - def __getnewargs__(self): - return (self.data[:],) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): - return len(self.data) - - def __getitem__(self, index): - return self.__class__(self.data[index]) - - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - - def __mul__(self, n): - return self.__class__(self.data * n) - - __rmul__ = __mul__ - - def __mod__(self, args): - return self.__class__(self.data % args) - - def __rmod__(self, template): - return self.__class__(str(template) % self) - - # the following methods are defined in alphabetical order: - def capitalize(self): - return self.__class__(self.data.capitalize()) - - def casefold(self): - return self.__class__(self.data.casefold()) - - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - - def removeprefix(self, prefix, /): - if isinstance(prefix, UserString): - prefix = prefix.data - return self.__class__(self.data.removeprefix(prefix)) - - def removesuffix(self, suffix, /): - if isinstance(suffix, UserString): - suffix = suffix.data - return self.__class__(self.data.removesuffix(suffix)) - - def encode(self, encoding='utf-8', errors='strict'): - encoding = 'utf-8' if encoding is None else encoding - errors = 'strict' if errors is None else errors - return self.data.encode(encoding, errors) - - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - - def format(self, /, *args, **kwds): - return self.data.format(*args, **kwds) - - def format_map(self, mapping): - return self.data.format_map(mapping) - - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - - def isalpha(self): - return self.data.isalpha() - - def isalnum(self): - return self.data.isalnum() - - def isascii(self): - return self.data.isascii() - - def isdecimal(self): - return self.data.isdecimal() - - def isdigit(self): - return self.data.isdigit() - - def isidentifier(self): - return self.data.isidentifier() - - def islower(self): - return self.data.islower() - - def isnumeric(self): - return self.data.isnumeric() - - def isprintable(self): - return self.data.isprintable() - - def isspace(self): - return self.data.isspace() - - def istitle(self): - return self.data.istitle() - - def isupper(self): - return self.data.isupper() - - def join(self, seq): - return self.data.join(seq) - - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - - def lower(self): - return self.__class__(self.data.lower()) - - def lstrip(self, chars=None): - return self.__class__(self.data.lstrip(chars)) - - maketrans = str.maketrans - - def partition(self, sep): - return self.data.partition(sep) - - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - - def rpartition(self, sep): - return self.data.rpartition(sep) - - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - - def splitlines(self, keepends=False): - return self.data.splitlines(keepends) - - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - - def strip(self, chars=None): - return self.__class__(self.data.strip(chars)) - - def swapcase(self): - return self.__class__(self.data.swapcase()) - - def title(self): - return self.__class__(self.data.title()) - - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - - def upper(self): - return self.__class__(self.data.upper()) - - def zfill(self, width): - return self.__class__(self.data.zfill(width)) + def __init__(self,seq): + if isinstance(seq,str):self.data=seq + elif isinstance(seq,UserString):self.data=seq.data[:] + else:self.data=str(seq) + def __str__(self):return str(self.data) + def __repr__(self):return repr(self.data) + def __int__(self):return int(self.data) + def __float__(self):return float(self.data) + def __complex__(self):return complex(self.data) + def __hash__(self):return hash(self.data) + def __getnewargs__(self):return self.data[:], + def __eq__(self,string): + if isinstance(string,UserString):return self.data==string.data + return self.data==string + def __lt__(self,string): + if isinstance(string,UserString):return self.datastring.data + return self.data>string + def __ge__(self,string): + if isinstance(string,UserString):return self.data>=string.data + return self.data>=string + def __contains__(self,char): + if isinstance(char,UserString):char=char.data + return char in self.data + def __len__(self):return len(self.data) + def __getitem__(self,index):return self.__class__(self.data[index]) + def __add__(self,other): + if isinstance(other,UserString):return self.__class__(self.data+other.data) + elif isinstance(other,str):return self.__class__(self.data+other) + return self.__class__(self.data+str(other)) + def __radd__(self,other): + if isinstance(other,str):return self.__class__(other+self.data) + return self.__class__(str(other)+self.data) + def __mul__(self,n):return self.__class__(self.data*n) + __rmul__=__mul__ + def __mod__(self,args):return self.__class__(self.data%args) + def __rmod__(self,template):return self.__class__(str(template)%self) + def capitalize(self):return self.__class__(self.data.capitalize()) + def casefold(self):return self.__class__(self.data.casefold()) + def center(self,width,*args):return self.__class__(self.data.center(width,*args)) + def count(self,sub,start=0,end=_sys.maxsize): + if isinstance(sub,UserString):sub=sub.data + return self.data.count(sub,start,end) + def removeprefix(self,prefix): + if isinstance(prefix,UserString):prefix=prefix.data + return self.__class__(self.data.removeprefix(prefix)) + def removesuffix(self,suffix): + if isinstance(suffix,UserString):suffix=suffix.data + return self.__class__(self.data.removesuffix(suffix)) + def encode(self,encoding='utf-8',errors=_C):encoding='utf-8'if encoding is _A else encoding;errors=_C if errors is _A else errors;return self.data.encode(encoding,errors) + def endswith(self,suffix,start=0,end=_sys.maxsize):return self.data.endswith(suffix,start,end) + def expandtabs(self,tabsize=8):return self.__class__(self.data.expandtabs(tabsize)) + def find(self,sub,start=0,end=_sys.maxsize): + if isinstance(sub,UserString):sub=sub.data + return self.data.find(sub,start,end) + def format(self,*args,**kwds):return self.data.format(*args,**kwds) + def format_map(self,mapping):return self.data.format_map(mapping) + def index(self,sub,start=0,end=_sys.maxsize):return self.data.index(sub,start,end) + def isalpha(self):return self.data.isalpha() + def isalnum(self):return self.data.isalnum() + def isascii(self):return self.data.isascii() + def isdecimal(self):return self.data.isdecimal() + def isdigit(self):return self.data.isdigit() + def isidentifier(self):return self.data.isidentifier() + def islower(self):return self.data.islower() + def isnumeric(self):return self.data.isnumeric() + def isprintable(self):return self.data.isprintable() + def isspace(self):return self.data.isspace() + def istitle(self):return self.data.istitle() + def isupper(self):return self.data.isupper() + def join(self,seq):return self.data.join(seq) + def ljust(self,width,*args):return self.__class__(self.data.ljust(width,*args)) + def lower(self):return self.__class__(self.data.lower()) + def lstrip(self,chars=_A):return self.__class__(self.data.lstrip(chars)) + maketrans=str.maketrans + def partition(self,sep):return self.data.partition(sep) + def replace(self,old,new,maxsplit=-1): + if isinstance(old,UserString):old=old.data + if isinstance(new,UserString):new=new.data + return self.__class__(self.data.replace(old,new,maxsplit)) + def rfind(self,sub,start=0,end=_sys.maxsize): + if isinstance(sub,UserString):sub=sub.data + return self.data.rfind(sub,start,end) + def rindex(self,sub,start=0,end=_sys.maxsize):return self.data.rindex(sub,start,end) + def rjust(self,width,*args):return self.__class__(self.data.rjust(width,*args)) + def rpartition(self,sep):return self.data.rpartition(sep) + def rstrip(self,chars=_A):return self.__class__(self.data.rstrip(chars)) + def split(self,sep=_A,maxsplit=-1):return self.data.split(sep,maxsplit) + def rsplit(self,sep=_A,maxsplit=-1):return self.data.rsplit(sep,maxsplit) + def splitlines(self,keepends=False):return self.data.splitlines(keepends) + def startswith(self,prefix,start=0,end=_sys.maxsize):return self.data.startswith(prefix,start,end) + def strip(self,chars=_A):return self.__class__(self.data.strip(chars)) + def swapcase(self):return self.__class__(self.data.swapcase()) + def title(self):return self.__class__(self.data.title()) + def translate(self,*args):return self.__class__(self.data.translate(*args)) + def upper(self):return self.__class__(self.data.upper()) + def zfill(self,width):return self.__class__(self.data.zfill(width)) \ No newline at end of file diff --git a/Lib/collections/_defaultdict.py b/Lib/collections/_defaultdict.py index b9c6c496139..ab3db1a014d 100644 --- a/Lib/collections/_defaultdict.py +++ b/Lib/collections/_defaultdict.py @@ -1,58 +1,32 @@ +_A=None from reprlib import recursive_repr as _recursive_repr - class defaultdict(dict): - def __init__(self, *args, **kwargs): - if len(args) >= 1: - default_factory = args[0] - if default_factory is not None and not callable(default_factory): - raise TypeError("first argument must be callable or None") - args = args[1:] - else: - default_factory = None - super().__init__(*args, **kwargs) - self.default_factory = default_factory - - def __missing__(self, key): - if self.default_factory is not None: - val = self.default_factory() - else: - raise KeyError(key) - self[key] = val - return val - - @_recursive_repr() - def __repr_factory(factory): - return repr(factory) - - def __repr__(self): - return f"{type(self).__name__}({defaultdict.__repr_factory(self.default_factory)}, {dict.__repr__(self)})" - - def copy(self): - return type(self)(self.default_factory, self) - - __copy__ = copy - - def __reduce__(self): - if self.default_factory is not None: - args = self.default_factory, - else: - args = () - return type(self), args, None, None, iter(self.items()) - - def __or__(self, other): - if not isinstance(other, dict): - return NotImplemented - - new = defaultdict(self.default_factory, self) - new.update(other) - return new - - def __ror__(self, other): - if not isinstance(other, dict): - return NotImplemented - - new = defaultdict(self.default_factory, other) - new.update(self) - return new - -defaultdict.__module__ = 'collections' + def __init__(C,*A,**D): + if len(A)>=1: + B=A[0] + if B is not _A and not callable(B):raise TypeError('first argument must be callable or None') + A=A[1:] + else:B=_A + super().__init__(*A,**D);C.default_factory=B + def __missing__(A,key): + if A.default_factory is not _A:B=A.default_factory() + else:raise KeyError(key) + A[key]=B;return B + @_recursive_repr() + def __repr_factory(factory):return repr(factory) + def __repr__(A):return f"{type(A).__name__}({defaultdict.__repr_factory(A.default_factory)}, {dict.__repr__(A)})" + def copy(A):return type(A)(A.default_factory,A) + __copy__=copy + def __reduce__(A): + if A.default_factory is not _A:B=A.default_factory, + else:B=() + return type(A),B,_A,_A,iter(A.items()) + def __or__(A,other): + B=other + if not isinstance(B,dict):return NotImplemented + C=defaultdict(A.default_factory,A);C.update(B);return C + def __ror__(A,other): + B=other + if not isinstance(B,dict):return NotImplemented + C=defaultdict(A.default_factory,B);C.update(A);return C +defaultdict.__module__='collections' \ No newline at end of file diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py index 86ca8b8a841..876069d74bc 100644 --- a/Lib/collections/abc.py +++ b/Lib/collections/abc.py @@ -1,3 +1,2 @@ -from _collections_abc import * -from _collections_abc import __all__ -from _collections_abc import _CallableGenericAlias +from _collections_abc import* +from _collections_abc import __all__,_CallableGenericAlias \ No newline at end of file diff --git a/Lib/colorsys.py b/Lib/colorsys.py index 9bdc83e3772..56f80e9237c 100644 --- a/Lib/colorsys.py +++ b/Lib/colorsys.py @@ -1,166 +1,55 @@ -"""Conversion functions between RGB and other color systems. - -This modules provides two functions for each color system ABC: - - rgb_to_abc(r, g, b) --> a, b, c - abc_to_rgb(a, b, c) --> r, g, b - -All inputs and outputs are triples of floats in the range [0.0...1.0] -(with the exception of I and Q, which covers a slightly larger range). -Inputs outside the valid range may cause exceptions or invalid outputs. - -Supported color systems: -RGB: Red, Green, Blue components -YIQ: Luminance, Chrominance (used by composite video signals) -HLS: Hue, Luminance, Saturation -HSV: Hue, Saturation, Value -""" - -# References: -# http://en.wikipedia.org/wiki/YIQ -# http://en.wikipedia.org/wiki/HLS_color_space -# http://en.wikipedia.org/wiki/HSV_color_space - -__all__ = ["rgb_to_yiq","yiq_to_rgb","rgb_to_hls","hls_to_rgb", - "rgb_to_hsv","hsv_to_rgb"] - -# Some floating point constants - -ONE_THIRD = 1.0/3.0 -ONE_SIXTH = 1.0/6.0 -TWO_THIRD = 2.0/3.0 - -# YIQ: used by composite video signals (linear combinations of RGB) -# Y: perceived grey level (0.0 == black, 1.0 == white) -# I, Q: color components -# -# There are a great many versions of the constants used in these formulae. -# The ones in this library uses constants from the FCC version of NTSC. - -def rgb_to_yiq(r, g, b): - y = 0.30*r + 0.59*g + 0.11*b - i = 0.74*(r-y) - 0.27*(b-y) - q = 0.48*(r-y) + 0.41*(b-y) - return (y, i, q) - -def yiq_to_rgb(y, i, q): - # r = y + (0.27*q + 0.41*i) / (0.74*0.41 + 0.27*0.48) - # b = y + (0.74*q - 0.48*i) / (0.74*0.41 + 0.27*0.48) - # g = y - (0.30*(r-y) + 0.11*(b-y)) / 0.59 - - r = y + 0.9468822170900693*i + 0.6235565819861433*q - g = y - 0.27478764629897834*i - 0.6356910791873801*q - b = y - 1.1085450346420322*i + 1.7090069284064666*q - - if r < 0.0: - r = 0.0 - if g < 0.0: - g = 0.0 - if b < 0.0: - b = 0.0 - if r > 1.0: - r = 1.0 - if g > 1.0: - g = 1.0 - if b > 1.0: - b = 1.0 - return (r, g, b) - - -# HLS: Hue, Luminance, Saturation -# H: position in the spectrum -# L: color lightness -# S: color saturation - -def rgb_to_hls(r, g, b): - maxc = max(r, g, b) - minc = min(r, g, b) - sumc = (maxc+minc) - rangec = (maxc-minc) - l = sumc/2.0 - if minc == maxc: - return 0.0, l, 0.0 - if l <= 0.5: - s = rangec / sumc - else: - s = rangec / (2.0-sumc) - rc = (maxc-r) / rangec - gc = (maxc-g) / rangec - bc = (maxc-b) / rangec - if r == maxc: - h = bc-gc - elif g == maxc: - h = 2.0+rc-bc - else: - h = 4.0+gc-rc - h = (h/6.0) % 1.0 - return h, l, s - -def hls_to_rgb(h, l, s): - if s == 0.0: - return l, l, l - if l <= 0.5: - m2 = l * (1.0+s) - else: - m2 = l+s-(l*s) - m1 = 2.0*l - m2 - return (_v(m1, m2, h+ONE_THIRD), _v(m1, m2, h), _v(m1, m2, h-ONE_THIRD)) - -def _v(m1, m2, hue): - hue = hue % 1.0 - if hue < ONE_SIXTH: - return m1 + (m2-m1)*hue*6.0 - if hue < 0.5: - return m2 - if hue < TWO_THIRD: - return m1 + (m2-m1)*(TWO_THIRD-hue)*6.0 - return m1 - - -# HSV: Hue, Saturation, Value -# H: position in the spectrum -# S: color saturation ("purity") -# V: color brightness - -def rgb_to_hsv(r, g, b): - maxc = max(r, g, b) - minc = min(r, g, b) - rangec = (maxc-minc) - v = maxc - if minc == maxc: - return 0.0, 0.0, v - s = rangec / maxc - rc = (maxc-r) / rangec - gc = (maxc-g) / rangec - bc = (maxc-b) / rangec - if r == maxc: - h = bc-gc - elif g == maxc: - h = 2.0+rc-bc - else: - h = 4.0+gc-rc - h = (h/6.0) % 1.0 - return h, s, v - -def hsv_to_rgb(h, s, v): - if s == 0.0: - return v, v, v - i = int(h*6.0) # XXX assume int() truncates! - f = (h*6.0) - i - p = v*(1.0 - s) - q = v*(1.0 - s*f) - t = v*(1.0 - s*(1.0-f)) - i = i%6 - if i == 0: - return v, t, p - if i == 1: - return q, v, p - if i == 2: - return p, v, t - if i == 3: - return p, q, v - if i == 4: - return t, p, v - if i == 5: - return v, p, q - # Cannot get here +'Conversion functions between RGB and other color systems.\nThis modules provides two functions for each color system ABC:\n rgb_to_abc(r, g, b) --> a, b, c\n abc_to_rgb(a, b, c) --> r, g, b\nAll inputs and outputs are triples of floats in the range [0.0...1.0]\n(with the exception of I and Q, which covers a slightly larger range).\nInputs outside the valid range may cause exceptions or invalid outputs.\nSupported color systems:\nRGB: Red, Green, Blue components\nYIQ: Luminance, Chrominance (used by composite video signals)\nHLS: Hue, Luminance, Saturation\nHSV: Hue, Saturation, Value\n' +_B=.0 +_A=1. +__all__=['rgb_to_yiq','yiq_to_rgb','rgb_to_hls','hls_to_rgb','rgb_to_hsv','hsv_to_rgb'] +ONE_THIRD=_A/3. +ONE_SIXTH=_A/6. +TWO_THIRD=2./3. +def rgb_to_yiq(r,g,b):A=.3*r+.59*g+.11*b;B=.74*(r-A)-.27*(b-A);C=.48*(r-A)+.41*(b-A);return A,B,C +def yiq_to_rgb(y,i,q): + A=y+.9468822170900693*i+.6235565819861433*q;B=y-.27478764629897834*i-.6356910791873801*q;C=y-1.1085450346420322*i+1.7090069284064666*q + if A<_B:A=_B + if B<_B:B=_B + if C<_B:C=_B + if A>_A:A=_A + if B>_A:B=_A + if C>_A:C=_A + return A,B,C +def rgb_to_hls(r,g,b): + A=max(r,g,b);B=min(r,g,b);D=(B+A)/2. + if B==A:return _B,D,_B + if D<=.5:E=(A-B)/(A+B) + else:E=(A-B)/(2.-A-B) + F=(A-r)/(A-B);G=(A-g)/(A-B);H=(A-b)/(A-B) + if r==A:C=H-G + elif g==A:C=2.+F-H + else:C=4.+G-F + C=C/6.%_A;return C,D,E +def hls_to_rgb(h,l,s): + if s==_B:return l,l,l + if l<=.5:A=l*(_A+s) + else:A=l+s-l*s + B=2.*l-A;return _v(B,A,h+ONE_THIRD),_v(B,A,h),_v(B,A,h-ONE_THIRD) +def _v(m1,m2,hue): + B=m1;A=hue;A=A%_A + if A 0 and name != os.curdir and name != os.pardir and - os.path.isdir(fullname) and not os.path.islink(fullname)): - yield from _walk_dir(fullname, ddir=dfile, - maxlevels=maxlevels - 1, quiet=quiet) - -def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None, - quiet=0, legacy=False, optimize=-1, workers=1): - """Byte-compile all modules in the given directory tree. - - Arguments (only dir is required): - - dir: the directory to byte-compile - maxlevels: maximum recursion level (default 10) - ddir: the directory that will be prepended to the path to the - file as it is compiled into each byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: optimization level or -1 for level of the interpreter - workers: maximum number of parallel workers - """ - if workers is not None and workers < 0: - raise ValueError('workers must be greater or equal to 0') - - files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels, - ddir=ddir) - success = True - if workers is not None and workers != 1 and ProcessPoolExecutor is not None: - workers = workers or None - with ProcessPoolExecutor(max_workers=workers) as executor: - results = executor.map(partial(compile_file, - ddir=ddir, force=force, - rx=rx, quiet=quiet, - legacy=legacy, - optimize=optimize), - files) - success = min(results, default=True) - else: - for file in files: - if not compile_file(file, ddir, force, rx, quiet, - legacy, optimize): - success = False - return success - -def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, - legacy=False, optimize=-1): - """Byte-compile one file. - - Arguments (only fullname is required): - - fullname: the file to byte-compile - ddir: if given, the directory name compiled in to the - byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: optimization level or -1 for level of the interpreter - """ - success = True - if quiet < 2 and isinstance(fullname, os.PathLike): - fullname = os.fspath(fullname) - name = os.path.basename(fullname) - if ddir is not None: - dfile = os.path.join(ddir, name) - else: - dfile = None - if rx is not None: - mo = rx.search(fullname) - if mo: - return success - if os.path.isfile(fullname): - if legacy: - cfile = fullname + 'c' - else: - if optimize >= 0: - opt = optimize if optimize >= 1 else '' - cfile = importlib.util.cache_from_source( - fullname, optimization=opt) - else: - cfile = importlib.util.cache_from_source(fullname) - cache_dir = os.path.dirname(cfile) - head, tail = name[:-3], name[-3:] - if tail == '.py': - if not force: - try: - mtime = int(os.stat(fullname).st_mtime) - expect = struct.pack('<4sl', importlib.util.MAGIC_NUMBER, - mtime) - with open(cfile, 'rb') as chandle: - actual = chandle.read(8) - if expect == actual: - return success - except OSError: - pass - if not quiet: - print('Compiling {!r}...'.format(fullname)) - try: - ok = py_compile.compile(fullname, cfile, dfile, True, - optimize=optimize) - except py_compile.PyCompileError as err: - success = False - if quiet >= 2: - return success - elif quiet: - print('*** Error compiling {!r}...'.format(fullname)) - else: - print('*** ', end='') - # escape non-printable characters in msg - msg = err.msg.encode(sys.stdout.encoding, - errors='backslashreplace') - msg = msg.decode(sys.stdout.encoding) - print(msg) - except (SyntaxError, UnicodeError, OSError) as e: - success = False - if quiet >= 2: - return success - elif quiet: - print('*** Error compiling {!r}...'.format(fullname)) - else: - print('*** ', end='') - print(e.__class__.__name__ + ':', e) - else: - if ok == 0: - success = False - return success - -def compile_path(skip_curdir=1, maxlevels=0, force=False, quiet=0, - legacy=False, optimize=-1): - """Byte-compile all module on sys.path. - - Arguments (all optional): - - skip_curdir: if true, skip current directory (default True) - maxlevels: max recursion level (default 0) - force: as for compile_dir() (default False) - quiet: as for compile_dir() (default 0) - legacy: as for compile_dir() (default False) - optimize: as for compile_dir() (default -1) - """ - success = True - for dir in sys.path: - if (not dir or dir == os.curdir) and skip_curdir: - if quiet < 2: - print('Skipping current directory') - else: - success = success and compile_dir(dir, maxlevels, None, - force, quiet=quiet, - legacy=legacy, optimize=optimize) - return success - - +__all__=['compile_dir','compile_file','compile_path'] +def _walk_dir(dir,ddir=_A,maxlevels=10,quiet=0): + E=maxlevels;C=quiet + if C<2 and isinstance(dir,os.PathLike):dir=os.fspath(dir) + if not C:print('Listing {!r}...'.format(dir)) + try:D=os.listdir(dir) + except OSError: + if C<2:print("Can't list {!r}".format(dir)) + D=[] + D.sort() + for A in D: + if A=='__pycache__':continue + B=os.path.join(dir,A) + if ddir is not _A:F=os.path.join(ddir,A) + else:F=_A + if not os.path.isdir(B):yield B + elif E>0 and A!=os.curdir and A!=os.pardir and os.path.isdir(B)and not os.path.islink(B):yield from _walk_dir(B,ddir=F,maxlevels=E-1,quiet=C) +def compile_dir(dir,maxlevels=10,ddir=_A,force=_B,rx=_A,quiet=0,legacy=_B,optimize=-1,workers=1): + 'Byte-compile all modules in the given directory tree.\n\n Arguments (only dir is required):\n\n dir: the directory to byte-compile\n maxlevels: maximum recursion level (default 10)\n ddir: the directory that will be prepended to the path to the\n file as it is compiled into each byte-code file.\n force: if True, force compilation, even if timestamps are up-to-date\n quiet: full output with False or 0, errors only with 1,\n no output with 2\n legacy: if True, produce legacy pyc paths instead of PEP 3147 paths\n optimize: optimization level or -1 for level of the interpreter\n workers: maximum number of parallel workers\n ';E=optimize;F=legacy;G=force;B=quiet;C=ddir;A=workers + if A is not _A and A<0:raise ValueError('workers must be greater or equal to 0') + H=_walk_dir(dir,quiet=B,maxlevels=maxlevels,ddir=C);D=_C + if A is not _A and A!=1 and ProcessPoolExecutor is not _A: + A=A or _A + with ProcessPoolExecutor(max_workers=A)as I:J=I.map(partial(compile_file,ddir=C,force=G,rx=rx,quiet=B,legacy=F,optimize=E),H);D=min(J,default=_C) + else: + for K in H: + if not compile_file(K,C,G,rx,B,F,E):D=_B + return D +def compile_file(fullname,ddir=_A,force=_B,rx=_A,quiet=0,legacy=_B,optimize=-1): + 'Byte-compile one file.\n\n Arguments (only fullname is required):\n\n fullname: the file to byte-compile\n ddir: if given, the directory name compiled in to the\n byte-code file.\n force: if True, force compilation, even if timestamps are up-to-date\n quiet: full output with False or 0, errors only with 1,\n no output with 2\n legacy: if True, produce legacy pyc paths instead of PEP 3147 paths\n optimize: optimization level or -1 for level of the interpreter\n ';H='*** ';I='*** Error compiling {!r}...';E=optimize;C=quiet;A=fullname;B=_C + if C<2 and isinstance(A,os.PathLike):A=os.fspath(A) + F=os.path.basename(A) + if ddir is not _A:J=os.path.join(ddir,F) + else:J=_A + if rx is not _A: + L=rx.search(A) + if L:return B + if os.path.isfile(A): + if legacy:D=A+'c' + else: + if E>=0:M=E if E>=1 else'';D=importlib.util.cache_from_source(A,optimization=M) + else:D=importlib.util.cache_from_source(A) + U=os.path.dirname(D) + V,N=F[:-3],F[-3:] + if N=='.py': + if not force: + try: + O=int(os.stat(A).st_mtime);P=struct.pack('<4sl',importlib.util.MAGIC_NUMBER,O) + with open(D,'rb')as Q:R=Q.read(8) + if P==R:return B + except OSError:pass + if not C:print('Compiling {!r}...'.format(A)) + try:S=py_compile.compile(A,D,J,_C,optimize=E) + except py_compile.PyCompileError as T: + B=_B + if C>=2:return B + elif C:print(I.format(A)) + else:print(H,end='') + G=T.msg.encode(sys.stdout.encoding,errors='backslashreplace');G=G.decode(sys.stdout.encoding);print(G) + except(SyntaxError,UnicodeError,OSError)as K: + B=_B + if C>=2:return B + elif C:print(I.format(A)) + else:print(H,end='') + print(K.__class__.__name__+':',K) + else: + if S==0:B=_B + return B +def compile_path(skip_curdir=1,maxlevels=0,force=_B,quiet=0,legacy=_B,optimize=-1): + 'Byte-compile all module on sys.path.\n\n Arguments (all optional):\n\n skip_curdir: if true, skip current directory (default True)\n maxlevels: max recursion level (default 0)\n force: as for compile_dir() (default False)\n quiet: as for compile_dir() (default 0)\n legacy: as for compile_dir() (default False)\n optimize: as for compile_dir() (default -1)\n ';B=quiet;A=_C + for dir in sys.path: + if(not dir or dir==os.curdir)and skip_curdir: + if B<2:print('Skipping current directory') + else:A=A and compile_dir(dir,maxlevels,_A,force,quiet=B,legacy=legacy,optimize=optimize) + return A def main(): - """Script main program.""" - import argparse - - parser = argparse.ArgumentParser( - description='Utilities to support installing Python libraries.') - parser.add_argument('-l', action='store_const', const=0, - default=10, dest='maxlevels', - help="don't recurse into subdirectories") - parser.add_argument('-r', type=int, dest='recursion', - help=('control the maximum recursion level. ' - 'if `-l` and `-r` options are specified, ' - 'then `-r` takes precedence.')) - parser.add_argument('-f', action='store_true', dest='force', - help='force rebuild even if timestamps are up to date') - parser.add_argument('-q', action='count', dest='quiet', default=0, - help='output only error messages; -qq will suppress ' - 'the error messages as well.') - parser.add_argument('-b', action='store_true', dest='legacy', - help='use legacy (pre-PEP3147) compiled file locations') - parser.add_argument('-d', metavar='DESTDIR', dest='ddir', default=None, - help=('directory to prepend to file paths for use in ' - 'compile-time tracebacks and in runtime ' - 'tracebacks in cases where the source file is ' - 'unavailable')) - parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None, - help=('skip files matching the regular expression; ' - 'the regexp is searched for in the full path ' - 'of each file considered for compilation')) - parser.add_argument('-i', metavar='FILE', dest='flist', - help=('add all the files and directories listed in ' - 'FILE to the list considered for compilation; ' - 'if "-", names are read from stdin')) - parser.add_argument('compile_dest', metavar='FILE|DIR', nargs='*', - help=('zero or more file and directory names ' - 'to compile; if no arguments given, defaults ' - 'to the equivalent of -l sys.path')) - parser.add_argument('-j', '--workers', default=1, - type=int, help='Run compileall concurrently') - - args = parser.parse_args() - compile_dests = args.compile_dest - - if args.rx: - import re - args.rx = re.compile(args.rx) - - - if args.recursion is not None: - maxlevels = args.recursion - else: - maxlevels = args.maxlevels - - # if flist is provided then load it - if args.flist: - try: - with (sys.stdin if args.flist=='-' else open(args.flist)) as f: - for line in f: - compile_dests.append(line.strip()) - except OSError: - if args.quiet < 2: - print("Error reading file list {}".format(args.flist)) - return False - - if args.workers is not None: - args.workers = args.workers or None - - success = True - try: - if compile_dests: - for dest in compile_dests: - if os.path.isfile(dest): - if not compile_file(dest, args.ddir, args.force, args.rx, - args.quiet, args.legacy): - success = False - else: - if not compile_dir(dest, maxlevels, args.ddir, - args.force, args.rx, args.quiet, - args.legacy, workers=args.workers): - success = False - return success - else: - return compile_path(legacy=args.legacy, force=args.force, - quiet=args.quiet) - except KeyboardInterrupt: - if args.quiet < 2: - print("\n[interrupted]") - return False - return True - - -if __name__ == '__main__': - exit_status = int(not main()) - sys.exit(exit_status) + 'Script main program.';F='store_true';import argparse as H;B=H.ArgumentParser(description='Utilities to support installing Python libraries.');B.add_argument('-l',action='store_const',const=0,default=10,dest='maxlevels',help="don't recurse into subdirectories");B.add_argument('-r',type=int,dest='recursion',help='control the maximum recursion level. if `-l` and `-r` options are specified, then `-r` takes precedence.');B.add_argument('-f',action=F,dest='force',help='force rebuild even if timestamps are up to date');B.add_argument('-q',action='count',dest='quiet',default=0,help='output only error messages; -qq will suppress the error messages as well.');B.add_argument('-b',action=F,dest='legacy',help='use legacy (pre-PEP3147) compiled file locations');B.add_argument('-d',metavar='DESTDIR',dest='ddir',default=_A,help='directory to prepend to file paths for use in compile-time tracebacks and in runtime tracebacks in cases where the source file is unavailable');B.add_argument('-x',metavar='REGEXP',dest='rx',default=_A,help='skip files matching the regular expression; the regexp is searched for in the full path of each file considered for compilation');B.add_argument('-i',metavar='FILE',dest='flist',help='add all the files and directories listed in FILE to the list considered for compilation; if "-", names are read from stdin');B.add_argument('compile_dest',metavar='FILE|DIR',nargs='*',help='zero or more file and directory names to compile; if no arguments given, defaults to the equivalent of -l sys.path');B.add_argument('-j','--workers',default=1,type=int,help='Run compileall concurrently');A=B.parse_args();C=A.compile_dest + if A.rx:import re;A.rx=re.compile(A.rx) + if A.recursion is not _A:G=A.recursion + else:G=A.maxlevels + if A.flist: + try: + with sys.stdin if A.flist=='-'else open(A.flist)as I: + for J in I:C.append(J.strip()) + except OSError: + if A.quiet<2:print('Error reading file list {}'.format(A.flist)) + return _B + if A.workers is not _A:A.workers=A.workers or _A + D=_C + try: + if C: + for E in C: + if os.path.isfile(E): + if not compile_file(E,A.ddir,A.force,A.rx,A.quiet,A.legacy):D=_B + elif not compile_dir(E,G,A.ddir,A.force,A.rx,A.quiet,A.legacy,workers=A.workers):D=_B + return D + else:return compile_path(legacy=A.legacy,force=A.force,quiet=A.quiet) + except KeyboardInterrupt: + if A.quiet<2:print('\n[interrupted]') + return _B + return _C +if __name__=='__main__':exit_status=int(not main());sys.exit(exit_status) \ No newline at end of file diff --git a/Lib/concurrent/__init__.py b/Lib/concurrent/__init__.py deleted file mode 100644 index 196d3788575..00000000000 --- a/Lib/concurrent/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# This directory is a Python package. diff --git a/Lib/concurrent/futures/__init__.py b/Lib/concurrent/futures/__init__.py deleted file mode 100644 index d746aeac50a..00000000000 --- a/Lib/concurrent/futures/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Execute computations asynchronously using threads or processes.""" - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -from concurrent.futures._base import (FIRST_COMPLETED, - FIRST_EXCEPTION, - ALL_COMPLETED, - CancelledError, - TimeoutError, - InvalidStateError, - BrokenExecutor, - Future, - Executor, - wait, - as_completed) - -__all__ = ( - 'FIRST_COMPLETED', - 'FIRST_EXCEPTION', - 'ALL_COMPLETED', - 'CancelledError', - 'TimeoutError', - 'BrokenExecutor', - 'Future', - 'Executor', - 'wait', - 'as_completed', - 'ProcessPoolExecutor', - 'ThreadPoolExecutor', -) - - -def __dir__(): - return __all__ + ('__author__', '__doc__') - - -def __getattr__(name): - global ProcessPoolExecutor, ThreadPoolExecutor - - if name == 'ProcessPoolExecutor': - from .process import ProcessPoolExecutor as pe - ProcessPoolExecutor = pe - return pe - - if name == 'ThreadPoolExecutor': - from .thread import ThreadPoolExecutor as te - ThreadPoolExecutor = te - return te - - raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py deleted file mode 100644 index cf119ac6437..00000000000 --- a/Lib/concurrent/futures/_base.py +++ /dev/null @@ -1,644 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -import collections -import logging -import threading -import time -import types - -FIRST_COMPLETED = 'FIRST_COMPLETED' -FIRST_EXCEPTION = 'FIRST_EXCEPTION' -ALL_COMPLETED = 'ALL_COMPLETED' -_AS_COMPLETED = '_AS_COMPLETED' - -# Possible future states (for internal use by the futures package). -PENDING = 'PENDING' -RUNNING = 'RUNNING' -# The future was cancelled by the user... -CANCELLED = 'CANCELLED' -# ...and _Waiter.add_cancelled() was called by a worker. -CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED' -FINISHED = 'FINISHED' - -_FUTURE_STATES = [ - PENDING, - RUNNING, - CANCELLED, - CANCELLED_AND_NOTIFIED, - FINISHED -] - -_STATE_TO_DESCRIPTION_MAP = { - PENDING: "pending", - RUNNING: "running", - CANCELLED: "cancelled", - CANCELLED_AND_NOTIFIED: "cancelled", - FINISHED: "finished" -} - -# Logger for internal use by the futures package. -LOGGER = logging.getLogger("concurrent.futures") - -class Error(Exception): - """Base class for all future-related exceptions.""" - pass - -class CancelledError(Error): - """The Future was cancelled.""" - pass - -class TimeoutError(Error): - """The operation exceeded the given deadline.""" - pass - -class InvalidStateError(Error): - """The operation is not allowed in this state.""" - pass - -class _Waiter(object): - """Provides the event that wait() and as_completed() block on.""" - def __init__(self): - self.event = threading.Event() - self.finished_futures = [] - - def add_result(self, future): - self.finished_futures.append(future) - - def add_exception(self, future): - self.finished_futures.append(future) - - def add_cancelled(self, future): - self.finished_futures.append(future) - -class _AsCompletedWaiter(_Waiter): - """Used by as_completed().""" - - def __init__(self): - super(_AsCompletedWaiter, self).__init__() - self.lock = threading.Lock() - - def add_result(self, future): - with self.lock: - super(_AsCompletedWaiter, self).add_result(future) - self.event.set() - - def add_exception(self, future): - with self.lock: - super(_AsCompletedWaiter, self).add_exception(future) - self.event.set() - - def add_cancelled(self, future): - with self.lock: - super(_AsCompletedWaiter, self).add_cancelled(future) - self.event.set() - -class _FirstCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_COMPLETED).""" - - def add_result(self, future): - super().add_result(future) - self.event.set() - - def add_exception(self, future): - super().add_exception(future) - self.event.set() - - def add_cancelled(self, future): - super().add_cancelled(future) - self.event.set() - -class _AllCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED).""" - - def __init__(self, num_pending_calls, stop_on_exception): - self.num_pending_calls = num_pending_calls - self.stop_on_exception = stop_on_exception - self.lock = threading.Lock() - super().__init__() - - def _decrement_pending_calls(self): - with self.lock: - self.num_pending_calls -= 1 - if not self.num_pending_calls: - self.event.set() - - def add_result(self, future): - super().add_result(future) - self._decrement_pending_calls() - - def add_exception(self, future): - super().add_exception(future) - if self.stop_on_exception: - self.event.set() - else: - self._decrement_pending_calls() - - def add_cancelled(self, future): - super().add_cancelled(future) - self._decrement_pending_calls() - -class _AcquireFutures(object): - """A context manager that does an ordered acquire of Future conditions.""" - - def __init__(self, futures): - self.futures = sorted(futures, key=id) - - def __enter__(self): - for future in self.futures: - future._condition.acquire() - - def __exit__(self, *args): - for future in self.futures: - future._condition.release() - -def _create_and_install_waiters(fs, return_when): - if return_when == _AS_COMPLETED: - waiter = _AsCompletedWaiter() - elif return_when == FIRST_COMPLETED: - waiter = _FirstCompletedWaiter() - else: - pending_count = sum( - f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs) - - if return_when == FIRST_EXCEPTION: - waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True) - elif return_when == ALL_COMPLETED: - waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False) - else: - raise ValueError("Invalid return condition: %r" % return_when) - - for f in fs: - f._waiters.append(waiter) - - return waiter - - -def _yield_finished_futures(fs, waiter, ref_collect): - """ - Iterate on the list *fs*, yielding finished futures one by one in - reverse order. - Before yielding a future, *waiter* is removed from its waiters - and the future is removed from each set in the collection of sets - *ref_collect*. - - The aim of this function is to avoid keeping stale references after - the future is yielded and before the iterator resumes. - """ - while fs: - f = fs[-1] - for futures_set in ref_collect: - futures_set.remove(f) - with f._condition: - f._waiters.remove(waiter) - del f - # Careful not to keep a reference to the popped value - yield fs.pop() - - -def as_completed(fs, timeout=None): - """An iterator over the given futures that yields each as it completes. - - Args: - fs: The sequence of Futures (possibly created by different Executors) to - iterate over. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - - Returns: - An iterator that yields the given Futures as they complete (finished or - cancelled). If any given Futures are duplicated, they will be returned - once. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - """ - if timeout is not None: - end_time = timeout + time.monotonic() - - fs = set(fs) - total_futures = len(fs) - with _AcquireFutures(fs): - finished = set( - f for f in fs - if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) - pending = fs - finished - waiter = _create_and_install_waiters(fs, _AS_COMPLETED) - finished = list(finished) - try: - yield from _yield_finished_futures(finished, waiter, - ref_collect=(fs,)) - - while pending: - if timeout is None: - wait_timeout = None - else: - wait_timeout = end_time - time.monotonic() - if wait_timeout < 0: - raise TimeoutError( - '%d (of %d) futures unfinished' % ( - len(pending), total_futures)) - - waiter.event.wait(wait_timeout) - - with waiter.lock: - finished = waiter.finished_futures - waiter.finished_futures = [] - waiter.event.clear() - - # reverse to keep finishing order - finished.reverse() - yield from _yield_finished_futures(finished, waiter, - ref_collect=(fs, pending)) - - finally: - # Remove waiter from unfinished futures - for f in fs: - with f._condition: - f._waiters.remove(waiter) - -DoneAndNotDoneFutures = collections.namedtuple( - 'DoneAndNotDoneFutures', 'done not_done') -def wait(fs, timeout=None, return_when=ALL_COMPLETED): - """Wait for the futures in the given sequence to complete. - - Args: - fs: The sequence of Futures (possibly created by different Executors) to - wait upon. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - return_when: Indicates when this function should return. The options - are: - - FIRST_COMPLETED - Return when any future finishes or is - cancelled. - FIRST_EXCEPTION - Return when any future finishes by raising an - exception. If no future raises an exception - then it is equivalent to ALL_COMPLETED. - ALL_COMPLETED - Return when all futures finish or are cancelled. - - Returns: - A named 2-tuple of sets. The first set, named 'done', contains the - futures that completed (is finished or cancelled) before the wait - completed. The second set, named 'not_done', contains uncompleted - futures. Duplicate futures given to *fs* are removed and will be - returned only once. - """ - fs = set(fs) - with _AcquireFutures(fs): - done = {f for f in fs - if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]} - not_done = fs - done - if (return_when == FIRST_COMPLETED) and done: - return DoneAndNotDoneFutures(done, not_done) - elif (return_when == FIRST_EXCEPTION) and done: - if any(f for f in done - if not f.cancelled() and f.exception() is not None): - return DoneAndNotDoneFutures(done, not_done) - - if len(done) == len(fs): - return DoneAndNotDoneFutures(done, not_done) - - waiter = _create_and_install_waiters(fs, return_when) - - waiter.event.wait(timeout) - for f in fs: - with f._condition: - f._waiters.remove(waiter) - - done.update(waiter.finished_futures) - return DoneAndNotDoneFutures(done, fs - done) - -class Future(object): - """Represents the result of an asynchronous computation.""" - - def __init__(self): - """Initializes the future. Should not be called by clients.""" - self._condition = threading.Condition() - self._state = PENDING - self._result = None - self._exception = None - self._waiters = [] - self._done_callbacks = [] - - def _invoke_callbacks(self): - for callback in self._done_callbacks: - try: - callback(self) - except Exception: - LOGGER.exception('exception calling callback for %r', self) - - def __repr__(self): - with self._condition: - if self._state == FINISHED: - if self._exception: - return '<%s at %#x state=%s raised %s>' % ( - self.__class__.__name__, - id(self), - _STATE_TO_DESCRIPTION_MAP[self._state], - self._exception.__class__.__name__) - else: - return '<%s at %#x state=%s returned %s>' % ( - self.__class__.__name__, - id(self), - _STATE_TO_DESCRIPTION_MAP[self._state], - self._result.__class__.__name__) - return '<%s at %#x state=%s>' % ( - self.__class__.__name__, - id(self), - _STATE_TO_DESCRIPTION_MAP[self._state]) - - def cancel(self): - """Cancel the future if possible. - - Returns True if the future was cancelled, False otherwise. A future - cannot be cancelled if it is running or has already completed. - """ - with self._condition: - if self._state in [RUNNING, FINISHED]: - return False - - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - return True - - self._state = CANCELLED - self._condition.notify_all() - - self._invoke_callbacks() - return True - - def cancelled(self): - """Return True if the future was cancelled.""" - with self._condition: - return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED] - - def running(self): - """Return True if the future is currently executing.""" - with self._condition: - return self._state == RUNNING - - def done(self): - """Return True if the future was cancelled or finished executing.""" - with self._condition: - return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED] - - def __get_result(self): - if self._exception: - try: - raise self._exception - finally: - # Break a reference cycle with the exception in self._exception - self = None - else: - return self._result - - def add_done_callback(self, fn): - """Attaches a callable that will be called when the future finishes. - - Args: - fn: A callable that will be called with this future as its only - argument when the future completes or is cancelled. The callable - will always be called by a thread in the same process in which - it was added. If the future has already completed or been - cancelled then the callable will be called immediately. These - callables are called in the order that they were added. - """ - with self._condition: - if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]: - self._done_callbacks.append(fn) - return - try: - fn(self) - except Exception: - LOGGER.exception('exception calling callback for %r', self) - - def result(self, timeout=None): - """Return the result of the call that the future represents. - - Args: - timeout: The number of seconds to wait for the result if the future - isn't done. If None, then there is no limit on the wait time. - - Returns: - The result of the call that the future represents. - - Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - Exception: If the call raised then that exception will be raised. - """ - try: - with self._condition: - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self.__get_result() - - self._condition.wait(timeout) - - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self.__get_result() - else: - raise TimeoutError() - finally: - # Break a reference cycle with the exception in self._exception - self = None - - def exception(self, timeout=None): - """Return the exception raised by the call that the future represents. - - Args: - timeout: The number of seconds to wait for the exception if the - future isn't done. If None, then there is no limit on the wait - time. - - Returns: - The exception raised by the call that the future represents or None - if the call completed without raising. - - Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - """ - - with self._condition: - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self._exception - - self._condition.wait(timeout) - - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self._exception - else: - raise TimeoutError() - - # The following methods should only be used by Executors and in tests. - def set_running_or_notify_cancel(self): - """Mark the future as running or process any cancel notifications. - - Should only be used by Executor implementations and unit tests. - - If the future has been cancelled (cancel() was called and returned - True) then any threads waiting on the future completing (though calls - to as_completed() or wait()) are notified and False is returned. - - If the future was not cancelled then it is put in the running state - (future calls to running() will return True) and True is returned. - - This method should be called by Executor implementations before - executing the work associated with this future. If this method returns - False then the work should not be executed. - - Returns: - False if the Future was cancelled, True otherwise. - - Raises: - RuntimeError: if this method was already called or if set_result() - or set_exception() was called. - """ - with self._condition: - if self._state == CANCELLED: - self._state = CANCELLED_AND_NOTIFIED - for waiter in self._waiters: - waiter.add_cancelled(self) - # self._condition.notify_all() is not necessary because - # self.cancel() triggers a notification. - return False - elif self._state == PENDING: - self._state = RUNNING - return True - else: - LOGGER.critical('Future %s in unexpected state: %s', - id(self), - self._state) - raise RuntimeError('Future in unexpected state') - - def set_result(self, result): - """Sets the return value of work associated with the future. - - Should only be used by Executor implementations and unit tests. - """ - with self._condition: - if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}: - raise InvalidStateError('{}: {!r}'.format(self._state, self)) - self._result = result - self._state = FINISHED - for waiter in self._waiters: - waiter.add_result(self) - self._condition.notify_all() - self._invoke_callbacks() - - def set_exception(self, exception): - """Sets the result of the future as being the given exception. - - Should only be used by Executor implementations and unit tests. - """ - with self._condition: - if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}: - raise InvalidStateError('{}: {!r}'.format(self._state, self)) - self._exception = exception - self._state = FINISHED - for waiter in self._waiters: - waiter.add_exception(self) - self._condition.notify_all() - self._invoke_callbacks() - - __class_getitem__ = classmethod(types.GenericAlias) - -class Executor(object): - """This is an abstract base class for concrete asynchronous executors.""" - - def submit(self, fn, /, *args, **kwargs): - """Submits a callable to be executed with the given arguments. - - Schedules the callable to be executed as fn(*args, **kwargs) and returns - a Future instance representing the execution of the callable. - - Returns: - A Future representing the given call. - """ - raise NotImplementedError() - - def map(self, fn, *iterables, timeout=None, chunksize=1): - """Returns an iterator equivalent to map(fn, iter). - - Args: - fn: A callable that will take as many arguments as there are - passed iterables. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - chunksize: The size of the chunks the iterable will be broken into - before being passed to a child process. This argument is only - used by ProcessPoolExecutor; it is ignored by - ThreadPoolExecutor. - - Returns: - An iterator equivalent to: map(func, *iterables) but the calls may - be evaluated out-of-order. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - Exception: If fn(*args) raises for any values. - """ - if timeout is not None: - end_time = timeout + time.monotonic() - - fs = [self.submit(fn, *args) for args in zip(*iterables)] - - # Yield must be hidden in closure so that the futures are submitted - # before the first iterator value is required. - def result_iterator(): - try: - # reverse to keep finishing order - fs.reverse() - while fs: - # Careful not to keep a reference to the popped future - if timeout is None: - yield fs.pop().result() - else: - yield fs.pop().result(end_time - time.monotonic()) - finally: - for future in fs: - future.cancel() - return result_iterator() - - def shutdown(self, wait=True, *, cancel_futures=False): - """Clean-up the resources associated with the Executor. - - It is safe to call this method several times. Otherwise, no other - methods can be called after this one. - - Args: - wait: If True then shutdown will not return until all running - futures have finished executing and the resources used by the - executor have been reclaimed. - cancel_futures: If True then shutdown will cancel all pending - futures. Futures that are completed or running will not be - cancelled. - """ - pass - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.shutdown(wait=True) - return False - - -class BrokenExecutor(RuntimeError): - """ - Raised when a executor has become non-functional after a severe failure. - """ diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py deleted file mode 100644 index 57941e485d8..00000000000 --- a/Lib/concurrent/futures/process.py +++ /dev/null @@ -1,786 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Implements ProcessPoolExecutor. - -The following diagram and text describe the data-flow through the system: - -|======================= In-process =====================|== Out-of-process ==| - -+----------+ +----------+ +--------+ +-----------+ +---------+ -| | => | Work Ids | | | | Call Q | | Process | -| | +----------+ | | +-----------+ | Pool | -| | | ... | | | | ... | +---------+ -| | | 6 | => | | => | 5, call() | => | | -| | | 7 | | | | ... | | | -| Process | | ... | | Local | +-----------+ | Process | -| Pool | +----------+ | Worker | | #1..n | -| Executor | | Thread | | | -| | +----------- + | | +-----------+ | | -| | <=> | Work Items | <=> | | <= | Result Q | <= | | -| | +------------+ | | +-----------+ | | -| | | 6: call() | | | | ... | | | -| | | future | | | | 4, result | | | -| | | ... | | | | 3, except | | | -+----------+ +------------+ +--------+ +-----------+ +---------+ - -Executor.submit() called: -- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict -- adds the id of the _WorkItem to the "Work Ids" queue - -Local worker thread: -- reads work ids from the "Work Ids" queue and looks up the corresponding - WorkItem from the "Work Items" dict: if the work item has been cancelled then - it is simply removed from the dict, otherwise it is repackaged as a - _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" - until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because - calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). -- reads _ResultItems from "Result Q", updates the future stored in the - "Work Items" dict and deletes the dict entry - -Process #1..n: -- reads _CallItems from "Call Q", executes the calls, and puts the resulting - _ResultItems in "Result Q" -""" - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -import os -from concurrent.futures import _base -import queue -import multiprocessing as mp -import multiprocessing.connection -from multiprocessing.queues import Queue -import threading -import weakref -from functools import partial -import itertools -import sys -import traceback - - -_threads_wakeups = weakref.WeakKeyDictionary() -_global_shutdown = False - - -class _ThreadWakeup: - def __init__(self): - self._closed = False - self._reader, self._writer = mp.Pipe(duplex=False) - - def close(self): - if not self._closed: - self._closed = True - self._writer.close() - self._reader.close() - - def wakeup(self): - if not self._closed: - self._writer.send_bytes(b"") - - def clear(self): - if not self._closed: - while self._reader.poll(): - self._reader.recv_bytes() - - -def _python_exit(): - global _global_shutdown - _global_shutdown = True - items = list(_threads_wakeups.items()) - for _, thread_wakeup in items: - # call not protected by ProcessPoolExecutor._shutdown_lock - thread_wakeup.wakeup() - for t, _ in items: - t.join() - -# Register for `_python_exit()` to be called just before joining all -# non-daemon threads. This is used instead of `atexit.register()` for -# compatibility with subinterpreters, which no longer support daemon threads. -# See bpo-39812 for context. -threading._register_atexit(_python_exit) - -# Controls how many more calls than processes will be queued in the call queue. -# A smaller number will mean that processes spend more time idle waiting for -# work while a larger number will make Future.cancel() succeed less frequently -# (Futures in the call queue cannot be cancelled). -EXTRA_QUEUED_CALLS = 1 - - -# On Windows, WaitForMultipleObjects is used to wait for processes to finish. -# It can wait on, at most, 63 objects. There is an overhead of two objects: -# - the result queue reader -# - the thread wakeup reader -_MAX_WINDOWS_WORKERS = 63 - 2 - -# Hack to embed stringification of remote traceback in local traceback - -class _RemoteTraceback(Exception): - def __init__(self, tb): - self.tb = tb - def __str__(self): - return self.tb - -class _ExceptionWithTraceback: - def __init__(self, exc, tb): - tb = traceback.format_exception(type(exc), exc, tb) - tb = ''.join(tb) - self.exc = exc - # Traceback object needs to be garbage-collected as its frames - # contain references to all the objects in the exception scope - self.exc.__traceback__ = None - self.tb = '\n"""\n%s"""' % tb - def __reduce__(self): - return _rebuild_exc, (self.exc, self.tb) - -def _rebuild_exc(exc, tb): - exc.__cause__ = _RemoteTraceback(tb) - return exc - -class _WorkItem(object): - def __init__(self, future, fn, args, kwargs): - self.future = future - self.fn = fn - self.args = args - self.kwargs = kwargs - -class _ResultItem(object): - def __init__(self, work_id, exception=None, result=None): - self.work_id = work_id - self.exception = exception - self.result = result - -class _CallItem(object): - def __init__(self, work_id, fn, args, kwargs): - self.work_id = work_id - self.fn = fn - self.args = args - self.kwargs = kwargs - - -class _SafeQueue(Queue): - """Safe Queue set exception to the future object linked to a job""" - def __init__(self, max_size=0, *, ctx, pending_work_items, shutdown_lock, - thread_wakeup): - self.pending_work_items = pending_work_items - self.shutdown_lock = shutdown_lock - self.thread_wakeup = thread_wakeup - super().__init__(max_size, ctx=ctx) - - def _on_queue_feeder_error(self, e, obj): - if isinstance(obj, _CallItem): - tb = traceback.format_exception(type(e), e, e.__traceback__) - e.__cause__ = _RemoteTraceback('\n"""\n{}"""'.format(''.join(tb))) - work_item = self.pending_work_items.pop(obj.work_id, None) - with self.shutdown_lock: - self.thread_wakeup.wakeup() - # work_item can be None if another process terminated. In this - # case, the executor_manager_thread fails all work_items - # with BrokenProcessPool - if work_item is not None: - work_item.future.set_exception(e) - else: - super()._on_queue_feeder_error(e, obj) - - -def _get_chunks(*iterables, chunksize): - """ Iterates over zip()ed iterables in chunks. """ - it = zip(*iterables) - while True: - chunk = tuple(itertools.islice(it, chunksize)) - if not chunk: - return - yield chunk - - -def _process_chunk(fn, chunk): - """ Processes a chunk of an iterable passed to map. - - Runs the function passed to map() on a chunk of the - iterable passed to map. - - This function is run in a separate process. - - """ - return [fn(*args) for args in chunk] - - -def _sendback_result(result_queue, work_id, result=None, exception=None): - """Safely send back the given result or exception""" - try: - result_queue.put(_ResultItem(work_id, result=result, - exception=exception)) - except BaseException as e: - exc = _ExceptionWithTraceback(e, e.__traceback__) - result_queue.put(_ResultItem(work_id, exception=exc)) - - -def _process_worker(call_queue, result_queue, initializer, initargs): - """Evaluates calls from call_queue and places the results in result_queue. - - This worker is run in a separate process. - - Args: - call_queue: A ctx.Queue of _CallItems that will be read and - evaluated by the worker. - result_queue: A ctx.Queue of _ResultItems that will written - to by the worker. - initializer: A callable initializer, or None - initargs: A tuple of args for the initializer - """ - if initializer is not None: - try: - initializer(*initargs) - except BaseException: - _base.LOGGER.critical('Exception in initializer:', exc_info=True) - # The parent will notice that the process stopped and - # mark the pool broken - return - while True: - call_item = call_queue.get(block=True) - if call_item is None: - # Wake up queue management thread - result_queue.put(os.getpid()) - return - try: - r = call_item.fn(*call_item.args, **call_item.kwargs) - except BaseException as e: - exc = _ExceptionWithTraceback(e, e.__traceback__) - _sendback_result(result_queue, call_item.work_id, exception=exc) - else: - _sendback_result(result_queue, call_item.work_id, result=r) - del r - - # Liberate the resource as soon as possible, to avoid holding onto - # open files or shared memory that is not needed anymore - del call_item - - -class _ExecutorManagerThread(threading.Thread): - """Manages the communication between this process and the worker processes. - - The manager is run in a local thread. - - Args: - executor: A reference to the ProcessPoolExecutor that owns - this thread. A weakref will be own by the manager as well as - references to internal objects used to introspect the state of - the executor. - """ - - def __init__(self, executor): - # Store references to necessary internals of the executor. - - # A _ThreadWakeup to allow waking up the queue_manager_thread from the - # main Thread and avoid deadlocks caused by permanently locked queues. - self.thread_wakeup = executor._executor_manager_thread_wakeup - self.shutdown_lock = executor._shutdown_lock - - # A weakref.ref to the ProcessPoolExecutor that owns this thread. Used - # to determine if the ProcessPoolExecutor has been garbage collected - # and that the manager can exit. - # When the executor gets garbage collected, the weakref callback - # will wake up the queue management thread so that it can terminate - # if there is no pending work item. - def weakref_cb(_, - thread_wakeup=self.thread_wakeup, - shutdown_lock=self.shutdown_lock): - mp.util.debug('Executor collected: triggering callback for' - ' QueueManager wakeup') - with shutdown_lock: - thread_wakeup.wakeup() - - self.executor_reference = weakref.ref(executor, weakref_cb) - - # A list of the ctx.Process instances used as workers. - self.processes = executor._processes - - # A ctx.Queue that will be filled with _CallItems derived from - # _WorkItems for processing by the process workers. - self.call_queue = executor._call_queue - - # A ctx.SimpleQueue of _ResultItems generated by the process workers. - self.result_queue = executor._result_queue - - # A queue.Queue of work ids e.g. Queue([5, 6, ...]). - self.work_ids_queue = executor._work_ids - - # A dict mapping work ids to _WorkItems e.g. - # {5: <_WorkItem...>, 6: <_WorkItem...>, ...} - self.pending_work_items = executor._pending_work_items - - super().__init__() - - def run(self): - # Main loop for the executor manager thread. - - while True: - self.add_call_item_to_queue() - - result_item, is_broken, cause = self.wait_result_broken_or_wakeup() - - if is_broken: - self.terminate_broken(cause) - return - if result_item is not None: - self.process_result_item(result_item) - # Delete reference to result_item to avoid keeping references - # while waiting on new results. - del result_item - - # attempt to increment idle process count - executor = self.executor_reference() - if executor is not None: - executor._idle_worker_semaphore.release() - del executor - - if self.is_shutting_down(): - self.flag_executor_shutting_down() - - # Since no new work items can be added, it is safe to shutdown - # this thread if there are no pending work items. - if not self.pending_work_items: - self.join_executor_internals() - return - - def add_call_item_to_queue(self): - # Fills call_queue with _WorkItems from pending_work_items. - # This function never blocks. - while True: - if self.call_queue.full(): - return - try: - work_id = self.work_ids_queue.get(block=False) - except queue.Empty: - return - else: - work_item = self.pending_work_items[work_id] - - if work_item.future.set_running_or_notify_cancel(): - self.call_queue.put(_CallItem(work_id, - work_item.fn, - work_item.args, - work_item.kwargs), - block=True) - else: - del self.pending_work_items[work_id] - continue - - def wait_result_broken_or_wakeup(self): - # Wait for a result to be ready in the result_queue while checking - # that all worker processes are still running, or for a wake up - # signal send. The wake up signals come either from new tasks being - # submitted, from the executor being shutdown/gc-ed, or from the - # shutdown of the python interpreter. - result_reader = self.result_queue._reader - assert not self.thread_wakeup._closed - wakeup_reader = self.thread_wakeup._reader - readers = [result_reader, wakeup_reader] - worker_sentinels = [p.sentinel for p in list(self.processes.values())] - ready = mp.connection.wait(readers + worker_sentinels) - - cause = None - is_broken = True - result_item = None - if result_reader in ready: - try: - result_item = result_reader.recv() - is_broken = False - except BaseException as e: - cause = traceback.format_exception(type(e), e, e.__traceback__) - - elif wakeup_reader in ready: - is_broken = False - - with self.shutdown_lock: - self.thread_wakeup.clear() - - return result_item, is_broken, cause - - def process_result_item(self, result_item): - # Process the received a result_item. This can be either the PID of a - # worker that exited gracefully or a _ResultItem - - if isinstance(result_item, int): - # Clean shutdown of a worker using its PID - # (avoids marking the executor broken) - assert self.is_shutting_down() - p = self.processes.pop(result_item) - p.join() - if not self.processes: - self.join_executor_internals() - return - else: - # Received a _ResultItem so mark the future as completed. - work_item = self.pending_work_items.pop(result_item.work_id, None) - # work_item can be None if another process terminated (see above) - if work_item is not None: - if result_item.exception: - work_item.future.set_exception(result_item.exception) - else: - work_item.future.set_result(result_item.result) - - def is_shutting_down(self): - # Check whether we should start shutting down the executor. - executor = self.executor_reference() - # No more work items can be added if: - # - The interpreter is shutting down OR - # - The executor that owns this worker has been collected OR - # - The executor that owns this worker has been shutdown. - return (_global_shutdown or executor is None - or executor._shutdown_thread) - - def terminate_broken(self, cause): - # Terminate the executor because it is in a broken state. The cause - # argument can be used to display more information on the error that - # lead the executor into becoming broken. - - # Mark the process pool broken so that submits fail right now. - executor = self.executor_reference() - if executor is not None: - executor._broken = ('A child process terminated ' - 'abruptly, the process pool is not ' - 'usable anymore') - executor._shutdown_thread = True - executor = None - - # All pending tasks are to be marked failed with the following - # BrokenProcessPool error - bpe = BrokenProcessPool("A process in the process pool was " - "terminated abruptly while the future was " - "running or pending.") - if cause is not None: - bpe.__cause__ = _RemoteTraceback( - f"\n'''\n{''.join(cause)}'''") - - # Mark pending tasks as failed. - for work_id, work_item in self.pending_work_items.items(): - work_item.future.set_exception(bpe) - # Delete references to object. See issue16284 - del work_item - self.pending_work_items.clear() - - # Terminate remaining workers forcibly: the queues or their - # locks may be in a dirty state and block forever. - for p in self.processes.values(): - p.terminate() - - # clean up resources - self.join_executor_internals() - - def flag_executor_shutting_down(self): - # Flag the executor as shutting down and cancel remaining tasks if - # requested as early as possible if it is not gc-ed yet. - executor = self.executor_reference() - if executor is not None: - executor._shutdown_thread = True - # Cancel pending work items if requested. - if executor._cancel_pending_futures: - # Cancel all pending futures and update pending_work_items - # to only have futures that are currently running. - new_pending_work_items = {} - for work_id, work_item in self.pending_work_items.items(): - if not work_item.future.cancel(): - new_pending_work_items[work_id] = work_item - self.pending_work_items = new_pending_work_items - # Drain work_ids_queue since we no longer need to - # add items to the call queue. - while True: - try: - self.work_ids_queue.get_nowait() - except queue.Empty: - break - # Make sure we do this only once to not waste time looping - # on running processes over and over. - executor._cancel_pending_futures = False - - def shutdown_workers(self): - n_children_to_stop = self.get_n_children_alive() - n_sentinels_sent = 0 - # Send the right number of sentinels, to make sure all children are - # properly terminated. - while (n_sentinels_sent < n_children_to_stop - and self.get_n_children_alive() > 0): - for i in range(n_children_to_stop - n_sentinels_sent): - try: - self.call_queue.put_nowait(None) - n_sentinels_sent += 1 - except queue.Full: - break - - def join_executor_internals(self): - self.shutdown_workers() - # Release the queue's resources as soon as possible. - self.call_queue.close() - self.call_queue.join_thread() - with self.shutdown_lock: - self.thread_wakeup.close() - # If .join() is not called on the created processes then - # some ctx.Queue methods may deadlock on Mac OS X. - for p in self.processes.values(): - p.join() - - def get_n_children_alive(self): - # This is an upper bound on the number of children alive. - return sum(p.is_alive() for p in self.processes.values()) - - -_system_limits_checked = False -_system_limited = None - - -def _check_system_limits(): - global _system_limits_checked, _system_limited - if _system_limits_checked: - if _system_limited: - raise NotImplementedError(_system_limited) - _system_limits_checked = True - try: - import multiprocessing.synchronize - except ImportError: - _system_limited = ( - "This Python build lacks multiprocessing.synchronize, usually due " - "to named semaphores being unavailable on this platform." - ) - raise NotImplementedError(_system_limited) - try: - nsems_max = os.sysconf("SC_SEM_NSEMS_MAX") - except (AttributeError, ValueError): - # sysconf not available or setting not available - return - if nsems_max == -1: - # indetermined limit, assume that limit is determined - # by available memory only - return - if nsems_max >= 256: - # minimum number of semaphores available - # according to POSIX - return - _system_limited = ("system provides too few semaphores (%d" - " available, 256 necessary)" % nsems_max) - raise NotImplementedError(_system_limited) - - -def _chain_from_iterable_of_lists(iterable): - """ - Specialized implementation of itertools.chain.from_iterable. - Each item in *iterable* should be a list. This function is - careful not to keep references to yielded objects. - """ - for element in iterable: - element.reverse() - while element: - yield element.pop() - - -class BrokenProcessPool(_base.BrokenExecutor): - """ - Raised when a process in a ProcessPoolExecutor terminated abruptly - while a future was in the running state. - """ - - -class ProcessPoolExecutor(_base.Executor): - def __init__(self, max_workers=None, mp_context=None, - initializer=None, initargs=()): - """Initializes a new ProcessPoolExecutor instance. - - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - """ - _check_system_limits() - - if max_workers is None: - self._max_workers = os.cpu_count() or 1 - if sys.platform == 'win32': - self._max_workers = min(_MAX_WINDOWS_WORKERS, - self._max_workers) - else: - if max_workers <= 0: - raise ValueError("max_workers must be greater than 0") - elif (sys.platform == 'win32' and - max_workers > _MAX_WINDOWS_WORKERS): - raise ValueError( - f"max_workers must be <= {_MAX_WINDOWS_WORKERS}") - - self._max_workers = max_workers - - if mp_context is None: - mp_context = mp.get_context() - self._mp_context = mp_context - - # https://github.com/python/cpython/issues/90622 - self._safe_to_dynamically_spawn_children = ( - self._mp_context.get_start_method(allow_none=False) != "fork") - - if initializer is not None and not callable(initializer): - raise TypeError("initializer must be a callable") - self._initializer = initializer - self._initargs = initargs - - # Management thread - self._executor_manager_thread = None - - # Map of pids to processes - self._processes = {} - - # Shutdown is a two-step process. - self._shutdown_thread = False - self._shutdown_lock = threading.Lock() - self._idle_worker_semaphore = threading.Semaphore(0) - self._broken = False - self._queue_count = 0 - self._pending_work_items = {} - self._cancel_pending_futures = False - - # _ThreadWakeup is a communication channel used to interrupt the wait - # of the main loop of executor_manager_thread from another thread (e.g. - # when calling executor.submit or executor.shutdown). We do not use the - # _result_queue to send wakeup signals to the executor_manager_thread - # as it could result in a deadlock if a worker process dies with the - # _result_queue write lock still acquired. - # - # _shutdown_lock must be locked to access _ThreadWakeup. - self._executor_manager_thread_wakeup = _ThreadWakeup() - - # Create communication channels for the executor - # Make the call queue slightly larger than the number of processes to - # prevent the worker processes from idling. But don't make it too big - # because futures in the call queue cannot be cancelled. - queue_size = self._max_workers + EXTRA_QUEUED_CALLS - self._call_queue = _SafeQueue( - max_size=queue_size, ctx=self._mp_context, - pending_work_items=self._pending_work_items, - shutdown_lock=self._shutdown_lock, - thread_wakeup=self._executor_manager_thread_wakeup) - # Killed worker processes can produce spurious "broken pipe" - # tracebacks in the queue's own worker thread. But we detect killed - # processes anyway, so silence the tracebacks. - self._call_queue._ignore_epipe = True - self._result_queue = mp_context.SimpleQueue() - self._work_ids = queue.Queue() - - def _start_executor_manager_thread(self): - if self._executor_manager_thread is None: - # Start the processes so that their sentinels are known. - if not self._safe_to_dynamically_spawn_children: # ie, using fork. - self._launch_processes() - self._executor_manager_thread = _ExecutorManagerThread(self) - self._executor_manager_thread.start() - _threads_wakeups[self._executor_manager_thread] = \ - self._executor_manager_thread_wakeup - - def _adjust_process_count(self): - # if there's an idle process, we don't need to spawn a new one. - if self._idle_worker_semaphore.acquire(blocking=False): - return - - process_count = len(self._processes) - if process_count < self._max_workers: - # Assertion disabled as this codepath is also used to replace a - # worker that unexpectedly dies, even when using the 'fork' start - # method. That means there is still a potential deadlock bug. If a - # 'fork' mp_context worker dies, we'll be forking a new one when - # we know a thread is running (self._executor_manager_thread). - #assert self._safe_to_dynamically_spawn_children or not self._executor_manager_thread, 'https://github.com/python/cpython/issues/90622' - self._spawn_process() - - def _launch_processes(self): - # https://github.com/python/cpython/issues/90622 - assert not self._executor_manager_thread, ( - 'Processes cannot be fork()ed after the thread has started, ' - 'deadlock in the child processes could result.') - for _ in range(len(self._processes), self._max_workers): - self._spawn_process() - - def _spawn_process(self): - p = self._mp_context.Process( - target=_process_worker, - args=(self._call_queue, - self._result_queue, - self._initializer, - self._initargs)) - p.start() - self._processes[p.pid] = p - - def submit(self, fn, /, *args, **kwargs): - with self._shutdown_lock: - if self._broken: - raise BrokenProcessPool(self._broken) - if self._shutdown_thread: - raise RuntimeError('cannot schedule new futures after shutdown') - if _global_shutdown: - raise RuntimeError('cannot schedule new futures after ' - 'interpreter shutdown') - - f = _base.Future() - w = _WorkItem(f, fn, args, kwargs) - - self._pending_work_items[self._queue_count] = w - self._work_ids.put(self._queue_count) - self._queue_count += 1 - # Wake up queue management thread - self._executor_manager_thread_wakeup.wakeup() - - if self._safe_to_dynamically_spawn_children: - self._adjust_process_count() - self._start_executor_manager_thread() - return f - submit.__doc__ = _base.Executor.submit.__doc__ - - def map(self, fn, *iterables, timeout=None, chunksize=1): - """Returns an iterator equivalent to map(fn, iter). - - Args: - fn: A callable that will take as many arguments as there are - passed iterables. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - chunksize: If greater than one, the iterables will be chopped into - chunks of size chunksize and submitted to the process pool. - If set to one, the items in the list will be sent one at a time. - - Returns: - An iterator equivalent to: map(func, *iterables) but the calls may - be evaluated out-of-order. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - Exception: If fn(*args) raises for any values. - """ - if chunksize < 1: - raise ValueError("chunksize must be >= 1.") - - results = super().map(partial(_process_chunk, fn), - _get_chunks(*iterables, chunksize=chunksize), - timeout=timeout) - return _chain_from_iterable_of_lists(results) - - def shutdown(self, wait=True, *, cancel_futures=False): - with self._shutdown_lock: - self._cancel_pending_futures = cancel_futures - self._shutdown_thread = True - if self._executor_manager_thread_wakeup is not None: - # Wake up queue management thread - self._executor_manager_thread_wakeup.wakeup() - - if self._executor_manager_thread is not None and wait: - self._executor_manager_thread.join() - # To reduce the risk of opening too many files, remove references to - # objects that use file descriptors. - self._executor_manager_thread = None - self._call_queue = None - if self._result_queue is not None and wait: - self._result_queue.close() - self._result_queue = None - self._processes = None - self._executor_manager_thread_wakeup = None - - shutdown.__doc__ = _base.Executor.shutdown.__doc__ diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py deleted file mode 100644 index 493861d314d..00000000000 --- a/Lib/concurrent/futures/thread.py +++ /dev/null @@ -1,237 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Implements ThreadPoolExecutor.""" - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -from concurrent.futures import _base -import itertools -import queue -import threading -import types -import weakref -import os - - -_threads_queues = weakref.WeakKeyDictionary() -_shutdown = False -# Lock that ensures that new workers are not created while the interpreter is -# shutting down. Must be held while mutating _threads_queues and _shutdown. -_global_shutdown_lock = threading.Lock() - -def _python_exit(): - global _shutdown - with _global_shutdown_lock: - _shutdown = True - items = list(_threads_queues.items()) - for t, q in items: - q.put(None) - for t, q in items: - t.join() - -# Register for `_python_exit()` to be called just before joining all -# non-daemon threads. This is used instead of `atexit.register()` for -# compatibility with subinterpreters, which no longer support daemon threads. -# See bpo-39812 for context. -threading._register_atexit(_python_exit) - -# At fork, reinitialize the `_global_shutdown_lock` lock in the child process -# TODO RUSTPYTHON - _at_fork_reinit is not implemented yet -if hasattr(os, 'register_at_fork') and hasattr(_global_shutdown_lock, '_at_fork_reinit'): - os.register_at_fork(before=_global_shutdown_lock.acquire, - after_in_child=_global_shutdown_lock._at_fork_reinit, - after_in_parent=_global_shutdown_lock.release) - - -class _WorkItem(object): - def __init__(self, future, fn, args, kwargs): - self.future = future - self.fn = fn - self.args = args - self.kwargs = kwargs - - def run(self): - if not self.future.set_running_or_notify_cancel(): - return - - try: - result = self.fn(*self.args, **self.kwargs) - except BaseException as exc: - self.future.set_exception(exc) - # Break a reference cycle with the exception 'exc' - self = None - else: - self.future.set_result(result) - - __class_getitem__ = classmethod(types.GenericAlias) - - -def _worker(executor_reference, work_queue, initializer, initargs): - if initializer is not None: - try: - initializer(*initargs) - except BaseException: - _base.LOGGER.critical('Exception in initializer:', exc_info=True) - executor = executor_reference() - if executor is not None: - executor._initializer_failed() - return - try: - while True: - work_item = work_queue.get(block=True) - if work_item is not None: - work_item.run() - # Delete references to object. See issue16284 - del work_item - - # attempt to increment idle count - executor = executor_reference() - if executor is not None: - executor._idle_semaphore.release() - del executor - continue - - executor = executor_reference() - # Exit if: - # - The interpreter is shutting down OR - # - The executor that owns the worker has been collected OR - # - The executor that owns the worker has been shutdown. - if _shutdown or executor is None or executor._shutdown: - # Flag the executor as shutting down as early as possible if it - # is not gc-ed yet. - if executor is not None: - executor._shutdown = True - # Notice other workers - work_queue.put(None) - return - del executor - except BaseException: - _base.LOGGER.critical('Exception in worker', exc_info=True) - - -class BrokenThreadPool(_base.BrokenExecutor): - """ - Raised when a worker thread in a ThreadPoolExecutor failed initializing. - """ - - -class ThreadPoolExecutor(_base.Executor): - - # Used to assign unique thread names when thread_name_prefix is not supplied. - _counter = itertools.count().__next__ - - def __init__(self, max_workers=None, thread_name_prefix='', - initializer=None, initargs=()): - """Initializes a new ThreadPoolExecutor instance. - - Args: - max_workers: The maximum number of threads that can be used to - execute the given calls. - thread_name_prefix: An optional name prefix to give our threads. - initializer: A callable used to initialize worker threads. - initargs: A tuple of arguments to pass to the initializer. - """ - if max_workers is None: - # ThreadPoolExecutor is often used to: - # * CPU bound task which releases GIL - # * I/O bound task (which releases GIL, of course) - # - # We use cpu_count + 4 for both types of tasks. - # But we limit it to 32 to avoid consuming surprisingly large resource - # on many core machine. - max_workers = min(32, (os.cpu_count() or 1) + 4) - if max_workers <= 0: - raise ValueError("max_workers must be greater than 0") - - if initializer is not None and not callable(initializer): - raise TypeError("initializer must be a callable") - - self._max_workers = max_workers - self._work_queue = queue.SimpleQueue() - self._idle_semaphore = threading.Semaphore(0) - self._threads = set() - self._broken = False - self._shutdown = False - self._shutdown_lock = threading.Lock() - self._thread_name_prefix = (thread_name_prefix or - ("ThreadPoolExecutor-%d" % self._counter())) - self._initializer = initializer - self._initargs = initargs - - def submit(self, fn, /, *args, **kwargs): - with self._shutdown_lock, _global_shutdown_lock: - if self._broken: - raise BrokenThreadPool(self._broken) - - if self._shutdown: - raise RuntimeError('cannot schedule new futures after shutdown') - if _shutdown: - raise RuntimeError('cannot schedule new futures after ' - 'interpreter shutdown') - - f = _base.Future() - w = _WorkItem(f, fn, args, kwargs) - - self._work_queue.put(w) - self._adjust_thread_count() - return f - submit.__doc__ = _base.Executor.submit.__doc__ - - def _adjust_thread_count(self): - # if idle threads are available, don't spin new threads - if self._idle_semaphore.acquire(timeout=0): - return - - # When the executor gets lost, the weakref callback will wake up - # the worker threads. - def weakref_cb(_, q=self._work_queue): - q.put(None) - - num_threads = len(self._threads) - if num_threads < self._max_workers: - thread_name = '%s_%d' % (self._thread_name_prefix or self, - num_threads) - t = threading.Thread(name=thread_name, target=_worker, - args=(weakref.ref(self, weakref_cb), - self._work_queue, - self._initializer, - self._initargs)) - t.start() - self._threads.add(t) - _threads_queues[t] = self._work_queue - - def _initializer_failed(self): - with self._shutdown_lock: - self._broken = ('A thread initializer failed, the thread pool ' - 'is not usable anymore') - # Drain work queue and mark pending futures failed - while True: - try: - work_item = self._work_queue.get_nowait() - except queue.Empty: - break - if work_item is not None: - work_item.future.set_exception(BrokenThreadPool(self._broken)) - - def shutdown(self, wait=True, *, cancel_futures=False): - with self._shutdown_lock: - self._shutdown = True - if cancel_futures: - # Drain all work items from the queue, and then cancel their - # associated futures. - while True: - try: - work_item = self._work_queue.get_nowait() - except queue.Empty: - break - if work_item is not None: - work_item.future.cancel() - - # Send a wake-up to prevent threads calling - # _work_queue.get(block=True) from permanently blocking. - self._work_queue.put(None) - if wait: - for t in self._threads: - t.join() - shutdown.__doc__ = _base.Executor.shutdown.__doc__ diff --git a/Lib/configparser.py b/Lib/configparser.py deleted file mode 100644 index df2d7e335d9..00000000000 --- a/Lib/configparser.py +++ /dev/null @@ -1,1382 +0,0 @@ -"""Configuration file parser. - -A configuration file consists of sections, lead by a "[section]" header, -and followed by "name: value" entries, with continuations and such in -the style of RFC 822. - -Intrinsic defaults can be specified by passing them into the -ConfigParser constructor as a dictionary. - -class: - -ConfigParser -- responsible for parsing a list of - configuration files, and managing the parsed database. - - methods: - - __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, - delimiters=('=', ':'), comment_prefixes=('#', ';'), - inline_comment_prefixes=None, strict=True, - empty_lines_in_values=True, default_section='DEFAULT', - interpolation=, converters=): - - Create the parser. When `defaults` is given, it is initialized into the - dictionary or intrinsic defaults. The keys must be strings, the values - must be appropriate for %()s string interpolation. - - When `dict_type` is given, it will be used to create the dictionary - objects for the list of sections, for the options within a section, and - for the default values. - - When `delimiters` is given, it will be used as the set of substrings - that divide keys from values. - - When `comment_prefixes` is given, it will be used as the set of - substrings that prefix comments in empty lines. Comments can be - indented. - - When `inline_comment_prefixes` is given, it will be used as the set of - substrings that prefix comments in non-empty lines. - - When `strict` is True, the parser won't allow for any section or option - duplicates while reading from a single source (file, string or - dictionary). Default is True. - - When `empty_lines_in_values` is False (default: True), each empty line - marks the end of an option. Otherwise, internal empty lines of - a multiline option are kept as part of the value. - - When `allow_no_value` is True (default: False), options without - values are accepted; the value presented for these is None. - - When `default_section` is given, the name of the special section is - named accordingly. By default it is called ``"DEFAULT"`` but this can - be customized to point to any other valid section name. Its current - value can be retrieved using the ``parser_instance.default_section`` - attribute and may be modified at runtime. - - When `interpolation` is given, it should be an Interpolation subclass - instance. It will be used as the handler for option value - pre-processing when using getters. RawConfigParser objects don't do - any sort of interpolation, whereas ConfigParser uses an instance of - BasicInterpolation. The library also provides a ``zc.buildbot`` - inspired ExtendedInterpolation implementation. - - When `converters` is given, it should be a dictionary where each key - represents the name of a type converter and each value is a callable - implementing the conversion from string to the desired datatype. Every - converter gets its corresponding get*() method on the parser object and - section proxies. - - sections() - Return all the configuration section names, sans DEFAULT. - - has_section(section) - Return whether the given section exists. - - has_option(section, option) - Return whether the given option exists in the given section. - - options(section) - Return list of configuration options for the named section. - - read(filenames, encoding=None) - Read and parse the iterable of named configuration files, given by - name. A single filename is also allowed. Non-existing files - are ignored. Return list of successfully read files. - - read_file(f, filename=None) - Read and parse one configuration file, given as a file object. - The filename defaults to f.name; it is only used in error - messages (if f has no `name` attribute, the string `` is used). - - read_string(string) - Read configuration from a given string. - - read_dict(dictionary) - Read configuration from a dictionary. Keys are section names, - values are dictionaries with keys and values that should be present - in the section. If the used dictionary type preserves order, sections - and their keys will be added in order. Values are automatically - converted to strings. - - get(section, option, raw=False, vars=None, fallback=_UNSET) - Return a string value for the named option. All % interpolations are - expanded in the return values, based on the defaults passed into the - constructor and the DEFAULT section. Additional substitutions may be - provided using the `vars` argument, which must be a dictionary whose - contents override any pre-existing defaults. If `option` is a key in - `vars`, the value from `vars` is used. - - getint(section, options, raw=False, vars=None, fallback=_UNSET) - Like get(), but convert value to an integer. - - getfloat(section, options, raw=False, vars=None, fallback=_UNSET) - Like get(), but convert value to a float. - - getboolean(section, options, raw=False, vars=None, fallback=_UNSET) - Like get(), but convert value to a boolean (currently case - insensitively defined as 0, false, no, off for False, and 1, true, - yes, on for True). Returns False or True. - - items(section=_UNSET, raw=False, vars=None) - If section is given, return a list of tuples with (name, value) for - each option in the section. Otherwise, return a list of tuples with - (section_name, section_proxy) for each section, including DEFAULTSECT. - - remove_section(section) - Remove the given file section and all its options. - - remove_option(section, option) - Remove the given option from the given section. - - set(section, option, value) - Set the given option. - - write(fp, space_around_delimiters=True) - Write the configuration state in .ini format. If - `space_around_delimiters` is True (the default), delimiters - between keys and values are surrounded by spaces. -""" - -from collections.abc import MutableMapping -from collections import ChainMap as _ChainMap -import functools -import io -import itertools -import os -import re -import sys -import warnings - -__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError", - "NoOptionError", "InterpolationError", "InterpolationDepthError", - "InterpolationMissingOptionError", "InterpolationSyntaxError", - "ParsingError", "MissingSectionHeaderError", - "ConfigParser", "SafeConfigParser", "RawConfigParser", - "Interpolation", "BasicInterpolation", "ExtendedInterpolation", - "LegacyInterpolation", "SectionProxy", "ConverterMapping", - "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"] - -_default_dict = dict -DEFAULTSECT = "DEFAULT" - -MAX_INTERPOLATION_DEPTH = 10 - - - -# exception classes -class Error(Exception): - """Base class for ConfigParser exceptions.""" - - def __init__(self, msg=''): - self.message = msg - Exception.__init__(self, msg) - - def __repr__(self): - return self.message - - __str__ = __repr__ - - -class NoSectionError(Error): - """Raised when no section matches a requested option.""" - - def __init__(self, section): - Error.__init__(self, 'No section: %r' % (section,)) - self.section = section - self.args = (section, ) - - -class DuplicateSectionError(Error): - """Raised when a section is repeated in an input source. - - Possible repetitions that raise this exception are: multiple creation - using the API or in strict parsers when a section is found more than once - in a single input file, string or dictionary. - """ - - def __init__(self, section, source=None, lineno=None): - msg = [repr(section), " already exists"] - if source is not None: - message = ["While reading from ", repr(source)] - if lineno is not None: - message.append(" [line {0:2d}]".format(lineno)) - message.append(": section ") - message.extend(msg) - msg = message - else: - msg.insert(0, "Section ") - Error.__init__(self, "".join(msg)) - self.section = section - self.source = source - self.lineno = lineno - self.args = (section, source, lineno) - - -class DuplicateOptionError(Error): - """Raised by strict parsers when an option is repeated in an input source. - - Current implementation raises this exception only when an option is found - more than once in a single file, string or dictionary. - """ - - def __init__(self, section, option, source=None, lineno=None): - msg = [repr(option), " in section ", repr(section), - " already exists"] - if source is not None: - message = ["While reading from ", repr(source)] - if lineno is not None: - message.append(" [line {0:2d}]".format(lineno)) - message.append(": option ") - message.extend(msg) - msg = message - else: - msg.insert(0, "Option ") - Error.__init__(self, "".join(msg)) - self.section = section - self.option = option - self.source = source - self.lineno = lineno - self.args = (section, option, source, lineno) - - -class NoOptionError(Error): - """A requested option was not found.""" - - def __init__(self, option, section): - Error.__init__(self, "No option %r in section: %r" % - (option, section)) - self.option = option - self.section = section - self.args = (option, section) - - -class InterpolationError(Error): - """Base class for interpolation-related exceptions.""" - - def __init__(self, option, section, msg): - Error.__init__(self, msg) - self.option = option - self.section = section - self.args = (option, section, msg) - - -class InterpolationMissingOptionError(InterpolationError): - """A string substitution required a setting which was not available.""" - - def __init__(self, option, section, rawval, reference): - msg = ("Bad value substitution: option {!r} in section {!r} contains " - "an interpolation key {!r} which is not a valid option name. " - "Raw value: {!r}".format(option, section, reference, rawval)) - InterpolationError.__init__(self, option, section, msg) - self.reference = reference - self.args = (option, section, rawval, reference) - - -class InterpolationSyntaxError(InterpolationError): - """Raised when the source text contains invalid syntax. - - Current implementation raises this exception when the source text into - which substitutions are made does not conform to the required syntax. - """ - - -class InterpolationDepthError(InterpolationError): - """Raised when substitutions are nested too deeply.""" - - def __init__(self, option, section, rawval): - msg = ("Recursion limit exceeded in value substitution: option {!r} " - "in section {!r} contains an interpolation key which " - "cannot be substituted in {} steps. Raw value: {!r}" - "".format(option, section, MAX_INTERPOLATION_DEPTH, - rawval)) - InterpolationError.__init__(self, option, section, msg) - self.args = (option, section, rawval) - - -class ParsingError(Error): - """Raised when a configuration file does not follow legal syntax.""" - - def __init__(self, source=None, filename=None): - # Exactly one of `source'/`filename' arguments has to be given. - # `filename' kept for compatibility. - if filename and source: - raise ValueError("Cannot specify both `filename' and `source'. " - "Use `source'.") - elif not filename and not source: - raise ValueError("Required argument `source' not given.") - elif filename: - source = filename - Error.__init__(self, 'Source contains parsing errors: %r' % source) - self.source = source - self.errors = [] - self.args = (source, ) - - @property - def filename(self): - """Deprecated, use `source'.""" - warnings.warn( - "The 'filename' attribute will be removed in Python 3.12. " - "Use 'source' instead.", - DeprecationWarning, stacklevel=2 - ) - return self.source - - @filename.setter - def filename(self, value): - """Deprecated, user `source'.""" - warnings.warn( - "The 'filename' attribute will be removed in Python 3.12. " - "Use 'source' instead.", - DeprecationWarning, stacklevel=2 - ) - self.source = value - - def append(self, lineno, line): - self.errors.append((lineno, line)) - self.message += '\n\t[line %2d]: %s' % (lineno, line) - - -class MissingSectionHeaderError(ParsingError): - """Raised when a key-value pair is found before any section header.""" - - def __init__(self, filename, lineno, line): - Error.__init__( - self, - 'File contains no section headers.\nfile: %r, line: %d\n%r' % - (filename, lineno, line)) - self.source = filename - self.lineno = lineno - self.line = line - self.args = (filename, lineno, line) - - -# Used in parser getters to indicate the default behaviour when a specific -# option is not found it to raise an exception. Created to enable `None` as -# a valid fallback value. -_UNSET = object() - - -class Interpolation: - """Dummy interpolation that passes the value through with no changes.""" - - def before_get(self, parser, section, option, value, defaults): - return value - - def before_set(self, parser, section, option, value): - return value - - def before_read(self, parser, section, option, value): - return value - - def before_write(self, parser, section, option, value): - return value - - -class BasicInterpolation(Interpolation): - """Interpolation as implemented in the classic ConfigParser. - - The option values can contain format strings which refer to other values in - the same section, or values in the special default section. - - For example: - - something: %(dir)s/whatever - - would resolve the "%(dir)s" to the value of dir. All reference - expansions are done late, on demand. If a user needs to use a bare % in - a configuration file, she can escape it by writing %%. Other % usage - is considered a user error and raises `InterpolationSyntaxError`.""" - - _KEYCRE = re.compile(r"%\(([^)]+)\)s") - - def before_get(self, parser, section, option, value, defaults): - L = [] - self._interpolate_some(parser, option, L, value, section, defaults, 1) - return ''.join(L) - - def before_set(self, parser, section, option, value): - tmp_value = value.replace('%%', '') # escaped percent signs - tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax - if '%' in tmp_value: - raise ValueError("invalid interpolation syntax in %r at " - "position %d" % (value, tmp_value.find('%'))) - return value - - def _interpolate_some(self, parser, option, accum, rest, section, map, - depth): - rawval = parser.get(section, option, raw=True, fallback=rest) - if depth > MAX_INTERPOLATION_DEPTH: - raise InterpolationDepthError(option, section, rawval) - while rest: - p = rest.find("%") - if p < 0: - accum.append(rest) - return - if p > 0: - accum.append(rest[:p]) - rest = rest[p:] - # p is no longer used - c = rest[1:2] - if c == "%": - accum.append("%") - rest = rest[2:] - elif c == "(": - m = self._KEYCRE.match(rest) - if m is None: - raise InterpolationSyntaxError(option, section, - "bad interpolation variable reference %r" % rest) - var = parser.optionxform(m.group(1)) - rest = rest[m.end():] - try: - v = map[var] - except KeyError: - raise InterpolationMissingOptionError( - option, section, rawval, var) from None - if "%" in v: - self._interpolate_some(parser, option, accum, v, - section, map, depth + 1) - else: - accum.append(v) - else: - raise InterpolationSyntaxError( - option, section, - "'%%' must be followed by '%%' or '(', " - "found: %r" % (rest,)) - - -class ExtendedInterpolation(Interpolation): - """Advanced variant of interpolation, supports the syntax used by - `zc.buildout`. Enables interpolation between sections.""" - - _KEYCRE = re.compile(r"\$\{([^}]+)\}") - - def before_get(self, parser, section, option, value, defaults): - L = [] - self._interpolate_some(parser, option, L, value, section, defaults, 1) - return ''.join(L) - - def before_set(self, parser, section, option, value): - tmp_value = value.replace('$$', '') # escaped dollar signs - tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax - if '$' in tmp_value: - raise ValueError("invalid interpolation syntax in %r at " - "position %d" % (value, tmp_value.find('$'))) - return value - - def _interpolate_some(self, parser, option, accum, rest, section, map, - depth): - rawval = parser.get(section, option, raw=True, fallback=rest) - if depth > MAX_INTERPOLATION_DEPTH: - raise InterpolationDepthError(option, section, rawval) - while rest: - p = rest.find("$") - if p < 0: - accum.append(rest) - return - if p > 0: - accum.append(rest[:p]) - rest = rest[p:] - # p is no longer used - c = rest[1:2] - if c == "$": - accum.append("$") - rest = rest[2:] - elif c == "{": - m = self._KEYCRE.match(rest) - if m is None: - raise InterpolationSyntaxError(option, section, - "bad interpolation variable reference %r" % rest) - path = m.group(1).split(':') - rest = rest[m.end():] - sect = section - opt = option - try: - if len(path) == 1: - opt = parser.optionxform(path[0]) - v = map[opt] - elif len(path) == 2: - sect = path[0] - opt = parser.optionxform(path[1]) - v = parser.get(sect, opt, raw=True) - else: - raise InterpolationSyntaxError( - option, section, - "More than one ':' found: %r" % (rest,)) - except (KeyError, NoSectionError, NoOptionError): - raise InterpolationMissingOptionError( - option, section, rawval, ":".join(path)) from None - if "$" in v: - self._interpolate_some(parser, opt, accum, v, sect, - dict(parser.items(sect, raw=True)), - depth + 1) - else: - accum.append(v) - else: - raise InterpolationSyntaxError( - option, section, - "'$' must be followed by '$' or '{', " - "found: %r" % (rest,)) - - -class LegacyInterpolation(Interpolation): - """Deprecated interpolation used in old versions of ConfigParser. - Use BasicInterpolation or ExtendedInterpolation instead.""" - - _KEYCRE = re.compile(r"%\(([^)]*)\)s|.") - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - warnings.warn( - "LegacyInterpolation has been deprecated since Python 3.2 " - "and will be removed from the configparser module in Python 3.13. " - "Use BasicInterpolation or ExtendedInterpolation instead.", - DeprecationWarning, stacklevel=2 - ) - - def before_get(self, parser, section, option, value, vars): - rawval = value - depth = MAX_INTERPOLATION_DEPTH - while depth: # Loop through this until it's done - depth -= 1 - if value and "%(" in value: - replace = functools.partial(self._interpolation_replace, - parser=parser) - value = self._KEYCRE.sub(replace, value) - try: - value = value % vars - except KeyError as e: - raise InterpolationMissingOptionError( - option, section, rawval, e.args[0]) from None - else: - break - if value and "%(" in value: - raise InterpolationDepthError(option, section, rawval) - return value - - def before_set(self, parser, section, option, value): - return value - - @staticmethod - def _interpolation_replace(match, parser): - s = match.group(1) - if s is None: - return match.group() - else: - return "%%(%s)s" % parser.optionxform(s) - - -class RawConfigParser(MutableMapping): - """ConfigParser that does not do interpolation.""" - - # Regular expressions for parsing section headers and options - _SECT_TMPL = r""" - \[ # [ - (?P
.+) # very permissive! - \] # ] - """ - _OPT_TMPL = r""" - (?P