8000 bpo-40495: compileall option to hardlink duplicate pyc files by frenzymadness · Pull Request #19901 · python/cpython · GitHub
[go: up one dir, main page]

Skip to content

bpo-40495: compileall option to hardlink duplicate pyc files #19901

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
May 14, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

8000 Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Remove duplicated optimization levels
  • Loading branch information
vstinner committed May 14, 2020
commit 7e92096fe6a6c675dbacff88c3e8e44596d8ce66
12 changes: 8 additions & 4 deletions Lib/compileall.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,13 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if isinstance(optimize, int):
optimize = [optimize]

if hardlink_dupes:
raise ValueError("Hardlinking of duplicated bytecode makes sense "
"only for more than one optimization level")
# Use set() to remove duplicates.
# Use sorted() to create pyc files in a deterministic order.
optimize = sorted(set(optimize))

if hardlink_dupes and len(optimize) < 2:
raise ValueError("Hardlinking of duplicated bytecode makes sense "
"only for more than one optimization level")

if rx is not None:
mo = rx.search(fullname)
Expand Down Expand Up @@ -229,7 +233,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if not quiet:
print('Compiling {!r}...'.format(fullname))
try:
for index, opt_level in enumerate(sorted(optimize)):
for index, opt_level in enumerate(optimize):
cfile = opt_cfiles[opt_level]
ok = py_compile.compile(fullname, cfile, dfile, True,
optimize=opt_level,
Expand Down
23 changes: 20 additions & 3 deletions Lib/test/test_compileall.py
Original file line number Diff line number Diff line change
Expand Up @@ -911,11 +911,16 @@ def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False):
def test_bad_args(self):
# Bad arguments combination, hardlink deduplication make sense
# only for more than one optimization level
with self.assertRaises(ValueError):
with self.temporary_directory():
self.make_script("pass")
with self.temporary_directory():
self.make_script("pass")
with self.assertRaises(ValueError):
compileall.compile_dir(self.path, quiet=True, optimize=0,
hardlink_dupes=True)
with self.assertRaises(ValueError):
# same optimization level specified twice:
# compile_dir() removes duplicates
compileall.compile_dir(self.path, quiet=True, optimize=[0, 0],
hardlink_dupes=True)

def create_code(self, docstring=False, assertion=False):
lines = []
Expand Down Expand Up @@ -975,6 +980,18 @@ def test_only_two_levels(self):
pyc2 = get_pyc(script, opts[1])
self.assertTrue(is_hardlink(pyc1, pyc2))

def test_duplicated_levels(self):
# compile_dir() must not fail if optimize contains duplicated
# optimization levels and/or if optimization levels are not sorted.
with self.temporary_directory():
# code with no dostring and no assertion:
# same bytecode for all optimization levels
script = self.make_script(self.create_code())
self.compile_dir(optimize=[1, 0, 1, 0])
pyc1 = get_pyc(script, 0)
pyc2 = get_pyc(script, 1)
self.assertTrue(is_hardlink(pyc1, pyc2))

def test_recompilation(self):
# Test compile_dir() when pyc files already exists and the script
# content changed
Expand Down
0