8000 gh-108303: Move tokenize-related data to `Lib/test/tokenizedata` by sobolevn · Pull Request #109265 · python/cpython · GitHub
[go: up one dir, main page]

Skip to content

gh-108303: Move tokenize-related data to Lib/test/tokenizedata #109265

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Sep 12, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
gh-108303: Move tokenize-related data to Lib/test/tokenizedata
  • Loading branch information
sobolevn committed Sep 11, 2023
commit 8254b4f140da196295ff54b6759f74356f0f7423
8 changes: 6 additions & 2 deletions Lib/test/test_py_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,9 @@ def test_exceptions_propagate(self):
os.chmod(self.directory, mode.st_mode)

def test_bad_coding(self):
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
bad_coding = os.path.join(os.path.dirname(__file__),
'tokenizedata',
'bad_coding2.py')
with support.captured_stderr():
self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
self.assertFalse(os.path.exists(
Expand Down Expand Up @@ -195,7 +197,9 @@ def test_invalidation_mode(self):
self.assertEqual(flags, 0b1)

def test_quiet(self):
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
bad_coding = os.path.join(os.path.dirname(__file__),
'tokenizedata',
'bad_coding2.py')
with support.captured_stderr() as stderr:
self.assertIsNone(py_compile.compile(bad_coding, doraise=False, quiet=2))
self.assertIsNone(py_compile.compile(bad_coding, doraise=True, quiet=2))
Expand Down
4 changes: 2 additions & 2 deletions Lib/test/test_source_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,10 @@ def test_bad_coding2(self):
self.verify_bad_module(module_name)

def verify_bad_module(self, module_name):
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
self.assertRaises(SyntaxError, __import__, 'test.tokenizedata.' + module_name)

path = os.path.dirname(__file__)
filename = os.path.join(path, module_name + '.py')
filename = os.path.join(path, 'tokenizedata', module_name + '.py')
with open(filename, "rb") as fp:
bytes = fp.read()
self.assertRaises(SyntaxError, compile, bytes, filename, 'exec')
Expand Down
29 changes: 18 additions & 11 deletions Lib/test/test_tarfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -2587,16 +2587,17 @@ def tarfilecmd_failure(self, *args):
return script_helper.assert_python_failure('-m', 'tarfile', *args)

def make_simple_tarfile(self, tar_name):
files = [support.findfile('tokenize_tests.txt'),
files = [support.findfile('tokenize_tests.txt',
subdir='tokenizedata'),
support.findfile('tokenize_tests-no-coding-cookie-'
'and-utf8-bom-sig-only.txt')]
'and-utf8-bom-sig-only.txt',
subdir='tokenizedata')]
self.addCleanup(os_helper.unlink, tar_name)
with tarfile.open(tar_name, 'w') as tf:
for tardata in files:
tf.add(tardata, arcname=os.path.basename(tardata))

def make_evil_tarfile(self, tar_name):
files = [support.findfile('tokenize_tests.txt')]
self.addCleanup(os_helper.unlink, tar_name)
with tarfile.open(tar_name, 'w') as tf:
benign = tarfile.TarInfo('benign')
Expand Down Expand Up @@ -2677,9 +2678,11 @@ def test_list_command_invalid_file(self):
self.assertEqual(rc, 1)

def test_create_command(self):
files = [support.findfile('tokenize_tests.txt'),
files = [support.findfile('tokenize_tests.txt',
subdir='tokenizedata'),
support.findfile('tokenize_tests-no-coding-cookie-'
'and-utf8-bom-sig-only.txt')]
'and-utf8-bom-sig-only.txt',
subdir='tokenizedata')]
for opt in '-c', '--create':
try:
out = self.tarfilecmd(opt, tmpname, *files)
Expand All @@ -2690,9 +2693,11 @@ def test_create_command(self):
os_helper.unlink(tmpname)

def test_create_command_verbose(self):
files = [support.findfile('tokenize_tests.txt'),
files = [support.findfile('tokenize_tests.txt',
subdir='tokenizedata'),
support.findfile('tokenize_tests-no-coding-cookie-'
'and-utf8-bom-sig-only.txt')]
'and-utf8-bom-sig-only.txt',
subdir='tokenizedata')]
for opt in '-v', '--verbose':
try:
out = self.tarfilecmd(opt, '-c', tmpname, *files,
Expand All @@ -2704,7 +2709,7 @@ def test_create_command_verbose(self):
os_helper.unlink(tmpname)

def test_create_command_dotless_filename(self):
files = [support.findfile('tokenize_tests.txt')]
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
try:
out = self.tarfilecmd('-c', dotlessname, *files)
self.assertEqual(out, b'')
Expand All @@ -2715,7 +2720,7 @@ def test_create_command_dotless_filename(self):

def test_create_command_dot_started_filename(self):
tar_name = os.path.join(TEMPDIR, ".testtar")
files = [support.findfile('tokenize_tests.txt')]
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
try:
out = self.tarfilecmd('-c', tar_name, *files)
self.assertEqual(out, b'')
Expand All @@ -2725,9 +2730,11 @@ def test_create_command_dot_started_filename(self):
os_helper.unlink(tar_name)

def test_create_command_compressed(self):
files = [support.findfile('tokenize_tests.txt'),
files = [support.findfile('tokenize_tests.txt',
subdir='tokenizedata'),
support.findfile('tokenize_tests-no-coding-cookie-'
'and-utf8-bom-sig-only.txt')]
'and-utf8-bom-sig-only.txt',
subdir='tokenizedata')]
for filetype in (GzipTest, Bz2Test, LzmaTest):
if not filetype.open:
continue
Expand Down
8 changes: 4 additions & 4 deletions Lib/test/test_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -1200,7 +1200,7 @@ class TestTokenizerAdheresToPep0263(TestCase):
"""

def _testFile(self, filename):
path = os.path.join(os.path.dirname(__file__), filename)
path = os.path.join(os.path.dirname(__file__), 'tokenizedata', filename)
with open(path, 'rb') as f:
TestRoundtrip.check_roundtrip(self, f)

Expand Down Expand Up @@ -1794,7 +1794,7 @@ def test_roundtrip(self):

self.check_roundtrip("if x == 1 : \n"
" print(x)\n")
fn = support.findfile("tokenize_tests.txt")
fn = support.findfile("tokenize_tests.txt", subdir="tokenizedata")
with open(fn, 'rb') as f:
self.check_roundtrip(f)
self.check_roundtrip("if x == 1:\n"
Expand Down Expand Up @@ -1849,8 +1849,8 @@ def test_random_files(self):
# pass the '-ucpu' option to process the full directory.

import glob, random
fn = support.findfile("tokenize_tests.txt")
tempdir = os.path.dirname(fn) or os.curdir
fn = support.findfile("tokenize_tests.txt", subdir='tokenizedata')
tempdir = os.path.dirname(os.path.dirname(fn)) or os.curdir
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure about the logical to get tempdir here. Maybe start from file instead? Why is it called tempdir? That's Lib/test/, the directory of test files, no?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is not something I am modifing here. I only changed
tempdir = os.path.dirname(fn) or os.curdir
to be
tempdir = os.path.dirname(os.path.dirname(fn)) or os.curdir

I can open a new issue about it and dig into it later :)

testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))

# Tokenize is broken on test_pep3131.py because regular expressions are
Expand Down
2 changes: 1 addition & 1 deletion Lib/test/test_tools/test_reindent.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_help(self):
self.assertGreater(err, b'')

def test_reindent_file_with_bad_encoding(self):
bad_coding_path = findfile('bad_coding.py')
bad_coding_path = findfile('bad_coding.py', subdir='tokenizedata')
rc, out, err = assert_python_ok(self.script, '-r', bad_coding_path)
self.assertEqual(out, b'')
self.assertNotEqual(err, b'')
Expand Down
Empty file.
File renamed without changes.
File renamed without changes.
File renamed without changes.
0