diff --git a/aten/src/ATen/nnapi/codegen.py b/aten/src/ATen/nnapi/codegen.py index 3197d670092e28..57b1e3a696fa8c 100755 --- a/aten/src/ATen/nnapi/codegen.py +++ b/aten/src/ATen/nnapi/codegen.py @@ -7,10 +7,11 @@ we need with dlsym. We also generate a "check" wrapper that checks return values and throws C++ exceptions on errors. """ -import pathlib + import re import sys import textwrap +from pathlib import Path PREFIX = """\ @@ -231,7 +232,7 @@ def main(argv): ) ) - out_dir = pathlib.Path(__file__).parent + out_dir = Path(__file__).parent (out_dir / "nnapi_wrapper.h").write_text( PREFIX diff --git a/benchmarks/dynamo/ci_expected_accuracy/update_expected.py b/benchmarks/dynamo/ci_expected_accuracy/update_expected.py index 5d73cf658c17c4..1f3a02b69a55f5 100644 --- a/benchmarks/dynamo/ci_expected_accuracy/update_expected.py +++ b/benchmarks/dynamo/ci_expected_accuracy/update_expected.py @@ -18,12 +18,12 @@ import argparse import json import os -import pathlib import subprocess import sys import urllib from io import BytesIO from itertools import product +from pathlib import Path from urllib.request import urlopen from zipfile import ZipFile @@ -34,7 +34,7 @@ # https://console.rockset.com/lambdas/details/commons.artifacts ARTIFACTS_QUERY_URL = "https://api.usw2a1.rockset.com/v1/public/shared_lambdas/4ca0033e-0117-41f5-b043-59cde19eff35" CSV_LINTER = str( - pathlib.Path(__file__).absolute().parent.parent.parent.parent + Path(__file__).absolute().parent.parent.parent.parent / "tools/linter/adapters/no_merge_conflict_csv_linter.py" ) diff --git a/benchmarks/dynamo/common.py b/benchmarks/dynamo/common.py index 4cfa853c30a28a..64fd831aec74f9 100644 --- a/benchmarks/dynamo/common.py +++ b/benchmarks/dynamo/common.py @@ -2,7 +2,6 @@ from __future__ import annotations import abc - import argparse import collections import contextlib @@ -14,7 +13,6 @@ import itertools import logging import os -import pathlib import shutil import signal import subprocess @@ -22,7 +20,7 @@ import time import weakref from contextlib import contextmanager - +from pathlib import Path from typing import ( Any, Callable, @@ -60,6 +58,7 @@ same, ) + try: from torch._dynamo.utils import ( clone_inputs, @@ -81,6 +80,7 @@ from torch.utils import _pytree as pytree from torch.utils._pytree import tree_map, tree_map_only + try: import torch_xla import torch_xla.core.xla_model as xm @@ -920,7 +920,7 @@ def speedup_experiment_onnx( 2. Running ORT with OnnxModel. Writes to ./{output_filename}, which should be - `pathlib.Path(self.output_dir) / f"{self.compiler}_{suite}_{self.dtype}_{self.mode}_{self.device}_{self.testing}.csv". + `Path(self.output_dir) / f"{self.compiler}_{suite}_{self.dtype}_{self.mode}_{self.device}_{self.testing}.csv". TODO(bowbao): Record export time and export peak memory usage. """ @@ -1347,8 +1347,8 @@ def deepcopy_model_and_inputs_to_device(self, model, example_inputs, target_devi @classmethod def _generate_onnx_model_directory( cls, output_directory: str, compiler_name: str, model_name: str - ) -> pathlib.Path: - model_path = pathlib.Path( + ) -> Path: + model_path = Path( output_directory, ".onnx_models", model_name, @@ -2389,7 +2389,6 @@ def get_fsdp_auto_wrap_policy(self, model_name: str): from diffusers.models.transformer_2d import Transformer2DModel from torchbenchmark.models.nanogpt.model import Block from transformers.models.llama.modeling_llama import LlamaDecoderLayer - from transformers.models.t5.modeling_t5 import T5Block from transformers.models.whisper.modeling_whisper import WhisperEncoderLayer diff --git a/scripts/compile_tests/update_failures.py b/scripts/compile_tests/update_failures.py index 929ed9fe20ac4c..2842a20f249699 100755 --- a/scripts/compile_tests/update_failures.py +++ b/scripts/compile_tests/update_failures.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 import argparse import os -import pathlib import subprocess +from pathlib import Path from common import ( get_testcases, @@ -194,7 +194,7 @@ def read_test_results(directory): "filename", nargs="?", default=str( - pathlib.Path(__file__).absolute().parent.parent.parent + Path(__file__).absolute().parent.parent.parent / "torch/testing/_internal/dynamo_test_failures.py" ), help="Optional path to dynamo_test_failures.py", @@ -203,7 +203,7 @@ def read_test_results(directory): parser.add_argument( "test_dir", nargs="?", - default=str(pathlib.Path(__file__).absolute().parent.parent.parent / "test"), + default=str(Path(__file__).absolute().parent.parent.parent / "test"), help="Optional path to test folder", ) parser.add_argument( @@ -219,7 +219,7 @@ def read_test_results(directory): action="store_true", ) args = parser.parse_args() - assert pathlib.Path(args.filename).exists(), args.filename - assert pathlib.Path(args.test_dir).exists(), args.test_dir + assert Path(args.filename).exists(), args.filename + assert Path(args.test_dir).exists(), args.test_dir dynamo38, dynamo311 = download_reports(args.commit, ("dynamo38", "dynamo311")) update(args.filename, args.test_dir, dynamo38, dynamo311, args.also_remove_skips) diff --git a/test/distributed/nn/jit/test_instantiator.py b/test/distributed/nn/jit/test_instantiator.py index 03d3a6f050628a..0ece03a4be547e 100644 --- a/test/distributed/nn/jit/test_instantiator.py +++ b/test/distributed/nn/jit/test_instantiator.py @@ -1,14 +1,15 @@ #!/usr/bin/env python3 # Owner(s): ["oncall: distributed"] -import pathlib import sys +from pathlib import Path from typing import Tuple import torch import torch.distributed as dist from torch import nn, Tensor + if not dist.is_available(): print("Distributed not available, skipping tests", file=sys.stderr) sys.exit(0) @@ -45,7 +46,7 @@ def test_get_arg_return_types_from_interface(self): self.assertEqual(return_type_str, "Tuple[Tensor, int, str]") def test_instantiate_scripted_remote_module_template(self): - dir_path = pathlib.Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH) + dir_path = Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH) # Cleanup. file_paths = dir_path.glob(f"{instantiator._FILE_PREFIX}*.py") @@ -69,7 +70,7 @@ def test_instantiate_scripted_remote_module_template(self): self.assertEqual(num_files_after, 1) def test_instantiate_non_scripted_remote_module_template(self): - dir_path = pathlib.Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH) + dir_path = Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH) # Cleanup. file_paths = dir_path.glob(f"{instantiator._FILE_PREFIX}*.py") diff --git a/test/export/test_serialize.py b/test/export/test_serialize.py index 1e0a9edf238727..89650262e63620 100644 --- a/test/export/test_serialize.py +++ b/test/export/test_serialize.py @@ -7,10 +7,10 @@ # Owner(s): ["oncall: export"] import copy import io -import pathlib import tempfile import unittest import zipfile +from pathlib import Path import torch import torch._dynamo as torchdynamo @@ -38,7 +38,6 @@ TemporaryFileName, TestCase, ) - from torch.testing._internal.torchbind_impls import init_torchbind_implementations @@ -1052,7 +1051,7 @@ def forward(self, x, y): ep = export(f, inp) with TemporaryFileName() as fname: - path = pathlib.Path(fname) + path = Path(fname) save(ep, path) loaded_ep = load(path) diff --git a/test/inductor/test_debug_trace.py b/test/inductor/test_debug_trace.py index 3d11af6d995f24..a2029678f48f10 100644 --- a/test/inductor/test_debug_trace.py +++ b/test/inductor/test_debug_trace.py @@ -1,16 +1,17 @@ # Owner(s): ["module: inductor"] import logging import os -import pathlib import re import shutil import sys import unittest +from pathlib import Path import torch from torch._inductor import config, test_operators from torch.testing._internal.inductor_utils import GPU_TYPE, HAS_GPU + try: try: from . import test_torchinductor @@ -22,7 +23,7 @@ raise -def filesize(filename: pathlib.Path): +def filesize(filename: Path): assert filename.exists(), f"{filename} is missing" return os.stat(filename).st_size @@ -43,7 +44,7 @@ def fn(a, b): self.assertEqual(len(cm.output), 1) m = re.match(r"WARNING.* debug trace: (.*)", cm.output[0]) self.assertTrue(m) - filename = pathlib.Path(m.group(1)) + filename = Path(m.group(1)) self.assertTrue(filename.is_dir()) self.assertGreater(filesize(filename / "fx_graph_readable.py"), 512) self.assertGreater(filesize(filename / "fx_graph_runnable.py"), 512) diff --git a/test/jit/test_save_load.py b/test/jit/test_save_load.py index d16f039798895f..7fddb75e9ecf1a 100644 --- a/test/jit/test_save_load.py +++ b/test/jit/test_save_load.py @@ -2,14 +2,15 @@ import io import os -import pathlib import sys +from pathlib import Path from typing import NamedTuple, Optional import torch from torch import Tensor from torch.testing._internal.common_utils import skipIfTorchDynamo, TemporaryFileName + # Make the helper files in test/ importable pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) sys.path.append(pytorch_test_dir) @@ -397,7 +398,7 @@ def forward(self, a): # Save then load. with TemporaryFileName() as fname: - path = pathlib.Path(fname) + path = Path(fname) m.save(path) m2 = torch.jit.load(path) @@ -624,7 +625,7 @@ def get_loaded_inputs(inputs): traced_module = torch.jit.trace(module, input1) traced_inputs = list(traced_module.graph.inputs()) with TemporaryFileName() as fname: - path = pathlib.Path(fname) + path = Path(fname) traced_module.save(path) print(traced_module.graph) loaded_module = torch.jit.load(path, _restore_shapes=True) @@ -640,7 +641,7 @@ def get_loaded_inputs(inputs): traced_module._c._retrieve_traced_inputs()["forward"], [input_tensor] ) with TemporaryFileName() as fname: - path = pathlib.Path(fname) + path = Path(fname) traced_module.save(path) loaded_module = torch.jit.load(path, _restore_shapes=True) loaded_inputs = list(loaded_module.graph.inputs()) @@ -659,7 +660,7 @@ def get_loaded_inputs(inputs): self.assertEqual(len(traced_module._c._retrieve_traced_inputs()), 0) with TemporaryFileName() as fname: - path = pathlib.Path(fname) + path = Path(fname) traced_module.save(path) loaded_module = torch.jit.load(path, _restore_shapes=True) loaded_inputs = list(loaded_module.graph.inputs()) @@ -1055,7 +1056,7 @@ def forward(self, a): # Save then load. with TemporaryFileName() as fname: - path = pathlib.Path(fname) + path = Path(fname) torch.jit.save_jit_module_to_flatbuffer(m, path) m2 = torch.jit.load(path) diff --git a/test/lazy/test_ts_opinfo.py b/test/lazy/test_ts_opinfo.py index 102dee3664bee0..ddad7b931f3841 100644 --- a/test/lazy/test_ts_opinfo.py +++ b/test/lazy/test_ts_opinfo.py @@ -3,7 +3,7 @@ import functools import itertools import os -import pathlib +from pathlib import Path from typing import Sequence from unittest import skip @@ -20,10 +20,10 @@ ops, ) from torch.testing._internal.common_methods_invocations import op_db - from torch.testing._internal.common_utils import run_tests, TestCase from torch.testing._internal.jit_utils import JitTestCase + torch._lazy.ts_backend.init() @@ -36,7 +36,7 @@ def remove_suffixes(l): def init_lists(): - path_to_script = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) + path_to_script = Path(os.path.abspath(os.path.dirname(__file__))) TS_NATIVE_FUNCTIONS_PATH = ( path_to_script.parent.parent / "aten/src/ATen/native/ts_native_functions.yaml" ) diff --git a/test/run_test.py b/test/run_test.py index 57e69c0d979cf3..f9b1507a7de0d9 100755 --- a/test/run_test.py +++ b/test/run_test.py @@ -5,7 +5,6 @@ import glob import json import os -import pathlib import re import shutil import signal @@ -16,6 +15,7 @@ from collections import defaultdict from contextlib import ExitStack from datetime import datetime +from pathlib import Path from typing import Any, cast, Dict, List, NamedTuple, Optional, Sequence, Tuple, Union import pkg_resources @@ -38,7 +38,7 @@ TEST_WITH_SLOW_GRADCHECK, ) -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent # using tools/ to optimize test run. sys.path.insert(0, str(REPO_ROOT)) @@ -61,7 +61,6 @@ gen_additional_test_failures_file, ) from tools.testing.target_determination.heuristics.utils import get_pr_number - from tools.testing.test_run import TestRun from tools.testing.test_selections import ( calculate_shards, @@ -71,6 +70,7 @@ THRESHOLD, ) + HAVE_TEST_SELECTION_TOOLS = True # Make sure to remove REPO_ROOT after import is done sys.path.remove(str(REPO_ROOT)) @@ -465,7 +465,7 @@ def run_test( ) else: cpp_test = os.path.join( - pathlib.Path(test_directory).parent, + Path(test_directory).parent, CPP_TEST_PATH, test_file.replace(f"{CPP_TEST_PREFIX}/", ""), ) @@ -800,11 +800,9 @@ def run_doctests(test_module, test_directory, options): Assumes the incoming test module is called doctest, and simply executes the xdoctest runner on the torch library itself. """ - import pathlib - import xdoctest - pkgpath = pathlib.Path(torch.__file__).parent + pkgpath = Path(torch.__file__).parent exclude_module_list = ["torch._vendor.*"] enabled = { diff --git a/test/test_serialization.py b/test/test_serialization.py index b64960df503da5..60f6ce161e73b8 100644 --- a/test/test_serialization.py +++ b/test/test_serialization.py @@ -1,40 +1,58 @@ # Owner(s): ["module: serialization"] -import torch -import unittest -import io -import tempfile -import os +import copy import gc -import sys -import zipfile -import warnings import gzip -import copy +import io +import os import pickle -import shutil -import pathlib import platform +import shutil +import sys +import tempfile +import unittest +import warnings +import zipfile from collections import namedtuple, OrderedDict from copy import deepcopy from itertools import product +from pathlib import Path -from torch._utils_internal import get_file_path_2 +import torch from torch._utils import _rebuild_tensor -from torch.utils._import_utils import import_dill -from torch.serialization import check_module_version_greater_or_equal, get_default_load_endianness, \ - set_default_load_endianness, LoadEndianness, SourceChangeWarning - -from torch.testing._internal.common_utils import ( - IS_FILESYSTEM_UTF8_ENCODING, TemporaryDirectoryName, - TestCase, IS_FBCODE, IS_WINDOWS, TEST_DILL, run_tests, download_file, BytesIOContext, TemporaryFileName, - parametrize, instantiate_parametrized_tests, AlwaysWarnTypedStorageRemoval, serialTest, skipIfTorchDynamo) +from torch._utils_internal import get_file_path_2 +from torch.serialization import ( + check_module_version_greater_or_equal, + get_default_load_endianness, + LoadEndianness, + set_default_load_endianness, + SourceChangeWarning, +) from torch.testing._internal.common_device_type import instantiate_device_type_tests from torch.testing._internal.common_dtype import all_types_and_complex_and +from torch.testing._internal.common_utils import ( + AlwaysWarnTypedStorageRemoval, + BytesIOContext, + download_file, + instantiate_parametrized_tests, + IS_FBCODE, + IS_FILESYSTEM_UTF8_ENCODING, + IS_WINDOWS, + parametrize, + run_tests, + serialTest, + skipIfTorchDynamo, + TemporaryDirectoryName, + TemporaryFileName, + TEST_DILL, + TestCase, +) from torch.testing._internal.two_tensor import TwoTensor # noqa: F401 +from torch.utils._import_utils import import_dill + if not IS_WINDOWS: - from mmap import MAP_SHARED, MAP_PRIVATE + from mmap import MAP_PRIVATE, MAP_SHARED else: MAP_SHARED, MAP_PRIVATE = None, None @@ -988,7 +1006,7 @@ def test_pathlike_serialization(self, weights_only): model = torch.nn.Conv2d(20, 3200, kernel_size=3) with TemporaryFileName() as fname: - path = pathlib.Path(fname) + path = Path(fname) torch.save(model.state_dict(), path) torch.load(path, weights_only=weights_only) @@ -4008,7 +4026,7 @@ def test_serialization_warning_s390x(self): finally: set_default_load_endianness(current_load_endian) - @parametrize('path_type', (str, pathlib.Path)) + @parametrize('path_type', (str, Path)) @parametrize('weights_only', (True, False)) @unittest.skipIf(IS_WINDOWS, "NamedTemporaryFile on windows") def test_serialization_mmap_loading(self, weights_only, path_type): diff --git a/test/test_tensorboard.py b/test/test_tensorboard.py index 1e79a2bf910cec..763091e72240e6 100644 --- a/test/test_tensorboard.py +++ b/test/test_tensorboard.py @@ -1,13 +1,16 @@ # Owner(s): ["module: unknown"] -import expecttest import io -import numpy as np import os import shutil import sys import tempfile import unittest +from pathlib import Path + +import expecttest +import numpy as np + TEST_TENSORBOARD = True try: @@ -36,14 +39,15 @@ import torch from torch.testing._internal.common_utils import ( instantiate_parametrized_tests, + IS_MACOS, + IS_WINDOWS, parametrize, - TestCase, run_tests, TEST_WITH_CROSSREF, - IS_WINDOWS, - IS_MACOS, + TestCase, ) + def tensor_N(shape, dtype=float): numel = np.prod(shape) x = (np.arange(numel, dtype=dtype)).reshape(shape) @@ -75,15 +79,16 @@ def tearDown(self): if TEST_TENSORBOARD: + from google.protobuf import text_format + from PIL import Image from tensorboard.compat.proto.graph_pb2 import GraphDef - from torch.utils.tensorboard import summary, SummaryWriter - from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC from tensorboard.compat.proto.types_pb2 import DataType - from torch.utils.tensorboard.summary import int_to_half, tensor_proto + + from torch.utils.tensorboard import summary, SummaryWriter from torch.utils.tensorboard._convert_np import make_np from torch.utils.tensorboard._pytorch_graph import graph - from google.protobuf import text_format - from PIL import Image + from torch.utils.tensorboard._utils import _prepare_video, convert_to_HWC + from torch.utils.tensorboard.summary import int_to_half, tensor_proto class TestTensorBoardPyTorchNumpy(BaseTestCase): def test_pytorch_np(self): @@ -289,9 +294,8 @@ def test_summary_writer_close(self): self.assertTrue(passed) def test_pathlib(self): - import pathlib with tempfile.TemporaryDirectory(prefix="test_tensorboard_pathlib") as d: - p = pathlib.Path(d) + p = Path(d) with SummaryWriter(p) as writer: writer.add_scalar('test', 1) diff --git a/test/torch_np/check_tests_conform.py b/test/torch_np/check_tests_conform.py index a93c1d629368ef..05ff5357b7c7c2 100644 --- a/test/torch_np/check_tests_conform.py +++ b/test/torch_np/check_tests_conform.py @@ -1,6 +1,6 @@ -import pathlib import sys import textwrap +from pathlib import Path def check(path): @@ -62,7 +62,7 @@ def report_violation(line, lineno, header): if len(argv) != 2: raise ValueError("Usage : python check_tests_conform path/to/file/or/dir") - path = pathlib.Path(argv[1]) + path = Path(argv[1]) if path.is_dir(): # run for all files in the directory (no subdirs) diff --git a/test/torch_np/numpy_tests/core/test_multiarray.py b/test/torch_np/numpy_tests/core/test_multiarray.py index 76af79f6208418..32ebee01cf6fed 100644 --- a/test/torch_np/numpy_tests/core/test_multiarray.py +++ b/test/torch_np/numpy_tests/core/test_multiarray.py @@ -9,16 +9,14 @@ import mmap import operator import os - -import pathlib import sys import tempfile import warnings import weakref from contextlib import contextmanager from decimal import Decimal +from pathlib import Path from tempfile import mkstemp - from unittest import expectedFailure as xfail, skipIf as skipif, SkipTest import numpy @@ -37,6 +35,7 @@ xpassIfTorchDynamo, ) + # If we are going to trace through these, we should use NumPy # If testing on eager mode, we use torch._numpy if TEST_WITH_TORCHDYNAMO: @@ -3866,7 +3865,7 @@ def test_roundtrip(self, x, tmp_filename): assert_array_equal(y, x.flat) def test_roundtrip_dump_pathlib(self, x, tmp_filename): - p = pathlib.Path(tmp_filename) + p = Path(tmp_filename) x.dump(p) y = np.load(p, allow_pickle=True) assert_array_equal(y, x) diff --git a/tools/onnx/update_default_opset_version.py b/tools/onnx/update_default_opset_version.py index a6446b1ca8a919..8c9710da37b103 100755 --- a/tools/onnx/update_default_opset_version.py +++ b/tools/onnx/update_default_opset_version.py @@ -12,10 +12,10 @@ import argparse import datetime import os -import pathlib import re import subprocess import sys +from pathlib import Path from subprocess import DEVNULL from typing import Any @@ -30,7 +30,7 @@ def read_sub_write(path: str, prefix_pat: str, new_default: int) -> None: def main(args: Any) -> None: - pytorch_dir = pathlib.Path(__file__).parent.parent.parent.resolve() + pytorch_dir = Path(__file__).parent.parent.parent.resolve() onnx_dir = pytorch_dir / "third_party" / "onnx" os.chdir(onnx_dir) diff --git a/tools/setup_helpers/generate_code.py b/tools/setup_helpers/generate_code.py index 6ef951d8f021c3..cf1e39dac90421 100644 --- a/tools/setup_helpers/generate_code.py +++ b/tools/setup_helpers/generate_code.py @@ -2,8 +2,8 @@ import argparse import os -import pathlib import sys +from pathlib import Path from typing import Any, cast import yaml @@ -19,7 +19,7 @@ def generate_code( - gen_dir: pathlib.Path, + gen_dir: Path, native_functions_path: str | None = None, tags_path: str | None = None, install_dir: str | None = None, @@ -41,7 +41,7 @@ def generate_code( autograd_gen_dir = os.path.join(install_dir, "autograd", "generated") for d in (autograd_gen_dir, python_install_dir): os.makedirs(d, exist_ok=True) - autograd_dir = os.fspath(pathlib.Path(__file__).parent.parent / "autograd") + autograd_dir = os.fspath(Path(__file__).parent.parent / "autograd") if subset == "pybindings" or not subset: gen_autograd_python( @@ -133,8 +133,8 @@ def main() -> None: parser.add_argument("--tags-path") parser.add_argument( "--gen-dir", - type=pathlib.Path, - default=pathlib.Path("."), + type=Path, + default=Path("."), help="Root directory where to install files. Defaults to the current working directory.", ) parser.add_argument( diff --git a/tools/stats/export_test_times.py b/tools/stats/export_test_times.py index 2b9c0c45068303..6a82b934eb02c1 100644 --- a/tools/stats/export_test_times.py +++ b/tools/stats/export_test_times.py @@ -1,7 +1,7 @@ -import pathlib import sys +from pathlib import Path -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent sys.path.append(str(REPO_ROOT)) from tools.stats.import_test_stats import get_test_class_times, get_test_times diff --git a/tools/stats/import_test_stats.py b/tools/stats/import_test_stats.py index 5f1d4ced59be5a..207db7dca4e2ab 100644 --- a/tools/stats/import_test_stats.py +++ b/tools/stats/import_test_stats.py @@ -5,12 +5,12 @@ import datetime import json import os -import pathlib import shutil +from pathlib import Path from typing import Any, Callable, cast, Dict from urllib.request import urlopen -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent def get_disabled_issues() -> list[str]: @@ -22,7 +22,7 @@ def get_disabled_issues() -> list[str]: SLOW_TESTS_FILE = ".pytorch-slow-tests.json" DISABLED_TESTS_FILE = ".pytorch-disabled-tests.json" -ADDITIONAL_CI_FILES_FOLDER = pathlib.Path(".additional_ci_files") +ADDITIONAL_CI_FILES_FOLDER = Path(".additional_ci_files") TEST_TIMES_FILE = "test-times.json" TEST_CLASS_TIMES_FILE = "test-class-times.json" TEST_FILE_RATINGS_FILE = "test-file-ratings.json" @@ -36,7 +36,7 @@ def get_disabled_issues() -> list[str]: def fetch_and_cache( - dirpath: str | pathlib.Path, + dirpath: str | Path, name: str, url: str, process_fn: Callable[[dict[str, Any]], dict[str, Any]], @@ -44,7 +44,7 @@ def fetch_and_cache( """ This fetch and cache utils allows sharing between different process. """ - pathlib.Path(dirpath).mkdir(exist_ok=True) + Path(dirpath).mkdir(exist_ok=True) path = os.path.join(dirpath, name) print(f"Downloading {url} to {path}") @@ -52,7 +52,7 @@ def fetch_and_cache( def is_cached_file_valid() -> bool: # Check if the file is new enough (see: FILE_CACHE_LIFESPAN_SECONDS). A real check # could make a HEAD request and check/store the file's ETag - fname = pathlib.Path(path) + fname = Path(path) now = datetime.datetime.now() mtime = datetime.datetime.fromtimestamp(fname.stat().st_mtime) diff = now - mtime diff --git a/tools/test/heuristics/test_heuristics.py b/tools/test/heuristics/test_heuristics.py index f18ad5b1eaad89..6e9ff8420d7d1a 100644 --- a/tools/test/heuristics/test_heuristics.py +++ b/tools/test/heuristics/test_heuristics.py @@ -3,13 +3,13 @@ import io import json -import pathlib import sys import unittest +from pathlib import Path from typing import Any from unittest import mock -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent sys.path.append(str(REPO_ROOT)) from tools.test.heuristics.test_interface import TestTD from tools.testing.target_determination.determinator import TestPrioritizations diff --git a/tools/test/heuristics/test_interface.py b/tools/test/heuristics/test_interface.py index 78ead7d0d6234d..4941c8ada6ca50 100644 --- a/tools/test/heuristics/test_interface.py +++ b/tools/test/heuristics/test_interface.py @@ -1,11 +1,11 @@ from __future__ import annotations -import pathlib import sys import unittest +from pathlib import Path from typing import Any -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent sys.path.append(str(REPO_ROOT)) import tools.testing.target_determination.heuristics.interface as interface from tools.testing.test_run import TestRun diff --git a/tools/test/heuristics/test_utils.py b/tools/test/heuristics/test_utils.py index ddc2a72a5ff8e1..23e1d155ab0dbd 100644 --- a/tools/test/heuristics/test_utils.py +++ b/tools/test/heuristics/test_utils.py @@ -1,12 +1,12 @@ from __future__ import annotations -import pathlib import sys import unittest +from pathlib import Path from typing import Any -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent sys.path.append(str(REPO_ROOT)) import tools.testing.target_determination.heuristics.utils as utils from tools.testing.test_run import TestRun diff --git a/tools/test/test_test_run.py b/tools/test/test_test_run.py index 25fa8107ffb299..1e4f82c2290350 100644 --- a/tools/test/test_test_run.py +++ b/tools/test/test_test_run.py @@ -1,8 +1,8 @@ -import pathlib import sys import unittest +from pathlib import Path -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent try: # using tools/ to optimize test run. sys.path.append(str(REPO_ROOT)) diff --git a/tools/test/test_test_selections.py b/tools/test/test_test_selections.py index 2fd59545414f8a..d18f0c50efb0ce 100644 --- a/tools/test/test_test_selections.py +++ b/tools/test/test_test_selections.py @@ -1,13 +1,13 @@ from __future__ import annotations import functools -import pathlib import random import sys import unittest from collections import defaultdict +from pathlib import Path -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent try: # using tools/ to optimize test run. sys.path.append(str(REPO_ROOT)) diff --git a/tools/test/test_upload_stats_lib.py b/tools/test/test_upload_stats_lib.py index 6642aeaa421161..a1e34566cf7fdc 100644 --- a/tools/test/test_upload_stats_lib.py +++ b/tools/test/test_upload_stats_lib.py @@ -2,13 +2,13 @@ import decimal import inspect -import pathlib import sys import unittest +from pathlib import Path from typing import Any from unittest import mock -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent sys.path.insert(0, str(REPO_ROOT)) from tools.stats.upload_metrics import add_global_metric, emit_metric diff --git a/tools/testing/do_target_determination_for_s3.py b/tools/testing/do_target_determination_for_s3.py index 32ea85b9802146..15d72174f21100 100644 --- a/tools/testing/do_target_determination_for_s3.py +++ b/tools/testing/do_target_determination_for_s3.py @@ -1,10 +1,10 @@ import json import os -import pathlib import sys +from pathlib import Path -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent sys.path.insert(0, str(REPO_ROOT)) from tools.stats.import_test_stats import ( diff --git a/tools/testing/explicit_ci_jobs.py b/tools/testing/explicit_ci_jobs.py index b81e2cb6215aa5..bc7736194f4828 100755 --- a/tools/testing/explicit_ci_jobs.py +++ b/tools/testing/explicit_ci_jobs.py @@ -4,15 +4,15 @@ import argparse import fnmatch -import pathlib import subprocess import textwrap +from pathlib import Path from typing import Any import yaml -REPO_ROOT = pathlib.Path(__file__).parent.parent.parent +REPO_ROOT = Path(__file__).parent.parent.parent CONFIG_YML = REPO_ROOT / ".circleci" / "config.yml" WORKFLOWS_DIR = REPO_ROOT / ".github" / "workflows" diff --git a/tools/testing/modulefinder_determinator.py b/tools/testing/modulefinder_determinator.py index eba68d78b16b38..b9262e34548d6d 100644 --- a/tools/testing/modulefinder_determinator.py +++ b/tools/testing/modulefinder_determinator.py @@ -2,12 +2,12 @@ import modulefinder import os -import pathlib import sys import warnings +from pathlib import Path from typing import Any -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent # These tests are slow enough that it's worth calculating whether the patch # touched any related files first. This list was manually generated, but for every diff --git a/tools/testing/target_determination/gen_artifact.py b/tools/testing/target_determination/gen_artifact.py index f69924c451ffb5..0c62bd02f4d3d4 100644 --- a/tools/testing/target_determination/gen_artifact.py +++ b/tools/testing/target_determination/gen_artifact.py @@ -2,10 +2,10 @@ import json import os -import pathlib +from pathlib import Path from typing import Any -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent +REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent def gen_ci_artifact(included: list[Any], excluded: list[Any]) -> None: diff --git a/torch/jit/_monkeytype_config.py b/torch/jit/_monkeytype_config.py index 4662869e36835c..ecf7cd865fdeb7 100644 --- a/torch/jit/_monkeytype_config.py +++ b/torch/jit/_monkeytype_config.py @@ -1,14 +1,15 @@ # mypy: allow-untyped-defs import inspect -import pathlib import sys import typing from collections import defaultdict +from pathlib import Path from types import CodeType from typing import Dict, Iterable, List, Optional import torch + _IS_MONKEYTYPE_INSTALLED = True try: import monkeytype # type: ignore[import] @@ -189,5 +190,5 @@ def jit_code_filter(code: CodeType) -> bool: ): return False - filename = pathlib.Path(code.co_filename).resolve() + filename = Path(code.co_filename).resolve() return not any(_startswith(filename, lib_path) for lib_path in LIB_PATHS) diff --git a/torch/utils/model_dump/__init__.py b/torch/utils/model_dump/__init__.py index 7e2bc36d2e7131..f2cd974798f915 100644 --- a/torch/utils/model_dump/__init__.py +++ b/torch/utils/model_dump/__init__.py @@ -64,21 +64,18 @@ (they probably don't work at all right now). """ -import sys -import os -import io -import pathlib -import re import argparse -import zipfile +import io import json +import os import pickle import pprint +import re +import sys import urllib.parse - -from typing import ( - Dict, -) +import zipfile +from pathlib import Path +from typing import Dict import torch.utils.show_pickle @@ -201,7 +198,7 @@ def get_model_info( file_size = path_or_file.stat().st_size # type: ignore[attr-defined] elif isinstance(path_or_file, str): default_title = path_or_file - file_size = pathlib.Path(path_or_file).stat().st_size + file_size = Path(path_or_file).stat().st_size else: default_title = "buffer" path_or_file.seek(0, io.SEEK_END) diff --git a/torchgen/gen_backend_stubs.py b/torchgen/gen_backend_stubs.py index a25ab66d698e85..92a897a330f903 100644 --- a/torchgen/gen_backend_stubs.py +++ b/torchgen/gen_backend_stubs.py @@ -2,9 +2,9 @@ import argparse import os -import pathlib import re from collections import Counter, defaultdict, namedtuple +from pathlib import Path from typing import Sequence import yaml @@ -529,7 +529,7 @@ def run( source_yaml: str, output_dir: str, dry_run: bool, impl_path: str | None = None ) -> None: # Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py - pytorch_root = pathlib.Path(__file__).parent.parent.absolute() + pytorch_root = Path(__file__).parent.parent.absolute() template_dir = os.path.join(pytorch_root, "aten/src/ATen/templates") def make_file_manager(install_dir: str) -> FileManager: diff --git a/torchgen/gen_lazy_tensor.py b/torchgen/gen_lazy_tensor.py index 3f5d83613c7c09..52e8b3cfbeea4b 100644 --- a/torchgen/gen_lazy_tensor.py +++ b/torchgen/gen_lazy_tensor.py @@ -2,8 +2,8 @@ import argparse import os -import pathlib from collections import namedtuple +from pathlib import Path from typing import Any, Callable, Iterable, Iterator, Sequence import yaml @@ -252,7 +252,7 @@ def main() -> None: options = parser.parse_args() # Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py - torch_root = pathlib.Path(__file__).parent.parent.parent.absolute() + torch_root = Path(__file__).parent.parent.parent.absolute() aten_path = str(torch_root / "aten" / "src" / "ATen") lazy_ir_generator: type[GenLazyIR] = default_args.lazy_ir_generator if options.gen_ts_lowerings: