8000 [BE][Easy] replace `import pathlib` with `from pathlib import Path` by XuehaiPan · Pull Request #129426 · pytorch/pytorch · GitHub
[go: up one dir, main page]

Skip to content

[BE][Easy] replace import pathlib with from pathlib import Path #129426

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 13 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension .py  (32)

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions aten/src/ATen/nnapi/codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
we need with dlsym. We also generate a "check" wrapper that checks
return values and throws C++ exceptions on errors.
"""
import pathlib

import re
import sys
import textwrap
from pathlib import Path


PREFIX = """\
Expand Down Expand Up @@ -231,7 +232,7 @@ def main(argv):
)
)

out_dir = pathlib.Path(__file__).parent
out_dir = Path(__file__).parent

(out_dir / "nnapi_wrapper.h").write_text(
PREFIX
Expand Down
4 changes: 2 additions & 2 deletions benchmarks/dynamo/ci_expected_accuracy/update_expected.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@
import argparse
import json
import os
import pathlib
import subprocess
import sys
import urllib
from io import BytesIO
from itertools import product
from pathlib import Path
from urllib.request import urlopen
from zipfile import ZipFile

Expand All @@ -34,7 +34,7 @@
# https://console.rockset.com/lambdas/details/commons.artifacts
ARTIFACTS_QUERY_URL = "https://api.usw2a1.rockset.com/v1/public/shared_lambdas/4ca0033e-0117-41f5-b043-59cde19eff35"
CSV_LINTER = str(
pathlib.Path(__file__).absolute().parent.parent.parent.parent
Path(__file__).absolute().parent.parent.parent.parent
/ "tools/linter/adapters/no_merge_conflict_csv_linter.py"
)

Expand Down
13 changes: 6 additions & 7 deletions benchmarks/dynamo/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from __future__ import annotations

import abc

import argparse
import collections
import contextlib
Expand All @@ -14,15 +13,14 @@
import itertools
import logging
import os
import pathlib
import shutil
import signal
import subprocess
import sys
import time
import weakref
from contextlib import contextmanager

from pathlib import Path
from typing import (
Any,
Callable,
Expand Down Expand Up @@ -60,6 +58,7 @@
same,
)


try:
from torch._dynamo.utils import (
clone_inputs,
Expand All @@ -81,6 +80,7 @@
from torch.utils import _pytree as pytree
from torch.utils._pytree import tree_map, tree_map_only


try:
import torch_xla
import torch_xla.core.xla_model as xm
Expand Down Expand Up @@ -920,7 +920,7 @@ def speedup_experiment_onnx(
2. Running ORT with OnnxModel.

Writes to ./{output_filename}, which should be
`pathlib.Path(self.output_dir) / f"{self.compiler}_{suite}_{self.dtype}_{self.mode}_{self.device}_{self.testing}.csv".
`Path(self.output_dir) / f"{self.compiler}_{suite}_{self.dtype}_{self.mode}_{self.device}_{self.testing}.csv".

TODO(bowbao): Record export time and export peak memory usage.
"""
Expand Down Expand Up @@ -1347,8 +1347,8 @@ def deepcopy_model_and_inputs_to_device(self, model, example_inputs, target_devi
@classmethod
def _generate_onnx_model_directory(
cls, output_directory: str, compiler_name: str, model_name: str
) -> pathlib.Path:
model_path = pathlib.Path(
) -> Path:
model_path = Path(
output_directory,
".onnx_models",
model_name,
Expand Down Expand Up @@ -2389,7 +2389,6 @@ def get_fsdp_auto_wrap_policy(self, model_name: str):
from diffusers.models.transformer_2d import Transformer2DModel
from torchbenchmark.models.nanogpt.model import Block
from transformers.models.llama.modeling_llama import LlamaDecoderLayer

from transformers.models.t5.modeling_t5 import T5Block
from transformers.models.whisper.modeling_whisper import WhisperEncoderLayer

Expand Down
10 changes: 5 additions & 5 deletions scripts/compile_tests/update_failures.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#!/usr/bin/env python3
import argparse
import os
import pathlib
import subprocess
from pathlib import Path

from common import (
get_testcases,
Expand Down Expand Up @@ -194,7 +194,7 @@ def read_test_results(directory):
"filename",
nargs="?",
default=str(
pathlib.Path(__file__).absolute().parent.parent.parent
Path(__file__).absolute().parent.parent.parent
/ "torch/testing/_internal/dynamo_test_failures.py"
),
help="Optional path to dynamo_test_failures.py",
Expand All @@ -203,7 +203,7 @@ def read_test_results(directory):
parser.add_argument(
"test_dir",
nargs="?",
default=str(pathlib.Path(__file__).absolute().parent.parent.parent / "test"),
default=str(Path(__file__).absolute().parent.parent.parent / "test"),
help="Optional path to test folder",
)
parser.add_argument(
Expand All @@ -219,7 +219,7 @@ def read_test_results(directory):
action="store_true",
)
args = parser.parse_args()
assert pathlib.Path(args.filename).exists(), args.filename
assert pathlib.Path(args.test_dir).exists(), args.test_dir
assert Path(args.filename).exists(), args.filename
assert Path(args.test_dir).exists(), args.test_dir
dynamo38, dynamo311 = download_reports(args.commit, ("dynamo38", "dynamo311"))
update(args.filename, args.test_dir, dynamo38, dynamo311, args.also_remove_skips)
7 changes: 4 additions & 3 deletions test/distributed/nn/jit/test_instantiator.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
#!/usr/bin/env python3
# Owner(s): ["oncall: distributed"]

import pathlib
import sys
from pathlib import Path
from typing import Tuple

import torch
import torch.distributed as dist
from torch import nn, Tensor


if not dist.is_available():
print("Distributed not available, skipping tests", file=sys.stderr)
sys.exit(0)
Expand Down Expand Up @@ -45,7 +46,7 @@ def test_get_arg_return_types_from_interface(self):
self.assertEqual(return_type_str, "Tuple[Tensor, int, str]")

def test_instantiate_scripted_remote_module_template(self):
dir_path = pathlib.Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)
dir_path = Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)

# Cleanup.
file_paths = dir_path.glob(f"{instantiator._FILE_PREFIX}*.py")
Expand All @@ -69,7 +70,7 @@ def test_instantiate_scripted_remote_module_template(self):
self.assertEqual(num_files_after, 1)

def test_instantiate_non_scripted_remote_module_template(self):
dir_path = pathlib.Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)
dir_path = Path(instantiator.INSTANTIATED_TEMPLATE_DIR_PATH)

# Cleanup.
file_paths = dir_path.glob(f"{instantiator._FILE_PREFIX}*.py")
Expand Down
5 changes: 2 additions & 3 deletions test/export/test_serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
# Owner(s): ["oncall: export"]
import copy
import io
import pathlib
import tempfile
import unittest
import zipfile
from pathlib import Path

import torch
import torch._dynamo as torchdynamo
Expand Down Expand Up @@ -38,7 +38,6 @@
TemporaryFileName,
TestCase,
)

from torch.testing._internal.torchbind_impls import init_torchbind_implementations


Expand Down Expand Up @@ -1052,7 +1051,7 @@ def forward(self, x, y):
ep = export(f, inp)

with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
save(ep, path)
loaded_ep = load(path)

Expand Down
7 changes: 4 additions & 3 deletions test/inductor/test_debug_trace.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
# Owner(s): ["module: inductor"]
import logging
import os
import pathlib
import re
import shutil
import sys
import unittest
from pathlib import Path

import torch
from torch._inductor import config, test_operators
from torch.testing._internal.inductor_utils import GPU_TYPE, HAS_GPU


try:
try:
from . import test_torchinductor
Expand All @@ -22,7 +23,7 @@
raise


def filesize(filename: pathlib.Path):
def filesize(filename: Path):
assert filename.exists(), f"{filename} is missing"
return os.stat(filename).st_size

Expand All @@ -43,7 +44,7 @@ def fn(a, b):
self.assertEqual(len(cm.output), 1)
m = re.match(r"WARNING.* debug trace: (.*)", cm.output[0])
self.assertTrue(m)
filename = pathlib.Path(m.group(1))
filename = Path(m.group(1))
self.assertTrue(filename.is_dir())
self.assertGreater(filesize(filename / "fx_graph_readable.py"), 512)
self.assertGreater(filesize(filename / "fx_graph_runnable.py"), 512)
Expand Down
13 changes: 7 additions & 6 deletions test/jit/test_save_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@

import io
import os
import pathlib
import sys
from pathlib import Path
from typing import NamedTuple, Optional

import torch
from torch import Tensor
from torch.testing._internal.common_utils import skipIfTorchDynamo, TemporaryFileName


# Make the helper files in test/ importable
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(pytorch_test_dir)
Expand Down Expand Up @@ -397,7 +398,7 @@ def forward(self, a):

# Save then load.
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
m.save(path)
m2 = torch.jit.load(path)

Expand Down Expand Up @@ -624,7 +625,7 @@ def get_loaded_inputs(inputs):
traced_module = torch.jit.trace(module, input1)
traced_inputs = list(traced_module.graph.inputs())
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
traced_module.save(path)
print(traced_module.graph)
loaded_module = torch.jit.load(path, _restore_shapes=True)
Expand All @@ -640,7 +641,7 @@ def get_loaded_inputs(inputs):
traced_module._c._retrieve_traced_inputs()["forward"], [input_tensor]
)
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
traced_module.save(path)
loaded_module = torch.jit.load(path, _restore_shapes=True)
loaded_inputs = list(loaded_module.graph.inputs())
Expand All @@ -659,7 +660,7 @@ def get_loaded_inputs(inputs):
self.assertEqual(len(traced_module._c._retrieve_traced_inputs()), 0)

with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
traced_module.save(path)
loaded_module = torch.jit.load(path, _restore_shapes=True)
loaded_inputs = list(loaded_module.graph.inputs())
Expand Down Expand Up @@ -1055,7 +1056,7 @@ def forward(self, a):

# Save then load.
with TemporaryFileName() as fname:
path = pathlib.Path(fname)
path = Path(fname)
torch.jit.save_jit_module_to_flatbuffer(m, path)
m2 = torch.jit.load(path)

Expand Down
6 changes: 3 additions & 3 deletions test/lazy/test_ts_opinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import functools
import itertools
import os
import pathlib
from pathlib import Path
from typing import Sequence
from unittest import skip

Expand All @@ -20,10 +20,10 @@
ops,
)
from torch.testing._internal.common_methods_invocations import op_db

from torch.testing._internal.common_utils import run_tests, TestCase
from torch.testing._internal.jit_utils import JitTestCase


torch._lazy.ts_backend.init()


Expand All @@ -36,7 +36,7 @@ def remove_suffixes(l):


def init_lists():
path_to_script = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
path_to_script = Path(os.path.abspath(os.path.dirname(__file__)))
TS_NATIVE_FUNCTIONS_PATH = (
path_to_script.parent.parent / "aten/src/ATen/native/ts_native_functions.yaml"
)
Expand Down
Loading
Loading
0