8000 Suppress more warnings by tugsbayasgalan · Pull Request #148488 · pytorch/pytorch · GitHub
[go: up one dir, main page]

Skip to content

Suppress more warnings #148488

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: gh/tugsbayasgalan/299/base
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion torch/_export/passes/lift_constants_pass.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# mypy: allow-untyped-defs
import collections
import logging
import warnings
from typing import Any, Union

Expand All @@ -18,6 +19,8 @@
)
from torch.fx.graph_module import _get_attr

log = logging.getLogger(__name__)


class ConstantAttrMap(collections.abc.MutableMapping):
"""A mapping class that understands how to use module constants (tensors,
Expand Down Expand Up @@ -204,7 +207,7 @@ def lift_constants_pass(
elif isinstance(constant_val, torch.Tensor):
# Remove the parameterness of constant_val
if isinstance(constant_val, torch.nn.Parameter):
warnings.warn(
log.debug(
f"{node.target} created when tracing {node.meta.get('stack_trace', '<unknown stack>')} is a parameter. But"
f"it's not registered with register_parameter(). export will treat it as a constant tensor"
)
Expand Down
7 changes: 6 additions & 1 deletion torch/export/_unlift.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,12 @@ def _unlift_inputs_as_getattr(

else:
with gm.graph.inserting_after(input_node):
getattr_node = gm.graph.get_attr(lifted_node)
# It is fine to ignore this warning because
# it is guaranteed that we will populate this
# attr later.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
getattr_node = gm.graph.get_attr(lifted_node)
input_node.replace_all_uses_with(getattr_node)
metadata = input_node.meta
gm.graph.erase_node(input_node)
Expand Down
3 changes: 1 addition & 2 deletions torch/fx/experimental/proxy_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import traceback
import typing
import typing_extensions
import warnings
import weakref
from collections import defaultdict, OrderedDict
from collections.abc import Generator, Mapping, Sequence
Expand Down Expand Up @@ -1807,7 +1806,7 @@ def call_module(
try:
return Tracer.call_module(self, m, forward, args, kwargs)
except _ModuleNotInstalledAsSubmoduleError:
warnings.warn(
log.debug(
f"Unable to find the path of the module {m}. "
"This might be because the module was not properly registered "
"as a submodule, which is not good practice. We will trace "
Expand Down
Loading
0