From 3b3d68bd1017d1ceb618ba0e54288334ecc5186f Mon Sep 17 00:00:00 2001 From: Yanbo Liang Date: Tue, 4 Mar 2025 14:51:24 -0800 Subject: [PATCH] [Dyamo] Replace unimplemented with unimplemented_v2 for variables/distributed --- torch/_dynamo/variables/distributed.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/torch/_dynamo/variables/distributed.py b/torch/_dynamo/variables/distributed.py index 3c52bb5d18ea14..1dda1200fb72b4 100644 --- a/torch/_dynamo/variables/distributed.py +++ b/torch/_dynamo/variables/distributed.py @@ -29,7 +29,7 @@ from .. import compiled_autograd, variables from .._trace_wrapped_higher_order_op import trace_wrapped -from ..exc import unimplemented +from ..exc import unimplemented_v2 from ..external_utils import call_module_hooks_from_backward_state from ..guards import GuardBuilder, install_guard from ..source import AttrSource @@ -56,7 +56,14 @@ class DistributedVariable(VariableTracker): def __init__(self, value, **kwargs) -> None: super().__init__(**kwargs) if not DistributedVariable.is_available(): - unimplemented("torch.distributed package is not available!") + unimplemented_v2( + gb_type="torch.distributed package is not available!", + context="", + explanation="The PyTorch package doesn't include torch.distributed when builing from source.", + hints=[ + "Set USE_DISTRIBUTED=1 to enable it when building PyTorch from source." + ], + ) self.value = value def python_type(self): @@ -339,7 +346,14 @@ def create( user_pre_hooks: VariableTracker, ): if not compiled_autograd.compiled_autograd_enabled: - unimplemented("module-level backwards hooks require compiled autograd") + unimplemented_v2( + gb_type="Module-level backwards hooks require compiled autograd.", + context="", + explanation="", + hints=[ + "Enable compiled autograd by setting torch._dynamo.config.compiled_autograd = True." + ], + ) def _in_graph_bw_hooks(bw_state: BackwardState): """