8000
We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 71027b1 commit ddc628fCopy full SHA for ddc628f
test/test_autograd.py
@@ -76,6 +76,7 @@
76
skipIfNoLapack,
77
skipIfTorchDynamo,
78
skipIfWindows,
79
+ skipIfXpu,
80
slowTest,
81
TestCase,
82
xfailIfTorchDynamo,
@@ -7432,6 +7433,7 @@ def test_checkpointing_without_reentrant_correct_grad(self):
7432
7433
7434
# PYTORCH_TEST_WITH_DYNAMO=1 test fails on CI but can't repro locally
7435
@skipIfTorchDynamo("https://github.com/pytorch/pytorch/issues/127115")
7436
+ @skipIfXpu(msg="torch._C._scatter Not implemented on XPU, issue #143239")
7437
def test_checkpointing_without_reentrant_dataparallel(self):
7438
"""
7439
Verifies gradient correctness when checkpoint without reentrant autograd
0 commit comments