8000
We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 453ec83 commit 2c279f2Copy full SHA for 2c279f2
test/inductor/test_flex_decoding.py
@@ -23,7 +23,6 @@
23
from torch.testing._internal import common_utils
24
from torch.testing._internal.common_cuda import PLATFORM_SUPPORTS_BF16
25
from torch.testing._internal.common_device_type import (
26
- expectedFailureXPU,
27
flex_attention_supported_platform as supported_platform,
28
)
29
from torch.testing._internal.common_utils import skipIfRocm
0 commit comments