8000 add backend_specialization kwarg to mark_dynamic · pytorch/pytorch@a62a4d9 · GitHub
[go: up one dir, main page]

Skip to content

Commit a62a4d9

Browse files
committed
add backend_specialization kwarg to mark_dynamic
ghstack-source-id: eca8398 Pull Request resolved: #152597
1 parent 70c5a71 commit a62a4d9

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

torch/_dynamo/decorators.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -552,7 +552,7 @@ def mark_unbacked(t, index, strict=False):
552552

553553

554554
@forbid_in_graph
555-
def mark_dynamic(t, index, *, min=None, max=None):
555+
def mark_dynamic(t, index, *, min=None, max=None, backend_specializations=None):
556556
"""
557557
Mark a tensor as having a dynamic dim and set corresponding min and max range for the dim.
558558
@@ -587,14 +587,16 @@ def mark_dynamic(t, index, *, min=None, max=None):
587587
if not hasattr(t, "_dynamo_dynamic_indices"):
588588
t._dynamo_dynamic_indices = set()
589589
t._dynamo_dynamic_range = set()
590+
t._backend_specializations = {}
590591
# TODO(voz): Should we bounds check?
591592
t._dynamo_dynamic_indices.add(index)
592593
t._dynamo_dynamic_range.add(_DimRange(index, min, max))
594+
t._backend_specializations[index] = backend_specializations
593595
return
594596

595597
assert isinstance(index, (list, tuple))
596598
for i in index:
597-
mark_dynamic(t, i, min=min, max=max)
599+
mark_dynamic(t, i, min=min, max=max, backend_specializations=backend_specializations)
598600

599601

600602
@forbid_in_graph

0 commit comments

Comments
 (0)
0