8000 remove args · pytorch/executorch@3451182 · GitHub
[go: up one dir, main page]

Skip to content

Commit 3451182

Browse files
angelayilucylq
authored andcommitted
remove args
1 parent d3bcadb commit 3451182

File tree

6 files changed

+18
-15
lines changed

6 files changed

+18
-15
lines changed

.ci/docker/ci_commit_pins/pytorch.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
80ca6dd892613fd4f1dee9040b8273ddeadb1c50
1+
2ea4b56ec872424e486c4fe2d55da061067a2ed3

examples/models/llama3_2_vision/text_decoder/test/test_text_decoder.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,6 @@ def test_llama3_2_text_decoder_aoti(self) -> None:
7474
with tempfile.TemporaryDirectory() as tmpdir:
7575
path = torch._inductor.aoti_compile_and_package(
7676
ep,
77-
model.get_example_inputs(),
78-
kwargs=model.get_example_kwarg_inputs(),
7977
package_path=os.path.join(tmpdir, "text_decoder.pt2"),
8078
)
8179
encoder_aoti = torch._inductor.aoti_load_package(path)

examples/models/llama3_2_vision/vision_encoder/test/test_vision_encoder.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@ def test_flamingo_vision_encoder(self) -> None:
3636
with tempfile.TemporaryDirectory() as tmpdir:
3737
path = torch._inductor.aoti_compile_and_package(
3838
ep,
39-
model.get_example_inputs(),
4039
package_path=os.path.join(tmpdir, "vision_encoder.pt2"),
4140
)
4241
print(path)

extension/llm/export/builder.py

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -184,15 +184,22 @@ def export(self) -> "LLMEdgeManager":
184184
# 2. torch.no_grad() is for getting rid of the dropout (not sure why training ops will show up)
185185
with torch.nn.attention.sdpa_kernel([SDPBackend.MATH]), torch.no_grad():
186186
if hasattr(self.args, "qnn") and self.args.qnn:
187-
# TODO: this is temporary and export_for_training doesn't work with qnn either. We need a
188-
# functional graph. See issue https://github.com/pytorch/executorch/pull/4627 for more details
189-
exported_module = torch.export.export(
190-
self.model,
191-
self.example_inputs,
192-
self.example_kwarg_inputs,
193-
dynamic_shapes=dynamic_shape,
194-
strict=True,
195-
)
187+
from unittest.mock import patch
188+
189+
with patch.object(
190+
torch._utils_internal,
191+
"export_training_ir_rollout_check",
192+
return_value=False,
193+
):
194+
# TODO: this is temporary and export_for_training doesn't work with qnn either. We need a
195+
# functional graph. See issue https://github.com/pytorch/executorch/pull/4627 for more details
196+
exported_module = torch.export.export(
197+
self.model,
198+
self.example_inputs,
199+
self.example_kwarg_inputs,
200+
dynamic_shapes=dynamic_shape,
201+
strict=True,
202+
)
196203
else:
197204
logging.info("Exporting with:")
198205
logging.info(f"inputs: {self.example_inputs}")

extension/llm/modules/test/test_position_embeddings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,6 @@ def test_tiled_token_positional_embedding_aoti(self):
177177
with tempfile.TemporaryDirectory() as tmpdir:
178178
path = torch._inductor.aoti_compile_and_package(
179179
tpe_ep,
180-
(self.x, self.aspect_ratio),
181180
package_path=os.path.join(tmpdir, "tpe.pt2"),
182181
)
183182
tpe_aoti = load_package(path)

install_requirements.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ def python_is_compatible():
112112
# NOTE: If a newly-fetched version of the executorch repo changes the value of
113113
# NIGHTLY_VERSION, you should re-run this script to install the necessary
114114
# package versions.
115-
NIGHTLY_VERSION = "dev20241206"
115+
NIGHTLY_VERSION = "dev20241218"
116116

117117
# The pip repository that hosts nightly torch packages.
118118
TORCH_NIGHTLY_URL = "https://download.pytorch.org/whl/nightly/cpu"

0 commit comments

Comments
 (0)
0