Skip to content

Commit fc6d86e

Browse files
authored
Fix pyre issues from non-mypy dirs
Differential Revision: D71585381 Pull Request resolved: #9477
1 parent 5fdfa51 commit fc6d86e

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

examples/models/llama/export_llama_lib.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1208,7 +1208,7 @@ def get_llama_model(args):
12081208
_validate_args(args)
12091209
e_mgr = _prepare_for_llama_export(args)
12101210
model = (
1211-
e_mgr.model.eval().to(device="cuda") # pyre-ignore
1211+
e_mgr.model.eval().to(device="cuda")
12121212
if torch.cuda.is_available()
12131213
else e_mgr.model.eval().to(device="cpu")
12141214
)

exir/tests/test_memory_format_ops_pass_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def detect_ambiguity(gm):
127127
Check every node's output tensor dim_order and raise if it is ambiguous for a list of formats.
128128
"""
129129

130-
def get_tensors(node: torch.fx.Node) -> torch.Tensor:
130+
def get_tensors(node: torch.fx.Node) -> List[torch.Tensor]:
131131
val = node.meta["val"]
132132
if isinstance(val, torch.Tensor):
133133
return [val]

0 commit comments

Comments
 (0)