Skip to content

Commit a85a42b

Browse files
GasoonjiaYIWENX14
authored andcommitted
remove reduntant check_valid_op in EdgeDialectVerifier
Differential Revision: D68611967 Pull Request resolved: #7941
1 parent bbf0f9b commit a85a42b

File tree

1 file changed

+1
-26
lines changed

1 file changed

+1
-26
lines changed

exir/verification/verifier.py

Lines changed: 1 addition & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def check_valid_op(self, op):
129129
130130
2. Sometimes inference and training gives slightly different op set. Try adding `with torch.no_grad():` context manager if you are export for inference only.
131131
132-
3. If the error persists after 2, this is likely caused by torch.export() + core ATen decomposition producing unexpected operators for your model.
132+
3. If the error persists after 2, this is likely caused by torch.export() + core ATen decomposition producing unexpected operators for your model.
133133
If you believe this operator should be included into core ATen opset, please create an issue in https://github.com/pytorch/pytorch/issues and add `module: core aten` tag.
134134
"""
135135
)
@@ -274,31 +274,6 @@ def check_additional(self, gm: GraphModule) -> None:
274274
_check_tensors_are_contiguous(gm)
275275
_check_tensor_args_matching_op_allowed_dtype(gm)
276276

277-
def check_valid_op(self, op):
278-
if isinstance(op, OpOverload):
279-
# TODO These special ops should be removable easily.
280-
if op.namespace in (
281-
"quantized_decomposed",
282-
"boltnn_nimble",
283-
"nimble",
284-
"quantized",
285-
"dim_order_ops",
286-
) or op in (
287-
torch.ops.aten.mkldnn_rnn_layer.default,
288-
torch.ops.aten._upsample_bilinear2d_aa.default,
289-
torch.ops.aten.quantize_per_tensor.default,
290-
torch.ops.aten.dequantize.self,
291-
torch.ops.aten.max.default,
292-
torch.ops.aten.full_like.default, # TODO(T183507359)
293-
):
294-
return
295-
if torch.Tag.core not in op.tags and torch.Tag.view_copy not in op.tags:
296-
# NOTE(qihan): whether view_copy operators are marked as canonical is still under
297-
# discussion.
298-
raise SpecViolationError(
299-
f"Operator {op.__module__}.{op.__name__} is not Aten Canonical."
300-
)
301-
302277
def is_valid(self, gm: GraphModule) -> bool:
303278
try:
304279
self(gm)

0 commit comments

Comments
 (0)