Skip to content

Commit 43ab919

Browse files
authored
fix: Fix PTQ export (#3447)
1 parent f699c46 commit 43ab919

File tree

2 files changed

+2
-5
lines changed

2 files changed

+2
-5
lines changed

examples/dynamo/vgg16_ptq.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -233,10 +233,8 @@ def calibrate_loop(model):
233233
with export_torch_mode():
234234
# Compile the model with Torch-TensorRT Dynamo backend
235235
input_tensor = images.cuda()
236-
# torch.export.export() failed due to RuntimeError: Attempting to use FunctionalTensor on its own. Instead, please use it with a corresponding FunctionalTensorMode()
237-
from torch.export._trace import _export
238236

239-
exp_program = _export(model, (input_tensor,))
237+
exp_program = torch.export.export(model, (input_tensor,), strict=False)
240238
if args.quantize_type == "int8":
241239
enabled_precisions = {torch.int8}
242240
elif args.quantize_type == "fp8":

tests/py/dynamo/models/test_models_export.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,6 @@ def test_resnet18_half(ir):
206206
def test_base_fp8(ir):
207207
import modelopt.torch.quantization as mtq
208208
from modelopt.torch.quantization.utils import export_torch_mode
209-
from torch.export._trace import _export
210209

211210
class SimpleNetwork(torch.nn.Module):
212211
def __init__(self):
@@ -234,7 +233,7 @@ def calibrate_loop(model):
234233

235234
with torch.no_grad():
236235
with export_torch_mode():
237-
exp_program = _export(model, (input_tensor,))
236+
exp_program = torch.export.export(model, (input_tensor,), strict=False)
238237
trt_model = torchtrt.dynamo.compile(
239238
exp_program,
240239
inputs=[input_tensor],

0 commit comments

Comments
 (0)