Skip to content

Commit 36f9a3f

Browse files
committed
Removing the aten.slice and add_slice
1 parent d7c82ab commit 36f9a3f

File tree

2 files changed

+0
-67
lines changed

2 files changed

+0
-67
lines changed

py/torch_tensorrt/fx/converters/aten_ops_converters.py

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -481,24 +481,6 @@ def aten_ops_sym_size(
481481
return slice_layer.get_output(0)
482482

483483

484-
@tensorrt_converter(torch.ops.aten.slice.Tensor)
485-
def aten_ops_slice(
486-
network: TRTNetwork,
487-
target: Target,
488-
args: Tuple[Argument, ...],
489-
kwargs: Dict[str, Argument],
490-
name: str,
491-
) -> Union[TRTTensor, Sequence[TRTTensor]]:
492-
kwargs_new = {
493-
"input": args[0],
494-
"dim": args[1],
495-
"start": args[2],
496-
"stop": args[3],
497-
"step": args[4],
498-
}
499-
return add_slice(network, target.kwargs_new, name)
500-
501-
502484
@tensorrt_converter(torch.ops.aten.leaky_relu.default)
503485
def aten_ops_leaky_relu(
504486
network: TRTNetwork,

py/torch_tensorrt/fx/converters/operator.py

Lines changed: 0 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -1016,52 +1016,3 @@ def add_expand(network, target, kwargs, name):
10161016
layer = network.add_slice(input_val, start=start, shape=shape, stride=stride)
10171017
set_layer_name(layer, target, name)
10181018
return layer.get_output(0)
1019-
1020-
1021-
def add_slice(network, target, kwargs, name):
1022-
input_val = kwargs["input"]
1023-
1024-
if not isinstance(input_val, TRTTensor):
1025-
raise RuntimeError(
1026-
f"slice_tensor received input {input_val} that is not part "
1027-
"of the TensorRT region!"
1028-
)
1029-
1030-
ranks = len(input_val.shape) + (1 if network.has_implicit_batch_dimension else 0)
1031-
dim = get_positive_dim(cast(int, kwargs["dim"]), ranks)
1032-
dynamic_shape = has_dynamic_shape(input_val.shape)
1033-
if network.has_implicit_batch_dimension:
1034-
if dim == 0:
1035-
raise RuntimeError(
1036-
f"We do not support slice_tensor at batch dim when it's implicit, got {dim}!"
1037-
)
1038-
dim = dim - 1
1039-
else:
1040-
if dynamic_shape:
1041-
# Check whether slice target dim is dynamic shape dim
1042-
assert input_val.shape[dim] != -1, "Can't chunk on dynamic shape dimension!"
1043-
1044-
start_int = cast(int, kwargs["start"])
1045-
stop_int = cast(int, kwargs["stop"])
1046-
step_int = cast(int, kwargs["step"])
1047-
start = [0] * len(input_val.shape)
1048-
start[dim] = start_int
1049-
stride = [1] * len(start)
1050-
stride[dim] = step_int
1051-
output_shape = list(input_val.shape)
1052-
output_shape[dim] = (stop_int - start_int) // step_int
1053-
1054-
if dynamic_shape > 0:
1055-
output_shape = get_shape_with_dynamic_shape(
1056-
network, output_shape, input_val, target, name
1057-
)
1058-
layer = network.add_slice(
1059-
input_val,
1060-
start=start,
1061-
shape=[] if dynamic_shape else output_shape,
1062-
stride=stride,
1063-
)
1064-
if dynamic_shape:
1065-
layer.set_input(2, output_shape)
1066-
set_layer_name(layer, target, name)
1067-
return layer.get_output(0)

0 commit comments

Comments
 (0)