Skip to content

Arm backend: Fix sigmoid int16 and int32 flakyness #10548

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,7 @@ class CheckProperQuantization(OperatorSupportBase):
exir_ops.edge.aten.sub.Tensor,
exir_ops.edge.aten.upsample_bilinear2d.vec,
exir_ops.edge.aten.upsample_nearest2d.vec,
torch.ops.aten.scalar_tensor.default,
*TableOps.included_ops(),
)

Expand Down
20 changes: 12 additions & 8 deletions backends/arm/test/models/test_conformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ def test_conformer_tosa_MI(self):
)
)

@unittest.expectedFailure # TODO(MLETORCH-635)
def test_conformer_tosa_BI(self):
(
ArmTester(
Expand Down Expand Up @@ -111,7 +110,6 @@ def test_conformer_u55_BI(self):
except Exception:
pass

@unittest.expectedFailure # TODO(MLETORCH-635)
def test_conformer_u85_BI(self):
tester = (
ArmTester(
Expand All @@ -126,9 +124,15 @@ def test_conformer_u85_BI(self):
.serialize()
)
if conftest.is_option_enabled("corstone_fvp"):
tester.run_method_and_compare_outputs(
qtol=1.0,
rtol=1.0,
atol=5.0,
inputs=get_test_inputs(self.dim, self.lengths, self.num_examples),
)
try:
tester.run_method_and_compare_outputs(
qtol=1.0,
rtol=1.0,
atol=5.0,
inputs=get_test_inputs(self.dim, self.lengths, self.num_examples),
)
self.fail(
"TODO(MLETORCH-635): Expected failure under FVP option, but test passed."
)
except Exception:
pass
21 changes: 17 additions & 4 deletions backends/arm/test/ops/test_depthwise_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,13 +202,13 @@ def test_convolution_2d_tosa_BI_depth_wise(test_module: torch.nn.Module):


x_fails = {
"3x3_2x8x198x198_gp8_st3": "MLETORCH-516: AssertionError: Output 0 does not match reference output.",
"two_dw_conv2d": "MLETORCH-516: AssertionError: Output 0 does not match reference output.",
"3x3_2x8x198x198_gp8_st3": "MLETORCH-517: Operators fail with batches > 1",
"two_dw_conv2d": "MLETORCH-517: Operators fail with batches > 1",
}


@common.parametrize("test_module", testsuite_conv2d, x_fails)
@common.XfailIfNoCorstone300 # TODO: MLETORCH-516
@common.parametrize("test_module", testsuite_conv2d, x_fails)
def test_convolution_2d_u55_BI_depth_wise(test_module: torch.nn.Module):
pipeline = EthosU55PipelineBI[input_t](
test_module(),
Expand All @@ -233,8 +233,8 @@ def test_convolution_1d_u55_BI_depth_wise(test_module: torch.nn.Module):
pipeline.run()


@common.parametrize("test_module", testsuite_conv1d | testsuite_conv2d, x_fails)
@common.XfailIfNoCorstone320 # TODO: MLETORCH-516
@common.parametrize("test_module", testsuite_conv2d, x_fails)
def test_convolution_2d_u85_BI_depth_wise(test_module: torch.nn.Module):
pipeline = EthosU85PipelineBI[input_t](
test_module(),
Expand All @@ -244,3 +244,16 @@ def test_convolution_2d_u85_BI_depth_wise(test_module: torch.nn.Module):
run_on_fvp=True,
)
pipeline.run()


@common.XfailIfNoCorstone320 # TODO: MLETORCH-516
@common.parametrize("test_module", testsuite_conv1d, x_fails)
def test_convolution_1d_u85_BI_depth_wise(test_module: torch.nn.Module):
pipeline = EthosU85PipelineBI[input_t](
test_module(),
test_module().get_inputs(),
aten_ops=[],
exir_ops=exir_op,
run_on_fvp=True,
)
pipeline.run()
17 changes: 8 additions & 9 deletions backends/arm/test/ops/test_sigmoid_16bit.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
# LICENSE file in the root directory of this source tree.

import pytest

import torch
from executorch.backends.arm.quantizer import (
get_symmetric_quantization_config,
Expand Down Expand Up @@ -91,10 +90,13 @@ def forward(self, x):


@common.parametrize("test_data", test_data_suite)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_tosa_BI(test_data):
pipeline = TosaPipelineBI(
Sigmoid(), (test_data(),), Sigmoid.aten_op, Sigmoid.exir_op
Sigmoid(),
(test_data(),),
Sigmoid.aten_op,
Sigmoid.exir_op,
qtol=1,
)
pipeline.change_args("quantize", get_16bit_sigmoid_quantizer())
pipeline.run()
Expand All @@ -108,13 +110,13 @@ def test_sigmoid_tosa_BI(test_data):
},
strict=False,
)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_tosa_BI_add_sigmoid(test_data):
pipeline = TosaPipelineBI(
SigmoidAddSigmoid(),
(test_data(),),
Sigmoid.aten_op,
Sigmoid.exir_op,
qtol=1,
)
pipeline.run()

Expand All @@ -131,7 +133,6 @@ def test_sigmoid_tosa_BI_add_sigmoid(test_data):
"test_data",
test_data_suite,
)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_u55_BI(test_data):
pipeline = OpNotSupportedPipeline(
Sigmoid(),
Expand All @@ -148,7 +149,6 @@ def test_sigmoid_u55_BI(test_data):
"test_data",
test_data_suite,
)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_u55_BI_add_sigmoid(test_data):
pipeline = OpNotSupportedPipeline(
SigmoidAddSigmoid(),
Expand All @@ -163,7 +163,6 @@ def test_sigmoid_u55_BI_add_sigmoid(test_data):


@common.parametrize("test_data", test_data_suite)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
@common.XfailIfNoCorstone320
def test_sigmoid_u85_BI(test_data):
pipeline = EthosU85PipelineBI(
Expand All @@ -181,10 +180,10 @@ def test_sigmoid_u85_BI(test_data):
"test_data",
test_data_suite,
xfails={
"ramp": "AssertionError: Output 0 does not match reference output.",
"ramp": "AssertionError: Output 0 does not match reference output. MLETORCH-787"
},
)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
@pytest.mark.flaky(reruns=5) # MLETORCH-787: Investigate int16-int8 rescaling precision
@common.XfailIfNoCorstone320
def test_sigmoid_u85_BI_add_sigmoid(test_data):
pipeline = EthosU85PipelineBI(
Expand Down
16 changes: 2 additions & 14 deletions backends/arm/test/ops/test_sigmoid_32bit.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import pytest
import torch
from executorch.backends.arm.quantizer import TOSAQuantizer
from executorch.backends.arm.quantizer.quantization_config import QuantizationConfig
Expand Down Expand Up @@ -107,33 +106,32 @@ def forward(self, x):


@common.parametrize("test_data", test_data_suite)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_tosa_BI(test_data):
pipeline = TosaPipelineBI(
Sigmoid(),
(test_data(),),
Sigmoid.aten_op,
Sigmoid.exir_op,
qtol=1,
)
pipeline.change_args("quantize", get_32bit_sigmoid_quantizer())
pipeline.run()


@common.parametrize("test_data", test_data_suite)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_tosa_BI_add_sigmoid(test_data):
pipeline = TosaPipelineBI(
SigmoidAddSigmoid(),
(test_data(),),
Sigmoid.aten_op,
Sigmoid.exir_op,
qtol=1,
)
pipeline.change_args("quantize", get_32bit_sigmoid_quantizer())
pipeline.run()


@common.parametrize("test_data", test_data_suite)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_u55_BI(test_data):
pipeline = OpNotSupportedPipeline(
Sigmoid(),
Expand All @@ -147,7 +145,6 @@ def test_sigmoid_u55_BI(test_data):


@common.parametrize("test_data", test_data_suite)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
def test_sigmoid_u55_BI_add_sigmoid(test_data):
pipeline = OpNotSupportedPipeline(
SigmoidAddSigmoid(),
Expand All @@ -162,9 +159,7 @@ def test_sigmoid_u55_BI_add_sigmoid(test_data):


@common.parametrize("test_data", test_data_suite)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
@common.XfailIfNoCorstone320
@pytest.mark.flaky(reruns=5)
def test_sigmoid_u85_BI(test_data):
pipeline = EthosU85PipelineBI(
Sigmoid(),
Expand All @@ -180,15 +175,8 @@ def test_sigmoid_u85_BI(test_data):
@common.parametrize(
"test_data",
test_data_suite,
xfails={
"ramp": "AssertionError: Output 0 does not match reference output.",
"rand": "AssertionError: Output 0 does not match reference output.",
"rand_4d": "AssertionError: Output 0 does not match reference output.",
},
)
@pytest.mark.flaky(reruns=32) # Flaky due to Vela bug: MLBEDSW-10642
@common.XfailIfNoCorstone320
@pytest.mark.flaky(reruns=5)
def test_sigmoid_u85_BI_add_sigmoid(test_data):
pipeline = EthosU85PipelineBI(
SigmoidAddSigmoid(),
Expand Down
2 changes: 1 addition & 1 deletion examples/arm/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ fi

# vela
vela_repo_url="https://gitlab.arm.com/artificial-intelligence/ethos-u/ethos-u-vela"
vela_rev="859cc066178a87ff28230c1ce9bd370f1e98aa5a"
vela_rev="8cac2b9a7204b57125a8718049519b091a98846c"

########
### Functions
Expand Down
Loading