Skip to content

Commit f147c7b

Browse files
Ninja91facebook-github-bot
authored andcommitted
Add a16w8 reduce_sum FVP coverage for Ethos-U85 (pytorch#19319)
Summary: Adds an a16w8 (int16 IO + int8 weights) sweep for `aten.sum.dim_IntList` reducing the last dim with `keepdim=True`. The new tests `test_sum_dim_intlist_a16w8_{u55,u85}_INT` run on the standard Corstone-300 / Corstone-320 FVP harness. The U85 case surfaces a known numerics issue in the Vela `regor` lowering at int16 IO precision (silent zero output), tracked upstream at https://gitlab.arm.com/artificial-intelligence/ethos-u/ethos-u-vela/-/issues/23. The Ethos-U55 path uses a different accumulator and is correct on the same OFM rescale. Also annotates the four `dim_None{,_4d_tensor}` parametrize ids on `test_sum_u{55,85}_INT_1_0` (and the corresponding fp16 / bf16 variants) with `skips=` -- those cases cannot be exercised through the FVP harness because `executorch.devtools.bundled_program.config` rejects `None` as a model input. The dim=None case is properly covered by the existing `SumDefault` class. Test design: - Standard `pipeline.run()` with the same a16w8 kwargs other arm a16w8 tests use (e.g. `test_native_layer_norm_16a8w_u85_INT` in `test_layer_norm.py`): `a16w8_quantization=True, symmetric_io_quantization=True, qtol=128, epsilon=2**-16`. - Numerical comparison is the standard `atol`/`rtol` check from `pipeline.run()` -- no SQNR helpers. - The U85 a16w8 test is wrapped with both `common.XfailIfNoCorstone320` (handles missing-FVP environments via `FileNotFoundError`) and `pytest.mark.xfail(strict=False, reason="...")` (handles the silent-zero bug). Both are function-level decorators that compose cleanly -- pattern matches `test_max_pool1d.py:111-114`. `strict=False` keeps the test target green both on stock Vela 5.0 (cases XFAIL) and once the upstream Vela fix is in tree (cases XPASS allowed). bypass-pytorch-oss-checks Reviewed By: digantdesai Differential Revision: D103667823
1 parent 91aef57 commit f147c7b

2 files changed

Lines changed: 71 additions & 2 deletions

File tree

backends/arm/test/ops/test_sum.py

Lines changed: 70 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55

66
from typing import Callable, Tuple
77

8+
import pytest
9+
810
import torch
911
from executorch.backends.arm.test import common
1012

@@ -96,7 +98,16 @@ def test_sum_dim_intlist_tosa_INT(test_data: input_t1):
9698
pipeline.run()
9799

98100

99-
@common.parametrize("test_data", Sum.test_parameters)
101+
# dim=None cases skipped: executorch.devtools.bundled_program.config rejects
102+
# None as a model input (cannot be serialized into the bundled program).
103+
_DIM_NONE_SKIP_REASON = "bundled_program cannot serialize None as a model input"
104+
_dim_none_skips = {
105+
"dim_None": _DIM_NONE_SKIP_REASON,
106+
"dim_None_4d_tensor": _DIM_NONE_SKIP_REASON,
107+
}
108+
109+
110+
@common.parametrize("test_data", Sum.test_parameters, skips=_dim_none_skips)
100111
@common.XfailIfNoCorstone300
101112
def test_sum_u55_INT_1_0(test_data: Tuple):
102113
pipeline = EthosU55PipelineINT[input_t1](
@@ -108,7 +119,7 @@ def test_sum_u55_INT_1_0(test_data: Tuple):
108119
pipeline.run()
109120

110121

111-
@common.parametrize("test_data", Sum.test_parameters)
122+
@common.parametrize("test_data", Sum.test_parameters, skips=_dim_none_skips)
112123
@common.XfailIfNoCorstone320
113124
def test_sum_u85_INT_1_0(test_data: Tuple):
114125
pipeline = EthosU85PipelineINT[input_t1](
@@ -220,3 +231,60 @@ def test_sum_tosa_FP(test_data: Callable[[], input_t2]):
220231
def test_sum_tosa_INT(test_data: Callable[[], input_t2]):
221232
pipeline = TosaPipelineINT[input_t1](SumDefault(), test_data(), SumDefault.aten_op)
222233
pipeline.run()
234+
235+
236+
# a16w8 (int16 IO + int8 weights) coverage for sum.dim_IntList. Surfaces the
237+
# Ethos-U85 int16 ReduceSum silent-zero issue tracked upstream at
238+
# https://gitlab.arm.com/artificial-intelligence/ethos-u/ethos-u-vela/-/issues/23.
239+
240+
241+
class SumLastDim(torch.nn.Module):
242+
"""Reduce the last dim with keepdim=True."""
243+
244+
def forward(self, x: torch.Tensor) -> torch.Tensor:
245+
return x.sum(dim=-1, keepdim=True)
246+
247+
248+
a16w8_sum_test_parameters = {
249+
"rank1_16": lambda: (torch.rand(16),),
250+
"rank3_8x1x16": lambda: (torch.rand(8, 1, 16),),
251+
"rank3_4x4x16": lambda: (torch.rand(4, 4, 16),),
252+
}
253+
254+
255+
@common.parametrize("test_data", a16w8_sum_test_parameters)
256+
@common.XfailIfNoCorstone300
257+
def test_sum_dim_intlist_a16w8_u55_INT(test_data: Callable[[], input_t1]):
258+
pipeline = EthosU55PipelineINT[input_t1](
259+
SumLastDim(),
260+
test_data(),
261+
aten_op,
262+
exir_ops=[],
263+
a16w8_quantization=True,
264+
symmetric_io_quantization=True,
265+
qtol=128,
266+
epsilon=2**-16,
267+
)
268+
pipeline.run()
269+
270+
271+
# All cases hit upstream Vela issue #23 (linked above). strict=False so the
272+
# test target stays green both on stock Vela 5.0 (cases XFAIL) and once the
273+
# Vela fix is in tree (cases XPASS).
274+
@common.parametrize("test_data", a16w8_sum_test_parameters)
275+
@common.XfailIfNoCorstone320
276+
@pytest.mark.xfail(
277+
reason="Ethos-U85 int16 ReduceSum returns zero (vela#23)", strict=False
278+
)
279+
def test_sum_dim_intlist_a16w8_u85_INT(test_data: Callable[[], input_t1]):
280+
pipeline = EthosU85PipelineINT[input_t1](
281+
SumLastDim(),
282+
test_data(),
283+
aten_op,
284+
exir_ops=[],
285+
a16w8_quantization=True,
286+
symmetric_io_quantization=True,
287+
qtol=128,
288+
epsilon=2**-16,
289+
)
290+
pipeline.run()

backends/arm/test/targets.bzl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ def define_arm_tests():
3030
"ops/test_slice.py",
3131
"ops/test_sigmoid.py",
3232
"ops/test_sub.py",
33+
"ops/test_sum.py",
3334
"ops/test_tanh.py",
3435
"ops/test_view.py",
3536
"ops/test_cos.py",

0 commit comments

Comments
 (0)