xref: /aosp_15_r20/external/executorch/backends/xnnpack/test/ops/relu.py (revision 523fa7a60841cd1ecfb9cc4201f1ca8b03ed023a)
1# Copyright (c) Meta Platforms, Inc. and affiliates.
2# All rights reserved.
3#
4# This source code is licensed under the BSD-style license found in the
5# LICENSE file in the root directory of this source tree.
6
7import unittest
8
9import torch
10from executorch.backends.xnnpack.test.tester import Tester
11
12
13class TestRelu(unittest.TestCase):
14    class Relu(torch.nn.Module):
15        def __init__(self):
16            super().__init__()
17            self.relu = torch.nn.ReLU()
18
19        def forward(self, x):
20            z = self.relu(x)
21            return z
22
23    def test_fp32_relu(self):
24        inputs = (torch.randn(8),)
25        (
26            Tester(self.Relu(), inputs)
27            .export()
28            .check_count({"torch.ops.aten.relu.default": 1})
29            .to_edge_transform_and_lower()
30            .check_count({"torch.ops.higher_order.executorch_call_delegate": 1})
31            .check_not(["executorch_exir_dialects_edge__ops_aten_relu_default"])
32            .to_executorch()
33            .serialize()
34            .run_method_and_compare_outputs()
35        )
36