xref: /aosp_15_r20/external/executorch/backends/xnnpack/operators/op_relu.py (revision 523fa7a60841cd1ecfb9cc4201f1ca8b03ed023a)
1# Copyright (c) Meta Platforms, Inc. and affiliates.
2# All rights reserved.
3#
4# This source code is licensed under the BSD-style license found in the
5# LICENSE file in the root directory of this source tree.
6
7from typing import Dict
8
9import torch
10from executorch.backends.xnnpack.operators.node_visitor import (
11    NodeVisitor,
12    register_node_visitor,
13)
14from executorch.backends.xnnpack.serialization.xnnpack_graph_schema import (
15    OutputMinMax,
16    XNNClamp,
17    XNNGraph,
18    XNode,
19)
20from executorch.backends.xnnpack.utils.utils import get_input_node
21
22
23@register_node_visitor
24class ReluVisitor(NodeVisitor):
25    target = "aten.relu.default"
26
27    def __init__(self, *args) -> None:
28        super().__init__(*args)
29
30    def define_node(
31        self,
32        node: torch.fx.Node,
33        xnn_graph: XNNGraph,
34        vals_to_ids: Dict[torch.fx.Node, int],
35        debug_handle: int,
36    ) -> None:
37        input_node = get_input_node(node, 0)
38
39        if "XNNPACK_FUSED" in node.meta and node.meta["XNNPACK_FUSED"]:
40            return
41
42        self.define_nodes_tensor_inputs_outputs(node, xnn_graph, vals_to_ids)
43
44        # input
45        input_id = vals_to_ids[input_node]
46
47        # output
48        output_id = vals_to_ids[node]
49
50        output_min_max = OutputMinMax(output_min=0, output_max="+inf")
51
52        ser_node = XNode(
53            xnode_union=XNNClamp(
54                input_id=input_id,
55                output_id=output_id,
56                flags=0,
57            ),
58            debug_handle=debug_handle,
59            output_min_max=output_min_max,
60        )
61        xnn_graph.xnodes.append(ser_node)
62