xref: /aosp_15_r20/external/armnn/src/backends/backendsCommon/test/ElementwiseBinaryEndToEndTestImpl.hpp (revision 89c4ff92f2867872bb9e2354d150bf0c8c502810)
1 //
2 // Copyright © 2023 Arm Ltd and contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #pragma once
6 
7 #include "CommonTestUtils.hpp"
8 
9 #include <ResolveType.hpp>
10 
11 #include <armnn/INetwork.hpp>
12 #include <armnn/utility/NumericCast.hpp>
13 
14 #include <doctest/doctest.h>
15 
16 #include <vector>
17 
18 namespace
19 {
20 
21 template<armnn::DataType ArmnnTypeInput>
CreateElementwiseBinaryNetwork(const TensorShape & input1Shape,const TensorShape & input2Shape,const TensorShape & outputShape,BinaryOperation operation,const float qScale=1.0f,const int32_t qOffset=0)22 INetworkPtr CreateElementwiseBinaryNetwork(const TensorShape& input1Shape,
23                                           const TensorShape& input2Shape,
24                                           const TensorShape& outputShape,
25                                           BinaryOperation operation,
26                                           const float qScale = 1.0f,
27                                           const int32_t qOffset = 0)
28 {
29     using namespace armnn;
30 
31     INetworkPtr net(INetwork::Create());
32 
33     TensorInfo input1TensorInfo(input1Shape, ArmnnTypeInput, qScale, qOffset, true);
34     TensorInfo input2TensorInfo(input2Shape, ArmnnTypeInput, qScale, qOffset, true);
35     TensorInfo outputTensorInfo(outputShape, ArmnnTypeInput, qScale, qOffset);
36 
37     IConnectableLayer* input1 = net->AddInputLayer(armnn::numeric_cast<LayerBindingId>(0));
38     IConnectableLayer* input2 = net->AddInputLayer(armnn::numeric_cast<LayerBindingId>(1));
39     IConnectableLayer* elementwiseBinaryLayer = net->AddElementwiseBinaryLayer(operation, "elementwiseUnary");
40     IConnectableLayer* output = net->AddOutputLayer(0, "output");
41 
42     Connect(input1, elementwiseBinaryLayer, input1TensorInfo, 0, 0);
43     Connect(input2, elementwiseBinaryLayer, input2TensorInfo, 0, 1);
44     Connect(elementwiseBinaryLayer, output, outputTensorInfo, 0, 0);
45 
46     return net;
47 }
48 
49 template<armnn::DataType ArmnnInType,
50          typename TInput = armnn::ResolveType<ArmnnInType>>
ElementwiseBinarySimpleEndToEnd(const std::vector<BackendId> & backends,BinaryOperation operation)51 void ElementwiseBinarySimpleEndToEnd(const std::vector<BackendId>& backends,
52                                      BinaryOperation operation)
53 {
54     using namespace armnn;
55 
56     const float   qScale  = IsQuantizedType<TInput>() ? 0.25f : 1.0f;
57     const int32_t qOffset = IsQuantizedType<TInput>() ? 50    : 0;
58 
59     const TensorShape& input1Shape  = { 2, 2, 2, 2 };
60     const TensorShape& input2Shape  = { 1 };
61     const TensorShape& outputShape = { 2, 2, 2, 2 };
62 
63     // Builds up the structure of the network
64     INetworkPtr net = CreateElementwiseBinaryNetwork<ArmnnInType>(input1Shape, input2Shape, outputShape,
65                                                                   operation, qScale, qOffset);
66 
67     CHECK(net);
68 
69     const std::vector<float> input1({ 1, -1, 1, 1,  5, -5, 5, 5,  -3, 3, 3, 3,  4, 4, -4, 4 });
70 
71     const std::vector<float> input2({ 2 });
72     std::vector<float> expectedOutput;
73     switch (operation) {
74         case armnn::BinaryOperation::Add:
75             expectedOutput = { 3, 1, 3, 3,  7, -3, 7, 7,  -1, 5, 5, 5,  6, 6, -2, 6 };
76             break;
77         case armnn::BinaryOperation::Div:
78             expectedOutput = {0.5f, -0.5f, 0.5f, 0.5f, 2.5f, -2.5f, 2.5f, 2.5f, -1.5f, 1.5f, 1.5f, 1.5f, 2, 2, -2, 2};
79             break;
80         case armnn::BinaryOperation::Maximum:
81             expectedOutput = { 2, 2, 2, 2,  5, 2, 5, 5,  2, 3, 3, 3,  4, 4, 2, 4 };
82             break;
83         case armnn::BinaryOperation::Minimum:
84             expectedOutput = { 1, -1, 1, 1,  2, -5, 2, 2,  -3, 2, 2, 2,  2, 2, -4, 2 };
85             break;
86         case armnn::BinaryOperation::Mul:
87             expectedOutput = { 2, -2, 2, 2,  10, -10, 10, 10,  -6, 6, 6, 6,  8, 8, -8, 8 };
88             break;
89         case armnn::BinaryOperation::Sub:
90             expectedOutput = { -1, -3, -1, -1,  3, -7, 3, 3,  -5, 1, 1, 1,  2, 2, -6, 2 };
91             break;
92         default:
93             throw("Invalid Elementwise Binary operation");
94     }
95     const std::vector<float> expectedOutput_const = expectedOutput;
96     // quantize data
97     std::vector<TInput> qInput1Data     = armnnUtils::QuantizedVector<TInput>(input1, qScale, qOffset);
98     std::vector<TInput> qInput2Data     = armnnUtils::QuantizedVector<TInput>(input2, qScale, qOffset);
99     std::vector<TInput> qExpectedOutput = armnnUtils::QuantizedVector<TInput>(expectedOutput_const, qScale, qOffset);
100 
101     std::map<int, std::vector<TInput>> inputTensorData    = {{ 0, qInput1Data }, { 1, qInput2Data }};
102     std::map<int, std::vector<TInput>> expectedOutputData = {{ 0, qExpectedOutput }};
103 
104     EndToEndLayerTestImpl<ArmnnInType, ArmnnInType>(std::move(net), inputTensorData, expectedOutputData, backends);
105 }
106 
107 } // anonymous namespace
108