1 //
2 // Copyright © 2019 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include "EndToEndTestImpl.hpp"
7 #include "LogSoftmaxEndToEndTestImpl.hpp"
8
9 #include <armnn/INetwork.hpp>
10
11 #include <TestUtils.hpp>
12
13 #include <doctest/doctest.h>
14
15 namespace {
16
17 template <typename armnn::DataType DataType>
CreateLogSoftmaxNetwork(const armnn::TensorShape & inputShape,const armnn::TensorShape & outputShape,const float beta,const int axis,const float qScale=1.0f,const int32_t qOffset=0)18 armnn::INetworkPtr CreateLogSoftmaxNetwork(const armnn::TensorShape& inputShape,
19 const armnn::TensorShape& outputShape,
20 const float beta,
21 const int axis,
22 const float qScale = 1.0f,
23 const int32_t qOffset = 0)
24 {
25 using namespace armnn;
26
27 // Builds up the structure of the network.
28 INetworkPtr net(INetwork::Create());
29
30 TensorInfo inputTensorInfo(inputShape, DataType, qScale, qOffset, true);
31
32 LogSoftmaxDescriptor logSoftmaxDesc;
33 logSoftmaxDesc.m_Beta = beta;
34 logSoftmaxDesc.m_Axis = axis;
35
36 IConnectableLayer* logSoftmax = net->AddLogSoftmaxLayer(logSoftmaxDesc, "Log_Softmax");
37 IConnectableLayer* input = net->AddInputLayer(0, "input");
38 Connect(input, logSoftmax, inputTensorInfo, 0, 0);
39
40 TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset);
41 IConnectableLayer* output = net->AddOutputLayer(0, "output");
42 Connect(logSoftmax, output, outputTensorInfo, 0, 0);
43
44 return net;
45 }
46
LogSoftmaxEndToEnd(const std::vector<armnn::BackendId> & backends,armnn::TensorInfo & inputTensorInfo,armnn::TensorInfo & outputTensorInfo,std::vector<float> & inputData,std::vector<float> & expectedOutputData,const float beta,const int axis)47 void LogSoftmaxEndToEnd(const std::vector<armnn::BackendId>& backends,
48 armnn::TensorInfo& inputTensorInfo,
49 armnn::TensorInfo& outputTensorInfo,
50 std::vector<float>& inputData,
51 std::vector<float>& expectedOutputData,
52 const float beta,
53 const int axis)
54 {
55 using namespace armnn;
56
57 // Builds up the structure of the network
58 INetworkPtr net = CreateLogSoftmaxNetwork<DataType::Float32>(inputTensorInfo.GetShape(),
59 outputTensorInfo.GetShape(),
60 beta,
61 axis);
62
63 CHECK(net);
64
65 std::map<int, std::vector<float>> inputTensorData = { {0, inputData} };
66 std::map<int, std::vector<float>> expectedOutputTensorData = { {0, expectedOutputData} };
67
68 EndToEndLayerTestImpl<DataType::Float32, DataType::Float32>(move(net),
69 inputTensorData,
70 expectedOutputTensorData,
71 backends);
72 }
73
74 } // anonymous namespace
75
LogSoftmaxEndToEndTest(const std::vector<armnn::BackendId> & defaultBackends)76 void LogSoftmaxEndToEndTest(const std::vector<armnn::BackendId>& defaultBackends)
77 {
78 using namespace armnn;
79
80 const float beta = 10.0f; // non-default beta
81 const int axis = 3; // positive axis
82
83 const TensorShape inputShape{1, 1, 2, 4};
84 TensorInfo inputTensorInfo(inputShape, DataType::Float32);
85
86 const TensorShape outputShape{1, 1, 2, 4};
87 TensorInfo outputTensorInfo(outputShape, DataType::Float32);
88
89 std::vector<float> inputData = std::vector<float>({
90 0.0f, -0.6f, 0.2f, 0.4f,
91 0.3f, -0.2f, 1.0f, 0.1f
92 });
93
94 std::vector<float> expectedOutputData = std::vector<float>({
95 -4.14297f, -10.14297f, -2.14297f, -0.14297f,
96 -7.00104f, -12.00104f, -0.00104087f, -9.00104f
97 });
98
99 LogSoftmaxEndToEnd(defaultBackends,
100 inputTensorInfo,
101 outputTensorInfo,
102 inputData,
103 expectedOutputData,
104 beta,
105 axis);
106 }