xref: /aosp_15_r20/external/armnn/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp (revision 89c4ff92f2867872bb9e2354d150bf0c8c502810)
1 //
2 // Copyright © 2017, 2023 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "JsonPrinterTestImpl.hpp"
7 #include "armnn/utility/StringUtils.hpp"
8 
9 #include <Profiling.hpp>
10 
11 #include <armnn/Descriptors.hpp>
12 #include <armnn/IRuntime.hpp>
13 #include <armnn/INetwork.hpp>
14 
15 #include <doctest/doctest.h>
16 
17 #include <sstream>
18 #include <stack>
19 #include <string>
20 #include <algorithm>
21 
AreMatchingPair(const char opening,const char closing)22 inline bool AreMatchingPair(const char opening, const char closing)
23 {
24     return (opening == '{' && closing == '}') || (opening == '[' && closing == ']');
25 }
26 
AreParenthesesMatching(const std::string & exp)27 bool AreParenthesesMatching(const std::string& exp)
28 {
29     std::stack<char> expStack;
30     for (size_t i = 0; i < exp.length(); ++i)
31     {
32         if (exp[i] == '{' || exp[i] == '[')
33         {
34             expStack.push(exp[i]);
35         }
36         else if (exp[i] == '}' || exp[i] == ']')
37         {
38             if (expStack.empty() || !AreMatchingPair(expStack.top(), exp[i]))
39             {
40                 return false;
41             }
42             else
43             {
44                 expStack.pop();
45             }
46         }
47     }
48     return expStack.empty();
49 }
50 
ExtractMeasurements(const std::string & exp)51 std::vector<double> ExtractMeasurements(const std::string& exp)
52 {
53     std::vector<double> numbers;
54     bool inArray = false;
55     std::string numberString;
56     for (size_t i = 0; i < exp.size(); ++i)
57     {
58         if (exp[i] == '[')
59         {
60             inArray = true;
61         }
62         else if (exp[i] == ']' && inArray)
63         {
64             try
65             {
66                 armnn::stringUtils::StringTrim(numberString, "\t,\n");
67                 numbers.push_back(std::stod(numberString));
68             }
69             catch (std::invalid_argument const&)
70             {
71                 FAIL(("Could not convert measurements to double: " + numberString));
72             }
73 
74             numberString.clear();
75             inArray = false;
76         }
77         else if (exp[i] == ',' && inArray)
78         {
79             try
80             {
81                 armnn::stringUtils::StringTrim(numberString, "\t,\n");
82                 numbers.push_back(std::stod(numberString));
83             }
84             catch (std::invalid_argument const&)
85             {
86                 FAIL(("Could not convert measurements to double: " + numberString));
87             }
88             numberString.clear();
89         }
90         else if (exp[i] != '[' && inArray && exp[i] != ',' && exp[i] != ' ')
91         {
92             numberString += exp[i];
93         }
94     }
95     return numbers;
96 }
97 
ExtractSections(const std::string & exp)98 std::vector<std::string> ExtractSections(const std::string& exp)
99 {
100     std::vector<std::string> sections;
101 
102     std::stack<size_t> s;
103     for (size_t i = 0; i < exp.size(); i++)
104     {
105         if (exp.at(i) == '{')
106         {
107             s.push(i);
108         }
109         else if (exp.at(i) == '}')
110         {
111             size_t from = s.top();
112             s.pop();
113             sections.push_back(exp.substr(from, i - from + 1));
114         }
115     }
116 
117     return sections;
118 }
119 
GetSoftmaxProfilerJson(const std::vector<armnn::BackendId> & backends)120 std::string GetSoftmaxProfilerJson(const std::vector<armnn::BackendId>& backends)
121 {
122     using namespace armnn;
123 
124     CHECK(!backends.empty());
125 
126     ProfilerManager& profilerManager = armnn::ProfilerManager::GetInstance();
127 
128     // Create runtime in which test will run
129     IRuntime::CreationOptions options;
130     options.m_EnableGpuProfiling = backends.front() == armnn::Compute::GpuAcc;
131     IRuntimePtr runtime(IRuntime::Create(options));
132 
133     // build up the structure of the network
134     INetworkPtr net(INetwork::Create());
135     IConnectableLayer* input = net->AddInputLayer(0, "input");
136     SoftmaxDescriptor softmaxDescriptor;
137     // Set Axis to -1 if CL or Neon until further Axes are supported.
138     if ( backends.front() == armnn::Compute::CpuAcc || backends.front() == armnn::Compute::GpuAcc)
139     {
140         softmaxDescriptor.m_Axis = -1;
141     }
142     IConnectableLayer* softmax = net->AddSoftmaxLayer(softmaxDescriptor, "softmax");
143     IConnectableLayer* output  = net->AddOutputLayer(0, "output");
144 
145     input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
146     softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
147 
148     // set the tensors in the network
149     TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
150     inputTensorInfo.SetQuantizationOffset(100);
151     inputTensorInfo.SetQuantizationScale(10000.0f);
152     input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
153 
154     TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
155     outputTensorInfo.SetQuantizationOffset(0);
156     outputTensorInfo.SetQuantizationScale(1.0f / 256.0f);
157     softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
158 
159     // optimize the network
160     armnn::OptimizerOptionsOpaque optOptions;
161     optOptions.SetProfilingEnabled(true);
162     IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optOptions);
163     if(!optNet)
164     {
165         FAIL("Error occurred during Optimization, Optimize() returned nullptr.");
166     }
167     // load it into the runtime
168     NetworkId netId;
169     auto error = runtime->LoadNetwork(netId, std::move(optNet));
170     CHECK(error == Status::Success);
171 
172     // create structures for input & output
173     std::vector<uint8_t> inputData
174         {
175             1, 10, 3, 200, 5
176             // one of inputs is sufficiently larger than the others to saturate softmax
177         };
178     std::vector<uint8_t> outputData(5);
179 
180     TensorInfo inputTensorInfo2 = runtime->GetInputTensorInfo(netId, 0);
181     inputTensorInfo2.SetConstant(true);
182     armnn::InputTensors inputTensors
183         {
184             {0, armnn::ConstTensor(inputTensorInfo2, inputData.data())}
185         };
186     armnn::OutputTensors outputTensors
187         {
188             {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
189         };
190 
191     runtime->GetProfiler(netId)->EnableProfiling(true);
192 
193     // do the inferences
194     runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
195     runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
196     runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
197 
198     // retrieve the Profiler.Print() output
199     std::stringstream ss;
200     profilerManager.GetProfiler()->Print(ss);
201 
202     return ss.str();
203 }
204 
ValidateProfilerJson(std::string & result)205 inline void ValidateProfilerJson(std::string& result)
206 {
207     // ensure all measurements are greater than zero
208     std::vector<double> measurementsVector = ExtractMeasurements(result);
209     CHECK(!measurementsVector.empty());
210 
211     // check sections contain raw and unit tags
212     // first ensure Parenthesis are balanced
213     if (AreParenthesesMatching(result))
214     {
215         // remove parent sections that will not have raw or unit tag
216         std::vector<std::string> sectionVector = ExtractSections(result);
217         for (size_t i = 0; i < sectionVector.size(); ++i)
218         {
219 
220             if (sectionVector[i].find("\"ArmNN\":") != std::string::npos
221                 || sectionVector[i].find("\"optimize_measurements\":") != std::string::npos
222                 || sectionVector[i].find("\"loaded_network_measurements\":") != std::string::npos
223                 || sectionVector[i].find("\"inference_measurements\":") != std::string::npos)
224             {
225                 sectionVector.erase(sectionVector.begin() + static_cast<int>(i));
226             }
227         }
228         CHECK(!sectionVector.empty());
229 
230         CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
231                                 [](std::string i) { return (i.find("\"raw\":") != std::string::npos); }));
232 
233         CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
234                                 [](std::string i) { return (i.find("\"unit\":") != std::string::npos); }));
235     }
236 
237     // remove the time measurements as they vary from test to test
238     result.erase(std::remove_if (result.begin(),result.end(),
239                                  [](char c) { return c == '.'; }), result.end());
240     result.erase(std::remove_if (result.begin(), result.end(), &isdigit), result.end());
241     result.erase(std::remove_if (result.begin(),result.end(),
242                                  [](char c) { return c == '\t'; }), result.end());
243 
244     CHECK(result.find("ArmNN") != std::string::npos);
245     CHECK(result.find("inference_measurements") != std::string::npos);
246 
247     // ensure no spare parenthesis present in print output
248     CHECK(AreParenthesesMatching(result));
249 }
250 
RunSoftmaxProfilerJsonPrinterTest(const std::vector<armnn::BackendId> & backends)251 void RunSoftmaxProfilerJsonPrinterTest(const std::vector<armnn::BackendId>& backends)
252 {
253     // setup the test fixture and obtain JSON Printer result
254     std::string result = GetSoftmaxProfilerJson(backends);
255 
256     // validate the JSON Printer result
257     ValidateProfilerJson(result);
258 
259     const armnn::BackendId& firstBackend = backends.at(0);
260     if (firstBackend == armnn::Compute::GpuAcc)
261     {
262         CHECK(result.find("OpenClKernelTimer/: softmax_layer_max_shift_exp_sum_quantized_serial GWS[,,]")
263                     != std::string::npos);
264     }
265     else if (firstBackend == armnn::Compute::CpuAcc)
266     {
267         CHECK(result.find("NeonKernelTimer") != std::string::npos);     // Validate backend
268 
269         bool softmaxCheck = ((result.find("softmax") != std::string::npos) ||            // Validate softmax
270                              (result.find("Softmax") != std::string::npos) ||
271                              (result.find("SoftMax") != std::string::npos));
272         CHECK(softmaxCheck);
273 
274     }
275 }
276