xref: /aosp_15_r20/external/armnn/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp (revision 89c4ff92f2867872bb9e2354d150bf0c8c502810)
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "../MobileNetSsdInferenceTest.hpp"
6 
7 #include "armnnTfLiteParser/ITfLiteParser.hpp"
8 
9 #include <algorithm>
10 #include <iterator>
11 
12 using namespace armnnTfLiteParser;
13 
main(int argc,char * argv[])14 int main(int argc, char* argv[])
15 {
16     int retVal = EXIT_FAILURE;
17     try
18     {
19         using DataType = float;
20         using Parser   = armnnTfLiteParser::ITfLiteParser;
21         using Model    = InferenceModel<Parser, DataType>;
22 
23         armnn::TensorShape inputTensorShape({ 1, 300, 300, 3  });
24 
25         std::vector<const char*> inputLayerNames  =
26         {
27             "normalized_input_image_tensor"
28         };
29 
30         std::vector<const char*> outputLayerNames =
31         {
32             "TFLite_Detection_PostProcess",
33             "TFLite_Detection_PostProcess:1",
34             "TFLite_Detection_PostProcess:2",
35             "TFLite_Detection_PostProcess:3"
36         };
37 
38         retVal = InferenceTestMain(argc, argv, { 0 },
39             [&inputTensorShape, inputLayerNames, outputLayerNames]()
40             {
41                 return make_unique<MobileNetSsdTestCaseProvider<Model>>(
42                     [&]
43                     (const InferenceTestOptions& commonOptions,
44                      typename Model::CommandLineOptions modelOptions)
45                     {
46                         if (!ValidateDirectory(modelOptions.m_ModelDir))
47                         {
48                             return std::unique_ptr<Model>();
49                         }
50 
51                         typename Model::Params modelParams;
52                         modelParams.m_ModelPath =
53                             modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite";
54 
55                         std::copy(inputLayerNames.begin(), inputLayerNames.end(),
56                                   std::back_inserter(modelParams.m_InputBindings));
57 
58                         std::copy(outputLayerNames.begin(), outputLayerNames.end(),
59                                   std::back_inserter(modelParams.m_OutputBindings));
60 
61                         modelParams.m_InputShapes                    = { inputTensorShape };
62                         modelParams.m_IsModelBinary                  = true;
63                         modelParams.m_ComputeDevices                 = modelOptions.GetComputeDevicesAsBackendIds();
64                         modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
65                         modelParams.m_EnableFp16TurboMode            = modelOptions.m_EnableFp16TurboMode;
66 
67                         return std::make_unique<Model>(modelParams,
68                                                        commonOptions.m_EnableProfiling,
69                                                        commonOptions.m_DynamicBackendsPath);
70                 });
71             });
72     }
73     catch (const std::exception& e)
74     {
75         std::cerr << "WARNING: " << *argv << ": An error has occurred when running "
76                      "the classifier inference tests: " << e.what() << std::endl;
77     }
78     return retVal;
79 }
80