xref: /aosp_15_r20/external/ComputeLibrary/examples/graph_inception_v3.cpp (revision c217d954acce2dbc11938adb493fc0abd69584f3)
1*c217d954SCole Faust /*
2*c217d954SCole Faust  * Copyright (c) 2017-2021 Arm Limited.
3*c217d954SCole Faust  *
4*c217d954SCole Faust  * SPDX-License-Identifier: MIT
5*c217d954SCole Faust  *
6*c217d954SCole Faust  * Permission is hereby granted, free of charge, to any person obtaining a copy
7*c217d954SCole Faust  * of this software and associated documentation files (the "Software"), to
8*c217d954SCole Faust  * deal in the Software without restriction, including without limitation the
9*c217d954SCole Faust  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10*c217d954SCole Faust  * sell copies of the Software, and to permit persons to whom the Software is
11*c217d954SCole Faust  * furnished to do so, subject to the following conditions:
12*c217d954SCole Faust  *
13*c217d954SCole Faust  * The above copyright notice and this permission notice shall be included in all
14*c217d954SCole Faust  * copies or substantial portions of the Software.
15*c217d954SCole Faust  *
16*c217d954SCole Faust  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17*c217d954SCole Faust  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18*c217d954SCole Faust  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19*c217d954SCole Faust  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20*c217d954SCole Faust  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21*c217d954SCole Faust  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22*c217d954SCole Faust  * SOFTWARE.
23*c217d954SCole Faust  */
24*c217d954SCole Faust #include "arm_compute/graph.h"
25*c217d954SCole Faust #include "arm_compute/core/Types.h"
26*c217d954SCole Faust #include "arm_compute/core/Utils.h"
27*c217d954SCole Faust #include "support/ToolchainSupport.h"
28*c217d954SCole Faust #include "utils/CommonGraphOptions.h"
29*c217d954SCole Faust #include "utils/GraphUtils.h"
30*c217d954SCole Faust #include "utils/Utils.h"
31*c217d954SCole Faust 
32*c217d954SCole Faust using namespace arm_compute::utils;
33*c217d954SCole Faust using namespace arm_compute::graph::frontend;
34*c217d954SCole Faust using namespace arm_compute::graph_utils;
35*c217d954SCole Faust 
36*c217d954SCole Faust /** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API */
37*c217d954SCole Faust class InceptionV3Example : public Example
38*c217d954SCole Faust {
39*c217d954SCole Faust public:
InceptionV3Example()40*c217d954SCole Faust     InceptionV3Example()
41*c217d954SCole Faust         : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3")
42*c217d954SCole Faust     {
43*c217d954SCole Faust     }
do_setup(int argc,char ** argv)44*c217d954SCole Faust     bool do_setup(int argc, char **argv) override
45*c217d954SCole Faust     {
46*c217d954SCole Faust         // Parse arguments
47*c217d954SCole Faust         cmd_parser.parse(argc, argv);
48*c217d954SCole Faust         cmd_parser.validate();
49*c217d954SCole Faust 
50*c217d954SCole Faust         // Consume common parameters
51*c217d954SCole Faust         common_params = consume_common_graph_parameters(common_opts);
52*c217d954SCole Faust 
53*c217d954SCole Faust         // Return when help menu is requested
54*c217d954SCole Faust         if(common_params.help)
55*c217d954SCole Faust         {
56*c217d954SCole Faust             cmd_parser.print_help(argv[0]);
57*c217d954SCole Faust             return false;
58*c217d954SCole Faust         }
59*c217d954SCole Faust 
60*c217d954SCole Faust         // Print parameter values
61*c217d954SCole Faust         std::cout << common_params << std::endl;
62*c217d954SCole Faust 
63*c217d954SCole Faust         // Get trainable parameters data path
64*c217d954SCole Faust         std::string data_path = common_params.data_path;
65*c217d954SCole Faust 
66*c217d954SCole Faust         // Create a preprocessor object
67*c217d954SCole Faust         std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>();
68*c217d954SCole Faust 
69*c217d954SCole Faust         // Create input descriptor
70*c217d954SCole Faust         const auto        operation_layout = common_params.data_layout;
71*c217d954SCole Faust         const TensorShape tensor_shape     = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
72*c217d954SCole Faust         TensorDescriptor  input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
73*c217d954SCole Faust 
74*c217d954SCole Faust         // Set weights trained layout
75*c217d954SCole Faust         const DataLayout weights_layout = DataLayout::NCHW;
76*c217d954SCole Faust 
77*c217d954SCole Faust         graph << common_params.target
78*c217d954SCole Faust               << common_params.fast_math_hint
79*c217d954SCole Faust               << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
80*c217d954SCole Faust               << ConvolutionLayer(3U, 3U, 32U,
81*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", weights_layout),
82*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
83*c217d954SCole Faust               .set_name("Conv2d_1a_3x3/convolution")
84*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path,
85*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
86*c217d954SCole Faust                                          get_weights_accessor(data_path,
87*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
88*c217d954SCole Faust                                          nullptr, get_weights_accessor(data_path,
89*c217d954SCole Faust                                                                        "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
90*c217d954SCole Faust                                          0.001f)
91*c217d954SCole Faust               .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm")
92*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
93*c217d954SCole Faust               << ConvolutionLayer(3U, 3U, 32U,
94*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", weights_layout),
95*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
96*c217d954SCole Faust               .set_name("Conv2d_2a_3x3/convolution")
97*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path,
98*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
99*c217d954SCole Faust                                          get_weights_accessor(data_path,
100*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
101*c217d954SCole Faust                                          nullptr, get_weights_accessor(data_path,
102*c217d954SCole Faust                                                                        "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
103*c217d954SCole Faust                                          0.001f)
104*c217d954SCole Faust               .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm")
105*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
106*c217d954SCole Faust 
107*c217d954SCole Faust               << ConvolutionLayer(3U, 3U, 64U,
108*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", weights_layout),
109*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
110*c217d954SCole Faust               .set_name("Conv2d_2b_3x3/convolution")
111*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path,
112*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
113*c217d954SCole Faust                                          get_weights_accessor(data_path,
114*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
115*c217d954SCole Faust                                          nullptr, get_weights_accessor(data_path,
116*c217d954SCole Faust                                                                        "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
117*c217d954SCole Faust                                          0.001f)
118*c217d954SCole Faust               .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm")
119*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
120*c217d954SCole Faust 
121*c217d954SCole Faust               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool")
122*c217d954SCole Faust 
123*c217d954SCole Faust               << ConvolutionLayer(1U, 1U, 80U,
124*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", weights_layout),
125*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
126*c217d954SCole Faust               .set_name("Conv2d_3b_1x1/convolution")
127*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path,
128*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
129*c217d954SCole Faust                                          get_weights_accessor(data_path,
130*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
131*c217d954SCole Faust                                          nullptr, get_weights_accessor(data_path,
132*c217d954SCole Faust                                                                        "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
133*c217d954SCole Faust                                          0.001f)
134*c217d954SCole Faust               .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm")
135*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
136*c217d954SCole Faust 
137*c217d954SCole Faust               << ConvolutionLayer(3U, 3U, 192U,
138*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", weights_layout),
139*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
140*c217d954SCole Faust               .set_name("Conv2d_4a_3x3/convolution")
141*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path,
142*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
143*c217d954SCole Faust                                          get_weights_accessor(data_path,
144*c217d954SCole Faust                                                               "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
145*c217d954SCole Faust                                          nullptr, get_weights_accessor(data_path,
146*c217d954SCole Faust                                                                        "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
147*c217d954SCole Faust                                          0.001f)
148*c217d954SCole Faust               .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm")
149*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
150*c217d954SCole Faust 
151*c217d954SCole Faust               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool");
152*c217d954SCole Faust 
153*c217d954SCole Faust         graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
154*c217d954SCole Faust                                       32U)
155*c217d954SCole Faust               .set_name("Mixed_5b/concat");
156*c217d954SCole Faust         graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
157*c217d954SCole Faust                                       64U, true)
158*c217d954SCole Faust               .set_name("Mixed_5c/concat");
159*c217d954SCole Faust         graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
160*c217d954SCole Faust                                       64U)
161*c217d954SCole Faust               .set_name("Mixed_5d/concat");
162*c217d954SCole Faust 
163*c217d954SCole Faust         graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat");
164*c217d954SCole Faust 
165*c217d954SCole Faust         graph << get_inception_node_C(data_path, "Mixed_6b", weights_layout, 192U, std::make_tuple(128U, 128U, 192U),
166*c217d954SCole Faust                                       std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
167*c217d954SCole Faust               .set_name("Mixed_6b/concat");
168*c217d954SCole Faust         graph << get_inception_node_C(data_path, "Mixed_6c", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
169*c217d954SCole Faust                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
170*c217d954SCole Faust               .set_name("Mixed_6c/concat");
171*c217d954SCole Faust         graph << get_inception_node_C(data_path, "Mixed_6d", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
172*c217d954SCole Faust                                       std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
173*c217d954SCole Faust               .set_name("Mixed_6d/concat");
174*c217d954SCole Faust         graph << get_inception_node_C(data_path, "Mixed_6e", weights_layout, 192U, std::make_tuple(192U, 192U, 192U),
175*c217d954SCole Faust                                       std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
176*c217d954SCole Faust               .set_name("Mixed_6e/concat");
177*c217d954SCole Faust 
178*c217d954SCole Faust         graph << get_inception_node_D(data_path, "Mixed_7a", weights_layout, std::make_tuple(192U, 320U),
179*c217d954SCole Faust                                       std::make_tuple(192U, 192U, 192U, 192U))
180*c217d954SCole Faust               .set_name("Mixed_7a/concat");
181*c217d954SCole Faust 
182*c217d954SCole Faust         graph << get_inception_node_E(data_path, "Mixed_7b", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
183*c217d954SCole Faust                                       std::make_tuple(448U, 384U, 384U, 384U), 192U)
184*c217d954SCole Faust               .set_name("Mixed_7b/concat");
185*c217d954SCole Faust         graph << get_inception_node_E(data_path, "Mixed_7c", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
186*c217d954SCole Faust                                       std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
187*c217d954SCole Faust               .set_name("Mixed_7c/concat");
188*c217d954SCole Faust 
189*c217d954SCole Faust         graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, operation_layout, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool")
190*c217d954SCole Faust               << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
191*c217d954SCole Faust                                                                       "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", weights_layout),
192*c217d954SCole Faust                                   get_weights_accessor(data_path,
193*c217d954SCole Faust                                                        "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
194*c217d954SCole Faust                                   PadStrideInfo(1, 1, 0, 0))
195*c217d954SCole Faust               .set_name("Logits/Conv2d_1c_1x1/convolution")
196*c217d954SCole Faust               << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape")
197*c217d954SCole Faust               << SoftmaxLayer().set_name("Predictions/Softmax")
198*c217d954SCole Faust               << OutputLayer(get_output_accessor(common_params, 5));
199*c217d954SCole Faust 
200*c217d954SCole Faust         // Finalize graph
201*c217d954SCole Faust         GraphConfig config;
202*c217d954SCole Faust         config.num_threads        = common_params.threads;
203*c217d954SCole Faust         config.use_tuner          = common_params.enable_tuner;
204*c217d954SCole Faust         config.tuner_mode         = common_params.tuner_mode;
205*c217d954SCole Faust         config.tuner_file         = common_params.tuner_file;
206*c217d954SCole Faust         config.mlgo_file          = common_params.mlgo_file;
207*c217d954SCole Faust         config.use_synthetic_type = arm_compute::is_data_type_quantized(common_params.data_type);
208*c217d954SCole Faust         config.synthetic_type     = common_params.data_type;
209*c217d954SCole Faust         graph.finalize(common_params.target, config);
210*c217d954SCole Faust 
211*c217d954SCole Faust         return true;
212*c217d954SCole Faust     }
213*c217d954SCole Faust 
do_run()214*c217d954SCole Faust     void do_run() override
215*c217d954SCole Faust     {
216*c217d954SCole Faust         graph.run();
217*c217d954SCole Faust     }
218*c217d954SCole Faust 
219*c217d954SCole Faust private:
220*c217d954SCole Faust     CommandLineParser  cmd_parser;
221*c217d954SCole Faust     CommonGraphOptions common_opts;
222*c217d954SCole Faust     CommonGraphParams  common_params;
223*c217d954SCole Faust     Stream             graph;
224*c217d954SCole Faust 
225*c217d954SCole Faust private:
get_inception_node_A(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int> b_filters,std::tuple<unsigned int,unsigned int,unsigned int> c_filters,unsigned int d_filt,bool is_name_different=false)226*c217d954SCole Faust     ConcatLayer get_inception_node_A(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
227*c217d954SCole Faust                                      unsigned int a_filt,
228*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int> b_filters,
229*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
230*c217d954SCole Faust                                      unsigned int d_filt,
231*c217d954SCole Faust                                      bool         is_name_different = false)
232*c217d954SCole Faust     {
233*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
234*c217d954SCole Faust 
235*c217d954SCole Faust         // This is due to a naming issue in the tf model
236*c217d954SCole Faust         std::string conv_id0 = "_0a_";
237*c217d954SCole Faust         std::string conv_id1 = "2d_0b_";
238*c217d954SCole Faust         if(is_name_different)
239*c217d954SCole Faust         {
240*c217d954SCole Faust             conv_id0 = "_0b_";
241*c217d954SCole Faust             conv_id1 = "_1_0c_";
242*c217d954SCole Faust         }
243*c217d954SCole Faust 
244*c217d954SCole Faust         SubStream i_a(graph);
245*c217d954SCole Faust         i_a << ConvolutionLayer(
246*c217d954SCole Faust                 1U, 1U, a_filt,
247*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
248*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
249*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
250*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
251*c217d954SCole Faust             << BatchNormalizationLayer(
252*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
253*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
254*c217d954SCole Faust                 nullptr,
255*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
256*c217d954SCole Faust                 0.001f)
257*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
258*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
259*c217d954SCole Faust 
260*c217d954SCole Faust         SubStream i_b(graph);
261*c217d954SCole Faust         i_b << ConvolutionLayer(
262*c217d954SCole Faust                 1U, 1U, std::get<0>(b_filters),
263*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", weights_layout),
264*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
265*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
266*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution")
267*c217d954SCole Faust             << BatchNormalizationLayer(
268*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
269*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
270*c217d954SCole Faust                 nullptr,
271*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
272*c217d954SCole Faust                 0.001f)
273*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm")
274*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu")
275*c217d954SCole Faust             << ConvolutionLayer(
276*c217d954SCole Faust                 5U, 5U, std::get<1>(b_filters),
277*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", weights_layout),
278*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
279*c217d954SCole Faust                 PadStrideInfo(1, 1, 2, 2))
280*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution")
281*c217d954SCole Faust             << BatchNormalizationLayer(
282*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
283*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
284*c217d954SCole Faust                 nullptr,
285*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
286*c217d954SCole Faust                 0.001f)
287*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm")
288*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu");
289*c217d954SCole Faust 
290*c217d954SCole Faust         SubStream i_c(graph);
291*c217d954SCole Faust         i_c << ConvolutionLayer(
292*c217d954SCole Faust                 1U, 1U, std::get<0>(c_filters),
293*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
294*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
295*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
296*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
297*c217d954SCole Faust             << BatchNormalizationLayer(
298*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
299*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
300*c217d954SCole Faust                 nullptr,
301*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
302*c217d954SCole Faust                 0.001f)
303*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
304*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
305*c217d954SCole Faust             << ConvolutionLayer(
306*c217d954SCole Faust                 3U, 3U, std::get<1>(c_filters),
307*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
308*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
309*c217d954SCole Faust                 PadStrideInfo(1, 1, 1, 1))
310*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
311*c217d954SCole Faust             << BatchNormalizationLayer(
312*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
313*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
314*c217d954SCole Faust                 nullptr,
315*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
316*c217d954SCole Faust                 0.001f)
317*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
318*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
319*c217d954SCole Faust             << ConvolutionLayer(
320*c217d954SCole Faust                 3U, 3U, std::get<2>(c_filters),
321*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
322*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
323*c217d954SCole Faust                 PadStrideInfo(1, 1, 1, 1))
324*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution")
325*c217d954SCole Faust             << BatchNormalizationLayer(
326*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
327*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
328*c217d954SCole Faust                 nullptr,
329*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
330*c217d954SCole Faust                 0.001f)
331*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm")
332*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
333*c217d954SCole Faust 
334*c217d954SCole Faust         SubStream i_d(graph);
335*c217d954SCole Faust         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
336*c217d954SCole Faust                                              true))
337*c217d954SCole Faust             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
338*c217d954SCole Faust             << ConvolutionLayer(
339*c217d954SCole Faust                 1U, 1U, d_filt,
340*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
341*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
342*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
343*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
344*c217d954SCole Faust             << BatchNormalizationLayer(
345*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
346*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
347*c217d954SCole Faust                 nullptr,
348*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
349*c217d954SCole Faust                 0.001f)
350*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
351*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
352*c217d954SCole Faust 
353*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
354*c217d954SCole Faust     }
355*c217d954SCole Faust 
get_inception_node_B(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int,unsigned int> b_filters)356*c217d954SCole Faust     ConcatLayer get_inception_node_B(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
357*c217d954SCole Faust                                      unsigned int a_filt,
358*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
359*c217d954SCole Faust     {
360*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
361*c217d954SCole Faust         SubStream   i_a(graph);
362*c217d954SCole Faust         i_a << ConvolutionLayer(
363*c217d954SCole Faust                 3U, 3U, a_filt,
364*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout),
365*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
366*c217d954SCole Faust                 PadStrideInfo(2, 2, 0, 0))
367*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution")
368*c217d954SCole Faust             << BatchNormalizationLayer(
369*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
370*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
371*c217d954SCole Faust                 nullptr,
372*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
373*c217d954SCole Faust                 0.001f)
374*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm")
375*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu");
376*c217d954SCole Faust 
377*c217d954SCole Faust         SubStream i_b(graph);
378*c217d954SCole Faust         i_b << ConvolutionLayer(
379*c217d954SCole Faust                 1U, 1U, std::get<0>(b_filters),
380*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
381*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
382*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
383*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
384*c217d954SCole Faust             << BatchNormalizationLayer(
385*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
386*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
387*c217d954SCole Faust                 nullptr,
388*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
389*c217d954SCole Faust                 0.001f)
390*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
391*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
392*c217d954SCole Faust             << ConvolutionLayer(
393*c217d954SCole Faust                 3U, 3U, std::get<1>(b_filters),
394*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
395*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
396*c217d954SCole Faust                 PadStrideInfo(1, 1, 1, 1))
397*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution")
398*c217d954SCole Faust             << BatchNormalizationLayer(
399*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
400*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
401*c217d954SCole Faust                 nullptr,
402*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
403*c217d954SCole Faust                 0.001f)
404*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm")
405*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu")
406*c217d954SCole Faust             << ConvolutionLayer(
407*c217d954SCole Faust                 3U, 3U, std::get<2>(b_filters),
408*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout),
409*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
410*c217d954SCole Faust                 PadStrideInfo(2, 2, 0, 0))
411*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution")
412*c217d954SCole Faust             << BatchNormalizationLayer(
413*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
414*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
415*c217d954SCole Faust                 nullptr,
416*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
417*c217d954SCole Faust                 0.001f)
418*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm")
419*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu");
420*c217d954SCole Faust 
421*c217d954SCole Faust         SubStream i_c(graph);
422*c217d954SCole Faust         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
423*c217d954SCole Faust 
424*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
425*c217d954SCole Faust     }
426*c217d954SCole Faust 
get_inception_node_C(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int,unsigned int> b_filters,std::tuple<unsigned int,unsigned int,unsigned int,unsigned int,unsigned int> c_filters,unsigned int d_filt)427*c217d954SCole Faust     ConcatLayer get_inception_node_C(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
428*c217d954SCole Faust                                      unsigned int a_filt,
429*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
430*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
431*c217d954SCole Faust                                      unsigned int d_filt)
432*c217d954SCole Faust     {
433*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
434*c217d954SCole Faust         SubStream   i_a(graph);
435*c217d954SCole Faust         i_a << ConvolutionLayer(
436*c217d954SCole Faust                 1U, 1U, a_filt,
437*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
438*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
439*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
440*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
441*c217d954SCole Faust             << BatchNormalizationLayer(
442*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
443*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
444*c217d954SCole Faust                 nullptr,
445*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
446*c217d954SCole Faust                 0.001f)
447*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
448*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
449*c217d954SCole Faust 
450*c217d954SCole Faust         SubStream i_b(graph);
451*c217d954SCole Faust         i_b << ConvolutionLayer(
452*c217d954SCole Faust                 1U, 1U, std::get<0>(b_filters),
453*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
454*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
455*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
456*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
457*c217d954SCole Faust             << BatchNormalizationLayer(
458*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
459*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
460*c217d954SCole Faust                 nullptr,
461*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
462*c217d954SCole Faust                 0.001f)
463*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
464*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
465*c217d954SCole Faust             << ConvolutionLayer(
466*c217d954SCole Faust                 7U, 1U, std::get<1>(b_filters),
467*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
468*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
469*c217d954SCole Faust                 PadStrideInfo(1, 1, 3, 0))
470*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
471*c217d954SCole Faust             << BatchNormalizationLayer(
472*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
473*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
474*c217d954SCole Faust                 nullptr,
475*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
476*c217d954SCole Faust                 0.001f)
477*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
478*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
479*c217d954SCole Faust             << ConvolutionLayer(
480*c217d954SCole Faust                 1U, 7U, std::get<2>(b_filters),
481*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
482*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
483*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 3))
484*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
485*c217d954SCole Faust             << BatchNormalizationLayer(
486*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
487*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
488*c217d954SCole Faust                 nullptr,
489*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
490*c217d954SCole Faust                 0.001f)
491*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
492*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu");
493*c217d954SCole Faust 
494*c217d954SCole Faust         SubStream i_c(graph);
495*c217d954SCole Faust         i_c << ConvolutionLayer(
496*c217d954SCole Faust                 1U, 1U, std::get<0>(c_filters),
497*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
498*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
499*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
500*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
501*c217d954SCole Faust             << BatchNormalizationLayer(
502*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
503*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
504*c217d954SCole Faust                 nullptr,
505*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
506*c217d954SCole Faust                 0.001f)
507*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
508*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
509*c217d954SCole Faust             << ConvolutionLayer(
510*c217d954SCole Faust                 1U, 7U, std::get<1>(c_filters),
511*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
512*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
513*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 3))
514*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution")
515*c217d954SCole Faust             << BatchNormalizationLayer(
516*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
517*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
518*c217d954SCole Faust                 nullptr,
519*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
520*c217d954SCole Faust                 0.001f)
521*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm")
522*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
523*c217d954SCole Faust             << ConvolutionLayer(
524*c217d954SCole Faust                 7U, 1U, std::get<2>(c_filters),
525*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
526*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
527*c217d954SCole Faust                 PadStrideInfo(1, 1, 3, 0))
528*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution")
529*c217d954SCole Faust             << BatchNormalizationLayer(
530*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
531*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
532*c217d954SCole Faust                 nullptr,
533*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
534*c217d954SCole Faust                 0.001f)
535*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm")
536*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
537*c217d954SCole Faust             << ConvolutionLayer(
538*c217d954SCole Faust                 1U, 7U, std::get<3>(c_filters),
539*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
540*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
541*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 3))
542*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution")
543*c217d954SCole Faust             << BatchNormalizationLayer(
544*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
545*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
546*c217d954SCole Faust                 nullptr,
547*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
548*c217d954SCole Faust                 0.001f)
549*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm")
550*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
551*c217d954SCole Faust             << ConvolutionLayer(
552*c217d954SCole Faust                 7U, 1U, std::get<4>(c_filters),
553*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
554*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
555*c217d954SCole Faust                 PadStrideInfo(1, 1, 3, 0))
556*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution")
557*c217d954SCole Faust             << BatchNormalizationLayer(
558*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
559*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
560*c217d954SCole Faust                 nullptr,
561*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
562*c217d954SCole Faust                 0.001f)
563*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm")
564*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
565*c217d954SCole Faust 
566*c217d954SCole Faust         SubStream i_d(graph);
567*c217d954SCole Faust         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
568*c217d954SCole Faust                                              true))
569*c217d954SCole Faust             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
570*c217d954SCole Faust             << ConvolutionLayer(
571*c217d954SCole Faust                 1U, 1U, d_filt,
572*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
573*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
574*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
575*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
576*c217d954SCole Faust             << BatchNormalizationLayer(
577*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
578*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
579*c217d954SCole Faust                 nullptr,
580*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
581*c217d954SCole Faust                 0.001f)
582*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
583*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
584*c217d954SCole Faust 
585*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
586*c217d954SCole Faust     }
587*c217d954SCole Faust 
get_inception_node_D(const std::string & data_path,std::string && param_path,DataLayout weights_layout,std::tuple<unsigned int,unsigned int> a_filters,std::tuple<unsigned int,unsigned int,unsigned int,unsigned int> b_filters)588*c217d954SCole Faust     ConcatLayer get_inception_node_D(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
589*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int> a_filters,
590*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
591*c217d954SCole Faust     {
592*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
593*c217d954SCole Faust         SubStream   i_a(graph);
594*c217d954SCole Faust         i_a << ConvolutionLayer(
595*c217d954SCole Faust                 1U, 1U, std::get<0>(a_filters),
596*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
597*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
598*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
599*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
600*c217d954SCole Faust             << BatchNormalizationLayer(
601*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
602*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
603*c217d954SCole Faust                 nullptr,
604*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
605*c217d954SCole Faust                 0.001f)
606*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
607*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu")
608*c217d954SCole Faust             << ConvolutionLayer(
609*c217d954SCole Faust                 3U, 3U, std::get<1>(a_filters),
610*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
611*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
612*c217d954SCole Faust                 PadStrideInfo(2, 2, 0, 0))
613*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution")
614*c217d954SCole Faust             << BatchNormalizationLayer(
615*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
616*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
617*c217d954SCole Faust                 nullptr,
618*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
619*c217d954SCole Faust                 0.001f)
620*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm")
621*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu");
622*c217d954SCole Faust 
623*c217d954SCole Faust         SubStream i_b(graph);
624*c217d954SCole Faust         i_b << ConvolutionLayer(
625*c217d954SCole Faust                 1U, 1U, std::get<0>(b_filters),
626*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
627*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
628*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
629*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
630*c217d954SCole Faust             << BatchNormalizationLayer(
631*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
632*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
633*c217d954SCole Faust                 nullptr,
634*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
635*c217d954SCole Faust                 0.001f)
636*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
637*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
638*c217d954SCole Faust             << ConvolutionLayer(
639*c217d954SCole Faust                 7U, 1U, std::get<1>(b_filters),
640*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
641*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
642*c217d954SCole Faust                 PadStrideInfo(1, 1, 3, 0))
643*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
644*c217d954SCole Faust             << BatchNormalizationLayer(
645*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
646*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
647*c217d954SCole Faust                 nullptr,
648*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
649*c217d954SCole Faust                 0.001f)
650*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
651*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
652*c217d954SCole Faust             << ConvolutionLayer(
653*c217d954SCole Faust                 1U, 7U, std::get<2>(b_filters),
654*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
655*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
656*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 3))
657*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
658*c217d954SCole Faust             << BatchNormalizationLayer(
659*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
660*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
661*c217d954SCole Faust                 nullptr,
662*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
663*c217d954SCole Faust                 0.001f)
664*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
665*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu")
666*c217d954SCole Faust             << ConvolutionLayer(
667*c217d954SCole Faust                 3U, 3U, std::get<3>(b_filters),
668*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
669*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
670*c217d954SCole Faust                 PadStrideInfo(2, 2, 0, 0))
671*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution")
672*c217d954SCole Faust             << BatchNormalizationLayer(
673*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
674*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
675*c217d954SCole Faust                 nullptr,
676*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
677*c217d954SCole Faust                 0.001f)
678*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm")
679*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu");
680*c217d954SCole Faust 
681*c217d954SCole Faust         SubStream i_c(graph);
682*c217d954SCole Faust         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
683*c217d954SCole Faust 
684*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
685*c217d954SCole Faust     }
686*c217d954SCole Faust 
get_inception_node_E(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int,unsigned int> b_filters,std::tuple<unsigned int,unsigned int,unsigned int,unsigned int> c_filters,unsigned int d_filt,bool is_name_different=false)687*c217d954SCole Faust     ConcatLayer get_inception_node_E(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
688*c217d954SCole Faust                                      unsigned int a_filt,
689*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
690*c217d954SCole Faust                                      std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
691*c217d954SCole Faust                                      unsigned int d_filt,
692*c217d954SCole Faust                                      bool         is_name_different = false)
693*c217d954SCole Faust     {
694*c217d954SCole Faust         // This is due to a naming issue in the tf model
695*c217d954SCole Faust         std::string conv_id = "_0b_";
696*c217d954SCole Faust         if(is_name_different)
697*c217d954SCole Faust         {
698*c217d954SCole Faust             conv_id = "_0c_";
699*c217d954SCole Faust         }
700*c217d954SCole Faust 
701*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
702*c217d954SCole Faust         SubStream   i_a(graph);
703*c217d954SCole Faust         i_a << ConvolutionLayer(
704*c217d954SCole Faust                 1U, 1U, a_filt,
705*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
706*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
707*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
708*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
709*c217d954SCole Faust             << BatchNormalizationLayer(
710*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
711*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
712*c217d954SCole Faust                 nullptr,
713*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
714*c217d954SCole Faust                 0.001f)
715*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
716*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
717*c217d954SCole Faust 
718*c217d954SCole Faust         SubStream i_b(graph);
719*c217d954SCole Faust         i_b << ConvolutionLayer(
720*c217d954SCole Faust                 1U, 1U, std::get<0>(b_filters),
721*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
722*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
723*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
724*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
725*c217d954SCole Faust             << BatchNormalizationLayer(
726*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
727*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
728*c217d954SCole Faust                 nullptr,
729*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
730*c217d954SCole Faust                 0.001f)
731*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
732*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
733*c217d954SCole Faust 
734*c217d954SCole Faust         SubStream i_b1(i_b);
735*c217d954SCole Faust         i_b1 << ConvolutionLayer(
736*c217d954SCole Faust                  3U, 1U, std::get<1>(b_filters),
737*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
738*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
739*c217d954SCole Faust                  PadStrideInfo(1, 1, 1, 0))
740*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution")
741*c217d954SCole Faust              << BatchNormalizationLayer(
742*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
743*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
744*c217d954SCole Faust                  nullptr,
745*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
746*c217d954SCole Faust                  0.001f)
747*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm")
748*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
749*c217d954SCole Faust 
750*c217d954SCole Faust         SubStream i_b2(i_b);
751*c217d954SCole Faust         i_b2 << ConvolutionLayer(
752*c217d954SCole Faust                  1U, 3U, std::get<2>(b_filters),
753*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", weights_layout),
754*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
755*c217d954SCole Faust                  PadStrideInfo(1, 1, 0, 1))
756*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution")
757*c217d954SCole Faust              << BatchNormalizationLayer(
758*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
759*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
760*c217d954SCole Faust                  nullptr,
761*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
762*c217d954SCole Faust                  0.001f)
763*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm")
764*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
765*c217d954SCole Faust 
766*c217d954SCole Faust         // Merge b1 and b2
767*c217d954SCole Faust         i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
768*c217d954SCole Faust 
769*c217d954SCole Faust         SubStream i_c(graph);
770*c217d954SCole Faust         i_c << ConvolutionLayer(
771*c217d954SCole Faust                 1U, 1U, std::get<0>(c_filters),
772*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
773*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
774*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
775*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
776*c217d954SCole Faust             << BatchNormalizationLayer(
777*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
778*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
779*c217d954SCole Faust                 nullptr,
780*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
781*c217d954SCole Faust                 0.001f)
782*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
783*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
784*c217d954SCole Faust             << ConvolutionLayer(
785*c217d954SCole Faust                 3U, 3U, std::get<1>(c_filters),
786*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
787*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
788*c217d954SCole Faust                 PadStrideInfo(1, 1, 1, 1))
789*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
790*c217d954SCole Faust             << BatchNormalizationLayer(
791*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
792*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
793*c217d954SCole Faust                 nullptr,
794*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
795*c217d954SCole Faust                 0.001f)
796*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
797*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu");
798*c217d954SCole Faust 
799*c217d954SCole Faust         SubStream i_c1(i_c);
800*c217d954SCole Faust         i_c1 << ConvolutionLayer(
801*c217d954SCole Faust                  3U, 1U, std::get<2>(c_filters),
802*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
803*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
804*c217d954SCole Faust                  PadStrideInfo(1, 1, 1, 0))
805*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution")
806*c217d954SCole Faust              << BatchNormalizationLayer(
807*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
808*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
809*c217d954SCole Faust                  nullptr,
810*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
811*c217d954SCole Faust                  0.001f)
812*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm")
813*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
814*c217d954SCole Faust 
815*c217d954SCole Faust         SubStream i_c2(i_c);
816*c217d954SCole Faust         i_c2 << ConvolutionLayer(
817*c217d954SCole Faust                  1U, 3U, std::get<3>(c_filters),
818*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout),
819*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
820*c217d954SCole Faust                  PadStrideInfo(1, 1, 0, 1))
821*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution")
822*c217d954SCole Faust              << BatchNormalizationLayer(
823*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
824*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
825*c217d954SCole Faust                  nullptr,
826*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
827*c217d954SCole Faust                  0.001f)
828*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm")
829*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
830*c217d954SCole Faust 
831*c217d954SCole Faust         // Merge i_c1 and i_c2
832*c217d954SCole Faust         i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
833*c217d954SCole Faust 
834*c217d954SCole Faust         SubStream i_d(graph);
835*c217d954SCole Faust         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
836*c217d954SCole Faust                                              true))
837*c217d954SCole Faust             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
838*c217d954SCole Faust             << ConvolutionLayer(
839*c217d954SCole Faust                 1U, 1U, d_filt,
840*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
841*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
842*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
843*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
844*c217d954SCole Faust             << BatchNormalizationLayer(
845*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
846*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
847*c217d954SCole Faust                 nullptr,
848*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
849*c217d954SCole Faust                 0.001f)
850*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
851*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
852*c217d954SCole Faust 
853*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
854*c217d954SCole Faust     }
855*c217d954SCole Faust };
856*c217d954SCole Faust 
857*c217d954SCole Faust /** Main program for Inception V3
858*c217d954SCole Faust  *
859*c217d954SCole Faust  * Model is based on:
860*c217d954SCole Faust  *      https://arxiv.org/abs/1512.00567
861*c217d954SCole Faust  *      "Rethinking the Inception Architecture for Computer Vision"
862*c217d954SCole Faust  *      Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens, Zbigniew Wojna
863*c217d954SCole Faust  *
864*c217d954SCole Faust  * Provenance: download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz
865*c217d954SCole Faust  *
866*c217d954SCole Faust  * @note To list all the possible arguments execute the binary appended with the --help option
867*c217d954SCole Faust  *
868*c217d954SCole Faust  * @param[in] argc Number of arguments
869*c217d954SCole Faust  * @param[in] argv Arguments
870*c217d954SCole Faust  */
main(int argc,char ** argv)871*c217d954SCole Faust int main(int argc, char **argv)
872*c217d954SCole Faust {
873*c217d954SCole Faust     return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
874*c217d954SCole Faust }
875