xref: /aosp_15_r20/external/ComputeLibrary/examples/graph_inception_v4.cpp (revision c217d954acce2dbc11938adb493fc0abd69584f3)
1 /*
2  * Copyright (c) 2018-2021 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #include "arm_compute/graph.h"
25 #ifdef ARM_COMPUTE_CL
26 #include "arm_compute/runtime/CL/Utils.h"
27 #endif /* ARM_COMPUTE_CL */
28 #include "support/ToolchainSupport.h"
29 #include "utils/CommonGraphOptions.h"
30 #include "utils/GraphUtils.h"
31 #include "utils/Utils.h"
32 
33 using namespace arm_compute;
34 using namespace arm_compute::utils;
35 using namespace arm_compute::graph::frontend;
36 using namespace arm_compute::graph_utils;
37 
38 /** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
39 class InceptionV4Example final : public Example
40 {
41 public:
InceptionV4Example()42     InceptionV4Example()
43         : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV4")
44     {
45     }
do_setup(int argc,char ** argv)46     bool do_setup(int argc, char **argv) override
47     {
48         // Parse arguments
49         cmd_parser.parse(argc, argv);
50         cmd_parser.validate();
51 
52         // Consume common parameters
53         common_params = consume_common_graph_parameters(common_opts);
54 
55         // Return when help menu is requested
56         if(common_params.help)
57         {
58             cmd_parser.print_help(argv[0]);
59             return false;
60         }
61 
62         // Print parameter values
63         std::cout << common_params << std::endl;
64 
65         // Get trainable parameters data path
66         std::string data_path = common_params.data_path;
67 
68         // Create a preprocessor object
69         std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>();
70 
71         // Create input descriptor
72         const auto        operation_layout = common_params.data_layout;
73         const TensorShape tensor_shape     = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
74         TensorDescriptor  input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
75 
76         // Set weights trained layout
77         const DataLayout weights_layout = DataLayout::NCHW;
78 
79         graph << common_params.target
80               << common_params.fast_math_hint
81               << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
82               // Conv2d_1a_3x3
83               << ConvolutionLayer(3U, 3U, 32U,
84                                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_weights.npy", weights_layout),
85                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
86               .set_name("Conv2d_1a_3x3/Conv2D")
87               << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
88                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
89                                          get_random_accessor(1.f, 1.f),
90                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
91                                          0.001f)
92               .set_name("Conv2d_1a_3x3/BatchNorm")
93               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
94               // Conv2d_2a_3x3
95               << ConvolutionLayer(3U, 3U, 32U,
96                                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_weights.npy", weights_layout),
97                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
98               .set_name("Conv2d_2a_3x3/Conv2D")
99               << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
100                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
101                                          get_random_accessor(1.f, 1.f),
102                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
103                                          0.001f)
104               .set_name("Conv2d_2a_3x3/BatchNorm")
105               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
106               // Conv2d_2b_3x3
107               << ConvolutionLayer(3U, 3U, 64U,
108                                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_weights.npy", weights_layout),
109                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
110               .set_name("Conv2d_2b_3x3/Conv2D")
111               << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
112                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
113                                          get_random_accessor(1.f, 1.f),
114                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
115                                          0.001f)
116               .set_name("Conv2d_2b_3x3/BatchNorm")
117               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu");
118 
119         graph << get_mixed_3a(data_path, weights_layout).set_name("Mixed_3a/concat");
120         graph << get_mixed_4a(data_path, weights_layout).set_name("Mixed_4a/concat");
121         graph << get_mixed_5a(data_path, weights_layout).set_name("Mixed_5a/concat");
122         // 4 inception A blocks
123         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5b").set_name("Mixed_5b/concat");
124         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5c").set_name("Mixed_5c/concat");
125         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5d").set_name("Mixed_5d/concat");
126         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5e").set_name("Mixed_5e/concat");
127         // reduction A block
128         graph << get_reductionA_block(data_path, weights_layout).set_name("Mixed_6a/concat");
129         // 7 inception B blocks
130         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6b").set_name("Mixed_6b/concat");
131         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6c").set_name("Mixed_6c/concat");
132         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6d").set_name("Mixed_6d/concat");
133         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6e").set_name("Mixed_6e/concat");
134         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6f").set_name("Mixed_6f/concat");
135         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6g").set_name("Mixed_6g/concat");
136         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6h").set_name("Mixed_6h/concat");
137         // reduction B block
138         graph << get_reductionB_block(data_path, weights_layout).set_name("Mixed_7a/concat");
139         // 3 inception C blocks
140         graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7b").set_name("Mixed_7b/concat");
141         graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7c").set_name("Mixed_7c/concat");
142         graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7d").set_name("Mixed_7d/concat");
143         graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a/AvgPool")
144               << FlattenLayer().set_name("Logits/Flatten")
145               << FullyConnectedLayer(
146                   1001U,
147                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_weights.npy", weights_layout),
148                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_biases.npy"))
149               .set_name("Logits/MatMul")
150               << SoftmaxLayer().set_name("Logits/Predictions")
151               << OutputLayer(get_output_accessor(common_params, 5));
152 
153         // Finalize graph
154         GraphConfig config;
155         config.num_threads        = common_params.threads;
156         config.use_tuner          = common_params.enable_tuner;
157         config.tuner_mode         = common_params.tuner_mode;
158         config.tuner_file         = common_params.tuner_file;
159         config.mlgo_file          = common_params.mlgo_file;
160         config.use_synthetic_type = arm_compute::is_data_type_quantized(common_params.data_type);
161         config.synthetic_type     = common_params.data_type;
162 
163         // Load the precompiled kernels from a file into the kernel library, in this way the next time they are needed
164         // compilation won't be required.
165         if(common_params.enable_cl_cache)
166         {
167 #ifdef ARM_COMPUTE_CL
168             restore_program_cache_from_file();
169 #endif /* ARM_COMPUTE_CL */
170         }
171 
172         graph.finalize(common_params.target, config);
173 
174         // Save the opencl kernels to a file
175         if(common_opts.enable_cl_cache)
176         {
177 #ifdef ARM_COMPUTE_CL
178             save_program_cache_to_file();
179 #endif /* ARM_COMPUTE_CL */
180         }
181 
182         return true;
183     }
184 
do_run()185     void do_run() override
186     {
187         graph.run();
188     }
189 
190 private:
191     CommandLineParser  cmd_parser;
192     CommonGraphOptions common_opts;
193     CommonGraphParams  common_params;
194     Stream             graph;
195 
196 private:
get_mixed_3a(const std::string & data_path,DataLayout weights_layout)197     ConcatLayer get_mixed_3a(const std::string &data_path, DataLayout weights_layout)
198     {
199         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_3a_";
200 
201         SubStream i_a(graph);
202         i_a << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
203                                              true))
204             .set_name("Mixed_3a/Branch_0/MaxPool_0a_3x3/MaxPool");
205 
206         SubStream i_b(graph);
207         i_b << ConvolutionLayer(3U, 3U, 96U,
208                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_weights.npy", weights_layout),
209                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
210             .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Conv2D")
211             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_mean.npy"),
212                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_variance.npy"),
213                                        get_random_accessor(1.f, 1.f),
214                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_beta.npy"),
215                                        0.001f)
216             .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/BatchNorm")
217             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Relu");
218 
219         return ConcatLayer(std::move(i_a), std::move(i_b));
220     }
221 
get_mixed_4a(const std::string & data_path,DataLayout weights_layout)222     ConcatLayer get_mixed_4a(const std::string &data_path, DataLayout weights_layout)
223     {
224         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_4a_";
225 
226         SubStream i_a(graph);
227         i_a << ConvolutionLayer(1U, 1U, 64U,
228                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
229                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
230             .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Conv2D")
231             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
232                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
233                                        get_random_accessor(1.f, 1.f),
234                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
235                                        0.001f)
236             .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/BatchNorm")
237             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Relu")
238             << ConvolutionLayer(3U, 3U, 96U,
239                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
240                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
241             .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Conv2D")
242             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
243                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
244                                        get_random_accessor(1.f, 1.f),
245                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
246                                        0.001f)
247             .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/BatchNorm")
248             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Relu");
249 
250         SubStream i_b(graph);
251         i_b << ConvolutionLayer(1U, 1U, 64U,
252                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
253                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
254             .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Conv2D")
255             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
256                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
257                                        get_random_accessor(1.f, 1.f),
258                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
259                                        0.001f)
260             .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/BatchNorm")
261             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Relu")
262             << ConvolutionLayer(7U, 1U, 64U,
263                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
264                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
265             .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Conv2D")
266             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
267                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
268                                        get_random_accessor(1.f, 1.f),
269                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
270                                        0.001f)
271             .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/BatchNorm")
272             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Relu")
273             << ConvolutionLayer(1U, 7U, 64U,
274                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
275                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
276             .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Conv2D")
277             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
278                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
279                                        get_random_accessor(1.f, 1.f),
280                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
281                                        0.001f)
282             .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/BatchNorm")
283             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Relu")
284             << ConvolutionLayer(3U, 3U, 96U,
285                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
286                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
287             .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Conv2D")
288             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
289                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
290                                        get_random_accessor(1.f, 1.f),
291                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
292                                        0.001f)
293             .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/BatchNorm")
294             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Relu");
295 
296         return ConcatLayer(std::move(i_a), std::move(i_b));
297     }
298 
get_mixed_5a(const std::string & data_path,DataLayout weights_layout)299     ConcatLayer get_mixed_5a(const std::string &data_path, DataLayout weights_layout)
300     {
301         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_5a_";
302 
303         SubStream i_a(graph);
304         i_a << ConvolutionLayer(3U, 3U, 192U,
305                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
306                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
307             .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Conv2D")
308             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
309                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
310                                        get_random_accessor(1.f, 1.f),
311                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
312                                        0.001f)
313             .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/BatchNorm")
314             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Relu");
315 
316         SubStream i_b(graph);
317         i_b << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
318                                              true))
319             .set_name("Mixed_5a/Branch_1/MaxPool_1a_3x3/MaxPool");
320 
321         return ConcatLayer(std::move(i_a), std::move(i_b));
322     }
323 
get_inceptionA_block(const std::string & data_path,DataLayout weights_layout,std::string && param_path)324     ConcatLayer get_inceptionA_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
325     {
326         std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
327 
328         SubStream i_a(graph);
329         i_a << ConvolutionLayer(1U, 1U, 96U,
330                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
331                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
332             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
333             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
334                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
335                                        get_random_accessor(1.f, 1.f),
336                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
337                                        0.001f)
338             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
339             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
340 
341         SubStream i_b(graph);
342         i_b << ConvolutionLayer(1U, 1U, 64U,
343                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
344                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
345             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
346             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
347                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
348                                        get_random_accessor(1.f, 1.f),
349                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
350                                        0.001f)
351             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
352             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
353             << ConvolutionLayer(3U, 3U, 96U,
354                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
355                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
356             .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Conv2D")
357             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
358                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
359                                        get_random_accessor(1.f, 1.f),
360                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
361                                        0.001f)
362             .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm")
363             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu");
364 
365         SubStream i_c(graph);
366         i_c << ConvolutionLayer(1U, 1U, 64U,
367                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
368                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
369             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
370             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
371                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
372                                        get_random_accessor(1.f, 1.f),
373                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
374                                        0.001f)
375             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
376             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
377             << ConvolutionLayer(3U, 3U, 96U,
378                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
379                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
380             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Conv2D")
381             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
382                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
383                                        get_random_accessor(1.f, 1.f),
384                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
385                                        0.001f)
386             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm")
387             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
388             << ConvolutionLayer(3U, 3U, 96U,
389                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
390                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
391             .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Conv2D")
392             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
393                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
394                                        get_random_accessor(1.f, 1.f),
395                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
396                                        0.001f)
397             .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm")
398             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
399 
400         SubStream i_d(graph);
401         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
402                                              true))
403             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
404             << ConvolutionLayer(1U, 1U, 96U,
405                                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
406                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
407             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
408             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
409                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
410                                        get_random_accessor(1.f, 1.f),
411                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
412                                        0.001f)
413             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
414             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
415 
416         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
417     }
418 
get_reductionA_block(const std::string & data_path,DataLayout weights_layout)419     ConcatLayer get_reductionA_block(const std::string &data_path, DataLayout weights_layout)
420     {
421         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_6a_";
422 
423         SubStream i_a(graph);
424         i_a << ConvolutionLayer(3U, 3U, 384U,
425                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
426                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
427             .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Conv2D")
428             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
429                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
430                                        get_random_accessor(1.f, 1.f),
431                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
432                                        0.001f)
433             .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
434             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
435 
436         SubStream i_b(graph);
437         i_b << ConvolutionLayer(1U, 1U, 192U,
438                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
439                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
440             .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Conv2D")
441             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
442                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
443                                        get_random_accessor(1.f, 1.f),
444                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
445                                        0.001f)
446             .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
447             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
448             << ConvolutionLayer(3U, 3U, 224U,
449                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
450                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
451             .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Conv2D")
452             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
453                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
454                                        get_random_accessor(1.f, 1.f),
455                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
456                                        0.001f)
457             .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
458             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
459             << ConvolutionLayer(3U, 3U, 256U,
460                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
461                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
462             .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Conv2D")
463             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
464                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
465                                        get_random_accessor(1.f, 1.f),
466                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
467                                        0.001f)
468             .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
469             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
470 
471         SubStream i_c(graph);
472         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
473                                              true))
474             .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3/MaxPool");
475 
476         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
477     }
478 
get_inceptionB_block(const std::string & data_path,DataLayout weights_layout,std::string && param_path)479     ConcatLayer get_inceptionB_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
480     {
481         std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
482 
483         SubStream i_a(graph);
484         i_a << ConvolutionLayer(1U, 1U, 384U,
485                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
486                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
487             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
488             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
489                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
490                                        get_random_accessor(1.f, 1.f),
491                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
492                                        0.001f)
493             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
494             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
495 
496         SubStream i_b(graph);
497         i_b << ConvolutionLayer(1U, 1U, 192U,
498                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
499                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
500             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
501             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
502                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
503                                        get_random_accessor(1.f, 1.f),
504                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
505                                        0.001f)
506             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
507             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
508             << ConvolutionLayer(7U, 1U, 224U,
509                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
510                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
511             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Conv2D")
512             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
513                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
514                                        get_random_accessor(1.f, 1.f),
515                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
516                                        0.001f)
517             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm")
518             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
519             << ConvolutionLayer(1U, 7U, 256U,
520                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
521                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
522             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Conv2D")
523             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
524                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
525                                        get_random_accessor(1.f, 1.f),
526                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
527                                        0.001f)
528             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm")
529             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu");
530 
531         SubStream i_c(graph);
532         i_c << ConvolutionLayer(1U, 1U, 192U,
533                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
534                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
535             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
536             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
537                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
538                                        get_random_accessor(1.f, 1.f),
539                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
540                                        0.001f)
541             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
542             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
543             << ConvolutionLayer(1U, 7U, 192U,
544                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
545                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
546             .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Conv2D")
547             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
548                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
549                                        get_random_accessor(1.f, 1.f),
550                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
551                                        0.001f)
552             .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm")
553             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
554             << ConvolutionLayer(7U, 1U, 224U,
555                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
556                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
557             .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Conv2D")
558             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
559                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
560                                        get_random_accessor(1.f, 1.f),
561                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
562                                        0.001f)
563             .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm")
564             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
565             << ConvolutionLayer(1U, 7U, 224U,
566                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
567                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
568             .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Conv2D")
569             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
570                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
571                                        get_random_accessor(1.f, 1.f),
572                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
573                                        0.001f)
574             .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm")
575             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
576             << ConvolutionLayer(7U, 1U, 256U,
577                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
578                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
579             .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Conv2D")
580             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
581                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
582                                        get_random_accessor(1.f, 1.f),
583                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
584                                        0.001f)
585             .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm")
586             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
587 
588         SubStream i_d(graph);
589         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
590                                              true))
591             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
592             << ConvolutionLayer(1U, 1U, 128U,
593                                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
594                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
595             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
596             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
597                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
598                                        get_random_accessor(1.f, 1.f),
599                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
600                                        0.001f)
601             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
602             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
603 
604         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
605     }
606 
get_reductionB_block(const std::string & data_path,DataLayout weights_layout)607     ConcatLayer get_reductionB_block(const std::string &data_path, DataLayout weights_layout)
608     {
609         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_7a_";
610 
611         SubStream i_a(graph);
612         i_a << ConvolutionLayer(1U, 1U, 192U,
613                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
614                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
615             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D")
616             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
617                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
618                                        get_random_accessor(1.f, 1.f),
619                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
620                                        0.001f)
621             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
622             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
623             << ConvolutionLayer(3U, 3U, 192U,
624                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
625                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
626             .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Conv2D")
627             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
628                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
629                                        get_random_accessor(1.f, 1.f),
630                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
631                                        0.001f)
632             .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
633             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
634 
635         SubStream i_b(graph);
636         i_b << ConvolutionLayer(1U, 1U, 256U,
637                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
638                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
639             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D")
640             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
641                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
642                                        get_random_accessor(1.f, 1.f),
643                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
644                                        0.001f)
645             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
646             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
647             << ConvolutionLayer(7U, 1U, 256U,
648                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
649                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
650             .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Conv2D")
651             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
652                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
653                                        get_random_accessor(1.f, 1.f),
654                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
655                                        0.001f)
656             .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/BatchNorm")
657             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Relu")
658             << ConvolutionLayer(1U, 7U, 320U,
659                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
660                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
661             .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Conv2D")
662             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
663                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
664                                        get_random_accessor(1.f, 1.f),
665                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
666                                        0.001f)
667             .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/BatchNorm")
668             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Relu")
669             << ConvolutionLayer(3U, 3U, 320U,
670                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
671                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
672             .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Conv2D")
673             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
674                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
675                                        get_random_accessor(1.f, 1.f),
676                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
677                                        0.001f)
678             .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
679             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
680 
681         SubStream i_c(graph);
682         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
683                                              true))
684             .set_name("Mixed_7a/Branch_2/MaxPool_1a_3x3/MaxPool");
685 
686         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
687     }
688 
get_inceptionC_block(const std::string & data_path,DataLayout weights_layout,std::string && param_path)689     ConcatLayer get_inceptionC_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
690     {
691         std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
692 
693         SubStream i_a(graph);
694         i_a << ConvolutionLayer(1U, 1U, 256U,
695                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
696                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
697             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
698             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
699                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
700                                        get_random_accessor(1.f, 1.f),
701                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
702                                        0.001f)
703             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
704             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
705 
706         SubStream i_b(graph);
707         i_b << ConvolutionLayer(
708                 1U, 1U, 384U,
709                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
710                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
711                 PadStrideInfo(1, 1, 0, 0))
712             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
713             << BatchNormalizationLayer(
714                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
715                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
716                 get_random_accessor(1.f, 1.f),
717                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
718                 0.001f)
719             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
720             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
721 
722         SubStream i_b1(i_b);
723         i_b1 << ConvolutionLayer(
724                  3U, 1U, 256U,
725                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
726                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
727                  PadStrideInfo(1, 1, 1, 0))
728              .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Conv2D")
729              << BatchNormalizationLayer(
730                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
731                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
732                  get_random_accessor(1.f, 1.f),
733                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
734                  0.001f)
735              .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm")
736              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
737 
738         SubStream i_b2(i_b);
739         i_b2 << ConvolutionLayer(
740                  1U, 3U, 256U,
741                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
742                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
743                  PadStrideInfo(1, 1, 0, 1))
744              .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Conv2D")
745              << BatchNormalizationLayer(
746                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
747                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
748                  get_random_accessor(1.f, 1.f),
749                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
750                  0.001f)
751              .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/BatchNorm")
752              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Relu");
753 
754         // Merge b1 and b2
755         i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
756 
757         SubStream i_c(graph);
758         i_c << ConvolutionLayer(
759                 1U, 1U, 384U,
760                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
761                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
762                 PadStrideInfo(1, 1, 0, 0))
763             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
764             << BatchNormalizationLayer(
765                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
766                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
767                 get_random_accessor(1.f, 1.f),
768                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
769                 0.001f)
770             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
771             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
772             << ConvolutionLayer(
773                 1U, 3U, 448U,
774                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_weights.npy", weights_layout),
775                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
776                 PadStrideInfo(1, 1, 0, 1))
777             .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Conv2D")
778             << BatchNormalizationLayer(
779                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_mean.npy"),
780                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_variance.npy"),
781                 get_random_accessor(1.f, 1.f),
782                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_beta.npy"),
783                 0.001f)
784             .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/BatchNorm")
785             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Relu")
786             << ConvolutionLayer(
787                 3U, 1U, 512U,
788                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
789                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
790                 PadStrideInfo(1, 1, 1, 0))
791             .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Conv2D")
792             << BatchNormalizationLayer(
793                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
794                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
795                 get_random_accessor(1.f, 1.f),
796                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
797                 0.001f)
798             .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm")
799             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
800 
801         SubStream i_c1(i_c);
802         i_c1 << ConvolutionLayer(
803                  3U, 1U, 256U,
804                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_weights.npy", weights_layout),
805                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
806                  PadStrideInfo(1, 1, 1, 0))
807              .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Conv2D")
808              << BatchNormalizationLayer(
809                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_mean.npy"),
810                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_variance.npy"),
811                  get_random_accessor(1.f, 1.f),
812                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_beta.npy"),
813                  0.001f)
814              .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/BatchNorm")
815              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Relu");
816 
817         SubStream i_c2(i_c);
818         i_c2 << ConvolutionLayer(
819                  1U, 3U, 256U,
820                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_weights.npy", weights_layout),
821                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
822                  PadStrideInfo(1, 1, 0, 1))
823              .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Conv2D")
824              << BatchNormalizationLayer(
825                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_mean.npy"),
826                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_variance.npy"),
827                  get_random_accessor(1.f, 1.f),
828                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_beta.npy"),
829                  0.001f)
830              .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/BatchNorm")
831              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Relu");
832 
833         // Merge i_c1 and i_c2
834         i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
835 
836         SubStream i_d(graph);
837         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
838                                              true))
839             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
840             << ConvolutionLayer(1U, 1U, 256U,
841                                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
842                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
843             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
844             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
845                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
846                                        get_random_accessor(1.f, 1.f),
847                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
848                                        0.001f)
849             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
850             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
851 
852         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
853     }
854 };
855 
856 /** Main program for Inception V4
857  *
858  * Model is based on:
859  *      https://arxiv.org/abs/1602.07261
860  *      "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
861  *      Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
862  *
863  * Provenance: download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz
864  *
865  * @note To list all the possible arguments execute the binary appended with the --help option
866  *
867  * @param[in] argc Number of arguments
868  * @param[in] argv Arguments
869  */
main(int argc,char ** argv)870 int main(int argc, char **argv)
871 {
872     return arm_compute::utils::run_example<InceptionV4Example>(argc, argv);
873 }
874