xref: /aosp_15_r20/external/ComputeLibrary/examples/graph_inception_v4.cpp (revision c217d954acce2dbc11938adb493fc0abd69584f3)
1*c217d954SCole Faust /*
2*c217d954SCole Faust  * Copyright (c) 2018-2021 Arm Limited.
3*c217d954SCole Faust  *
4*c217d954SCole Faust  * SPDX-License-Identifier: MIT
5*c217d954SCole Faust  *
6*c217d954SCole Faust  * Permission is hereby granted, free of charge, to any person obtaining a copy
7*c217d954SCole Faust  * of this software and associated documentation files (the "Software"), to
8*c217d954SCole Faust  * deal in the Software without restriction, including without limitation the
9*c217d954SCole Faust  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10*c217d954SCole Faust  * sell copies of the Software, and to permit persons to whom the Software is
11*c217d954SCole Faust  * furnished to do so, subject to the following conditions:
12*c217d954SCole Faust  *
13*c217d954SCole Faust  * The above copyright notice and this permission notice shall be included in all
14*c217d954SCole Faust  * copies or substantial portions of the Software.
15*c217d954SCole Faust  *
16*c217d954SCole Faust  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17*c217d954SCole Faust  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18*c217d954SCole Faust  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19*c217d954SCole Faust  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20*c217d954SCole Faust  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21*c217d954SCole Faust  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22*c217d954SCole Faust  * SOFTWARE.
23*c217d954SCole Faust  */
24*c217d954SCole Faust #include "arm_compute/graph.h"
25*c217d954SCole Faust #ifdef ARM_COMPUTE_CL
26*c217d954SCole Faust #include "arm_compute/runtime/CL/Utils.h"
27*c217d954SCole Faust #endif /* ARM_COMPUTE_CL */
28*c217d954SCole Faust #include "support/ToolchainSupport.h"
29*c217d954SCole Faust #include "utils/CommonGraphOptions.h"
30*c217d954SCole Faust #include "utils/GraphUtils.h"
31*c217d954SCole Faust #include "utils/Utils.h"
32*c217d954SCole Faust 
33*c217d954SCole Faust using namespace arm_compute;
34*c217d954SCole Faust using namespace arm_compute::utils;
35*c217d954SCole Faust using namespace arm_compute::graph::frontend;
36*c217d954SCole Faust using namespace arm_compute::graph_utils;
37*c217d954SCole Faust 
38*c217d954SCole Faust /** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
39*c217d954SCole Faust class InceptionV4Example final : public Example
40*c217d954SCole Faust {
41*c217d954SCole Faust public:
InceptionV4Example()42*c217d954SCole Faust     InceptionV4Example()
43*c217d954SCole Faust         : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV4")
44*c217d954SCole Faust     {
45*c217d954SCole Faust     }
do_setup(int argc,char ** argv)46*c217d954SCole Faust     bool do_setup(int argc, char **argv) override
47*c217d954SCole Faust     {
48*c217d954SCole Faust         // Parse arguments
49*c217d954SCole Faust         cmd_parser.parse(argc, argv);
50*c217d954SCole Faust         cmd_parser.validate();
51*c217d954SCole Faust 
52*c217d954SCole Faust         // Consume common parameters
53*c217d954SCole Faust         common_params = consume_common_graph_parameters(common_opts);
54*c217d954SCole Faust 
55*c217d954SCole Faust         // Return when help menu is requested
56*c217d954SCole Faust         if(common_params.help)
57*c217d954SCole Faust         {
58*c217d954SCole Faust             cmd_parser.print_help(argv[0]);
59*c217d954SCole Faust             return false;
60*c217d954SCole Faust         }
61*c217d954SCole Faust 
62*c217d954SCole Faust         // Print parameter values
63*c217d954SCole Faust         std::cout << common_params << std::endl;
64*c217d954SCole Faust 
65*c217d954SCole Faust         // Get trainable parameters data path
66*c217d954SCole Faust         std::string data_path = common_params.data_path;
67*c217d954SCole Faust 
68*c217d954SCole Faust         // Create a preprocessor object
69*c217d954SCole Faust         std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>();
70*c217d954SCole Faust 
71*c217d954SCole Faust         // Create input descriptor
72*c217d954SCole Faust         const auto        operation_layout = common_params.data_layout;
73*c217d954SCole Faust         const TensorShape tensor_shape     = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
74*c217d954SCole Faust         TensorDescriptor  input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
75*c217d954SCole Faust 
76*c217d954SCole Faust         // Set weights trained layout
77*c217d954SCole Faust         const DataLayout weights_layout = DataLayout::NCHW;
78*c217d954SCole Faust 
79*c217d954SCole Faust         graph << common_params.target
80*c217d954SCole Faust               << common_params.fast_math_hint
81*c217d954SCole Faust               << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
82*c217d954SCole Faust               // Conv2d_1a_3x3
83*c217d954SCole Faust               << ConvolutionLayer(3U, 3U, 32U,
84*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_weights.npy", weights_layout),
85*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
86*c217d954SCole Faust               .set_name("Conv2d_1a_3x3/Conv2D")
87*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
88*c217d954SCole Faust                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
89*c217d954SCole Faust                                          get_random_accessor(1.f, 1.f),
90*c217d954SCole Faust                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
91*c217d954SCole Faust                                          0.001f)
92*c217d954SCole Faust               .set_name("Conv2d_1a_3x3/BatchNorm")
93*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
94*c217d954SCole Faust               // Conv2d_2a_3x3
95*c217d954SCole Faust               << ConvolutionLayer(3U, 3U, 32U,
96*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_weights.npy", weights_layout),
97*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
98*c217d954SCole Faust               .set_name("Conv2d_2a_3x3/Conv2D")
99*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
100*c217d954SCole Faust                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
101*c217d954SCole Faust                                          get_random_accessor(1.f, 1.f),
102*c217d954SCole Faust                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
103*c217d954SCole Faust                                          0.001f)
104*c217d954SCole Faust               .set_name("Conv2d_2a_3x3/BatchNorm")
105*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
106*c217d954SCole Faust               // Conv2d_2b_3x3
107*c217d954SCole Faust               << ConvolutionLayer(3U, 3U, 64U,
108*c217d954SCole Faust                                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_weights.npy", weights_layout),
109*c217d954SCole Faust                                   std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
110*c217d954SCole Faust               .set_name("Conv2d_2b_3x3/Conv2D")
111*c217d954SCole Faust               << BatchNormalizationLayer(get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
112*c217d954SCole Faust                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
113*c217d954SCole Faust                                          get_random_accessor(1.f, 1.f),
114*c217d954SCole Faust                                          get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
115*c217d954SCole Faust                                          0.001f)
116*c217d954SCole Faust               .set_name("Conv2d_2b_3x3/BatchNorm")
117*c217d954SCole Faust               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu");
118*c217d954SCole Faust 
119*c217d954SCole Faust         graph << get_mixed_3a(data_path, weights_layout).set_name("Mixed_3a/concat");
120*c217d954SCole Faust         graph << get_mixed_4a(data_path, weights_layout).set_name("Mixed_4a/concat");
121*c217d954SCole Faust         graph << get_mixed_5a(data_path, weights_layout).set_name("Mixed_5a/concat");
122*c217d954SCole Faust         // 4 inception A blocks
123*c217d954SCole Faust         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5b").set_name("Mixed_5b/concat");
124*c217d954SCole Faust         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5c").set_name("Mixed_5c/concat");
125*c217d954SCole Faust         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5d").set_name("Mixed_5d/concat");
126*c217d954SCole Faust         graph << get_inceptionA_block(data_path, weights_layout, "Mixed_5e").set_name("Mixed_5e/concat");
127*c217d954SCole Faust         // reduction A block
128*c217d954SCole Faust         graph << get_reductionA_block(data_path, weights_layout).set_name("Mixed_6a/concat");
129*c217d954SCole Faust         // 7 inception B blocks
130*c217d954SCole Faust         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6b").set_name("Mixed_6b/concat");
131*c217d954SCole Faust         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6c").set_name("Mixed_6c/concat");
132*c217d954SCole Faust         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6d").set_name("Mixed_6d/concat");
133*c217d954SCole Faust         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6e").set_name("Mixed_6e/concat");
134*c217d954SCole Faust         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6f").set_name("Mixed_6f/concat");
135*c217d954SCole Faust         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6g").set_name("Mixed_6g/concat");
136*c217d954SCole Faust         graph << get_inceptionB_block(data_path, weights_layout, "Mixed_6h").set_name("Mixed_6h/concat");
137*c217d954SCole Faust         // reduction B block
138*c217d954SCole Faust         graph << get_reductionB_block(data_path, weights_layout).set_name("Mixed_7a/concat");
139*c217d954SCole Faust         // 3 inception C blocks
140*c217d954SCole Faust         graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7b").set_name("Mixed_7b/concat");
141*c217d954SCole Faust         graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7c").set_name("Mixed_7c/concat");
142*c217d954SCole Faust         graph << get_inceptionC_block(data_path, weights_layout, "Mixed_7d").set_name("Mixed_7d/concat");
143*c217d954SCole Faust         graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a/AvgPool")
144*c217d954SCole Faust               << FlattenLayer().set_name("Logits/Flatten")
145*c217d954SCole Faust               << FullyConnectedLayer(
146*c217d954SCole Faust                   1001U,
147*c217d954SCole Faust                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_weights.npy", weights_layout),
148*c217d954SCole Faust                   get_weights_accessor(data_path, "/cnn_data/inceptionv4_model/Logits_Logits_biases.npy"))
149*c217d954SCole Faust               .set_name("Logits/MatMul")
150*c217d954SCole Faust               << SoftmaxLayer().set_name("Logits/Predictions")
151*c217d954SCole Faust               << OutputLayer(get_output_accessor(common_params, 5));
152*c217d954SCole Faust 
153*c217d954SCole Faust         // Finalize graph
154*c217d954SCole Faust         GraphConfig config;
155*c217d954SCole Faust         config.num_threads        = common_params.threads;
156*c217d954SCole Faust         config.use_tuner          = common_params.enable_tuner;
157*c217d954SCole Faust         config.tuner_mode         = common_params.tuner_mode;
158*c217d954SCole Faust         config.tuner_file         = common_params.tuner_file;
159*c217d954SCole Faust         config.mlgo_file          = common_params.mlgo_file;
160*c217d954SCole Faust         config.use_synthetic_type = arm_compute::is_data_type_quantized(common_params.data_type);
161*c217d954SCole Faust         config.synthetic_type     = common_params.data_type;
162*c217d954SCole Faust 
163*c217d954SCole Faust         // Load the precompiled kernels from a file into the kernel library, in this way the next time they are needed
164*c217d954SCole Faust         // compilation won't be required.
165*c217d954SCole Faust         if(common_params.enable_cl_cache)
166*c217d954SCole Faust         {
167*c217d954SCole Faust #ifdef ARM_COMPUTE_CL
168*c217d954SCole Faust             restore_program_cache_from_file();
169*c217d954SCole Faust #endif /* ARM_COMPUTE_CL */
170*c217d954SCole Faust         }
171*c217d954SCole Faust 
172*c217d954SCole Faust         graph.finalize(common_params.target, config);
173*c217d954SCole Faust 
174*c217d954SCole Faust         // Save the opencl kernels to a file
175*c217d954SCole Faust         if(common_opts.enable_cl_cache)
176*c217d954SCole Faust         {
177*c217d954SCole Faust #ifdef ARM_COMPUTE_CL
178*c217d954SCole Faust             save_program_cache_to_file();
179*c217d954SCole Faust #endif /* ARM_COMPUTE_CL */
180*c217d954SCole Faust         }
181*c217d954SCole Faust 
182*c217d954SCole Faust         return true;
183*c217d954SCole Faust     }
184*c217d954SCole Faust 
do_run()185*c217d954SCole Faust     void do_run() override
186*c217d954SCole Faust     {
187*c217d954SCole Faust         graph.run();
188*c217d954SCole Faust     }
189*c217d954SCole Faust 
190*c217d954SCole Faust private:
191*c217d954SCole Faust     CommandLineParser  cmd_parser;
192*c217d954SCole Faust     CommonGraphOptions common_opts;
193*c217d954SCole Faust     CommonGraphParams  common_params;
194*c217d954SCole Faust     Stream             graph;
195*c217d954SCole Faust 
196*c217d954SCole Faust private:
get_mixed_3a(const std::string & data_path,DataLayout weights_layout)197*c217d954SCole Faust     ConcatLayer get_mixed_3a(const std::string &data_path, DataLayout weights_layout)
198*c217d954SCole Faust     {
199*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_3a_";
200*c217d954SCole Faust 
201*c217d954SCole Faust         SubStream i_a(graph);
202*c217d954SCole Faust         i_a << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
203*c217d954SCole Faust                                              true))
204*c217d954SCole Faust             .set_name("Mixed_3a/Branch_0/MaxPool_0a_3x3/MaxPool");
205*c217d954SCole Faust 
206*c217d954SCole Faust         SubStream i_b(graph);
207*c217d954SCole Faust         i_b << ConvolutionLayer(3U, 3U, 96U,
208*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_weights.npy", weights_layout),
209*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
210*c217d954SCole Faust             .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Conv2D")
211*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_mean.npy"),
212*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_moving_variance.npy"),
213*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
214*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_3x3_BatchNorm_beta.npy"),
215*c217d954SCole Faust                                        0.001f)
216*c217d954SCole Faust             .set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/BatchNorm")
217*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_3a/Branch_1/Conv2d_0a_3x3/Relu");
218*c217d954SCole Faust 
219*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b));
220*c217d954SCole Faust     }
221*c217d954SCole Faust 
get_mixed_4a(const std::string & data_path,DataLayout weights_layout)222*c217d954SCole Faust     ConcatLayer get_mixed_4a(const std::string &data_path, DataLayout weights_layout)
223*c217d954SCole Faust     {
224*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_4a_";
225*c217d954SCole Faust 
226*c217d954SCole Faust         SubStream i_a(graph);
227*c217d954SCole Faust         i_a << ConvolutionLayer(1U, 1U, 64U,
228*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
229*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
230*c217d954SCole Faust             .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Conv2D")
231*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
232*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
233*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
234*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
235*c217d954SCole Faust                                        0.001f)
236*c217d954SCole Faust             .set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/BatchNorm")
237*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_0/Conv2d_0a_1x1/Relu")
238*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 96U,
239*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
240*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
241*c217d954SCole Faust             .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Conv2D")
242*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
243*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
244*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
245*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
246*c217d954SCole Faust                                        0.001f)
247*c217d954SCole Faust             .set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/BatchNorm")
248*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_0/Conv2d_1a_3x3/Relu");
249*c217d954SCole Faust 
250*c217d954SCole Faust         SubStream i_b(graph);
251*c217d954SCole Faust         i_b << ConvolutionLayer(1U, 1U, 64U,
252*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
253*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
254*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Conv2D")
255*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
256*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
257*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
258*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
259*c217d954SCole Faust                                        0.001f)
260*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/BatchNorm")
261*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0a_1x1/Relu")
262*c217d954SCole Faust             << ConvolutionLayer(7U, 1U, 64U,
263*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
264*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
265*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Conv2D")
266*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
267*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
268*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
269*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
270*c217d954SCole Faust                                        0.001f)
271*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/BatchNorm")
272*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0b_1x7/Relu")
273*c217d954SCole Faust             << ConvolutionLayer(1U, 7U, 64U,
274*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
275*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
276*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Conv2D")
277*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
278*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
279*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
280*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
281*c217d954SCole Faust                                        0.001f)
282*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/BatchNorm")
283*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_0c_7x1/Relu")
284*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 96U,
285*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
286*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
287*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Conv2D")
288*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
289*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
290*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
291*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
292*c217d954SCole Faust                                        0.001f)
293*c217d954SCole Faust             .set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/BatchNorm")
294*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_4a/Branch_1/Conv2d_1a_3x3/Relu");
295*c217d954SCole Faust 
296*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b));
297*c217d954SCole Faust     }
298*c217d954SCole Faust 
get_mixed_5a(const std::string & data_path,DataLayout weights_layout)299*c217d954SCole Faust     ConcatLayer get_mixed_5a(const std::string &data_path, DataLayout weights_layout)
300*c217d954SCole Faust     {
301*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_5a_";
302*c217d954SCole Faust 
303*c217d954SCole Faust         SubStream i_a(graph);
304*c217d954SCole Faust         i_a << ConvolutionLayer(3U, 3U, 192U,
305*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
306*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
307*c217d954SCole Faust             .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Conv2D")
308*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
309*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
310*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
311*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
312*c217d954SCole Faust                                        0.001f)
313*c217d954SCole Faust             .set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/BatchNorm")
314*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5a/Branch_0/Conv2d_1a_3x3/Relu");
315*c217d954SCole Faust 
316*c217d954SCole Faust         SubStream i_b(graph);
317*c217d954SCole Faust         i_b << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
318*c217d954SCole Faust                                              true))
319*c217d954SCole Faust             .set_name("Mixed_5a/Branch_1/MaxPool_1a_3x3/MaxPool");
320*c217d954SCole Faust 
321*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b));
322*c217d954SCole Faust     }
323*c217d954SCole Faust 
get_inceptionA_block(const std::string & data_path,DataLayout weights_layout,std::string && param_path)324*c217d954SCole Faust     ConcatLayer get_inceptionA_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
325*c217d954SCole Faust     {
326*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
327*c217d954SCole Faust 
328*c217d954SCole Faust         SubStream i_a(graph);
329*c217d954SCole Faust         i_a << ConvolutionLayer(1U, 1U, 96U,
330*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
331*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
332*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
333*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
334*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
335*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
336*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
337*c217d954SCole Faust                                        0.001f)
338*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
339*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
340*c217d954SCole Faust 
341*c217d954SCole Faust         SubStream i_b(graph);
342*c217d954SCole Faust         i_b << ConvolutionLayer(1U, 1U, 64U,
343*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
344*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
345*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
346*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
347*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
348*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
349*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
350*c217d954SCole Faust                                        0.001f)
351*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
352*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
353*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 96U,
354*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
355*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
356*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Conv2D")
357*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
358*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
359*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
360*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
361*c217d954SCole Faust                                        0.001f)
362*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm")
363*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu");
364*c217d954SCole Faust 
365*c217d954SCole Faust         SubStream i_c(graph);
366*c217d954SCole Faust         i_c << ConvolutionLayer(1U, 1U, 64U,
367*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
368*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
369*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
370*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
371*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
372*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
373*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
374*c217d954SCole Faust                                        0.001f)
375*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
376*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
377*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 96U,
378*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
379*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
380*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Conv2D")
381*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
382*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
383*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
384*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
385*c217d954SCole Faust                                        0.001f)
386*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm")
387*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
388*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 96U,
389*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
390*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
391*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Conv2D")
392*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
393*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
394*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
395*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
396*c217d954SCole Faust                                        0.001f)
397*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm")
398*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
399*c217d954SCole Faust 
400*c217d954SCole Faust         SubStream i_d(graph);
401*c217d954SCole Faust         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
402*c217d954SCole Faust                                              true))
403*c217d954SCole Faust             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
404*c217d954SCole Faust             << ConvolutionLayer(1U, 1U, 96U,
405*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
406*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
407*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
408*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
409*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
410*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
411*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
412*c217d954SCole Faust                                        0.001f)
413*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
414*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
415*c217d954SCole Faust 
416*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
417*c217d954SCole Faust     }
418*c217d954SCole Faust 
get_reductionA_block(const std::string & data_path,DataLayout weights_layout)419*c217d954SCole Faust     ConcatLayer get_reductionA_block(const std::string &data_path, DataLayout weights_layout)
420*c217d954SCole Faust     {
421*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_6a_";
422*c217d954SCole Faust 
423*c217d954SCole Faust         SubStream i_a(graph);
424*c217d954SCole Faust         i_a << ConvolutionLayer(3U, 3U, 384U,
425*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
426*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
427*c217d954SCole Faust             .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Conv2D")
428*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
429*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
430*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
431*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
432*c217d954SCole Faust                                        0.001f)
433*c217d954SCole Faust             .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
434*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
435*c217d954SCole Faust 
436*c217d954SCole Faust         SubStream i_b(graph);
437*c217d954SCole Faust         i_b << ConvolutionLayer(1U, 1U, 192U,
438*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
439*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
440*c217d954SCole Faust             .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Conv2D")
441*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
442*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
443*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
444*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
445*c217d954SCole Faust                                        0.001f)
446*c217d954SCole Faust             .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
447*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
448*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 224U,
449*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
450*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
451*c217d954SCole Faust             .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Conv2D")
452*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
453*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
454*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
455*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
456*c217d954SCole Faust                                        0.001f)
457*c217d954SCole Faust             .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
458*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
459*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 256U,
460*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
461*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
462*c217d954SCole Faust             .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Conv2D")
463*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
464*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
465*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
466*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
467*c217d954SCole Faust                                        0.001f)
468*c217d954SCole Faust             .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
469*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
470*c217d954SCole Faust 
471*c217d954SCole Faust         SubStream i_c(graph);
472*c217d954SCole Faust         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
473*c217d954SCole Faust                                              true))
474*c217d954SCole Faust             .set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3/MaxPool");
475*c217d954SCole Faust 
476*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
477*c217d954SCole Faust     }
478*c217d954SCole Faust 
get_inceptionB_block(const std::string & data_path,DataLayout weights_layout,std::string && param_path)479*c217d954SCole Faust     ConcatLayer get_inceptionB_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
480*c217d954SCole Faust     {
481*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
482*c217d954SCole Faust 
483*c217d954SCole Faust         SubStream i_a(graph);
484*c217d954SCole Faust         i_a << ConvolutionLayer(1U, 1U, 384U,
485*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
486*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
487*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
488*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
489*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
490*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
491*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
492*c217d954SCole Faust                                        0.001f)
493*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
494*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
495*c217d954SCole Faust 
496*c217d954SCole Faust         SubStream i_b(graph);
497*c217d954SCole Faust         i_b << ConvolutionLayer(1U, 1U, 192U,
498*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
499*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
500*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
501*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
502*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
503*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
504*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
505*c217d954SCole Faust                                        0.001f)
506*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
507*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
508*c217d954SCole Faust             << ConvolutionLayer(7U, 1U, 224U,
509*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
510*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
511*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Conv2D")
512*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
513*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
514*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
515*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
516*c217d954SCole Faust                                        0.001f)
517*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm")
518*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
519*c217d954SCole Faust             << ConvolutionLayer(1U, 7U, 256U,
520*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
521*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
522*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Conv2D")
523*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
524*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
525*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
526*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
527*c217d954SCole Faust                                        0.001f)
528*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm")
529*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu");
530*c217d954SCole Faust 
531*c217d954SCole Faust         SubStream i_c(graph);
532*c217d954SCole Faust         i_c << ConvolutionLayer(1U, 1U, 192U,
533*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
534*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
535*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
536*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
537*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
538*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
539*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
540*c217d954SCole Faust                                        0.001f)
541*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
542*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
543*c217d954SCole Faust             << ConvolutionLayer(1U, 7U, 192U,
544*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
545*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
546*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Conv2D")
547*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
548*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
549*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
550*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
551*c217d954SCole Faust                                        0.001f)
552*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm")
553*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
554*c217d954SCole Faust             << ConvolutionLayer(7U, 1U, 224U,
555*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
556*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
557*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Conv2D")
558*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
559*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
560*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
561*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
562*c217d954SCole Faust                                        0.001f)
563*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm")
564*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
565*c217d954SCole Faust             << ConvolutionLayer(1U, 7U, 224U,
566*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
567*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
568*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Conv2D")
569*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
570*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
571*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
572*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
573*c217d954SCole Faust                                        0.001f)
574*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm")
575*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
576*c217d954SCole Faust             << ConvolutionLayer(7U, 1U, 256U,
577*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
578*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
579*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Conv2D")
580*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
581*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
582*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
583*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
584*c217d954SCole Faust                                        0.001f)
585*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm")
586*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
587*c217d954SCole Faust 
588*c217d954SCole Faust         SubStream i_d(graph);
589*c217d954SCole Faust         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
590*c217d954SCole Faust                                              true))
591*c217d954SCole Faust             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
592*c217d954SCole Faust             << ConvolutionLayer(1U, 1U, 128U,
593*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
594*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
595*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
596*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
597*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
598*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
599*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
600*c217d954SCole Faust                                        0.001f)
601*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
602*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
603*c217d954SCole Faust 
604*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
605*c217d954SCole Faust     }
606*c217d954SCole Faust 
get_reductionB_block(const std::string & data_path,DataLayout weights_layout)607*c217d954SCole Faust     ConcatLayer get_reductionB_block(const std::string &data_path, DataLayout weights_layout)
608*c217d954SCole Faust     {
609*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/Mixed_7a_";
610*c217d954SCole Faust 
611*c217d954SCole Faust         SubStream i_a(graph);
612*c217d954SCole Faust         i_a << ConvolutionLayer(1U, 1U, 192U,
613*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
614*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
615*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D")
616*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
617*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
618*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
619*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
620*c217d954SCole Faust                                        0.001f)
621*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
622*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
623*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 192U,
624*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
625*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
626*c217d954SCole Faust             .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Conv2D")
627*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
628*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
629*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
630*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
631*c217d954SCole Faust                                        0.001f)
632*c217d954SCole Faust             .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
633*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
634*c217d954SCole Faust 
635*c217d954SCole Faust         SubStream i_b(graph);
636*c217d954SCole Faust         i_b << ConvolutionLayer(1U, 1U, 256U,
637*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
638*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
639*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Conv2D")
640*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
641*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
642*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
643*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
644*c217d954SCole Faust                                        0.001f)
645*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
646*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
647*c217d954SCole Faust             << ConvolutionLayer(7U, 1U, 256U,
648*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
649*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 3, 0))
650*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Conv2D")
651*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
652*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
653*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
654*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
655*c217d954SCole Faust                                        0.001f)
656*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/BatchNorm")
657*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0b_1x7/Relu")
658*c217d954SCole Faust             << ConvolutionLayer(1U, 7U, 320U,
659*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
660*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 3))
661*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Conv2D")
662*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
663*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
664*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
665*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
666*c217d954SCole Faust                                        0.001f)
667*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/BatchNorm")
668*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0c_7x1/Relu")
669*c217d954SCole Faust             << ConvolutionLayer(3U, 3U, 320U,
670*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
671*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
672*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Conv2D")
673*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
674*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
675*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
676*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
677*c217d954SCole Faust                                        0.001f)
678*c217d954SCole Faust             .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
679*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
680*c217d954SCole Faust 
681*c217d954SCole Faust         SubStream i_c(graph);
682*c217d954SCole Faust         i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL),
683*c217d954SCole Faust                                              true))
684*c217d954SCole Faust             .set_name("Mixed_7a/Branch_2/MaxPool_1a_3x3/MaxPool");
685*c217d954SCole Faust 
686*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
687*c217d954SCole Faust     }
688*c217d954SCole Faust 
get_inceptionC_block(const std::string & data_path,DataLayout weights_layout,std::string && param_path)689*c217d954SCole Faust     ConcatLayer get_inceptionC_block(const std::string &data_path, DataLayout weights_layout, std::string &&param_path)
690*c217d954SCole Faust     {
691*c217d954SCole Faust         std::string total_path = "/cnn_data/inceptionv4_model/" + param_path + "_";
692*c217d954SCole Faust 
693*c217d954SCole Faust         SubStream i_a(graph);
694*c217d954SCole Faust         i_a << ConvolutionLayer(1U, 1U, 256U,
695*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
696*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
697*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Conv2D")
698*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
699*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
700*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
701*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
702*c217d954SCole Faust                                        0.001f)
703*c217d954SCole Faust             .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm")
704*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
705*c217d954SCole Faust 
706*c217d954SCole Faust         SubStream i_b(graph);
707*c217d954SCole Faust         i_b << ConvolutionLayer(
708*c217d954SCole Faust                 1U, 1U, 384U,
709*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
710*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
711*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
712*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Conv2D")
713*c217d954SCole Faust             << BatchNormalizationLayer(
714*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
715*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
716*c217d954SCole Faust                 get_random_accessor(1.f, 1.f),
717*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
718*c217d954SCole Faust                 0.001f)
719*c217d954SCole Faust             .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm")
720*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
721*c217d954SCole Faust 
722*c217d954SCole Faust         SubStream i_b1(i_b);
723*c217d954SCole Faust         i_b1 << ConvolutionLayer(
724*c217d954SCole Faust                  3U, 1U, 256U,
725*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
726*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
727*c217d954SCole Faust                  PadStrideInfo(1, 1, 1, 0))
728*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Conv2D")
729*c217d954SCole Faust              << BatchNormalizationLayer(
730*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
731*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
732*c217d954SCole Faust                  get_random_accessor(1.f, 1.f),
733*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
734*c217d954SCole Faust                  0.001f)
735*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm")
736*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
737*c217d954SCole Faust 
738*c217d954SCole Faust         SubStream i_b2(i_b);
739*c217d954SCole Faust         i_b2 << ConvolutionLayer(
740*c217d954SCole Faust                  1U, 3U, 256U,
741*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
742*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
743*c217d954SCole Faust                  PadStrideInfo(1, 1, 0, 1))
744*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Conv2D")
745*c217d954SCole Faust              << BatchNormalizationLayer(
746*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
747*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
748*c217d954SCole Faust                  get_random_accessor(1.f, 1.f),
749*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
750*c217d954SCole Faust                  0.001f)
751*c217d954SCole Faust              .set_name(param_path + "/Branch_1/Conv2d_0c_3x1/BatchNorm")
752*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_3x1/Relu");
753*c217d954SCole Faust 
754*c217d954SCole Faust         // Merge b1 and b2
755*c217d954SCole Faust         i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
756*c217d954SCole Faust 
757*c217d954SCole Faust         SubStream i_c(graph);
758*c217d954SCole Faust         i_c << ConvolutionLayer(
759*c217d954SCole Faust                 1U, 1U, 384U,
760*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
761*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
762*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 0))
763*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Conv2D")
764*c217d954SCole Faust             << BatchNormalizationLayer(
765*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
766*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
767*c217d954SCole Faust                 get_random_accessor(1.f, 1.f),
768*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
769*c217d954SCole Faust                 0.001f)
770*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm")
771*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
772*c217d954SCole Faust             << ConvolutionLayer(
773*c217d954SCole Faust                 1U, 3U, 448U,
774*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_weights.npy", weights_layout),
775*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
776*c217d954SCole Faust                 PadStrideInfo(1, 1, 0, 1))
777*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Conv2D")
778*c217d954SCole Faust             << BatchNormalizationLayer(
779*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_mean.npy"),
780*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_moving_variance.npy"),
781*c217d954SCole Faust                 get_random_accessor(1.f, 1.f),
782*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x1_BatchNorm_beta.npy"),
783*c217d954SCole Faust                 0.001f)
784*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0b_3x1/BatchNorm")
785*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x1/Relu")
786*c217d954SCole Faust             << ConvolutionLayer(
787*c217d954SCole Faust                 3U, 1U, 512U,
788*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
789*c217d954SCole Faust                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
790*c217d954SCole Faust                 PadStrideInfo(1, 1, 1, 0))
791*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Conv2D")
792*c217d954SCole Faust             << BatchNormalizationLayer(
793*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
794*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
795*c217d954SCole Faust                 get_random_accessor(1.f, 1.f),
796*c217d954SCole Faust                 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
797*c217d954SCole Faust                 0.001f)
798*c217d954SCole Faust             .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm")
799*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
800*c217d954SCole Faust 
801*c217d954SCole Faust         SubStream i_c1(i_c);
802*c217d954SCole Faust         i_c1 << ConvolutionLayer(
803*c217d954SCole Faust                  3U, 1U, 256U,
804*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_weights.npy", weights_layout),
805*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
806*c217d954SCole Faust                  PadStrideInfo(1, 1, 1, 0))
807*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Conv2D")
808*c217d954SCole Faust              << BatchNormalizationLayer(
809*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_mean.npy"),
810*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_moving_variance.npy"),
811*c217d954SCole Faust                  get_random_accessor(1.f, 1.f),
812*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_1x3_BatchNorm_beta.npy"),
813*c217d954SCole Faust                  0.001f)
814*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0d_1x3/BatchNorm")
815*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_1x3/Relu");
816*c217d954SCole Faust 
817*c217d954SCole Faust         SubStream i_c2(i_c);
818*c217d954SCole Faust         i_c2 << ConvolutionLayer(
819*c217d954SCole Faust                  1U, 3U, 256U,
820*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_weights.npy", weights_layout),
821*c217d954SCole Faust                  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
822*c217d954SCole Faust                  PadStrideInfo(1, 1, 0, 1))
823*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Conv2D")
824*c217d954SCole Faust              << BatchNormalizationLayer(
825*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_mean.npy"),
826*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_moving_variance.npy"),
827*c217d954SCole Faust                  get_random_accessor(1.f, 1.f),
828*c217d954SCole Faust                  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_3x1_BatchNorm_beta.npy"),
829*c217d954SCole Faust                  0.001f)
830*c217d954SCole Faust              .set_name(param_path + "/Branch_2/Conv2d_0e_3x1/BatchNorm")
831*c217d954SCole Faust              << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_3x1/Relu");
832*c217d954SCole Faust 
833*c217d954SCole Faust         // Merge i_c1 and i_c2
834*c217d954SCole Faust         i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
835*c217d954SCole Faust 
836*c217d954SCole Faust         SubStream i_d(graph);
837*c217d954SCole Faust         i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
838*c217d954SCole Faust                                              true))
839*c217d954SCole Faust             .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
840*c217d954SCole Faust             << ConvolutionLayer(1U, 1U, 256U,
841*c217d954SCole Faust                                 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
842*c217d954SCole Faust                                 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
843*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Conv2D")
844*c217d954SCole Faust             << BatchNormalizationLayer(get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
845*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
846*c217d954SCole Faust                                        get_random_accessor(1.f, 1.f),
847*c217d954SCole Faust                                        get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
848*c217d954SCole Faust                                        0.001f)
849*c217d954SCole Faust             .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm")
850*c217d954SCole Faust             << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
851*c217d954SCole Faust 
852*c217d954SCole Faust         return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
853*c217d954SCole Faust     }
854*c217d954SCole Faust };
855*c217d954SCole Faust 
856*c217d954SCole Faust /** Main program for Inception V4
857*c217d954SCole Faust  *
858*c217d954SCole Faust  * Model is based on:
859*c217d954SCole Faust  *      https://arxiv.org/abs/1602.07261
860*c217d954SCole Faust  *      "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
861*c217d954SCole Faust  *      Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
862*c217d954SCole Faust  *
863*c217d954SCole Faust  * Provenance: download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz
864*c217d954SCole Faust  *
865*c217d954SCole Faust  * @note To list all the possible arguments execute the binary appended with the --help option
866*c217d954SCole Faust  *
867*c217d954SCole Faust  * @param[in] argc Number of arguments
868*c217d954SCole Faust  * @param[in] argv Arguments
869*c217d954SCole Faust  */
main(int argc,char ** argv)870*c217d954SCole Faust int main(int argc, char **argv)
871*c217d954SCole Faust {
872*c217d954SCole Faust     return arm_compute::utils::run_example<InceptionV4Example>(argc, argv);
873*c217d954SCole Faust }
874