1 /*
2 * Copyright (c) 2017-2021 Arm Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24 #include "arm_compute/graph.h"
25 #include "arm_compute/core/Types.h"
26 #include "arm_compute/core/Utils.h"
27 #include "support/ToolchainSupport.h"
28 #include "utils/CommonGraphOptions.h"
29 #include "utils/GraphUtils.h"
30 #include "utils/Utils.h"
31
32 using namespace arm_compute::utils;
33 using namespace arm_compute::graph::frontend;
34 using namespace arm_compute::graph_utils;
35
36 /** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API */
37 class InceptionV3Example : public Example
38 {
39 public:
InceptionV3Example()40 InceptionV3Example()
41 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3")
42 {
43 }
do_setup(int argc,char ** argv)44 bool do_setup(int argc, char **argv) override
45 {
46 // Parse arguments
47 cmd_parser.parse(argc, argv);
48 cmd_parser.validate();
49
50 // Consume common parameters
51 common_params = consume_common_graph_parameters(common_opts);
52
53 // Return when help menu is requested
54 if(common_params.help)
55 {
56 cmd_parser.print_help(argv[0]);
57 return false;
58 }
59
60 // Print parameter values
61 std::cout << common_params << std::endl;
62
63 // Get trainable parameters data path
64 std::string data_path = common_params.data_path;
65
66 // Create a preprocessor object
67 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>();
68
69 // Create input descriptor
70 const auto operation_layout = common_params.data_layout;
71 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
72 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
73
74 // Set weights trained layout
75 const DataLayout weights_layout = DataLayout::NCHW;
76
77 graph << common_params.target
78 << common_params.fast_math_hint
79 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
80 << ConvolutionLayer(3U, 3U, 32U,
81 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", weights_layout),
82 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
83 .set_name("Conv2d_1a_3x3/convolution")
84 << BatchNormalizationLayer(get_weights_accessor(data_path,
85 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
86 get_weights_accessor(data_path,
87 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
88 nullptr, get_weights_accessor(data_path,
89 "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
90 0.001f)
91 .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm")
92 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
93 << ConvolutionLayer(3U, 3U, 32U,
94 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", weights_layout),
95 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
96 .set_name("Conv2d_2a_3x3/convolution")
97 << BatchNormalizationLayer(get_weights_accessor(data_path,
98 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
99 get_weights_accessor(data_path,
100 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
101 nullptr, get_weights_accessor(data_path,
102 "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
103 0.001f)
104 .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm")
105 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
106
107 << ConvolutionLayer(3U, 3U, 64U,
108 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", weights_layout),
109 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
110 .set_name("Conv2d_2b_3x3/convolution")
111 << BatchNormalizationLayer(get_weights_accessor(data_path,
112 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
113 get_weights_accessor(data_path,
114 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
115 nullptr, get_weights_accessor(data_path,
116 "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
117 0.001f)
118 .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm")
119 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
120
121 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool")
122
123 << ConvolutionLayer(1U, 1U, 80U,
124 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", weights_layout),
125 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
126 .set_name("Conv2d_3b_1x1/convolution")
127 << BatchNormalizationLayer(get_weights_accessor(data_path,
128 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
129 get_weights_accessor(data_path,
130 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
131 nullptr, get_weights_accessor(data_path,
132 "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
133 0.001f)
134 .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm")
135 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
136
137 << ConvolutionLayer(3U, 3U, 192U,
138 get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", weights_layout),
139 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
140 .set_name("Conv2d_4a_3x3/convolution")
141 << BatchNormalizationLayer(get_weights_accessor(data_path,
142 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
143 get_weights_accessor(data_path,
144 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
145 nullptr, get_weights_accessor(data_path,
146 "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
147 0.001f)
148 .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm")
149 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
150
151 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool");
152
153 graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
154 32U)
155 .set_name("Mixed_5b/concat");
156 graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
157 64U, true)
158 .set_name("Mixed_5c/concat");
159 graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
160 64U)
161 .set_name("Mixed_5d/concat");
162
163 graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat");
164
165 graph << get_inception_node_C(data_path, "Mixed_6b", weights_layout, 192U, std::make_tuple(128U, 128U, 192U),
166 std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
167 .set_name("Mixed_6b/concat");
168 graph << get_inception_node_C(data_path, "Mixed_6c", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
169 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
170 .set_name("Mixed_6c/concat");
171 graph << get_inception_node_C(data_path, "Mixed_6d", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
172 std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
173 .set_name("Mixed_6d/concat");
174 graph << get_inception_node_C(data_path, "Mixed_6e", weights_layout, 192U, std::make_tuple(192U, 192U, 192U),
175 std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
176 .set_name("Mixed_6e/concat");
177
178 graph << get_inception_node_D(data_path, "Mixed_7a", weights_layout, std::make_tuple(192U, 320U),
179 std::make_tuple(192U, 192U, 192U, 192U))
180 .set_name("Mixed_7a/concat");
181
182 graph << get_inception_node_E(data_path, "Mixed_7b", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
183 std::make_tuple(448U, 384U, 384U, 384U), 192U)
184 .set_name("Mixed_7b/concat");
185 graph << get_inception_node_E(data_path, "Mixed_7c", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
186 std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
187 .set_name("Mixed_7c/concat");
188
189 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, operation_layout, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool")
190 << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
191 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", weights_layout),
192 get_weights_accessor(data_path,
193 "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
194 PadStrideInfo(1, 1, 0, 0))
195 .set_name("Logits/Conv2d_1c_1x1/convolution")
196 << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape")
197 << SoftmaxLayer().set_name("Predictions/Softmax")
198 << OutputLayer(get_output_accessor(common_params, 5));
199
200 // Finalize graph
201 GraphConfig config;
202 config.num_threads = common_params.threads;
203 config.use_tuner = common_params.enable_tuner;
204 config.tuner_mode = common_params.tuner_mode;
205 config.tuner_file = common_params.tuner_file;
206 config.mlgo_file = common_params.mlgo_file;
207 config.use_synthetic_type = arm_compute::is_data_type_quantized(common_params.data_type);
208 config.synthetic_type = common_params.data_type;
209 graph.finalize(common_params.target, config);
210
211 return true;
212 }
213
do_run()214 void do_run() override
215 {
216 graph.run();
217 }
218
219 private:
220 CommandLineParser cmd_parser;
221 CommonGraphOptions common_opts;
222 CommonGraphParams common_params;
223 Stream graph;
224
225 private:
get_inception_node_A(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int> b_filters,std::tuple<unsigned int,unsigned int,unsigned int> c_filters,unsigned int d_filt,bool is_name_different=false)226 ConcatLayer get_inception_node_A(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
227 unsigned int a_filt,
228 std::tuple<unsigned int, unsigned int> b_filters,
229 std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
230 unsigned int d_filt,
231 bool is_name_different = false)
232 {
233 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
234
235 // This is due to a naming issue in the tf model
236 std::string conv_id0 = "_0a_";
237 std::string conv_id1 = "2d_0b_";
238 if(is_name_different)
239 {
240 conv_id0 = "_0b_";
241 conv_id1 = "_1_0c_";
242 }
243
244 SubStream i_a(graph);
245 i_a << ConvolutionLayer(
246 1U, 1U, a_filt,
247 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
248 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
249 PadStrideInfo(1, 1, 0, 0))
250 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
251 << BatchNormalizationLayer(
252 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
253 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
254 nullptr,
255 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
256 0.001f)
257 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
258 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
259
260 SubStream i_b(graph);
261 i_b << ConvolutionLayer(
262 1U, 1U, std::get<0>(b_filters),
263 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", weights_layout),
264 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
265 PadStrideInfo(1, 1, 0, 0))
266 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution")
267 << BatchNormalizationLayer(
268 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
269 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
270 nullptr,
271 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
272 0.001f)
273 .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm")
274 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu")
275 << ConvolutionLayer(
276 5U, 5U, std::get<1>(b_filters),
277 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", weights_layout),
278 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
279 PadStrideInfo(1, 1, 2, 2))
280 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution")
281 << BatchNormalizationLayer(
282 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
283 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
284 nullptr,
285 get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
286 0.001f)
287 .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm")
288 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu");
289
290 SubStream i_c(graph);
291 i_c << ConvolutionLayer(
292 1U, 1U, std::get<0>(c_filters),
293 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
294 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
295 PadStrideInfo(1, 1, 0, 0))
296 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
297 << BatchNormalizationLayer(
298 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
299 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
300 nullptr,
301 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
302 0.001f)
303 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
304 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
305 << ConvolutionLayer(
306 3U, 3U, std::get<1>(c_filters),
307 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
308 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
309 PadStrideInfo(1, 1, 1, 1))
310 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
311 << BatchNormalizationLayer(
312 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
313 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
314 nullptr,
315 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
316 0.001f)
317 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
318 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
319 << ConvolutionLayer(
320 3U, 3U, std::get<2>(c_filters),
321 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
322 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
323 PadStrideInfo(1, 1, 1, 1))
324 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution")
325 << BatchNormalizationLayer(
326 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
327 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
328 nullptr,
329 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
330 0.001f)
331 .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm")
332 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
333
334 SubStream i_d(graph);
335 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
336 true))
337 .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
338 << ConvolutionLayer(
339 1U, 1U, d_filt,
340 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
341 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
342 PadStrideInfo(1, 1, 0, 0))
343 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
344 << BatchNormalizationLayer(
345 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
346 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
347 nullptr,
348 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
349 0.001f)
350 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
351 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
352
353 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
354 }
355
get_inception_node_B(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int,unsigned int> b_filters)356 ConcatLayer get_inception_node_B(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
357 unsigned int a_filt,
358 std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
359 {
360 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
361 SubStream i_a(graph);
362 i_a << ConvolutionLayer(
363 3U, 3U, a_filt,
364 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout),
365 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
366 PadStrideInfo(2, 2, 0, 0))
367 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution")
368 << BatchNormalizationLayer(
369 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
370 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
371 nullptr,
372 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
373 0.001f)
374 .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm")
375 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu");
376
377 SubStream i_b(graph);
378 i_b << ConvolutionLayer(
379 1U, 1U, std::get<0>(b_filters),
380 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
381 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
382 PadStrideInfo(1, 1, 0, 0))
383 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
384 << BatchNormalizationLayer(
385 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
386 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
387 nullptr,
388 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
389 0.001f)
390 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
391 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
392 << ConvolutionLayer(
393 3U, 3U, std::get<1>(b_filters),
394 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
395 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
396 PadStrideInfo(1, 1, 1, 1))
397 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution")
398 << BatchNormalizationLayer(
399 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
400 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
401 nullptr,
402 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
403 0.001f)
404 .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm")
405 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu")
406 << ConvolutionLayer(
407 3U, 3U, std::get<2>(b_filters),
408 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout),
409 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
410 PadStrideInfo(2, 2, 0, 0))
411 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution")
412 << BatchNormalizationLayer(
413 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
414 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
415 nullptr,
416 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
417 0.001f)
418 .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm")
419 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu");
420
421 SubStream i_c(graph);
422 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
423
424 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
425 }
426
get_inception_node_C(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int,unsigned int> b_filters,std::tuple<unsigned int,unsigned int,unsigned int,unsigned int,unsigned int> c_filters,unsigned int d_filt)427 ConcatLayer get_inception_node_C(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
428 unsigned int a_filt,
429 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
430 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
431 unsigned int d_filt)
432 {
433 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
434 SubStream i_a(graph);
435 i_a << ConvolutionLayer(
436 1U, 1U, a_filt,
437 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
438 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
439 PadStrideInfo(1, 1, 0, 0))
440 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
441 << BatchNormalizationLayer(
442 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
443 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
444 nullptr,
445 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
446 0.001f)
447 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
448 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
449
450 SubStream i_b(graph);
451 i_b << ConvolutionLayer(
452 1U, 1U, std::get<0>(b_filters),
453 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
454 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
455 PadStrideInfo(1, 1, 0, 0))
456 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
457 << BatchNormalizationLayer(
458 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
459 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
460 nullptr,
461 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
462 0.001f)
463 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
464 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
465 << ConvolutionLayer(
466 7U, 1U, std::get<1>(b_filters),
467 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
468 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
469 PadStrideInfo(1, 1, 3, 0))
470 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
471 << BatchNormalizationLayer(
472 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
473 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
474 nullptr,
475 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
476 0.001f)
477 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
478 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
479 << ConvolutionLayer(
480 1U, 7U, std::get<2>(b_filters),
481 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
482 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
483 PadStrideInfo(1, 1, 0, 3))
484 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
485 << BatchNormalizationLayer(
486 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
487 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
488 nullptr,
489 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
490 0.001f)
491 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
492 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu");
493
494 SubStream i_c(graph);
495 i_c << ConvolutionLayer(
496 1U, 1U, std::get<0>(c_filters),
497 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
498 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
499 PadStrideInfo(1, 1, 0, 0))
500 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
501 << BatchNormalizationLayer(
502 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
503 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
504 nullptr,
505 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
506 0.001f)
507 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
508 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
509 << ConvolutionLayer(
510 1U, 7U, std::get<1>(c_filters),
511 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
512 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
513 PadStrideInfo(1, 1, 0, 3))
514 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution")
515 << BatchNormalizationLayer(
516 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
517 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
518 nullptr,
519 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
520 0.001f)
521 .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm")
522 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
523 << ConvolutionLayer(
524 7U, 1U, std::get<2>(c_filters),
525 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
526 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
527 PadStrideInfo(1, 1, 3, 0))
528 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution")
529 << BatchNormalizationLayer(
530 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
531 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
532 nullptr,
533 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
534 0.001f)
535 .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm")
536 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
537 << ConvolutionLayer(
538 1U, 7U, std::get<3>(c_filters),
539 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
540 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
541 PadStrideInfo(1, 1, 0, 3))
542 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution")
543 << BatchNormalizationLayer(
544 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
545 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
546 nullptr,
547 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
548 0.001f)
549 .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm")
550 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
551 << ConvolutionLayer(
552 7U, 1U, std::get<4>(c_filters),
553 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
554 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
555 PadStrideInfo(1, 1, 3, 0))
556 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution")
557 << BatchNormalizationLayer(
558 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
559 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
560 nullptr,
561 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
562 0.001f)
563 .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm")
564 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
565
566 SubStream i_d(graph);
567 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
568 true))
569 .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
570 << ConvolutionLayer(
571 1U, 1U, d_filt,
572 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
573 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
574 PadStrideInfo(1, 1, 0, 0))
575 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
576 << BatchNormalizationLayer(
577 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
578 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
579 nullptr,
580 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
581 0.001f)
582 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
583 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
584
585 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
586 }
587
get_inception_node_D(const std::string & data_path,std::string && param_path,DataLayout weights_layout,std::tuple<unsigned int,unsigned int> a_filters,std::tuple<unsigned int,unsigned int,unsigned int,unsigned int> b_filters)588 ConcatLayer get_inception_node_D(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
589 std::tuple<unsigned int, unsigned int> a_filters,
590 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
591 {
592 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
593 SubStream i_a(graph);
594 i_a << ConvolutionLayer(
595 1U, 1U, std::get<0>(a_filters),
596 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
597 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
598 PadStrideInfo(1, 1, 0, 0))
599 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
600 << BatchNormalizationLayer(
601 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
602 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
603 nullptr,
604 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
605 0.001f)
606 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
607 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu")
608 << ConvolutionLayer(
609 3U, 3U, std::get<1>(a_filters),
610 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
611 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
612 PadStrideInfo(2, 2, 0, 0))
613 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution")
614 << BatchNormalizationLayer(
615 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
616 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
617 nullptr,
618 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
619 0.001f)
620 .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm")
621 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu");
622
623 SubStream i_b(graph);
624 i_b << ConvolutionLayer(
625 1U, 1U, std::get<0>(b_filters),
626 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
627 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
628 PadStrideInfo(1, 1, 0, 0))
629 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
630 << BatchNormalizationLayer(
631 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
632 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
633 nullptr,
634 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
635 0.001f)
636 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
637 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
638 << ConvolutionLayer(
639 7U, 1U, std::get<1>(b_filters),
640 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
641 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
642 PadStrideInfo(1, 1, 3, 0))
643 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
644 << BatchNormalizationLayer(
645 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
646 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
647 nullptr,
648 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
649 0.001f)
650 .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
651 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
652 << ConvolutionLayer(
653 1U, 7U, std::get<2>(b_filters),
654 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
655 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
656 PadStrideInfo(1, 1, 0, 3))
657 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
658 << BatchNormalizationLayer(
659 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
660 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
661 nullptr,
662 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
663 0.001f)
664 .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
665 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu")
666 << ConvolutionLayer(
667 3U, 3U, std::get<3>(b_filters),
668 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
669 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
670 PadStrideInfo(2, 2, 0, 0))
671 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution")
672 << BatchNormalizationLayer(
673 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
674 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
675 nullptr,
676 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
677 0.001f)
678 .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm")
679 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu");
680
681 SubStream i_c(graph);
682 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
683
684 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
685 }
686
get_inception_node_E(const std::string & data_path,std::string && param_path,DataLayout weights_layout,unsigned int a_filt,std::tuple<unsigned int,unsigned int,unsigned int> b_filters,std::tuple<unsigned int,unsigned int,unsigned int,unsigned int> c_filters,unsigned int d_filt,bool is_name_different=false)687 ConcatLayer get_inception_node_E(const std::string &data_path, std::string &¶m_path, DataLayout weights_layout,
688 unsigned int a_filt,
689 std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
690 std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
691 unsigned int d_filt,
692 bool is_name_different = false)
693 {
694 // This is due to a naming issue in the tf model
695 std::string conv_id = "_0b_";
696 if(is_name_different)
697 {
698 conv_id = "_0c_";
699 }
700
701 std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
702 SubStream i_a(graph);
703 i_a << ConvolutionLayer(
704 1U, 1U, a_filt,
705 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
706 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
707 PadStrideInfo(1, 1, 0, 0))
708 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
709 << BatchNormalizationLayer(
710 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
711 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
712 nullptr,
713 get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
714 0.001f)
715 .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
716 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
717
718 SubStream i_b(graph);
719 i_b << ConvolutionLayer(
720 1U, 1U, std::get<0>(b_filters),
721 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
722 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
723 PadStrideInfo(1, 1, 0, 0))
724 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
725 << BatchNormalizationLayer(
726 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
727 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
728 nullptr,
729 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
730 0.001f)
731 .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
732 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
733
734 SubStream i_b1(i_b);
735 i_b1 << ConvolutionLayer(
736 3U, 1U, std::get<1>(b_filters),
737 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
738 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
739 PadStrideInfo(1, 1, 1, 0))
740 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution")
741 << BatchNormalizationLayer(
742 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
743 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
744 nullptr,
745 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
746 0.001f)
747 .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm")
748 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
749
750 SubStream i_b2(i_b);
751 i_b2 << ConvolutionLayer(
752 1U, 3U, std::get<2>(b_filters),
753 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", weights_layout),
754 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
755 PadStrideInfo(1, 1, 0, 1))
756 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution")
757 << BatchNormalizationLayer(
758 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
759 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
760 nullptr,
761 get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
762 0.001f)
763 .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm")
764 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
765
766 // Merge b1 and b2
767 i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
768
769 SubStream i_c(graph);
770 i_c << ConvolutionLayer(
771 1U, 1U, std::get<0>(c_filters),
772 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
773 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
774 PadStrideInfo(1, 1, 0, 0))
775 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
776 << BatchNormalizationLayer(
777 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
778 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
779 nullptr,
780 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
781 0.001f)
782 .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
783 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
784 << ConvolutionLayer(
785 3U, 3U, std::get<1>(c_filters),
786 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
787 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
788 PadStrideInfo(1, 1, 1, 1))
789 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
790 << BatchNormalizationLayer(
791 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
792 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
793 nullptr,
794 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
795 0.001f)
796 .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
797 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu");
798
799 SubStream i_c1(i_c);
800 i_c1 << ConvolutionLayer(
801 3U, 1U, std::get<2>(c_filters),
802 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
803 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
804 PadStrideInfo(1, 1, 1, 0))
805 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution")
806 << BatchNormalizationLayer(
807 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
808 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
809 nullptr,
810 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
811 0.001f)
812 .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm")
813 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
814
815 SubStream i_c2(i_c);
816 i_c2 << ConvolutionLayer(
817 1U, 3U, std::get<3>(c_filters),
818 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout),
819 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
820 PadStrideInfo(1, 1, 0, 1))
821 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution")
822 << BatchNormalizationLayer(
823 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
824 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
825 nullptr,
826 get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
827 0.001f)
828 .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm")
829 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
830
831 // Merge i_c1 and i_c2
832 i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
833
834 SubStream i_d(graph);
835 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
836 true))
837 .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
838 << ConvolutionLayer(
839 1U, 1U, d_filt,
840 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
841 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
842 PadStrideInfo(1, 1, 0, 0))
843 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
844 << BatchNormalizationLayer(
845 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
846 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
847 nullptr,
848 get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
849 0.001f)
850 .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
851 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
852
853 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
854 }
855 };
856
857 /** Main program for Inception V3
858 *
859 * Model is based on:
860 * https://arxiv.org/abs/1512.00567
861 * "Rethinking the Inception Architecture for Computer Vision"
862 * Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens, Zbigniew Wojna
863 *
864 * Provenance: download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz
865 *
866 * @note To list all the possible arguments execute the binary appended with the --help option
867 *
868 * @param[in] argc Number of arguments
869 * @param[in] argv Arguments
870 */
main(int argc,char ** argv)871 int main(int argc, char **argv)
872 {
873 return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
874 }
875