1*c217d954SCole Faust /*
2*c217d954SCole Faust * Copyright (c) 2018-2021 Arm Limited.
3*c217d954SCole Faust *
4*c217d954SCole Faust * SPDX-License-Identifier: MIT
5*c217d954SCole Faust *
6*c217d954SCole Faust * Permission is hereby granted, free of charge, to any person obtaining a copy
7*c217d954SCole Faust * of this software and associated documentation files (the "Software"), to
8*c217d954SCole Faust * deal in the Software without restriction, including without limitation the
9*c217d954SCole Faust * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10*c217d954SCole Faust * sell copies of the Software, and to permit persons to whom the Software is
11*c217d954SCole Faust * furnished to do so, subject to the following conditions:
12*c217d954SCole Faust *
13*c217d954SCole Faust * The above copyright notice and this permission notice shall be included in all
14*c217d954SCole Faust * copies or substantial portions of the Software.
15*c217d954SCole Faust *
16*c217d954SCole Faust * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17*c217d954SCole Faust * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18*c217d954SCole Faust * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19*c217d954SCole Faust * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20*c217d954SCole Faust * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21*c217d954SCole Faust * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22*c217d954SCole Faust * SOFTWARE.
23*c217d954SCole Faust */
24*c217d954SCole Faust #include "arm_compute/graph.h"
25*c217d954SCole Faust #include "support/ToolchainSupport.h"
26*c217d954SCole Faust #include "utils/CommonGraphOptions.h"
27*c217d954SCole Faust #include "utils/GraphUtils.h"
28*c217d954SCole Faust #include "utils/Utils.h"
29*c217d954SCole Faust
30*c217d954SCole Faust using namespace arm_compute::utils;
31*c217d954SCole Faust using namespace arm_compute::graph::frontend;
32*c217d954SCole Faust using namespace arm_compute::graph_utils;
33*c217d954SCole Faust
34*c217d954SCole Faust /** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
35*c217d954SCole Faust class InceptionResNetV2Example final : public Example
36*c217d954SCole Faust {
37*c217d954SCole Faust public:
InceptionResNetV2Example()38*c217d954SCole Faust InceptionResNetV2Example()
39*c217d954SCole Faust : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionResNetV2")
40*c217d954SCole Faust {
41*c217d954SCole Faust }
do_setup(int argc,char ** argv)42*c217d954SCole Faust bool do_setup(int argc, char **argv) override
43*c217d954SCole Faust {
44*c217d954SCole Faust // Parse arguments
45*c217d954SCole Faust cmd_parser.parse(argc, argv);
46*c217d954SCole Faust cmd_parser.validate();
47*c217d954SCole Faust
48*c217d954SCole Faust // Consume common parameters
49*c217d954SCole Faust common_params = consume_common_graph_parameters(common_opts);
50*c217d954SCole Faust
51*c217d954SCole Faust // Return when help menu is requested
52*c217d954SCole Faust if(common_params.help)
53*c217d954SCole Faust {
54*c217d954SCole Faust cmd_parser.print_help(argv[0]);
55*c217d954SCole Faust return false;
56*c217d954SCole Faust }
57*c217d954SCole Faust
58*c217d954SCole Faust // Set default layout if needed
59*c217d954SCole Faust if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
60*c217d954SCole Faust {
61*c217d954SCole Faust common_params.data_layout = DataLayout::NCHW;
62*c217d954SCole Faust }
63*c217d954SCole Faust
64*c217d954SCole Faust // Checks
65*c217d954SCole Faust ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
66*c217d954SCole Faust
67*c217d954SCole Faust // Print parameter values
68*c217d954SCole Faust std::cout << common_params << std::endl;
69*c217d954SCole Faust
70*c217d954SCole Faust // Create model path
71*c217d954SCole Faust std::string data_path = common_params.data_path;
72*c217d954SCole Faust std::string model_path = "/cnn_data/inception_resnet_v2_model/";
73*c217d954SCole Faust if(!data_path.empty())
74*c217d954SCole Faust {
75*c217d954SCole Faust data_path += model_path;
76*c217d954SCole Faust }
77*c217d954SCole Faust
78*c217d954SCole Faust // Create a preprocessor object
79*c217d954SCole Faust std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>(0.f, 1.f);
80*c217d954SCole Faust
81*c217d954SCole Faust // Create input descriptor
82*c217d954SCole Faust const auto operation_layout = common_params.data_layout;
83*c217d954SCole Faust const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
84*c217d954SCole Faust TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
85*c217d954SCole Faust
86*c217d954SCole Faust // Set weights trained layout
87*c217d954SCole Faust const DataLayout weights_layout = DataLayout::NCHW;
88*c217d954SCole Faust
89*c217d954SCole Faust graph << common_params.target
90*c217d954SCole Faust << common_params.fast_math_hint
91*c217d954SCole Faust << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
92*c217d954SCole Faust // Conv2d_1a_3x3
93*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 32U,
94*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
95*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
96*c217d954SCole Faust PadStrideInfo(2, 2, 0, 0))
97*c217d954SCole Faust .set_name("Conv2d_1a_3x3/convolution")
98*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
99*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
100*c217d954SCole Faust get_random_accessor(1.f, 1.f),
101*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
102*c217d954SCole Faust 0.0010000000474974513f)
103*c217d954SCole Faust .set_name("Conv2d_1a_3x3/BatchNorm")
104*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
105*c217d954SCole Faust // Conv2d_2a_3x3
106*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 32U,
107*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
108*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
109*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
110*c217d954SCole Faust .set_name("Conv2d_2a_3x3/convolution")
111*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
112*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
113*c217d954SCole Faust get_random_accessor(1.f, 1.f),
114*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
115*c217d954SCole Faust 0.0010000000474974513f)
116*c217d954SCole Faust .set_name("Conv2d_2a_3x3/BatchNorm")
117*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
118*c217d954SCole Faust // Conv2d_2b_3x3
119*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 64U,
120*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
121*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
122*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
123*c217d954SCole Faust .set_name("Conv2d_2b_3x3/convolution")
124*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
125*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
126*c217d954SCole Faust get_random_accessor(1.f, 1.f),
127*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
128*c217d954SCole Faust 0.0010000000474974513f)
129*c217d954SCole Faust .set_name("Conv2d_2b_3x3/BatchNorm")
130*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
131*c217d954SCole Faust // MaxPool_3a_3x3
132*c217d954SCole Faust << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
133*c217d954SCole Faust // Conv2d_3b_1x1
134*c217d954SCole Faust << ConvolutionLayer(1U, 1U, 80U,
135*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
136*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
137*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
138*c217d954SCole Faust .set_name("Conv2d_3b_1x1/convolution")
139*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
140*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
141*c217d954SCole Faust get_random_accessor(1.f, 1.f),
142*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
143*c217d954SCole Faust 0.0010000000474974513f)
144*c217d954SCole Faust .set_name("Conv2d_3b_1x1/BatchNorm")
145*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
146*c217d954SCole Faust // Conv2d_4a_3x3
147*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 192U,
148*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
149*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
150*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
151*c217d954SCole Faust .set_name("Conv2d_4a_3x3/convolution")
152*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
153*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
154*c217d954SCole Faust get_random_accessor(1.f, 1.f),
155*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
156*c217d954SCole Faust 0.0010000000474974513f)
157*c217d954SCole Faust .set_name("Conv2d_4a_3x3/BatchNorm")
158*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
159*c217d954SCole Faust // MaxPool_5a_3x3
160*c217d954SCole Faust << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("MaxPool_5a_3x3/MaxPool");
161*c217d954SCole Faust
162*c217d954SCole Faust block_mixed_5b(data_path, weights_layout);
163*c217d954SCole Faust block35_repeat(data_path, weights_layout, 10);
164*c217d954SCole Faust block_mixed_6a(data_path, weights_layout);
165*c217d954SCole Faust block17_repeat(data_path, weights_layout, 20);
166*c217d954SCole Faust block_mixed_7a(data_path, weights_layout);
167*c217d954SCole Faust block8_repeat(data_path, weights_layout, 9, 0.2f, true);
168*c217d954SCole Faust block8_repeat(data_path, weights_layout, 1, 1.f, false);
169*c217d954SCole Faust
170*c217d954SCole Faust // Conv2d_7b_1x1
171*c217d954SCole Faust graph << ConvolutionLayer(1U, 1U, 1536U,
172*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_7b_1x1_weights.npy", weights_layout),
173*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
174*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
175*c217d954SCole Faust .set_name("Conv2d_7b_1x1/convolution")
176*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_mean.npy"),
177*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_variance.npy"),
178*c217d954SCole Faust get_random_accessor(1.f, 1.f),
179*c217d954SCole Faust get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_beta.npy"),
180*c217d954SCole Faust 0.0010000000474974513f)
181*c217d954SCole Faust .set_name("Conv2d_7b_1x1/BatchNorm")
182*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_7b_1x1/Relu")
183*c217d954SCole Faust << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8")
184*c217d954SCole Faust << FlattenLayer().set_name("Logits/Flatten")
185*c217d954SCole Faust << FullyConnectedLayer(
186*c217d954SCole Faust 1001U,
187*c217d954SCole Faust get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
188*c217d954SCole Faust get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
189*c217d954SCole Faust .set_name("Logits/Logits")
190*c217d954SCole Faust << SoftmaxLayer().set_name("Logits/Predictions")
191*c217d954SCole Faust << OutputLayer(get_output_accessor(common_params, 5));
192*c217d954SCole Faust
193*c217d954SCole Faust // Finalize graph
194*c217d954SCole Faust GraphConfig config;
195*c217d954SCole Faust config.num_threads = common_params.threads;
196*c217d954SCole Faust config.use_tuner = common_params.enable_tuner;
197*c217d954SCole Faust config.tuner_mode = common_params.tuner_mode;
198*c217d954SCole Faust config.tuner_file = common_params.tuner_file;
199*c217d954SCole Faust config.mlgo_file = common_params.mlgo_file;
200*c217d954SCole Faust
201*c217d954SCole Faust graph.finalize(common_params.target, config);
202*c217d954SCole Faust
203*c217d954SCole Faust return true;
204*c217d954SCole Faust }
205*c217d954SCole Faust
do_run()206*c217d954SCole Faust void do_run() override
207*c217d954SCole Faust {
208*c217d954SCole Faust graph.run();
209*c217d954SCole Faust }
210*c217d954SCole Faust
211*c217d954SCole Faust private:
212*c217d954SCole Faust CommandLineParser cmd_parser;
213*c217d954SCole Faust CommonGraphOptions common_opts;
214*c217d954SCole Faust CommonGraphParams common_params;
215*c217d954SCole Faust Stream graph;
216*c217d954SCole Faust
217*c217d954SCole Faust private:
block_mixed_5b(const std::string & data_path,DataLayout weights_layout)218*c217d954SCole Faust void block_mixed_5b(const std::string &data_path, DataLayout weights_layout)
219*c217d954SCole Faust {
220*c217d954SCole Faust // Branch 0
221*c217d954SCole Faust SubStream i_a(graph);
222*c217d954SCole Faust i_a << ConvolutionLayer(1U, 1U, 96U,
223*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_weights.npy", weights_layout),
224*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
225*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
226*c217d954SCole Faust .set_name("Mixed_5b/Branch_0/Conv2d_1x1/convolution")
227*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
228*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
229*c217d954SCole Faust get_random_accessor(1.f, 1.f),
230*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
231*c217d954SCole Faust 0.0010000000474974513f)
232*c217d954SCole Faust .set_name("Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
233*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_0/Conv2d_1x1/Relu");
234*c217d954SCole Faust
235*c217d954SCole Faust // Branch 1
236*c217d954SCole Faust SubStream i_b(graph);
237*c217d954SCole Faust i_b << ConvolutionLayer(1U, 1U, 48U,
238*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
239*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
240*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
241*c217d954SCole Faust .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
242*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
243*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
244*c217d954SCole Faust get_random_accessor(1.f, 1.f),
245*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
246*c217d954SCole Faust 0.0010000000474974513f)
247*c217d954SCole Faust .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
248*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
249*c217d954SCole Faust << ConvolutionLayer(5U, 5U, 64U,
250*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_weights.npy", weights_layout),
251*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
252*c217d954SCole Faust PadStrideInfo(1, 1, 2, 2))
253*c217d954SCole Faust .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
254*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_mean.npy"),
255*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
256*c217d954SCole Faust get_random_accessor(1.f, 1.f),
257*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_beta.npy"),
258*c217d954SCole Faust 0.0010000000474974513f)
259*c217d954SCole Faust .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
260*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
261*c217d954SCole Faust
262*c217d954SCole Faust // Branch 2
263*c217d954SCole Faust SubStream i_c(graph);
264*c217d954SCole Faust i_c << ConvolutionLayer(1U, 1U, 64U,
265*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
266*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
267*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
268*c217d954SCole Faust .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
269*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
270*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
271*c217d954SCole Faust get_random_accessor(1.f, 1.f),
272*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
273*c217d954SCole Faust 0.0010000000474974513f)
274*c217d954SCole Faust .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
275*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
276*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 96U,
277*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
278*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
279*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
280*c217d954SCole Faust .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
281*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
282*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
283*c217d954SCole Faust get_random_accessor(1.f, 1.f),
284*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
285*c217d954SCole Faust 0.0010000000474974513f)
286*c217d954SCole Faust .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
287*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
288*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 96U,
289*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
290*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
291*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
292*c217d954SCole Faust .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
293*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
294*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
295*c217d954SCole Faust get_random_accessor(1.f, 1.f),
296*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
297*c217d954SCole Faust 0.0010000000474974513f)
298*c217d954SCole Faust .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
299*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
300*c217d954SCole Faust
301*c217d954SCole Faust // Branch 3
302*c217d954SCole Faust SubStream i_d(graph);
303*c217d954SCole Faust i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name("Mixed_5b/Branch_3/AvgPool_0a_3x3")
304*c217d954SCole Faust << ConvolutionLayer(1U, 1U, 64U,
305*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
306*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
307*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
308*c217d954SCole Faust .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
309*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
310*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
311*c217d954SCole Faust get_random_accessor(1.f, 1.f),
312*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
313*c217d954SCole Faust 0.0010000000474974513f)
314*c217d954SCole Faust .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
315*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
316*c217d954SCole Faust
317*c217d954SCole Faust // Concatenate
318*c217d954SCole Faust graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat");
319*c217d954SCole Faust }
320*c217d954SCole Faust
block_mixed_6a(const std::string & data_path,DataLayout weights_layout)321*c217d954SCole Faust void block_mixed_6a(const std::string &data_path, DataLayout weights_layout)
322*c217d954SCole Faust {
323*c217d954SCole Faust // Branch 0
324*c217d954SCole Faust SubStream i_a(graph);
325*c217d954SCole Faust i_a << ConvolutionLayer(3U, 3U, 384U,
326*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
327*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
328*c217d954SCole Faust PadStrideInfo(2, 2, 0, 0))
329*c217d954SCole Faust .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
330*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
331*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
332*c217d954SCole Faust get_random_accessor(1.f, 1.f),
333*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
334*c217d954SCole Faust 0.0010000000474974513f)
335*c217d954SCole Faust .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
336*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
337*c217d954SCole Faust
338*c217d954SCole Faust // Branch 1
339*c217d954SCole Faust SubStream i_b(graph);
340*c217d954SCole Faust i_b << ConvolutionLayer(1U, 1U, 256U,
341*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
342*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
343*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
344*c217d954SCole Faust .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
345*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
346*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
347*c217d954SCole Faust get_random_accessor(1.f, 1.f),
348*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
349*c217d954SCole Faust 0.0010000000474974513f)
350*c217d954SCole Faust .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
351*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
352*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 256U,
353*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
354*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
355*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
356*c217d954SCole Faust .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
357*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
358*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
359*c217d954SCole Faust get_random_accessor(1.f, 1.f),
360*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
361*c217d954SCole Faust 0.0010000000474974513f)
362*c217d954SCole Faust .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
363*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
364*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 384U,
365*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
366*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
367*c217d954SCole Faust PadStrideInfo(2, 2, 0, 0))
368*c217d954SCole Faust .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
369*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
370*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
371*c217d954SCole Faust get_random_accessor(1.f, 1.f),
372*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
373*c217d954SCole Faust 0.0010000000474974513f)
374*c217d954SCole Faust .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
375*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
376*c217d954SCole Faust
377*c217d954SCole Faust // Branch 2
378*c217d954SCole Faust SubStream i_c(graph);
379*c217d954SCole Faust i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
380*c217d954SCole Faust
381*c217d954SCole Faust // Concatenate
382*c217d954SCole Faust graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
383*c217d954SCole Faust }
384*c217d954SCole Faust
block_mixed_7a(const std::string & data_path,DataLayout weights_layout)385*c217d954SCole Faust void block_mixed_7a(const std::string &data_path, DataLayout weights_layout)
386*c217d954SCole Faust {
387*c217d954SCole Faust // Branch 0
388*c217d954SCole Faust SubStream i_a(graph);
389*c217d954SCole Faust i_a << ConvolutionLayer(1U, 1U, 256U,
390*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
391*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
392*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
393*c217d954SCole Faust .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
394*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
395*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
396*c217d954SCole Faust get_random_accessor(1.f, 1.f),
397*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
398*c217d954SCole Faust 0.0010000000474974513f)
399*c217d954SCole Faust .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
400*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
401*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 384U,
402*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
403*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
404*c217d954SCole Faust PadStrideInfo(2, 2, 0, 0))
405*c217d954SCole Faust .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
406*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
407*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
408*c217d954SCole Faust get_random_accessor(1.f, 1.f),
409*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
410*c217d954SCole Faust 0.0010000000474974513f)
411*c217d954SCole Faust .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
412*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
413*c217d954SCole Faust
414*c217d954SCole Faust // Branch 1
415*c217d954SCole Faust SubStream i_b(graph);
416*c217d954SCole Faust i_b << ConvolutionLayer(1U, 1U, 256U,
417*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
418*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
419*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
420*c217d954SCole Faust .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
421*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
422*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
423*c217d954SCole Faust get_random_accessor(1.f, 1.f),
424*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
425*c217d954SCole Faust 0.0010000000474974513f)
426*c217d954SCole Faust .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
427*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
428*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 288U,
429*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
430*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
431*c217d954SCole Faust PadStrideInfo(2, 2, 0, 0))
432*c217d954SCole Faust .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
433*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
434*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
435*c217d954SCole Faust get_random_accessor(1.f, 1.f),
436*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
437*c217d954SCole Faust 0.0010000000474974513f)
438*c217d954SCole Faust .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
439*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
440*c217d954SCole Faust
441*c217d954SCole Faust // Branch 2
442*c217d954SCole Faust SubStream i_c(graph);
443*c217d954SCole Faust i_c << ConvolutionLayer(1U, 1U, 256U,
444*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
445*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
446*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
447*c217d954SCole Faust .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
448*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
449*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
450*c217d954SCole Faust get_random_accessor(1.f, 1.f),
451*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
452*c217d954SCole Faust 0.0010000000474974513f)
453*c217d954SCole Faust .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
454*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
455*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 288U,
456*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
457*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
458*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
459*c217d954SCole Faust .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
460*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
461*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
462*c217d954SCole Faust get_random_accessor(1.f, 1.f),
463*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
464*c217d954SCole Faust 0.0010000000474974513f)
465*c217d954SCole Faust .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
466*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
467*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 320U,
468*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
469*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
470*c217d954SCole Faust PadStrideInfo(2, 2, 0, 0))
471*c217d954SCole Faust .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
472*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
473*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
474*c217d954SCole Faust get_random_accessor(1.f, 1.f),
475*c217d954SCole Faust get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
476*c217d954SCole Faust 0.0010000000474974513f)
477*c217d954SCole Faust .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
478*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
479*c217d954SCole Faust
480*c217d954SCole Faust // Branch 3
481*c217d954SCole Faust SubStream i_d(graph);
482*c217d954SCole Faust i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
483*c217d954SCole Faust
484*c217d954SCole Faust // Concatenate
485*c217d954SCole Faust graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
486*c217d954SCole Faust }
487*c217d954SCole Faust
block35_repeat(const std::string & data_path,DataLayout weights_layout,unsigned int num_blocks)488*c217d954SCole Faust void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
489*c217d954SCole Faust {
490*c217d954SCole Faust for(unsigned int i = 0; i < num_blocks; ++i)
491*c217d954SCole Faust {
492*c217d954SCole Faust std::stringstream unit_path_ss;
493*c217d954SCole Faust unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
494*c217d954SCole Faust std::stringstream unit_name_ss;
495*c217d954SCole Faust unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
496*c217d954SCole Faust
497*c217d954SCole Faust std::string unit_path = unit_path_ss.str();
498*c217d954SCole Faust std::string unit_name = unit_name_ss.str();
499*c217d954SCole Faust
500*c217d954SCole Faust // Create left and write substreams
501*c217d954SCole Faust SubStream i_l(graph);
502*c217d954SCole Faust SubStream i_r(graph);
503*c217d954SCole Faust
504*c217d954SCole Faust // Branch 0
505*c217d954SCole Faust SubStream i_la(i_l);
506*c217d954SCole Faust i_la << ConvolutionLayer(1U, 1U, 32U,
507*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
508*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
509*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
510*c217d954SCole Faust .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
511*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
512*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
513*c217d954SCole Faust get_random_accessor(1.f, 1.f),
514*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
515*c217d954SCole Faust 0.0010000000474974513f)
516*c217d954SCole Faust .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
517*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
518*c217d954SCole Faust
519*c217d954SCole Faust // Branch 1
520*c217d954SCole Faust SubStream i_lb(i_l);
521*c217d954SCole Faust i_lb << ConvolutionLayer(1U, 1U, 32U,
522*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
523*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
524*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
525*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
526*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
527*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
528*c217d954SCole Faust get_random_accessor(1.f, 1.f),
529*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
530*c217d954SCole Faust 0.0010000000474974513f)
531*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
532*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
533*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 32U,
534*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
535*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
536*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
537*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
538*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
539*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
540*c217d954SCole Faust get_random_accessor(1.f, 1.f),
541*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
542*c217d954SCole Faust 0.0010000000474974513f)
543*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
544*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
545*c217d954SCole Faust
546*c217d954SCole Faust // Branch 2
547*c217d954SCole Faust SubStream i_lc(i_l);
548*c217d954SCole Faust i_lc << ConvolutionLayer(1U, 1U, 32U,
549*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
550*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
551*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
552*c217d954SCole Faust .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
553*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
554*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
555*c217d954SCole Faust get_random_accessor(1.f, 1.f),
556*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
557*c217d954SCole Faust 0.0010000000474974513f)
558*c217d954SCole Faust .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
559*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
560*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 48U,
561*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
562*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
563*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
564*c217d954SCole Faust .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
565*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
566*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
567*c217d954SCole Faust get_random_accessor(1.f, 1.f),
568*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
569*c217d954SCole Faust 0.0010000000474974513f)
570*c217d954SCole Faust .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
571*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
572*c217d954SCole Faust << ConvolutionLayer(3U, 3U, 64U,
573*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
574*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
575*c217d954SCole Faust PadStrideInfo(1, 1, 1, 1))
576*c217d954SCole Faust .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
577*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
578*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
579*c217d954SCole Faust get_random_accessor(1.f, 1.f),
580*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
581*c217d954SCole Faust 0.0010000000474974513f)
582*c217d954SCole Faust .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
583*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
584*c217d954SCole Faust
585*c217d954SCole Faust // Concatenate
586*c217d954SCole Faust i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
587*c217d954SCole Faust << ConvolutionLayer(1U, 1U, 320U,
588*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
589*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
590*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
591*c217d954SCole Faust .set_name(unit_name + "Conv2d_1x1/convolution")
592*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
593*c217d954SCole Faust
594*c217d954SCole Faust graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
595*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
596*c217d954SCole Faust }
597*c217d954SCole Faust }
598*c217d954SCole Faust
block17_repeat(const std::string & data_path,DataLayout weights_layout,unsigned int num_blocks)599*c217d954SCole Faust void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
600*c217d954SCole Faust {
601*c217d954SCole Faust for(unsigned int i = 0; i < num_blocks; ++i)
602*c217d954SCole Faust {
603*c217d954SCole Faust std::stringstream unit_path_ss;
604*c217d954SCole Faust unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
605*c217d954SCole Faust std::stringstream unit_name_ss;
606*c217d954SCole Faust unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
607*c217d954SCole Faust
608*c217d954SCole Faust std::string unit_path = unit_path_ss.str();
609*c217d954SCole Faust std::string unit_name = unit_name_ss.str();
610*c217d954SCole Faust
611*c217d954SCole Faust // Create left and write substreams
612*c217d954SCole Faust SubStream i_l(graph);
613*c217d954SCole Faust SubStream i_r(graph);
614*c217d954SCole Faust
615*c217d954SCole Faust // Branch 0
616*c217d954SCole Faust SubStream i_la(i_l);
617*c217d954SCole Faust i_la << ConvolutionLayer(1U, 1U, 192U,
618*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
619*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
620*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
621*c217d954SCole Faust .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
622*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
623*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
624*c217d954SCole Faust get_random_accessor(1.f, 1.f),
625*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
626*c217d954SCole Faust 0.0010000000474974513f)
627*c217d954SCole Faust .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
628*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
629*c217d954SCole Faust
630*c217d954SCole Faust // Branch 1
631*c217d954SCole Faust SubStream i_lb(i_l);
632*c217d954SCole Faust i_lb << ConvolutionLayer(1U, 1U, 128U,
633*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
634*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
635*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
636*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
637*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
638*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
639*c217d954SCole Faust get_random_accessor(1.f, 1.f),
640*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
641*c217d954SCole Faust 0.0010000000474974513f)
642*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
643*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
644*c217d954SCole Faust << ConvolutionLayer(7U, 1U, 160U,
645*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
646*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
647*c217d954SCole Faust PadStrideInfo(1, 1, 3, 0))
648*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
649*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
650*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
651*c217d954SCole Faust get_random_accessor(1.f, 1.f),
652*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
653*c217d954SCole Faust 0.0010000000474974513f)
654*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
655*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
656*c217d954SCole Faust << ConvolutionLayer(1U, 7U, 192U,
657*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
658*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
659*c217d954SCole Faust PadStrideInfo(1, 1, 0, 3))
660*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
661*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
662*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
663*c217d954SCole Faust get_random_accessor(1.f, 1.f),
664*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
665*c217d954SCole Faust 0.0010000000474974513f)
666*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
667*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
668*c217d954SCole Faust
669*c217d954SCole Faust // Concatenate
670*c217d954SCole Faust i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
671*c217d954SCole Faust << ConvolutionLayer(1U, 1U, 1088U,
672*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
673*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
674*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
675*c217d954SCole Faust .set_name(unit_name + "Conv2d_1x1/convolution")
676*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
677*c217d954SCole Faust
678*c217d954SCole Faust graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
679*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
680*c217d954SCole Faust }
681*c217d954SCole Faust }
682*c217d954SCole Faust
block8_repeat(const std::string & data_path,DataLayout weights_layout,unsigned int num_blocks,float scale,bool has_activation)683*c217d954SCole Faust void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
684*c217d954SCole Faust {
685*c217d954SCole Faust for(unsigned int i = 0; i < num_blocks; ++i)
686*c217d954SCole Faust {
687*c217d954SCole Faust std::stringstream unit_path_ss;
688*c217d954SCole Faust std::stringstream unit_name_ss;
689*c217d954SCole Faust if(num_blocks != 1)
690*c217d954SCole Faust {
691*c217d954SCole Faust unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
692*c217d954SCole Faust unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
693*c217d954SCole Faust }
694*c217d954SCole Faust else
695*c217d954SCole Faust {
696*c217d954SCole Faust unit_path_ss << "Block8_";
697*c217d954SCole Faust unit_name_ss << "Block8/";
698*c217d954SCole Faust }
699*c217d954SCole Faust
700*c217d954SCole Faust std::string unit_path = unit_path_ss.str();
701*c217d954SCole Faust std::string unit_name = unit_name_ss.str();
702*c217d954SCole Faust
703*c217d954SCole Faust // Create left and write substreams
704*c217d954SCole Faust SubStream i_l(graph);
705*c217d954SCole Faust SubStream i_r(graph);
706*c217d954SCole Faust
707*c217d954SCole Faust // Branch 0
708*c217d954SCole Faust SubStream i_la(i_l);
709*c217d954SCole Faust i_la << ConvolutionLayer(1U, 1U, 192U,
710*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
711*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
712*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
713*c217d954SCole Faust .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
714*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
715*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
716*c217d954SCole Faust get_random_accessor(1.f, 1.f),
717*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
718*c217d954SCole Faust 0.0010000000474974513f)
719*c217d954SCole Faust .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
720*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
721*c217d954SCole Faust
722*c217d954SCole Faust // Branch 1
723*c217d954SCole Faust SubStream i_lb(i_l);
724*c217d954SCole Faust i_lb << ConvolutionLayer(1U, 1U, 192U,
725*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
726*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
727*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
728*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
729*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
730*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
731*c217d954SCole Faust get_random_accessor(1.f, 1.f),
732*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
733*c217d954SCole Faust 0.0010000000474974513f)
734*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
735*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
736*c217d954SCole Faust << ConvolutionLayer(3U, 1U, 224U,
737*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
738*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
739*c217d954SCole Faust PadStrideInfo(1, 1, 1, 0))
740*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
741*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
742*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
743*c217d954SCole Faust get_random_accessor(1.f, 1.f),
744*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
745*c217d954SCole Faust 0.0010000000474974513f)
746*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
747*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
748*c217d954SCole Faust << ConvolutionLayer(1U, 3U, 256U,
749*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
750*c217d954SCole Faust std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
751*c217d954SCole Faust PadStrideInfo(1, 1, 0, 1))
752*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
753*c217d954SCole Faust << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
754*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
755*c217d954SCole Faust get_random_accessor(1.f, 1.f),
756*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
757*c217d954SCole Faust 0.0010000000474974513f)
758*c217d954SCole Faust .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
759*c217d954SCole Faust << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
760*c217d954SCole Faust
761*c217d954SCole Faust // Concatenate
762*c217d954SCole Faust i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
763*c217d954SCole Faust << ConvolutionLayer(1U, 1U, 2080U,
764*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
765*c217d954SCole Faust get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
766*c217d954SCole Faust PadStrideInfo(1, 1, 0, 0))
767*c217d954SCole Faust .set_name(unit_name + "Conv2d_1x1/convolution");
768*c217d954SCole Faust
769*c217d954SCole Faust // Scale result
770*c217d954SCole Faust if(scale != 1.f)
771*c217d954SCole Faust {
772*c217d954SCole Faust i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
773*c217d954SCole Faust }
774*c217d954SCole Faust
775*c217d954SCole Faust // Residual add
776*c217d954SCole Faust graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
777*c217d954SCole Faust
778*c217d954SCole Faust // Apply activation if needed
779*c217d954SCole Faust if(has_activation)
780*c217d954SCole Faust {
781*c217d954SCole Faust graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
782*c217d954SCole Faust }
783*c217d954SCole Faust }
784*c217d954SCole Faust }
785*c217d954SCole Faust };
786*c217d954SCole Faust
787*c217d954SCole Faust /** Main program for Inception ResNet V2
788*c217d954SCole Faust *
789*c217d954SCole Faust * Model is based on:
790*c217d954SCole Faust * https://arxiv.org/abs/1602.07261
791*c217d954SCole Faust * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
792*c217d954SCole Faust * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
793*c217d954SCole Faust *
794*c217d954SCole Faust * Provenance: download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz
795*c217d954SCole Faust *
796*c217d954SCole Faust * @note To list all the possible arguments execute the binary appended with the --help option
797*c217d954SCole Faust *
798*c217d954SCole Faust * @param[in] argc Number of arguments
799*c217d954SCole Faust * @param[in] argv Arguments
800*c217d954SCole Faust */
main(int argc,char ** argv)801*c217d954SCole Faust int main(int argc, char **argv)
802*c217d954SCole Faust {
803*c217d954SCole Faust return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);
804*c217d954SCole Faust }
805