1*c217d954SCole Faust /*
2*c217d954SCole Faust * Copyright (c) 2018-2021 Arm Limited.
3*c217d954SCole Faust *
4*c217d954SCole Faust * SPDX-License-Identifier: MIT
5*c217d954SCole Faust *
6*c217d954SCole Faust * Permission is hereby granted, free of charge, to any person obtaining a copy
7*c217d954SCole Faust * of this software and associated documentation files (the "Software"), to
8*c217d954SCole Faust * deal in the Software without restriction, including without limitation the
9*c217d954SCole Faust * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10*c217d954SCole Faust * sell copies of the Software, and to permit persons to whom the Software is
11*c217d954SCole Faust * furnished to do so, subject to the following conditions:
12*c217d954SCole Faust *
13*c217d954SCole Faust * The above copyright notice and this permission notice shall be included in all
14*c217d954SCole Faust * copies or substantial portions of the Software.
15*c217d954SCole Faust *
16*c217d954SCole Faust * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17*c217d954SCole Faust * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18*c217d954SCole Faust * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19*c217d954SCole Faust * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20*c217d954SCole Faust * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21*c217d954SCole Faust * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22*c217d954SCole Faust * SOFTWARE.
23*c217d954SCole Faust */
24*c217d954SCole Faust #ifndef ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
25*c217d954SCole Faust #define ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H
26*c217d954SCole Faust
27*c217d954SCole Faust #include "arm_compute/core/experimental/IPostOp.h"
28*c217d954SCole Faust #include "arm_compute/core/experimental/PostOps.h"
29*c217d954SCole Faust #include "arm_compute/graph/Logger.h"
30*c217d954SCole Faust #include "arm_compute/graph/Tensor.h"
31*c217d954SCole Faust #include "arm_compute/graph/TypePrinter.h"
32*c217d954SCole Faust #include "arm_compute/graph/Types.h"
33*c217d954SCole Faust #include "arm_compute/graph/Utils.h"
34*c217d954SCole Faust #include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationFunction.h"
35*c217d954SCole Faust #include "arm_compute/graph/backends/FusedConvolutionBatchNormalizationWithPostOpsFunction.h"
36*c217d954SCole Faust #include "arm_compute/graph/backends/FusedDepthwiseConvolutionBatchNormalizationFunction.h"
37*c217d954SCole Faust #include "arm_compute/graph/backends/Utils.h"
38*c217d954SCole Faust #include "arm_compute/graph/nodes/Nodes.h"
39*c217d954SCole Faust
40*c217d954SCole Faust #include "arm_compute/core/Error.h"
41*c217d954SCole Faust #include "arm_compute/core/Helpers.h"
42*c217d954SCole Faust #include "arm_compute/core/ITensorInfo.h"
43*c217d954SCole Faust #include "support/Cast.h"
44*c217d954SCole Faust
45*c217d954SCole Faust namespace arm_compute
46*c217d954SCole Faust {
47*c217d954SCole Faust namespace graph
48*c217d954SCole Faust {
49*c217d954SCole Faust namespace backends
50*c217d954SCole Faust {
51*c217d954SCole Faust namespace detail
52*c217d954SCole Faust {
53*c217d954SCole Faust /** Returns backing tensor of a given tensor
54*c217d954SCole Faust *
55*c217d954SCole Faust * @tparam TargetInfo Target information
56*c217d954SCole Faust *
57*c217d954SCole Faust * @param[in] tensor Tensor to extract the backing tensor from
58*c217d954SCole Faust *
59*c217d954SCole Faust * @return Backing tensor if present else nullptr
60*c217d954SCole Faust */
61*c217d954SCole Faust template <typename TargetInfo>
get_backing_tensor(arm_compute::graph::Tensor * tensor)62*c217d954SCole Faust typename TargetInfo::TensorType *get_backing_tensor(arm_compute::graph::Tensor *tensor)
63*c217d954SCole Faust {
64*c217d954SCole Faust typename TargetInfo::TensorType *backing_tensor = nullptr;
65*c217d954SCole Faust if(tensor != nullptr)
66*c217d954SCole Faust {
67*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(tensor->desc().target != TargetInfo::TargetType);
68*c217d954SCole Faust // Get backing tensor handle
69*c217d954SCole Faust ITensorHandle *tensor_handle = tensor->handle();
70*c217d954SCole Faust // Get backing tensor
71*c217d954SCole Faust backing_tensor = (tensor_handle != nullptr) ? arm_compute::utils::cast::polymorphic_cast<typename TargetInfo::TensorType *>(&tensor_handle->tensor()) : nullptr;
72*c217d954SCole Faust }
73*c217d954SCole Faust
74*c217d954SCole Faust return backing_tensor;
75*c217d954SCole Faust }
76*c217d954SCole Faust
77*c217d954SCole Faust template <typename TargetInfo>
validate_node(const INode & node,size_t num_expected_inputs,size_t num_expected_outputs)78*c217d954SCole Faust void validate_node(const INode &node, size_t num_expected_inputs, size_t num_expected_outputs)
79*c217d954SCole Faust {
80*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating " << node.type()
81*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
82*c217d954SCole Faust << " ID: " << node.id()
83*c217d954SCole Faust << node.name()
84*c217d954SCole Faust << std::endl);
85*c217d954SCole Faust
86*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(TargetInfo::TargetType != node.assigned_target());
87*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(node.num_inputs() != num_expected_inputs);
88*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(node.num_outputs() != num_expected_outputs);
89*c217d954SCole Faust ARM_COMPUTE_UNUSED(node, num_expected_inputs, num_expected_outputs);
90*c217d954SCole Faust }
91*c217d954SCole Faust
92*c217d954SCole Faust /** Creates a backend activation layer function
93*c217d954SCole Faust *
94*c217d954SCole Faust * @tparam ActivationLayerFunction Backend activation function
95*c217d954SCole Faust * @tparam TargetInfo Target-specific information
96*c217d954SCole Faust *
97*c217d954SCole Faust * @param[in] node Node to create the backend function for
98*c217d954SCole Faust *
99*c217d954SCole Faust * @return Backend activation layer function
100*c217d954SCole Faust */
101*c217d954SCole Faust template <typename ActivationLayerFunction, typename TargetInfo>
create_activation_layer(ActivationLayerNode & node)102*c217d954SCole Faust std::unique_ptr<IFunction> create_activation_layer(ActivationLayerNode &node)
103*c217d954SCole Faust {
104*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
105*c217d954SCole Faust
106*c217d954SCole Faust // Extract IO and info
107*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
108*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
109*c217d954SCole Faust const ActivationLayerInfo act_info = node.activation_info();
110*c217d954SCole Faust
111*c217d954SCole Faust // Create function
112*c217d954SCole Faust auto func = std::make_unique<ActivationLayerFunction>();
113*c217d954SCole Faust func->configure(input, output, act_info);
114*c217d954SCole Faust
115*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
116*c217d954SCole Faust << node.name()
117*c217d954SCole Faust << " Type: " << node.type()
118*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
119*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
120*c217d954SCole Faust << " Shape: " << input->info()->tensor_shape()
121*c217d954SCole Faust << " Activation function: " << act_info.activation()
122*c217d954SCole Faust << " a: " << act_info.a()
123*c217d954SCole Faust << " b: " << act_info.b()
124*c217d954SCole Faust << " InPlace : " << is_in_place_operation(input, output)
125*c217d954SCole Faust << std::endl);
126*c217d954SCole Faust
127*c217d954SCole Faust return std::move(func);
128*c217d954SCole Faust }
129*c217d954SCole Faust
130*c217d954SCole Faust /** Creates a backend argminmax layer function
131*c217d954SCole Faust *
132*c217d954SCole Faust * @tparam ArgMinMaxLayerFunction Backend activation function
133*c217d954SCole Faust * @tparam TargetInfo Target-specific information
134*c217d954SCole Faust *
135*c217d954SCole Faust * @param[in] node Node to create the backend function for
136*c217d954SCole Faust *
137*c217d954SCole Faust * @return Backend argminmax layer function
138*c217d954SCole Faust */
139*c217d954SCole Faust template <typename ArgMinMaxLayerFunction, typename TargetInfo>
create_arg_min_max_layer(ArgMinMaxLayerNode & node)140*c217d954SCole Faust std::unique_ptr<IFunction> create_arg_min_max_layer(ArgMinMaxLayerNode &node)
141*c217d954SCole Faust {
142*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
143*c217d954SCole Faust
144*c217d954SCole Faust // Extract IO and info
145*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
146*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
147*c217d954SCole Faust const ReductionOperation op = node.reduction_operation();
148*c217d954SCole Faust unsigned int axis = node.axis();
149*c217d954SCole Faust
150*c217d954SCole Faust // Create function
151*c217d954SCole Faust auto func = std::make_unique<ArgMinMaxLayerFunction>();
152*c217d954SCole Faust func->configure(input, axis, output, op);
153*c217d954SCole Faust
154*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
155*c217d954SCole Faust << node.name()
156*c217d954SCole Faust << " Type: " << node.type()
157*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
158*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
159*c217d954SCole Faust << " Shape: " << input->info()->tensor_shape()
160*c217d954SCole Faust << " Reduction Operation: " << op
161*c217d954SCole Faust << " axis: " << axis
162*c217d954SCole Faust << std::endl);
163*c217d954SCole Faust
164*c217d954SCole Faust return std::move(func);
165*c217d954SCole Faust }
166*c217d954SCole Faust
167*c217d954SCole Faust /** Create a backend batch normalization layer function
168*c217d954SCole Faust *
169*c217d954SCole Faust * @tparam BatchNormalizationLayerFunction Backend batch normalization function
170*c217d954SCole Faust * @tparam TargetInfo Target-specific information
171*c217d954SCole Faust *
172*c217d954SCole Faust * @param[in] node Node to create the backend function for
173*c217d954SCole Faust *
174*c217d954SCole Faust * @return Backend batch normalization layer function
175*c217d954SCole Faust */
176*c217d954SCole Faust template <typename BatchNormalizationLayerFunction, typename TargetInfo>
create_batch_normalization_layer(BatchNormalizationLayerNode & node)177*c217d954SCole Faust std::unique_ptr<IFunction> create_batch_normalization_layer(BatchNormalizationLayerNode &node)
178*c217d954SCole Faust {
179*c217d954SCole Faust validate_node<TargetInfo>(node, 5 /* expected inputs */, 1 /* expected outputs */);
180*c217d954SCole Faust
181*c217d954SCole Faust // Extract IO and info
182*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
183*c217d954SCole Faust typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
184*c217d954SCole Faust typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(2));
185*c217d954SCole Faust typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(3));
186*c217d954SCole Faust typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(4));
187*c217d954SCole Faust
188*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
189*c217d954SCole Faust const float epsilon = node.epsilon();
190*c217d954SCole Faust const ActivationLayerInfo fused_act = node.fused_activation();
191*c217d954SCole Faust
192*c217d954SCole Faust // Create and configure function
193*c217d954SCole Faust auto func = std::make_unique<BatchNormalizationLayerFunction>();
194*c217d954SCole Faust func->configure(input, output, mean, var, beta, gamma, epsilon, fused_act);
195*c217d954SCole Faust
196*c217d954SCole Faust // Log info
197*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
198*c217d954SCole Faust << node.name()
199*c217d954SCole Faust << " Type: " << node.type()
200*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
201*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
202*c217d954SCole Faust << " Shape: " << input->info()->tensor_shape()
203*c217d954SCole Faust << " Epsilon: " << epsilon << " "
204*c217d954SCole Faust << (fused_act.enabled() ? to_string(fused_act.activation()) : "")
205*c217d954SCole Faust << " InPlace: " << is_in_place_operation(input, output)
206*c217d954SCole Faust << std::endl);
207*c217d954SCole Faust
208*c217d954SCole Faust return std::move(func);
209*c217d954SCole Faust }
210*c217d954SCole Faust
211*c217d954SCole Faust /** Create a backend batch normalization layer function
212*c217d954SCole Faust *
213*c217d954SCole Faust * @tparam BatchNormalizationLayerFunction Backend batch normalization function
214*c217d954SCole Faust * @tparam TargetInfo Target-specific information
215*c217d954SCole Faust *
216*c217d954SCole Faust * @param[in] node Node to create the backend function for
217*c217d954SCole Faust * @param[in] ctx Graph context
218*c217d954SCole Faust *
219*c217d954SCole Faust * @return Backend batch normalization layer function
220*c217d954SCole Faust */
221*c217d954SCole Faust template <typename FusedLayerTypes, typename TargetInfo>
create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode & node,GraphContext & ctx)222*c217d954SCole Faust std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_layer(FusedConvolutionBatchNormalizationNode &node, GraphContext &ctx)
223*c217d954SCole Faust {
224*c217d954SCole Faust validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
225*c217d954SCole Faust
226*c217d954SCole Faust // Extract IO and info
227*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
228*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
229*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
230*c217d954SCole Faust typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
231*c217d954SCole Faust typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
232*c217d954SCole Faust typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
233*c217d954SCole Faust typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
234*c217d954SCole Faust
235*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
236*c217d954SCole Faust
237*c217d954SCole Faust const PadStrideInfo conv_info = node.convolution_info();
238*c217d954SCole Faust const unsigned int num_groups = node.num_groups();
239*c217d954SCole Faust const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
240*c217d954SCole Faust const ActivationLayerInfo fused_act = node.fused_activation();
241*c217d954SCole Faust const float epsilon = node.epsilon();
242*c217d954SCole Faust
243*c217d954SCole Faust // Create and configure function (we assume that functions have been validated before creation)
244*c217d954SCole Faust std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
245*c217d954SCole Faust std::unique_ptr<IFunction> func;
246*c217d954SCole Faust std::string func_name;
247*c217d954SCole Faust
248*c217d954SCole Faust using FType = FusedConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
249*c217d954SCole Faust
250*c217d954SCole Faust // Create and configure function
251*c217d954SCole Faust std::tie(func, func_name) = create_named_memory_managed_function<FType>(
252*c217d954SCole Faust std::string("FusedConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, fused_act);
253*c217d954SCole Faust
254*c217d954SCole Faust // Log info
255*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
256*c217d954SCole Faust << node.name()
257*c217d954SCole Faust << " Type: " << node.type()
258*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
259*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
260*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
261*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
262*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
263*c217d954SCole Faust << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
264*c217d954SCole Faust << std::endl);
265*c217d954SCole Faust return std::move(func);
266*c217d954SCole Faust }
267*c217d954SCole Faust
268*c217d954SCole Faust /** Create a backend fused depthwise convolution batch normalization layer function
269*c217d954SCole Faust *
270*c217d954SCole Faust * @tparam FusedLayerTypes Fused layer types
271*c217d954SCole Faust * @tparam TargetInfo Target-specific information
272*c217d954SCole Faust *
273*c217d954SCole Faust * @param[in] node Node to create the backend function for
274*c217d954SCole Faust * @param[in] ctx Graph context
275*c217d954SCole Faust *
276*c217d954SCole Faust * @return Backend fused depthwise convolution batch normalization layer function
277*c217d954SCole Faust */
278*c217d954SCole Faust template <typename FusedLayerTypes, typename TargetInfo>
create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode & node,GraphContext & ctx)279*c217d954SCole Faust std::unique_ptr<IFunction> create_fused_depthwise_convolution_batch_normalization_layer(FusedDepthwiseConvolutionBatchNormalizationNode &node, GraphContext &ctx)
280*c217d954SCole Faust {
281*c217d954SCole Faust validate_node<TargetInfo>(node, 7 /* expected inputs */, 1 /* expected outputs */);
282*c217d954SCole Faust
283*c217d954SCole Faust // Extract IO and info
284*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
285*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
286*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
287*c217d954SCole Faust typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
288*c217d954SCole Faust typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
289*c217d954SCole Faust typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
290*c217d954SCole Faust typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
291*c217d954SCole Faust
292*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
293*c217d954SCole Faust
294*c217d954SCole Faust const PadStrideInfo conv_info = node.convolution_info();
295*c217d954SCole Faust const unsigned int depth_multiplier = node.depth_multiplier();
296*c217d954SCole Faust const ActivationLayerInfo fused_act = node.fused_activation();
297*c217d954SCole Faust const float epsilon = node.epsilon();
298*c217d954SCole Faust
299*c217d954SCole Faust // Create and configure function (we assume that functions have been validated before creation)
300*c217d954SCole Faust std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
301*c217d954SCole Faust std::unique_ptr<IFunction> func;
302*c217d954SCole Faust std::string func_name;
303*c217d954SCole Faust
304*c217d954SCole Faust using FType = FusedDepthwiseConvolutionBatchNormalizationFunction<TargetInfo, FusedLayerTypes>;
305*c217d954SCole Faust
306*c217d954SCole Faust // Create and configure function
307*c217d954SCole Faust std::tie(func, func_name) = create_named_memory_managed_function<FType>(
308*c217d954SCole Faust std::string("FusedDepthwiseConvolutionBatchNormalizationLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, depth_multiplier, fused_act);
309*c217d954SCole Faust
310*c217d954SCole Faust // Log info
311*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
312*c217d954SCole Faust << node.name()
313*c217d954SCole Faust << " Type: " << node.type()
314*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
315*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
316*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
317*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
318*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
319*c217d954SCole Faust << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
320*c217d954SCole Faust << std::endl);
321*c217d954SCole Faust return std::move(func);
322*c217d954SCole Faust }
323*c217d954SCole Faust
324*c217d954SCole Faust /** Create a backend bounding box transform layer function
325*c217d954SCole Faust *
326*c217d954SCole Faust * @tparam BoundingBoxTransformLayerFunction Backend bounding box transform function
327*c217d954SCole Faust * @tparam TargetInfo Target-specific information
328*c217d954SCole Faust *
329*c217d954SCole Faust * @param[in] node Node to create the backend function for
330*c217d954SCole Faust *
331*c217d954SCole Faust * @return Backend bounding box transform layer function
332*c217d954SCole Faust */
333*c217d954SCole Faust template <typename BoundingBoxTransformLayerFunction, typename TargetInfo>
create_bounding_box_transform_layer(BoundingBoxTransformLayerNode & node)334*c217d954SCole Faust std::unique_ptr<IFunction> create_bounding_box_transform_layer(BoundingBoxTransformLayerNode &node)
335*c217d954SCole Faust {
336*c217d954SCole Faust validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
337*c217d954SCole Faust
338*c217d954SCole Faust // Extract IO and info
339*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
340*c217d954SCole Faust typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
341*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
342*c217d954SCole Faust const BoundingBoxTransformInfo bbox_info = node.info();
343*c217d954SCole Faust
344*c217d954SCole Faust // Create and configure function
345*c217d954SCole Faust auto func = std::make_unique<BoundingBoxTransformLayerFunction>();
346*c217d954SCole Faust func->configure(input, output, deltas, bbox_info);
347*c217d954SCole Faust
348*c217d954SCole Faust // Log info
349*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
350*c217d954SCole Faust << node.name()
351*c217d954SCole Faust << " Type: " << node.type()
352*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
353*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
354*c217d954SCole Faust << " Shape: " << input->info()->tensor_shape()
355*c217d954SCole Faust << " BoundingBox Info img W: " << bbox_info.img_width() << " "
356*c217d954SCole Faust << " BoundingBox Info img H: " << bbox_info.img_height() << " "
357*c217d954SCole Faust << std::endl);
358*c217d954SCole Faust
359*c217d954SCole Faust return std::move(func);
360*c217d954SCole Faust }
361*c217d954SCole Faust
362*c217d954SCole Faust /** Create a backend channel shuffle layer function
363*c217d954SCole Faust *
364*c217d954SCole Faust * @tparam ChannelShuffleLayerFunction Backend channel shuffle function
365*c217d954SCole Faust * @tparam TargetInfo Target-specific information
366*c217d954SCole Faust *
367*c217d954SCole Faust * @param[in] node Node to create the backend function for
368*c217d954SCole Faust *
369*c217d954SCole Faust * @return Backend channel shuffle layer function
370*c217d954SCole Faust */
371*c217d954SCole Faust template <typename ChannelShuffleLayerFunction, typename TargetInfo>
create_channel_shuffle_layer(ChannelShuffleLayerNode & node)372*c217d954SCole Faust std::unique_ptr<IFunction> create_channel_shuffle_layer(ChannelShuffleLayerNode &node)
373*c217d954SCole Faust {
374*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
375*c217d954SCole Faust
376*c217d954SCole Faust // Extract IO and info
377*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
378*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
379*c217d954SCole Faust const unsigned int num_groups = node.num_groups();
380*c217d954SCole Faust
381*c217d954SCole Faust // Create function
382*c217d954SCole Faust auto func = std::make_unique<ChannelShuffleLayerFunction>();
383*c217d954SCole Faust func->configure(input, output, num_groups);
384*c217d954SCole Faust
385*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
386*c217d954SCole Faust << node.name()
387*c217d954SCole Faust << " Type: " << node.type()
388*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
389*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
390*c217d954SCole Faust << " Shape: " << input->info()->tensor_shape()
391*c217d954SCole Faust << " Num groups: " << num_groups
392*c217d954SCole Faust << std::endl);
393*c217d954SCole Faust
394*c217d954SCole Faust return std::move(func);
395*c217d954SCole Faust }
396*c217d954SCole Faust
397*c217d954SCole Faust /** Create a backend layer concatenate function
398*c217d954SCole Faust *
399*c217d954SCole Faust * @tparam ConcatenateLayerFunction Backend concatenate function
400*c217d954SCole Faust * @tparam TargetInfo Target-specific information
401*c217d954SCole Faust *
402*c217d954SCole Faust * @param[in] node Node to create the backend function for
403*c217d954SCole Faust *
404*c217d954SCole Faust * @return Backend concatenate layer function
405*c217d954SCole Faust */
406*c217d954SCole Faust template <typename ConcatenateLayerFunction, typename TargetInfo>
create_concatenate_layer(ConcatenateLayerNode & node)407*c217d954SCole Faust std::unique_ptr<arm_compute::IFunction> create_concatenate_layer(ConcatenateLayerNode &node)
408*c217d954SCole Faust {
409*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Concatenate node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
410*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
411*c217d954SCole Faust
412*c217d954SCole Faust // Return nullptr if depth concatenate is switched off
413*c217d954SCole Faust if(!node.is_enabled())
414*c217d954SCole Faust {
415*c217d954SCole Faust return nullptr;
416*c217d954SCole Faust }
417*c217d954SCole Faust
418*c217d954SCole Faust // Extract IO and info
419*c217d954SCole Faust std::vector<typename TargetInfo::SrcTensorType *> inputs;
420*c217d954SCole Faust for(unsigned int i = 0; i < node.num_inputs(); ++i)
421*c217d954SCole Faust {
422*c217d954SCole Faust inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
423*c217d954SCole Faust }
424*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
425*c217d954SCole Faust const DataLayout data_layout = node.output(0) != nullptr ? node.output(0)->desc().layout : DataLayout::UNKNOWN;
426*c217d954SCole Faust const size_t concat_axis = get_dimension_idx(data_layout, node.concatenation_axis());
427*c217d954SCole Faust
428*c217d954SCole Faust // Create and configure function
429*c217d954SCole Faust auto func = std::make_unique<ConcatenateLayerFunction>();
430*c217d954SCole Faust func->configure(inputs, output, concat_axis);
431*c217d954SCole Faust
432*c217d954SCole Faust // Log info
433*c217d954SCole Faust const bool is_quantized = is_data_type_quantized_asymmetric(output->info()->data_type());
434*c217d954SCole Faust std::ostringstream qss;
435*c217d954SCole Faust if(is_quantized)
436*c217d954SCole Faust {
437*c217d954SCole Faust qss << " Output QuantInfo: " << output->info()->quantization_info();
438*c217d954SCole Faust }
439*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
440*c217d954SCole Faust << node.name()
441*c217d954SCole Faust << " Type: " << node.type()
442*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
443*c217d954SCole Faust << " Data Type: " << output->info()->data_type()
444*c217d954SCole Faust << " Shape: " << output->info()->tensor_shape()
445*c217d954SCole Faust << " Num Inputs: " << inputs.size()
446*c217d954SCole Faust << " Axis: " << concat_axis
447*c217d954SCole Faust << qss.str()
448*c217d954SCole Faust << std::endl);
449*c217d954SCole Faust
450*c217d954SCole Faust return std::move(func);
451*c217d954SCole Faust }
452*c217d954SCole Faust
453*c217d954SCole Faust /** Create a backend convolution layer function
454*c217d954SCole Faust *
455*c217d954SCole Faust * @tparam ConvolutionLayerFunctions Backend convolution functions
456*c217d954SCole Faust * @tparam TargetInfo Target-specific information
457*c217d954SCole Faust *
458*c217d954SCole Faust * @param[in] node Node to create the backend function for
459*c217d954SCole Faust * @param[in] ctx Graph context
460*c217d954SCole Faust *
461*c217d954SCole Faust * @return Backend convolution layer function
462*c217d954SCole Faust */
463*c217d954SCole Faust template <typename ConvolutionLayerFunctions, typename TargetInfo>
create_convolution_layer(ConvolutionLayerNode & node,GraphContext & ctx)464*c217d954SCole Faust std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node, GraphContext &ctx)
465*c217d954SCole Faust {
466*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
467*c217d954SCole Faust
468*c217d954SCole Faust // Extract IO and info
469*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
470*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
471*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
472*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
473*c217d954SCole Faust
474*c217d954SCole Faust const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
475*c217d954SCole Faust
476*c217d954SCole Faust if(is_quantized)
477*c217d954SCole Faust {
478*c217d954SCole Faust biases->info()->set_data_type(DataType::S32);
479*c217d954SCole Faust }
480*c217d954SCole Faust
481*c217d954SCole Faust const PadStrideInfo conv_info = node.convolution_info();
482*c217d954SCole Faust const unsigned int num_groups = node.num_groups();
483*c217d954SCole Faust const ConvolutionMethod conv_algorithm = node.convolution_method();
484*c217d954SCole Faust const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
485*c217d954SCole Faust const ActivationLayerInfo fused_act = node.fused_activation();
486*c217d954SCole Faust
487*c217d954SCole Faust // Create and configure function (we assume that functions have been validated before creation)
488*c217d954SCole Faust std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
489*c217d954SCole Faust std::unique_ptr<IFunction> func;
490*c217d954SCole Faust std::string func_name;
491*c217d954SCole Faust
492*c217d954SCole Faust if(conv_algorithm == ConvolutionMethod::Winograd)
493*c217d954SCole Faust {
494*c217d954SCole Faust ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "WinogradConvolutionLayer does not support grouping!");
495*c217d954SCole Faust std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::WinogradConvolutionLayer>(
496*c217d954SCole Faust std::string("WinogradConvolutionLayer"), mm,
497*c217d954SCole Faust input, weights, biases, output, conv_info, fused_act, fast_math);
498*c217d954SCole Faust }
499*c217d954SCole Faust else if(conv_algorithm == ConvolutionMethod::Direct)
500*c217d954SCole Faust {
501*c217d954SCole Faust ARM_COMPUTE_ERROR_ON_MSG(num_groups != 1, "DirectConvolutionLayer does not support grouping!");
502*c217d954SCole Faust std::tie(func, func_name) = create_named_function<typename ConvolutionLayerFunctions::DirectConvolutionLayer>(
503*c217d954SCole Faust std::string("DirectConvolutionLayer"),
504*c217d954SCole Faust input, weights, biases, output, conv_info, fused_act);
505*c217d954SCole Faust }
506*c217d954SCole Faust else if(conv_algorithm == ConvolutionMethod::GEMM)
507*c217d954SCole Faust {
508*c217d954SCole Faust std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
509*c217d954SCole Faust std::string("GEMMConvolutionLayer"), mm,
510*c217d954SCole Faust input, weights, biases, output, conv_info,
511*c217d954SCole Faust WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups);
512*c217d954SCole Faust }
513*c217d954SCole Faust else
514*c217d954SCole Faust {
515*c217d954SCole Faust std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GenericConvolutionLayer>(
516*c217d954SCole Faust std::string("GenericConvolutionLayer"), mm,
517*c217d954SCole Faust input, weights, biases, output, conv_info,
518*c217d954SCole Faust WeightsInfo(), Size2D(1U, 1U), fused_act, fast_math, num_groups);
519*c217d954SCole Faust }
520*c217d954SCole Faust
521*c217d954SCole Faust // Log info
522*c217d954SCole Faust std::ostringstream qss;
523*c217d954SCole Faust if(is_quantized)
524*c217d954SCole Faust {
525*c217d954SCole Faust qss << " Input QuantInfo: " << input->info()->quantization_info()
526*c217d954SCole Faust << " Weights QuantInfo: " << weights->info()->quantization_info()
527*c217d954SCole Faust << " Output QuantInfo: " << output->info()->quantization_info();
528*c217d954SCole Faust }
529*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
530*c217d954SCole Faust << node.name()
531*c217d954SCole Faust << " Type: " << func_name
532*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
533*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
534*c217d954SCole Faust << " Groups: " << num_groups
535*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
536*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
537*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
538*c217d954SCole Faust << qss.str()
539*c217d954SCole Faust << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
540*c217d954SCole Faust << std::endl);
541*c217d954SCole Faust return std::move(func);
542*c217d954SCole Faust }
543*c217d954SCole Faust
544*c217d954SCole Faust /** Create a backend convolution layer function with post operator
545*c217d954SCole Faust *
546*c217d954SCole Faust * @tparam ConvolutionLayerFunctions Backend convolution functions
547*c217d954SCole Faust * @tparam TargetInfo Target-specific information
548*c217d954SCole Faust *
549*c217d954SCole Faust * @param[in] node Node to create the backend function for
550*c217d954SCole Faust * @param[in] ctx Graph context
551*c217d954SCole Faust *
552*c217d954SCole Faust * @return Backend convolution layer function
553*c217d954SCole Faust */
554*c217d954SCole Faust template <typename ConvolutionLayerFunctions, typename TargetInfo>
create_fused_convolution_with_post_op(FusedConvolutionWithPostOpNode & node,GraphContext & ctx)555*c217d954SCole Faust std::unique_ptr<IFunction> create_fused_convolution_with_post_op(FusedConvolutionWithPostOpNode &node, GraphContext &ctx)
556*c217d954SCole Faust {
557*c217d954SCole Faust validate_node<TargetInfo>(node, 4 /* expected inputs */, 1 /* expected outputs */);
558*c217d954SCole Faust
559*c217d954SCole Faust // Extract IO and info
560*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
561*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
562*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
563*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
564*c217d954SCole Faust
565*c217d954SCole Faust const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
566*c217d954SCole Faust
567*c217d954SCole Faust if(is_quantized)
568*c217d954SCole Faust {
569*c217d954SCole Faust biases->info()->set_data_type(DataType::S32);
570*c217d954SCole Faust }
571*c217d954SCole Faust
572*c217d954SCole Faust const PadStrideInfo conv_info = node.convolution_info();
573*c217d954SCole Faust const unsigned int num_groups = node.num_groups();
574*c217d954SCole Faust const ActivationLayerInfo fused_act = node.fused_activation();
575*c217d954SCole Faust
576*c217d954SCole Faust experimental::PostOpList<typename TargetInfo::TensorType *> post_ops;
577*c217d954SCole Faust
578*c217d954SCole Faust auto &post_op_info_list = node.post_op_info_list();
579*c217d954SCole Faust for(const auto &post_op_info : post_op_info_list)
580*c217d954SCole Faust {
581*c217d954SCole Faust switch(post_op_info->type())
582*c217d954SCole Faust {
583*c217d954SCole Faust case PostOpType::Activation:
584*c217d954SCole Faust {
585*c217d954SCole Faust const auto act_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoActivation *>(post_op_info.get());
586*c217d954SCole Faust post_ops.template push_back_op<experimental::PostOpAct<typename TargetInfo::TensorType *>>(act_info->_act);
587*c217d954SCole Faust break;
588*c217d954SCole Faust }
589*c217d954SCole Faust case PostOpType::Eltwise_Add:
590*c217d954SCole Faust {
591*c217d954SCole Faust typename TargetInfo::TensorType *add_input = get_backing_tensor<TargetInfo>(node.input(3));
592*c217d954SCole Faust const auto eltwise_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoEltwiseAdd *>(post_op_info.get());
593*c217d954SCole Faust post_ops.template push_back_op<experimental::PostOpEltwiseAdd<typename TargetInfo::TensorType *>>(add_input, eltwise_info->_prev_op_dst_pos, eltwise_info->_policy);
594*c217d954SCole Faust break;
595*c217d954SCole Faust }
596*c217d954SCole Faust default:
597*c217d954SCole Faust {
598*c217d954SCole Faust ARM_COMPUTE_ERROR("Unsupported PostOpType");
599*c217d954SCole Faust }
600*c217d954SCole Faust }
601*c217d954SCole Faust }
602*c217d954SCole Faust
603*c217d954SCole Faust // Create and configure function (we assume that functions have been validated before creation)
604*c217d954SCole Faust std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
605*c217d954SCole Faust std::unique_ptr<IFunction> func;
606*c217d954SCole Faust std::string func_name;
607*c217d954SCole Faust
608*c217d954SCole Faust // Fuse convolution with post ops is only supported for conv1x1, which is only implemented as gemmconv2d
609*c217d954SCole Faust std::tie(func, func_name) = create_named_memory_managed_function<typename ConvolutionLayerFunctions::GEMMConvolutionLayer>(
610*c217d954SCole Faust std::string("GEMMConvolutionLayer"), mm,
611*c217d954SCole Faust input, weights, biases, output, conv_info,
612*c217d954SCole Faust WeightsInfo(), Size2D(1U, 1U), fused_act, num_groups, post_ops);
613*c217d954SCole Faust
614*c217d954SCole Faust // Log info
615*c217d954SCole Faust std::ostringstream qss;
616*c217d954SCole Faust if(is_quantized)
617*c217d954SCole Faust {
618*c217d954SCole Faust qss << " Input QuantInfo: " << input->info()->quantization_info()
619*c217d954SCole Faust << " Weights QuantInfo: " << weights->info()->quantization_info()
620*c217d954SCole Faust << " Output QuantInfo: " << output->info()->quantization_info();
621*c217d954SCole Faust }
622*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
623*c217d954SCole Faust << node.name()
624*c217d954SCole Faust << " Type: " << func_name
625*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
626*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
627*c217d954SCole Faust << " Groups: " << num_groups
628*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
629*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
630*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
631*c217d954SCole Faust << qss.str()
632*c217d954SCole Faust << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
633*c217d954SCole Faust << " Post ops" << post_ops
634*c217d954SCole Faust << std::endl);
635*c217d954SCole Faust return std::move(func);
636*c217d954SCole Faust }
637*c217d954SCole Faust
638*c217d954SCole Faust /** Create a backend convolution batch normalization layer function with post operator
639*c217d954SCole Faust *
640*c217d954SCole Faust * @tparam FusedLayerTypes Backend convolution functions
641*c217d954SCole Faust * @tparam TargetInfo Target-specific information
642*c217d954SCole Faust *
643*c217d954SCole Faust * @param[in] node Node to create the backend function for
644*c217d954SCole Faust * @param[in] ctx Graph context
645*c217d954SCole Faust *
646*c217d954SCole Faust * @return Backend fused convolution with batch normalization layer function
647*c217d954SCole Faust */
648*c217d954SCole Faust template <typename FusedLayerTypes, typename TargetInfo>
create_fused_convolution_batch_normalization_with_post_op(FusedConvolutionBatchNormalizationWithPostOpsNode & node,GraphContext & ctx)649*c217d954SCole Faust std::unique_ptr<IFunction> create_fused_convolution_batch_normalization_with_post_op(FusedConvolutionBatchNormalizationWithPostOpsNode &node, GraphContext &ctx)
650*c217d954SCole Faust {
651*c217d954SCole Faust validate_node<TargetInfo>(node, 8 /* expected inputs */, 1 /* expected outputs */);
652*c217d954SCole Faust
653*c217d954SCole Faust // Extract IO and info
654*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
655*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
656*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
657*c217d954SCole Faust typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(3));
658*c217d954SCole Faust typename TargetInfo::TensorType *var = get_backing_tensor<TargetInfo>(node.input(4));
659*c217d954SCole Faust typename TargetInfo::TensorType *beta = get_backing_tensor<TargetInfo>(node.input(5));
660*c217d954SCole Faust typename TargetInfo::TensorType *gamma = get_backing_tensor<TargetInfo>(node.input(6));
661*c217d954SCole Faust
662*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
663*c217d954SCole Faust
664*c217d954SCole Faust const PadStrideInfo conv_info = node.convolution_info();
665*c217d954SCole Faust const unsigned int num_groups = node.num_groups();
666*c217d954SCole Faust const bool fast_math = node.fast_math_hint() == FastMathHint::Enabled;
667*c217d954SCole Faust const float epsilon = node.epsilon();
668*c217d954SCole Faust
669*c217d954SCole Faust experimental::PostOpList<typename TargetInfo::TensorType *> post_ops;
670*c217d954SCole Faust
671*c217d954SCole Faust auto &post_op_info_list = node.post_op_info_list();
672*c217d954SCole Faust for(const auto &post_op_info : post_op_info_list)
673*c217d954SCole Faust {
674*c217d954SCole Faust switch(post_op_info->type())
675*c217d954SCole Faust {
676*c217d954SCole Faust case PostOpType::Activation:
677*c217d954SCole Faust {
678*c217d954SCole Faust const auto act_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoActivation *>(post_op_info.get());
679*c217d954SCole Faust post_ops.template push_back_op<experimental::PostOpAct<typename TargetInfo::TensorType *>>(act_info->_act);
680*c217d954SCole Faust break;
681*c217d954SCole Faust }
682*c217d954SCole Faust case PostOpType::Eltwise_Add:
683*c217d954SCole Faust {
684*c217d954SCole Faust typename TargetInfo::TensorType *add_input = get_backing_tensor<TargetInfo>(node.input(3));
685*c217d954SCole Faust const auto eltwise_info = utils::cast::polymorphic_downcast<const ConvPostOpInfoEltwiseAdd *>(post_op_info.get());
686*c217d954SCole Faust post_ops.template push_back_op<experimental::PostOpEltwiseAdd<typename TargetInfo::TensorType *>>(add_input, eltwise_info->_prev_op_dst_pos, eltwise_info->_policy);
687*c217d954SCole Faust break;
688*c217d954SCole Faust }
689*c217d954SCole Faust default:
690*c217d954SCole Faust {
691*c217d954SCole Faust ARM_COMPUTE_ERROR("Unsupported PostOpType");
692*c217d954SCole Faust }
693*c217d954SCole Faust }
694*c217d954SCole Faust }
695*c217d954SCole Faust
696*c217d954SCole Faust // Create and configure function (we assume that functions have been validated before creation)
697*c217d954SCole Faust std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
698*c217d954SCole Faust std::unique_ptr<IFunction> func;
699*c217d954SCole Faust std::string func_name;
700*c217d954SCole Faust
701*c217d954SCole Faust using FType = FusedConvolutionBatchNormalizationWithPostOpsFunction<TargetInfo, FusedLayerTypes>;
702*c217d954SCole Faust
703*c217d954SCole Faust // Create and configure function
704*c217d954SCole Faust std::tie(func, func_name) = create_named_memory_managed_function<FType>(
705*c217d954SCole Faust std::string("FusedConvolutionBatchNormalizationLayerWithPostOpsLayer"), mm, input, weights, biases, output, mean, var, beta, gamma, epsilon, conv_info, num_groups, fast_math, post_ops);
706*c217d954SCole Faust
707*c217d954SCole Faust // Log info
708*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
709*c217d954SCole Faust << node.name()
710*c217d954SCole Faust << " Type: " << node.type()
711*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
712*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
713*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
714*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
715*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
716*c217d954SCole Faust << " Post Ops:" << post_ops
717*c217d954SCole Faust << std::endl);
718*c217d954SCole Faust return std::move(func);
719*c217d954SCole Faust }
720*c217d954SCole Faust
721*c217d954SCole Faust /** Create a backend deconvolution layer function
722*c217d954SCole Faust *
723*c217d954SCole Faust * @tparam DeconvolutionLayerFunction Backend deconvolution function
724*c217d954SCole Faust * @tparam TargetInfo Target-specific information
725*c217d954SCole Faust *
726*c217d954SCole Faust * @param[in] node Node to create the backend function for
727*c217d954SCole Faust * @param[in] ctx Graph context
728*c217d954SCole Faust *
729*c217d954SCole Faust * @return Backend deconvolution layer function
730*c217d954SCole Faust */
731*c217d954SCole Faust template <typename DeconvolutionLayerFunction, typename TargetInfo>
create_deconvolution_layer(DeconvolutionLayerNode & node,GraphContext & ctx)732*c217d954SCole Faust std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
733*c217d954SCole Faust {
734*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
735*c217d954SCole Faust
736*c217d954SCole Faust // Extract IO and info
737*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
738*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
739*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
740*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
741*c217d954SCole Faust
742*c217d954SCole Faust const PadStrideInfo deconv_info = node.deconvolution_info();
743*c217d954SCole Faust
744*c217d954SCole Faust // Create and configure function (we assume that functions have been validated before creation)
745*c217d954SCole Faust std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, TargetInfo::TargetType);
746*c217d954SCole Faust std::unique_ptr<IFunction> func;
747*c217d954SCole Faust
748*c217d954SCole Faust std::tie(func, std::ignore) = create_named_memory_managed_function<DeconvolutionLayerFunction>(
749*c217d954SCole Faust std::string(), mm,
750*c217d954SCole Faust input, weights, biases, output, deconv_info);
751*c217d954SCole Faust
752*c217d954SCole Faust // Log info
753*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
754*c217d954SCole Faust << node.name()
755*c217d954SCole Faust << " Type: " << node.type()
756*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
757*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
758*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
759*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
760*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
761*c217d954SCole Faust << std::endl);
762*c217d954SCole Faust return func;
763*c217d954SCole Faust }
764*c217d954SCole Faust
765*c217d954SCole Faust /** Create a backend layer depth-wise convolution function
766*c217d954SCole Faust *
767*c217d954SCole Faust * @tparam DepthwiseConvolutionLayerFunctions Backend depthwise convolution function
768*c217d954SCole Faust * @tparam TargetInfo Target-specific information
769*c217d954SCole Faust *
770*c217d954SCole Faust * @param[in] node Node to create the backend function for
771*c217d954SCole Faust *
772*c217d954SCole Faust * @return Backend depth-wise convolution layer function
773*c217d954SCole Faust */
774*c217d954SCole Faust template <typename DepthwiseConvolutionLayer, typename TargetInfo>
create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode & node)775*c217d954SCole Faust std::unique_ptr<IFunction> create_depthwise_convolution_layer(DepthwiseConvolutionLayerNode &node)
776*c217d954SCole Faust {
777*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
778*c217d954SCole Faust
779*c217d954SCole Faust // Extract IO and info
780*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
781*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
782*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
783*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
784*c217d954SCole Faust
785*c217d954SCole Faust const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
786*c217d954SCole Faust
787*c217d954SCole Faust if(is_quantized)
788*c217d954SCole Faust {
789*c217d954SCole Faust biases->info()->set_data_type(DataType::S32);
790*c217d954SCole Faust }
791*c217d954SCole Faust
792*c217d954SCole Faust const PadStrideInfo conv_info = node.convolution_info();
793*c217d954SCole Faust const unsigned int depth_multiplier = node.depth_multiplier();
794*c217d954SCole Faust const ActivationLayerInfo fused_act = node.fused_activation();
795*c217d954SCole Faust
796*c217d954SCole Faust // Create and configure function (we assume that functions have been validated before creation)
797*c217d954SCole Faust std::unique_ptr<IFunction> func;
798*c217d954SCole Faust std::string func_name;
799*c217d954SCole Faust
800*c217d954SCole Faust std::tie(func, func_name) = create_named_function<DepthwiseConvolutionLayer>(
801*c217d954SCole Faust std::string("DepthwiseConvolutionLayer"),
802*c217d954SCole Faust input, weights, biases, output, conv_info, depth_multiplier, fused_act);
803*c217d954SCole Faust
804*c217d954SCole Faust // Log info
805*c217d954SCole Faust std::ostringstream qss;
806*c217d954SCole Faust if(is_quantized)
807*c217d954SCole Faust {
808*c217d954SCole Faust qss << " Input QuantInfo: " << input->info()->quantization_info()
809*c217d954SCole Faust << " Weights QuantInfo: " << weights->info()->quantization_info()
810*c217d954SCole Faust << " Output QuantInfo: " << output->info()->quantization_info();
811*c217d954SCole Faust }
812*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
813*c217d954SCole Faust << node.name()
814*c217d954SCole Faust << " Type: " << func_name
815*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
816*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
817*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
818*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
819*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
820*c217d954SCole Faust << " Depth multiplier: " << depth_multiplier
821*c217d954SCole Faust << qss.str()
822*c217d954SCole Faust << (fused_act.enabled() ? " " + to_string(fused_act.activation()) : "")
823*c217d954SCole Faust << std::endl);
824*c217d954SCole Faust return std::move(func);
825*c217d954SCole Faust }
826*c217d954SCole Faust
827*c217d954SCole Faust /** Create a backend depth to space layer function
828*c217d954SCole Faust *
829*c217d954SCole Faust * @tparam DepthToSpaceLayerNode Function Backend depth to space function
830*c217d954SCole Faust * @tparam TargetInfo Target-specific information
831*c217d954SCole Faust *
832*c217d954SCole Faust * @param[in] node Node to create the backend function for
833*c217d954SCole Faust *
834*c217d954SCole Faust * @return Backend depth to space layer function
835*c217d954SCole Faust */
836*c217d954SCole Faust template <typename DepthToSpaceLayerFunction, typename TargetInfo>
create_depth_to_space_layer(DepthToSpaceLayerNode & node)837*c217d954SCole Faust std::unique_ptr<IFunction> create_depth_to_space_layer(DepthToSpaceLayerNode &node)
838*c217d954SCole Faust {
839*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
840*c217d954SCole Faust
841*c217d954SCole Faust // Extract IO and info
842*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
843*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
844*c217d954SCole Faust
845*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
846*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
847*c217d954SCole Faust
848*c217d954SCole Faust // Create and configure function
849*c217d954SCole Faust auto func = std::make_unique<DepthToSpaceLayerFunction>();
850*c217d954SCole Faust func->configure(input, output, node.block_shape());
851*c217d954SCole Faust
852*c217d954SCole Faust // Log info
853*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
854*c217d954SCole Faust << node.name()
855*c217d954SCole Faust << " Type: " << node.type()
856*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
857*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
858*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
859*c217d954SCole Faust << " Block Size: " << node.block_shape()
860*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
861*c217d954SCole Faust << std::endl);
862*c217d954SCole Faust
863*c217d954SCole Faust return std::move(func);
864*c217d954SCole Faust }
865*c217d954SCole Faust
866*c217d954SCole Faust /** Create a backend dequantize layer function
867*c217d954SCole Faust *
868*c217d954SCole Faust * @tparam DequantizationLayer Function Backend dequantize function
869*c217d954SCole Faust * @tparam TargetInfo Target-specific information
870*c217d954SCole Faust *
871*c217d954SCole Faust * @param[in] node Node to create the backend function for
872*c217d954SCole Faust *
873*c217d954SCole Faust * @return Backend dequantize layer function
874*c217d954SCole Faust */
875*c217d954SCole Faust template <typename DequantizationLayerFunction, typename TargetInfo>
create_dequantization_layer(DequantizationLayerNode & node)876*c217d954SCole Faust std::unique_ptr<IFunction> create_dequantization_layer(DequantizationLayerNode &node)
877*c217d954SCole Faust {
878*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
879*c217d954SCole Faust
880*c217d954SCole Faust // Extract IO and info
881*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
882*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
883*c217d954SCole Faust
884*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
885*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
886*c217d954SCole Faust
887*c217d954SCole Faust // Create and configure function
888*c217d954SCole Faust auto func = std::make_unique<DequantizationLayerFunction>();
889*c217d954SCole Faust func->configure(input, output);
890*c217d954SCole Faust
891*c217d954SCole Faust // Log info
892*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
893*c217d954SCole Faust << node.name()
894*c217d954SCole Faust << " Type: " << node.type()
895*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
896*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
897*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
898*c217d954SCole Faust << " Input quantization info: " << output->info()->quantization_info()
899*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
900*c217d954SCole Faust << std::endl);
901*c217d954SCole Faust
902*c217d954SCole Faust return std::move(func);
903*c217d954SCole Faust }
904*c217d954SCole Faust /** Create a backend detection output layer function
905*c217d954SCole Faust *
906*c217d954SCole Faust * @tparam DetectionOutputLayer Function Backend detection output function
907*c217d954SCole Faust * @tparam TargetInfo Target-specific information
908*c217d954SCole Faust *
909*c217d954SCole Faust * @param[in] node Node to create the backend function for
910*c217d954SCole Faust *
911*c217d954SCole Faust * @return Backend detection output layer function
912*c217d954SCole Faust */
913*c217d954SCole Faust template <typename DetectionOutputLayerFunction, typename TargetInfo>
create_detection_output_layer(DetectionOutputLayerNode & node)914*c217d954SCole Faust std::unique_ptr<IFunction> create_detection_output_layer(DetectionOutputLayerNode &node)
915*c217d954SCole Faust {
916*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
917*c217d954SCole Faust
918*c217d954SCole Faust // Extract IO and info
919*c217d954SCole Faust typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
920*c217d954SCole Faust typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
921*c217d954SCole Faust typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
922*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
923*c217d954SCole Faust const DetectionOutputLayerInfo detect_info = node.detection_output_info();
924*c217d954SCole Faust
925*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input0 == nullptr);
926*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input1 == nullptr);
927*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input2 == nullptr);
928*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
929*c217d954SCole Faust
930*c217d954SCole Faust // Create and configure function
931*c217d954SCole Faust auto func = std::make_unique<DetectionOutputLayerFunction>();
932*c217d954SCole Faust func->configure(input0, input1, input2, output, detect_info);
933*c217d954SCole Faust
934*c217d954SCole Faust // Log info
935*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
936*c217d954SCole Faust << node.name()
937*c217d954SCole Faust << " Type: " << node.type()
938*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
939*c217d954SCole Faust << " Data Type: " << input0->info()->data_type()
940*c217d954SCole Faust << " Input0 shape: " << input0->info()->tensor_shape()
941*c217d954SCole Faust << " Input1 shape: " << input1->info()->tensor_shape()
942*c217d954SCole Faust << " Input2 shape: " << input2->info()->tensor_shape()
943*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
944*c217d954SCole Faust << " DetectionOutputLayer info: " << detect_info
945*c217d954SCole Faust << std::endl);
946*c217d954SCole Faust
947*c217d954SCole Faust return std::move(func);
948*c217d954SCole Faust }
949*c217d954SCole Faust
950*c217d954SCole Faust /** Create a backend detection post process layer function
951*c217d954SCole Faust *
952*c217d954SCole Faust * @tparam DetectionPostProcessLayerFunction Backend detection output function
953*c217d954SCole Faust * @tparam TargetInfo Target-specific information
954*c217d954SCole Faust *
955*c217d954SCole Faust * @param[in] node Node to create the backend function for
956*c217d954SCole Faust *
957*c217d954SCole Faust * @return Backend detection post process layer function
958*c217d954SCole Faust */
959*c217d954SCole Faust template <typename DetectionPostProcessLayerFunction, typename TargetInfo>
create_detection_post_process_layer(DetectionPostProcessLayerNode & node)960*c217d954SCole Faust std::unique_ptr<IFunction> create_detection_post_process_layer(DetectionPostProcessLayerNode &node)
961*c217d954SCole Faust {
962*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 4 /* expected outputs */);
963*c217d954SCole Faust
964*c217d954SCole Faust // Extract IO and info
965*c217d954SCole Faust typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
966*c217d954SCole Faust typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
967*c217d954SCole Faust typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(2));
968*c217d954SCole Faust typename TargetInfo::TensorType *output0 = get_backing_tensor<TargetInfo>(node.output(0));
969*c217d954SCole Faust typename TargetInfo::TensorType *output1 = get_backing_tensor<TargetInfo>(node.output(1));
970*c217d954SCole Faust typename TargetInfo::TensorType *output2 = get_backing_tensor<TargetInfo>(node.output(2));
971*c217d954SCole Faust typename TargetInfo::TensorType *output3 = get_backing_tensor<TargetInfo>(node.output(3));
972*c217d954SCole Faust const DetectionPostProcessLayerInfo detect_info = node.detection_post_process_info();
973*c217d954SCole Faust
974*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input0 == nullptr);
975*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input1 == nullptr);
976*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input2 == nullptr);
977*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output0 == nullptr);
978*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output1 == nullptr);
979*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output2 == nullptr);
980*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output3 == nullptr);
981*c217d954SCole Faust
982*c217d954SCole Faust // Create and configure function
983*c217d954SCole Faust auto func = std::make_unique<DetectionPostProcessLayerFunction>();
984*c217d954SCole Faust func->configure(input0, input1, input2, output0, output1, output2, output3, detect_info);
985*c217d954SCole Faust
986*c217d954SCole Faust // Log info
987*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
988*c217d954SCole Faust << node.name()
989*c217d954SCole Faust << " Type: " << node.type()
990*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
991*c217d954SCole Faust << " Data Type: " << input0->info()->data_type()
992*c217d954SCole Faust << " Input0 shape: " << input0->info()->tensor_shape()
993*c217d954SCole Faust << " Input1 shape: " << input1->info()->tensor_shape()
994*c217d954SCole Faust << " Input2 shape: " << input2->info()->tensor_shape()
995*c217d954SCole Faust << " Output0 shape: " << output0->info()->tensor_shape()
996*c217d954SCole Faust << " Output1 shape: " << output1->info()->tensor_shape()
997*c217d954SCole Faust << " Output2 shape: " << output2->info()->tensor_shape()
998*c217d954SCole Faust << " Output3 shape: " << output3->info()->tensor_shape()
999*c217d954SCole Faust << " DetectionPostProcessLayer info: " << detect_info
1000*c217d954SCole Faust << std::endl);
1001*c217d954SCole Faust
1002*c217d954SCole Faust return std::move(func);
1003*c217d954SCole Faust }
1004*c217d954SCole Faust
1005*c217d954SCole Faust /** Create a backend element-wise operation layer function
1006*c217d954SCole Faust *
1007*c217d954SCole Faust * @tparam EltwiseFunctions Backend element-wise function
1008*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1009*c217d954SCole Faust *
1010*c217d954SCole Faust * @param[in] node Node to create the backend function for
1011*c217d954SCole Faust *
1012*c217d954SCole Faust * @return Backend element-wise operation layer function
1013*c217d954SCole Faust */
1014*c217d954SCole Faust template <typename EltwiseFunctions, typename TargetInfo>
create_eltwise_layer(EltwiseLayerNode & node)1015*c217d954SCole Faust std::unique_ptr<IFunction> create_eltwise_layer(EltwiseLayerNode &node)
1016*c217d954SCole Faust {
1017*c217d954SCole Faust validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1018*c217d954SCole Faust
1019*c217d954SCole Faust // Extract IO and info
1020*c217d954SCole Faust typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(0));
1021*c217d954SCole Faust typename TargetInfo::TensorType *input2 = get_backing_tensor<TargetInfo>(node.input(1));
1022*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1023*c217d954SCole Faust const EltwiseOperation eltwise_op = node.eltwise_operation();
1024*c217d954SCole Faust const ConvertPolicy convert_policy = node.convert_policy();
1025*c217d954SCole Faust const ActivationLayerInfo act_info = node.fused_activation();
1026*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1027*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input2 == nullptr);
1028*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1029*c217d954SCole Faust
1030*c217d954SCole Faust std::unique_ptr<IFunction> func = nullptr;
1031*c217d954SCole Faust std::string func_name;
1032*c217d954SCole Faust if(eltwise_op == EltwiseOperation::Add)
1033*c217d954SCole Faust {
1034*c217d954SCole Faust std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Addition>(
1035*c217d954SCole Faust std::string("ArithmeticAddition"),
1036*c217d954SCole Faust input1, input2, output, convert_policy, act_info);
1037*c217d954SCole Faust }
1038*c217d954SCole Faust else if(eltwise_op == EltwiseOperation::Sub)
1039*c217d954SCole Faust {
1040*c217d954SCole Faust std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Subtraction>(
1041*c217d954SCole Faust std::string("ArithmeticSubtraction"),
1042*c217d954SCole Faust input1, input2, output, convert_policy, act_info);
1043*c217d954SCole Faust }
1044*c217d954SCole Faust else if(eltwise_op == EltwiseOperation::Mul)
1045*c217d954SCole Faust {
1046*c217d954SCole Faust std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Multiplication>(
1047*c217d954SCole Faust std::string("PixelWiseMultiplication"),
1048*c217d954SCole Faust input1, input2, output, 1.f, convert_policy, node.rounding_policy(), act_info);
1049*c217d954SCole Faust }
1050*c217d954SCole Faust else if(eltwise_op == EltwiseOperation::Max)
1051*c217d954SCole Faust {
1052*c217d954SCole Faust std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Maximum>(
1053*c217d954SCole Faust std::string("ElementwiseMaximum"),
1054*c217d954SCole Faust input1, input2, output, act_info);
1055*c217d954SCole Faust }
1056*c217d954SCole Faust else if(eltwise_op == EltwiseOperation::Div)
1057*c217d954SCole Faust {
1058*c217d954SCole Faust std::tie(func, func_name) = create_named_function<typename EltwiseFunctions::Division>(
1059*c217d954SCole Faust std::string("ArithmeticDivision"),
1060*c217d954SCole Faust input1, input2, output, act_info);
1061*c217d954SCole Faust }
1062*c217d954SCole Faust else
1063*c217d954SCole Faust {
1064*c217d954SCole Faust ARM_COMPUTE_ERROR("Unsupported element-wise operation!");
1065*c217d954SCole Faust }
1066*c217d954SCole Faust
1067*c217d954SCole Faust // Log info
1068*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1069*c217d954SCole Faust << node.name()
1070*c217d954SCole Faust << " Type: " << node.type()
1071*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1072*c217d954SCole Faust << " Operation: " << func_name
1073*c217d954SCole Faust << " Data Type: " << input1->info()->data_type()
1074*c217d954SCole Faust << " Shape: " << input1->info()->tensor_shape()
1075*c217d954SCole Faust << std::endl);
1076*c217d954SCole Faust
1077*c217d954SCole Faust return std::move(func);
1078*c217d954SCole Faust }
1079*c217d954SCole Faust
1080*c217d954SCole Faust /** Create a backend unary element-wise operation layer function
1081*c217d954SCole Faust *
1082*c217d954SCole Faust * @tparam UnaryEltwiseFunctions Backend unary element-wise function
1083*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1084*c217d954SCole Faust *
1085*c217d954SCole Faust * @param[in] node Node to create the backend function for
1086*c217d954SCole Faust *
1087*c217d954SCole Faust * @return Backend unary element-wise operation layer function
1088*c217d954SCole Faust */
1089*c217d954SCole Faust template <typename UnaryEltwiseFunctions, typename TargetInfo>
create_unary_eltwise_layer(UnaryEltwiseLayerNode & node)1090*c217d954SCole Faust std::unique_ptr<IFunction> create_unary_eltwise_layer(UnaryEltwiseLayerNode &node)
1091*c217d954SCole Faust {
1092*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1093*c217d954SCole Faust
1094*c217d954SCole Faust // Extract IO and info
1095*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1096*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1097*c217d954SCole Faust const UnaryEltwiseOperation eltwise_op = node.eltwise_descriptor().op;
1098*c217d954SCole Faust
1099*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1100*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1101*c217d954SCole Faust
1102*c217d954SCole Faust std::unique_ptr<IFunction> func = nullptr;
1103*c217d954SCole Faust std::string func_name;
1104*c217d954SCole Faust if(eltwise_op == UnaryEltwiseOperation::Exp)
1105*c217d954SCole Faust {
1106*c217d954SCole Faust std::tie(func, func_name) = create_named_function<typename UnaryEltwiseFunctions::Exp>(
1107*c217d954SCole Faust std::string("Exp"),
1108*c217d954SCole Faust input, output);
1109*c217d954SCole Faust }
1110*c217d954SCole Faust else
1111*c217d954SCole Faust {
1112*c217d954SCole Faust ARM_COMPUTE_ERROR("Unsupported unary element-wise operation!");
1113*c217d954SCole Faust }
1114*c217d954SCole Faust
1115*c217d954SCole Faust // Log info
1116*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1117*c217d954SCole Faust << node.name()
1118*c217d954SCole Faust << " Type: " << node.type()
1119*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1120*c217d954SCole Faust << " Operation: " << func_name
1121*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1122*c217d954SCole Faust << " Shape: " << input->info()->tensor_shape()
1123*c217d954SCole Faust << std::endl);
1124*c217d954SCole Faust
1125*c217d954SCole Faust return std::move(func);
1126*c217d954SCole Faust }
1127*c217d954SCole Faust
1128*c217d954SCole Faust /** Create a backend flatten layer function
1129*c217d954SCole Faust *
1130*c217d954SCole Faust * @tparam FlattenLayerFunction Backend flatten function
1131*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1132*c217d954SCole Faust *
1133*c217d954SCole Faust * @param[in] node Node to create the backend function for
1134*c217d954SCole Faust *
1135*c217d954SCole Faust * @return Backend flatten layer function
1136*c217d954SCole Faust */
1137*c217d954SCole Faust template <typename FlattenLayerFunction, typename TargetInfo>
create_flatten_layer(FlattenLayerNode & node)1138*c217d954SCole Faust std::unique_ptr<IFunction> create_flatten_layer(FlattenLayerNode &node)
1139*c217d954SCole Faust {
1140*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1141*c217d954SCole Faust
1142*c217d954SCole Faust // Extract IO and info
1143*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1144*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1145*c217d954SCole Faust
1146*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1147*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1148*c217d954SCole Faust
1149*c217d954SCole Faust // Create and configure function
1150*c217d954SCole Faust auto func = std::make_unique<FlattenLayerFunction>();
1151*c217d954SCole Faust func->configure(input, output);
1152*c217d954SCole Faust
1153*c217d954SCole Faust // Log info
1154*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1155*c217d954SCole Faust << node.name()
1156*c217d954SCole Faust << " Type: " << node.type()
1157*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1158*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1159*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1160*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1161*c217d954SCole Faust << std::endl);
1162*c217d954SCole Faust
1163*c217d954SCole Faust return std::move(func);
1164*c217d954SCole Faust }
1165*c217d954SCole Faust
1166*c217d954SCole Faust /** Create a backend fully connected layer function
1167*c217d954SCole Faust *
1168*c217d954SCole Faust * @tparam FullyConnectedLayerFunction Backend fully-connected function
1169*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1170*c217d954SCole Faust *
1171*c217d954SCole Faust * @param[in] node Node to create the backend function for
1172*c217d954SCole Faust * @param[in] ctx Graph context
1173*c217d954SCole Faust *
1174*c217d954SCole Faust * @return Backend fully connected layer function
1175*c217d954SCole Faust */
1176*c217d954SCole Faust template <typename FullyConnectedLayerFunction, typename TargetInfo>
create_fully_connected_layer(FullyConnectedLayerNode & node,GraphContext & ctx)1177*c217d954SCole Faust std::unique_ptr<IFunction> create_fully_connected_layer(FullyConnectedLayerNode &node, GraphContext &ctx)
1178*c217d954SCole Faust {
1179*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1180*c217d954SCole Faust
1181*c217d954SCole Faust // Extract IO and info
1182*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1183*c217d954SCole Faust typename TargetInfo::TensorType *weights = get_backing_tensor<TargetInfo>(node.input(1));
1184*c217d954SCole Faust typename TargetInfo::TensorType *biases = get_backing_tensor<TargetInfo>(node.input(2));
1185*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1186*c217d954SCole Faust FullyConnectedLayerInfo fc_info = node.info();
1187*c217d954SCole Faust fc_info.enable_fast_math = (node.fast_math_hint() == FastMathHint::Enabled);
1188*c217d954SCole Faust
1189*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1190*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(weights == nullptr);
1191*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1192*c217d954SCole Faust
1193*c217d954SCole Faust // Create and configure function
1194*c217d954SCole Faust auto wm = get_weights_manager(ctx, TargetInfo::TargetType);
1195*c217d954SCole Faust auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
1196*c217d954SCole Faust auto func = std::make_unique<FullyConnectedLayerFunction>(mm, wm.get());
1197*c217d954SCole Faust func->configure(input, weights, biases, output, fc_info);
1198*c217d954SCole Faust
1199*c217d954SCole Faust const bool is_quantized = is_data_type_quantized_asymmetric(input->info()->data_type());
1200*c217d954SCole Faust
1201*c217d954SCole Faust // Log info
1202*c217d954SCole Faust std::ostringstream qss;
1203*c217d954SCole Faust if(is_quantized)
1204*c217d954SCole Faust {
1205*c217d954SCole Faust qss << " Input QuantInfo: " << input->info()->quantization_info()
1206*c217d954SCole Faust << " Weights QuantInfo: " << weights->info()->quantization_info()
1207*c217d954SCole Faust << " Output QuantInfo: " << output->info()->quantization_info();
1208*c217d954SCole Faust }
1209*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1210*c217d954SCole Faust << node.name()
1211*c217d954SCole Faust << " Type: " << node.type()
1212*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1213*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1214*c217d954SCole Faust << qss.str()
1215*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1216*c217d954SCole Faust << " Weights shape: " << weights->info()->tensor_shape()
1217*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1218*c217d954SCole Faust << std::endl);
1219*c217d954SCole Faust
1220*c217d954SCole Faust return std::move(func);
1221*c217d954SCole Faust }
1222*c217d954SCole Faust
1223*c217d954SCole Faust /** Create a backend generate proposals layer function
1224*c217d954SCole Faust *
1225*c217d954SCole Faust * @tparam GenerateProposalsLayerFunction Backend generate proposals function
1226*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1227*c217d954SCole Faust *
1228*c217d954SCole Faust * @param[in] node Node to create the backend function for
1229*c217d954SCole Faust * @param[in] ctx Graph context
1230*c217d954SCole Faust *
1231*c217d954SCole Faust * @return Backend generate proposals layer function
1232*c217d954SCole Faust */
1233*c217d954SCole Faust template <typename GenerateProposalsLayerFunction, typename TargetInfo>
create_generate_proposals_layer(GenerateProposalsLayerNode & node,GraphContext & ctx)1234*c217d954SCole Faust std::unique_ptr<IFunction> create_generate_proposals_layer(GenerateProposalsLayerNode &node, GraphContext &ctx)
1235*c217d954SCole Faust {
1236*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 3 /* expected outputs */);
1237*c217d954SCole Faust
1238*c217d954SCole Faust // Extract IO and info
1239*c217d954SCole Faust typename TargetInfo::TensorType *scores = get_backing_tensor<TargetInfo>(node.input(0));
1240*c217d954SCole Faust typename TargetInfo::TensorType *deltas = get_backing_tensor<TargetInfo>(node.input(1));
1241*c217d954SCole Faust typename TargetInfo::TensorType *anchors = get_backing_tensor<TargetInfo>(node.input(2));
1242*c217d954SCole Faust typename TargetInfo::TensorType *proposals = get_backing_tensor<TargetInfo>(node.output(0));
1243*c217d954SCole Faust typename TargetInfo::TensorType *scores_out = get_backing_tensor<TargetInfo>(node.output(1));
1244*c217d954SCole Faust typename TargetInfo::TensorType *num_valid_proposals = get_backing_tensor<TargetInfo>(node.output(2));
1245*c217d954SCole Faust const GenerateProposalsInfo info = node.info();
1246*c217d954SCole Faust
1247*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(scores == nullptr);
1248*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(deltas == nullptr);
1249*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(anchors == nullptr);
1250*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(proposals == nullptr);
1251*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(scores_out == nullptr);
1252*c217d954SCole Faust
1253*c217d954SCole Faust // Create and configure function
1254*c217d954SCole Faust auto func = std::make_unique<GenerateProposalsLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
1255*c217d954SCole Faust func->configure(scores, deltas, anchors, proposals, scores_out, num_valid_proposals, info);
1256*c217d954SCole Faust
1257*c217d954SCole Faust // Log info
1258*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
1259*c217d954SCole Faust << " Target " << TargetInfo::TargetType
1260*c217d954SCole Faust << " Data Type: " << scores->info()->data_type()
1261*c217d954SCole Faust << " Scores shape: " << scores->info()->tensor_shape()
1262*c217d954SCole Faust << " Deltas shape: " << deltas->info()->tensor_shape()
1263*c217d954SCole Faust << " Anchors shape: " << anchors->info()->tensor_shape()
1264*c217d954SCole Faust << " Proposals shape: " << proposals->info()->tensor_shape()
1265*c217d954SCole Faust << " Num valid proposals shape: " << num_valid_proposals->info()->tensor_shape()
1266*c217d954SCole Faust << " Scores Out shape: " << scores_out->info()->tensor_shape()
1267*c217d954SCole Faust << std::endl);
1268*c217d954SCole Faust
1269*c217d954SCole Faust return std::move(func);
1270*c217d954SCole Faust }
1271*c217d954SCole Faust
1272*c217d954SCole Faust /** Create a backend l2 normalization layer function
1273*c217d954SCole Faust *
1274*c217d954SCole Faust * @tparam NormalizationLayerFunction Backend normalization function
1275*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1276*c217d954SCole Faust *
1277*c217d954SCole Faust * @param[in] node Node to create the backend function for
1278*c217d954SCole Faust * @param[in] ctx Graph context
1279*c217d954SCole Faust *
1280*c217d954SCole Faust * @return Backend normalization layer function
1281*c217d954SCole Faust */
1282*c217d954SCole Faust template <typename L2NormalizeLayerFunction, typename TargetInfo>
create_l2_normalize_layer(L2NormalizeLayerNode & node,GraphContext & ctx)1283*c217d954SCole Faust std::unique_ptr<IFunction> create_l2_normalize_layer(L2NormalizeLayerNode &node, GraphContext &ctx)
1284*c217d954SCole Faust {
1285*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1286*c217d954SCole Faust
1287*c217d954SCole Faust // Extract IO and info
1288*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1289*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1290*c217d954SCole Faust int axis = node.axis();
1291*c217d954SCole Faust float epsilon = node.epsilon();
1292*c217d954SCole Faust
1293*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1294*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1295*c217d954SCole Faust
1296*c217d954SCole Faust // Create and configure function
1297*c217d954SCole Faust auto mm = get_memory_manager(ctx, TargetInfo::TargetType);
1298*c217d954SCole Faust auto func = std::make_unique<L2NormalizeLayerFunction>(mm);
1299*c217d954SCole Faust func->configure(input, output, axis, epsilon);
1300*c217d954SCole Faust
1301*c217d954SCole Faust // Log info
1302*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1303*c217d954SCole Faust << node.name()
1304*c217d954SCole Faust << " Type: " << node.type()
1305*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1306*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1307*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1308*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1309*c217d954SCole Faust << " Axis: " << axis
1310*c217d954SCole Faust << " Epsilon: " << epsilon
1311*c217d954SCole Faust << std::endl);
1312*c217d954SCole Faust
1313*c217d954SCole Faust return std::move(func);
1314*c217d954SCole Faust }
1315*c217d954SCole Faust
1316*c217d954SCole Faust /** Create a backend normalization layer function
1317*c217d954SCole Faust *
1318*c217d954SCole Faust * @tparam NormalizationLayerFunction Backend normalization function
1319*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1320*c217d954SCole Faust *
1321*c217d954SCole Faust * @param[in] node Node to create the backend function for
1322*c217d954SCole Faust * @param[in] ctx Graph context
1323*c217d954SCole Faust *
1324*c217d954SCole Faust * @return Backend normalization layer function
1325*c217d954SCole Faust */
1326*c217d954SCole Faust template <typename NormalizationLayerFunction, typename TargetInfo>
create_normalization_layer(NormalizationLayerNode & node,GraphContext & ctx)1327*c217d954SCole Faust std::unique_ptr<IFunction> create_normalization_layer(NormalizationLayerNode &node, GraphContext &ctx)
1328*c217d954SCole Faust {
1329*c217d954SCole Faust ARM_COMPUTE_UNUSED(ctx);
1330*c217d954SCole Faust
1331*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1332*c217d954SCole Faust
1333*c217d954SCole Faust // Extract IO and info
1334*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1335*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1336*c217d954SCole Faust const NormalizationLayerInfo norm_info = node.normalization_info();
1337*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1338*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1339*c217d954SCole Faust
1340*c217d954SCole Faust // Create and configure function
1341*c217d954SCole Faust auto func = std::make_unique<NormalizationLayerFunction>();
1342*c217d954SCole Faust func->configure(input, output, norm_info);
1343*c217d954SCole Faust
1344*c217d954SCole Faust // Log info
1345*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1346*c217d954SCole Faust << node.name()
1347*c217d954SCole Faust << " Type: " << node.type()
1348*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1349*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1350*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1351*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1352*c217d954SCole Faust << " Normalization info: " << norm_info.type()
1353*c217d954SCole Faust << std::endl);
1354*c217d954SCole Faust
1355*c217d954SCole Faust return std::move(func);
1356*c217d954SCole Faust }
1357*c217d954SCole Faust
1358*c217d954SCole Faust /** Create a backend normalize planar YUV layer function
1359*c217d954SCole Faust *
1360*c217d954SCole Faust * @tparam NormalizePlanarYUVLayerFunction Backend normalize planar YUV function
1361*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1362*c217d954SCole Faust *
1363*c217d954SCole Faust * @param[in] node Node to create the backend function for
1364*c217d954SCole Faust *
1365*c217d954SCole Faust * @return Backend normalize plnar YUV layer function
1366*c217d954SCole Faust */
1367*c217d954SCole Faust template <typename NormalizePlanarYUVLayerFunction, typename TargetInfo>
create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode & node)1368*c217d954SCole Faust std::unique_ptr<IFunction> create_normalize_planar_yuv_layer(NormalizePlanarYUVLayerNode &node)
1369*c217d954SCole Faust {
1370*c217d954SCole Faust validate_node<TargetInfo>(node, 3 /* expected inputs */, 1 /* expected outputs */);
1371*c217d954SCole Faust
1372*c217d954SCole Faust // Extract IO and info
1373*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1374*c217d954SCole Faust typename TargetInfo::TensorType *mean = get_backing_tensor<TargetInfo>(node.input(1));
1375*c217d954SCole Faust typename TargetInfo::TensorType *std = get_backing_tensor<TargetInfo>(node.input(2));
1376*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1377*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1378*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(mean == nullptr);
1379*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(std == nullptr);
1380*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1381*c217d954SCole Faust
1382*c217d954SCole Faust // Create and configure function
1383*c217d954SCole Faust auto func = std::make_unique<NormalizePlanarYUVLayerFunction>();
1384*c217d954SCole Faust func->configure(input, output, mean, std);
1385*c217d954SCole Faust
1386*c217d954SCole Faust // Log info
1387*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1388*c217d954SCole Faust << node.name()
1389*c217d954SCole Faust << " Type: " << node.type()
1390*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1391*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1392*c217d954SCole Faust << " Shape: " << input->info()->tensor_shape()
1393*c217d954SCole Faust << std::endl);
1394*c217d954SCole Faust
1395*c217d954SCole Faust return std::move(func);
1396*c217d954SCole Faust }
1397*c217d954SCole Faust
1398*c217d954SCole Faust /** Create a backend pad layer function
1399*c217d954SCole Faust *
1400*c217d954SCole Faust * @tparam PadLayerFunction Backend pad function
1401*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1402*c217d954SCole Faust *
1403*c217d954SCole Faust * @param[in] node Node to create the backend function for
1404*c217d954SCole Faust *
1405*c217d954SCole Faust * @return Backend pad layer function
1406*c217d954SCole Faust */
1407*c217d954SCole Faust template <typename PadLayerFunction, typename TargetInfo>
create_pad_layer(PadLayerNode & node)1408*c217d954SCole Faust std::unique_ptr<IFunction> create_pad_layer(PadLayerNode &node)
1409*c217d954SCole Faust {
1410*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1411*c217d954SCole Faust
1412*c217d954SCole Faust // Extract IO and info
1413*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1414*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1415*c217d954SCole Faust const PaddingList &padding = node.padding();
1416*c217d954SCole Faust const PixelValue pad_value = node.pad_value();
1417*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1418*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1419*c217d954SCole Faust
1420*c217d954SCole Faust // Create and configure function
1421*c217d954SCole Faust auto func = std::make_unique<PadLayerFunction>();
1422*c217d954SCole Faust func->configure(input, output, padding, pad_value);
1423*c217d954SCole Faust
1424*c217d954SCole Faust // Log info
1425*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1426*c217d954SCole Faust << node.name()
1427*c217d954SCole Faust << " Type: " << node.type()
1428*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1429*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1430*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1431*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1432*c217d954SCole Faust << std::endl);
1433*c217d954SCole Faust
1434*c217d954SCole Faust return std::move(func);
1435*c217d954SCole Faust }
1436*c217d954SCole Faust
1437*c217d954SCole Faust /** Create a backend permute layer function
1438*c217d954SCole Faust *
1439*c217d954SCole Faust * @tparam PermuteLayerFunction Backend permute function
1440*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1441*c217d954SCole Faust *
1442*c217d954SCole Faust * @param[in] node Node to create the backend function for
1443*c217d954SCole Faust *
1444*c217d954SCole Faust * @return Backend permute layer function
1445*c217d954SCole Faust */
1446*c217d954SCole Faust template <typename PermuteLayerFunction, typename TargetInfo>
create_permute_layer(PermuteLayerNode & node)1447*c217d954SCole Faust std::unique_ptr<IFunction> create_permute_layer(PermuteLayerNode &node)
1448*c217d954SCole Faust {
1449*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1450*c217d954SCole Faust
1451*c217d954SCole Faust // Extract IO and info
1452*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1453*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1454*c217d954SCole Faust const PermutationVector &perm = node.permutation_vector();
1455*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1456*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1457*c217d954SCole Faust
1458*c217d954SCole Faust // Create and configure function
1459*c217d954SCole Faust auto func = std::make_unique<PermuteLayerFunction>();
1460*c217d954SCole Faust func->configure(input, output, perm);
1461*c217d954SCole Faust
1462*c217d954SCole Faust // Log info
1463*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1464*c217d954SCole Faust << node.name()
1465*c217d954SCole Faust << " Type: " << node.type()
1466*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1467*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1468*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1469*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1470*c217d954SCole Faust << " Permutation vector: " << perm
1471*c217d954SCole Faust << std::endl);
1472*c217d954SCole Faust
1473*c217d954SCole Faust return std::move(func);
1474*c217d954SCole Faust }
1475*c217d954SCole Faust
1476*c217d954SCole Faust /** Create a backend pooling layer function
1477*c217d954SCole Faust *
1478*c217d954SCole Faust * @tparam PoolingLayerFunction Backend pooling function
1479*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1480*c217d954SCole Faust *
1481*c217d954SCole Faust * @param[in] node Node to create the backend function for
1482*c217d954SCole Faust *
1483*c217d954SCole Faust * @return Backend pooling layer function
1484*c217d954SCole Faust */
1485*c217d954SCole Faust template <typename PoolingLayerFunction, typename TargetInfo>
create_pooling_layer(PoolingLayerNode & node)1486*c217d954SCole Faust std::unique_ptr<IFunction> create_pooling_layer(PoolingLayerNode &node)
1487*c217d954SCole Faust {
1488*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1489*c217d954SCole Faust
1490*c217d954SCole Faust // Extract IO and info
1491*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1492*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1493*c217d954SCole Faust const PoolingLayerInfo pool_info = node.pooling_info();
1494*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1495*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1496*c217d954SCole Faust
1497*c217d954SCole Faust // Create and configure function
1498*c217d954SCole Faust auto func = std::make_unique<PoolingLayerFunction>();
1499*c217d954SCole Faust func->configure(input, output, pool_info);
1500*c217d954SCole Faust
1501*c217d954SCole Faust // Log info
1502*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1503*c217d954SCole Faust << node.name()
1504*c217d954SCole Faust << " Type: " << node.type()
1505*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1506*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1507*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1508*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1509*c217d954SCole Faust << " Pooling info: " << pool_info.pool_type
1510*c217d954SCole Faust << std::endl);
1511*c217d954SCole Faust
1512*c217d954SCole Faust return std::move(func);
1513*c217d954SCole Faust }
1514*c217d954SCole Faust
1515*c217d954SCole Faust /** Create a backend PRelu layer function
1516*c217d954SCole Faust *
1517*c217d954SCole Faust * @tparam PReluFunction Backend PRelu function
1518*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1519*c217d954SCole Faust *
1520*c217d954SCole Faust * @param[in] node Node to create the backend function for
1521*c217d954SCole Faust *
1522*c217d954SCole Faust * @return Backend PRelu layer function
1523*c217d954SCole Faust */
1524*c217d954SCole Faust template <typename PReluFunction, typename TargetInfo>
create_prelu_layer(PReluLayerNode & node)1525*c217d954SCole Faust std::unique_ptr<IFunction> create_prelu_layer(PReluLayerNode &node)
1526*c217d954SCole Faust {
1527*c217d954SCole Faust validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1528*c217d954SCole Faust
1529*c217d954SCole Faust // Extract IO and info
1530*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1531*c217d954SCole Faust typename TargetInfo::TensorType *alpha = get_backing_tensor<TargetInfo>(node.input(1));
1532*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1533*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr || alpha == nullptr);
1534*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1535*c217d954SCole Faust
1536*c217d954SCole Faust // Create and configure function
1537*c217d954SCole Faust auto func = std::make_unique<PReluFunction>();
1538*c217d954SCole Faust func->configure(input, alpha, output);
1539*c217d954SCole Faust
1540*c217d954SCole Faust // Log info
1541*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1542*c217d954SCole Faust << node.name()
1543*c217d954SCole Faust << " Type: " << node.type()
1544*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1545*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1546*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1547*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1548*c217d954SCole Faust << std::endl);
1549*c217d954SCole Faust
1550*c217d954SCole Faust return std::move(func);
1551*c217d954SCole Faust }
1552*c217d954SCole Faust
1553*c217d954SCole Faust /** Create a backend print layer function
1554*c217d954SCole Faust *
1555*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1556*c217d954SCole Faust *
1557*c217d954SCole Faust * @param[in] node Node to create the backend function for
1558*c217d954SCole Faust *
1559*c217d954SCole Faust * @return Backend print layer function
1560*c217d954SCole Faust */
1561*c217d954SCole Faust template <typename TargetInfo>
create_print_layer(PrintLayerNode & node)1562*c217d954SCole Faust std::unique_ptr<IFunction> create_print_layer(PrintLayerNode &node)
1563*c217d954SCole Faust {
1564*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1565*c217d954SCole Faust
1566*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1567*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1568*c217d954SCole Faust ARM_COMPUTE_UNUSED(input);
1569*c217d954SCole Faust
1570*c217d954SCole Faust // Log info
1571*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1572*c217d954SCole Faust << node.name()
1573*c217d954SCole Faust << " Type: " << node.type()
1574*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1575*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1576*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1577*c217d954SCole Faust << std::endl);
1578*c217d954SCole Faust
1579*c217d954SCole Faust return nullptr;
1580*c217d954SCole Faust }
1581*c217d954SCole Faust
1582*c217d954SCole Faust /** Create a backend priorbox layer function
1583*c217d954SCole Faust *
1584*c217d954SCole Faust * @tparam PriorBoxLayerFunction Backend priorbox function
1585*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1586*c217d954SCole Faust *
1587*c217d954SCole Faust * @param[in] node Node to create the backend function for
1588*c217d954SCole Faust *
1589*c217d954SCole Faust * @return Backend priorbox layer function
1590*c217d954SCole Faust */
1591*c217d954SCole Faust template <typename PriorBoxLayerFunction, typename TargetInfo>
create_priorbox_layer(PriorBoxLayerNode & node)1592*c217d954SCole Faust std::unique_ptr<IFunction> create_priorbox_layer(PriorBoxLayerNode &node)
1593*c217d954SCole Faust {
1594*c217d954SCole Faust validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1595*c217d954SCole Faust
1596*c217d954SCole Faust // Extract IO and info
1597*c217d954SCole Faust typename TargetInfo::TensorType *input0 = get_backing_tensor<TargetInfo>(node.input(0));
1598*c217d954SCole Faust typename TargetInfo::TensorType *input1 = get_backing_tensor<TargetInfo>(node.input(1));
1599*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1600*c217d954SCole Faust const PriorBoxLayerInfo prior_info = node.priorbox_info();
1601*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input0 == nullptr);
1602*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input1 == nullptr);
1603*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1604*c217d954SCole Faust
1605*c217d954SCole Faust // Create and configure function
1606*c217d954SCole Faust auto func = std::make_unique<PriorBoxLayerFunction>();
1607*c217d954SCole Faust func->configure(input0, input1, output, prior_info);
1608*c217d954SCole Faust
1609*c217d954SCole Faust // Log info
1610*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1611*c217d954SCole Faust << node.name()
1612*c217d954SCole Faust << " Type: " << node.type()
1613*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1614*c217d954SCole Faust << " Data Type: " << input0->info()->data_type()
1615*c217d954SCole Faust << " Input0 shape: " << input0->info()->tensor_shape()
1616*c217d954SCole Faust << " Input1 shape: " << input1->info()->tensor_shape()
1617*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1618*c217d954SCole Faust << " PriorBoxLayer info: " << prior_info
1619*c217d954SCole Faust << std::endl);
1620*c217d954SCole Faust
1621*c217d954SCole Faust return std::move(func);
1622*c217d954SCole Faust }
1623*c217d954SCole Faust
1624*c217d954SCole Faust /** Create a backend quantization layer function
1625*c217d954SCole Faust *
1626*c217d954SCole Faust * @tparam QuantizationLayerFunction Backend quantization function
1627*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1628*c217d954SCole Faust *
1629*c217d954SCole Faust * @param[in] node Node to create the backend function for
1630*c217d954SCole Faust *
1631*c217d954SCole Faust * @return Backend quantization layer function
1632*c217d954SCole Faust */
1633*c217d954SCole Faust template <typename QuantizationLayerFunction, typename TargetInfo>
create_quantization_layer(QuantizationLayerNode & node)1634*c217d954SCole Faust std::unique_ptr<IFunction> create_quantization_layer(QuantizationLayerNode &node)
1635*c217d954SCole Faust {
1636*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1637*c217d954SCole Faust
1638*c217d954SCole Faust // Extract IO and info
1639*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1640*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1641*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1642*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1643*c217d954SCole Faust
1644*c217d954SCole Faust // Create and configure function
1645*c217d954SCole Faust auto func = std::make_unique<QuantizationLayerFunction>();
1646*c217d954SCole Faust func->configure(input, output);
1647*c217d954SCole Faust
1648*c217d954SCole Faust // Log info
1649*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1650*c217d954SCole Faust << node.name()
1651*c217d954SCole Faust << " Type: " << node.type()
1652*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1653*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1654*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1655*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1656*c217d954SCole Faust << std::endl);
1657*c217d954SCole Faust
1658*c217d954SCole Faust return std::move(func);
1659*c217d954SCole Faust }
1660*c217d954SCole Faust
1661*c217d954SCole Faust /** Create a backend reduction operation layer function
1662*c217d954SCole Faust *
1663*c217d954SCole Faust * @tparam ReductionOperationFunction Backend reduction operation function
1664*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1665*c217d954SCole Faust *
1666*c217d954SCole Faust * @param[in] node Node to create the backend function for
1667*c217d954SCole Faust * @param[in] ctx Graph context
1668*c217d954SCole Faust *
1669*c217d954SCole Faust * @return Backend reduction sum layer function
1670*c217d954SCole Faust */
1671*c217d954SCole Faust template <typename ReductionOperationFunction, typename TargetInfo>
create_reduction_operation_layer(ReductionLayerNode & node,GraphContext & ctx)1672*c217d954SCole Faust std::unique_ptr<IFunction> create_reduction_operation_layer(ReductionLayerNode &node, GraphContext &ctx)
1673*c217d954SCole Faust {
1674*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1675*c217d954SCole Faust
1676*c217d954SCole Faust // Extract IO and info
1677*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1678*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1679*c217d954SCole Faust ReductionOperation op = node.op();
1680*c217d954SCole Faust int axis = node.axis();
1681*c217d954SCole Faust bool keep_dims = node.keep_dims();
1682*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1683*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1684*c217d954SCole Faust
1685*c217d954SCole Faust // Create and configure function
1686*c217d954SCole Faust auto func = std::make_unique<ReductionOperationFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
1687*c217d954SCole Faust func->configure(input, output, axis, op, keep_dims);
1688*c217d954SCole Faust
1689*c217d954SCole Faust // Log info
1690*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1691*c217d954SCole Faust << node.name()
1692*c217d954SCole Faust << " Type: " << node.type()
1693*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1694*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1695*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1696*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1697*c217d954SCole Faust << " Operation: " << op
1698*c217d954SCole Faust << " Axis: " << axis
1699*c217d954SCole Faust << " Keep dimensions:" << keep_dims
1700*c217d954SCole Faust << std::endl);
1701*c217d954SCole Faust
1702*c217d954SCole Faust return std::move(func);
1703*c217d954SCole Faust }
1704*c217d954SCole Faust
1705*c217d954SCole Faust /** Create a backend reorg layer function
1706*c217d954SCole Faust *
1707*c217d954SCole Faust * @tparam ReorgLayerFunction Backend reorg function
1708*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1709*c217d954SCole Faust *
1710*c217d954SCole Faust * @param[in] node Node to create the backend function for
1711*c217d954SCole Faust *
1712*c217d954SCole Faust * @return Backend reshape layer function
1713*c217d954SCole Faust */
1714*c217d954SCole Faust template <typename ReorgLayerFunction, typename TargetInfo>
create_reorg_layer(ReorgLayerNode & node)1715*c217d954SCole Faust std::unique_ptr<IFunction> create_reorg_layer(ReorgLayerNode &node)
1716*c217d954SCole Faust {
1717*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1718*c217d954SCole Faust
1719*c217d954SCole Faust // Extract IO and info
1720*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1721*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1722*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1723*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1724*c217d954SCole Faust
1725*c217d954SCole Faust // Create and configure function
1726*c217d954SCole Faust auto func = std::make_unique<ReorgLayerFunction>();
1727*c217d954SCole Faust func->configure(input, output, node.stride());
1728*c217d954SCole Faust
1729*c217d954SCole Faust // Log info
1730*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1731*c217d954SCole Faust << node.name()
1732*c217d954SCole Faust << " Type: " << node.type()
1733*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1734*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1735*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1736*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1737*c217d954SCole Faust << std::endl);
1738*c217d954SCole Faust
1739*c217d954SCole Faust return std::move(func);
1740*c217d954SCole Faust }
1741*c217d954SCole Faust
1742*c217d954SCole Faust /** Create a backend reshape layer function
1743*c217d954SCole Faust *
1744*c217d954SCole Faust * @tparam ReshapeLayerFunction Backend reshape function
1745*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1746*c217d954SCole Faust *
1747*c217d954SCole Faust * @param[in] node Node to create the backend function for
1748*c217d954SCole Faust *
1749*c217d954SCole Faust * @return Backend reshape layer function
1750*c217d954SCole Faust */
1751*c217d954SCole Faust template <typename ReshapeLayerFunction, typename TargetInfo>
create_reshape_layer(ReshapeLayerNode & node)1752*c217d954SCole Faust std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
1753*c217d954SCole Faust {
1754*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1755*c217d954SCole Faust
1756*c217d954SCole Faust // Extract IO and info
1757*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1758*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1759*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1760*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1761*c217d954SCole Faust
1762*c217d954SCole Faust // Create and configure function
1763*c217d954SCole Faust auto func = std::make_unique<ReshapeLayerFunction>();
1764*c217d954SCole Faust func->configure(input, output);
1765*c217d954SCole Faust
1766*c217d954SCole Faust // Log info
1767*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1768*c217d954SCole Faust << node.name()
1769*c217d954SCole Faust << " Type: " << node.type()
1770*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1771*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1772*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1773*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1774*c217d954SCole Faust << std::endl);
1775*c217d954SCole Faust
1776*c217d954SCole Faust return std::move(func);
1777*c217d954SCole Faust }
1778*c217d954SCole Faust
1779*c217d954SCole Faust /** Create a backend resize layer function
1780*c217d954SCole Faust *
1781*c217d954SCole Faust * @tparam ResizeLayerFunction Backend resize function
1782*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1783*c217d954SCole Faust *
1784*c217d954SCole Faust * @param[in] node Node to create the backend function for
1785*c217d954SCole Faust *
1786*c217d954SCole Faust * @return Backend resize layer function
1787*c217d954SCole Faust */
1788*c217d954SCole Faust template <typename ResizeLayerFunction, typename TargetInfo>
create_resize_layer(ResizeLayerNode & node)1789*c217d954SCole Faust std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
1790*c217d954SCole Faust {
1791*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1792*c217d954SCole Faust
1793*c217d954SCole Faust // Extract IO and info
1794*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1795*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1796*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1797*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1798*c217d954SCole Faust const InterpolationPolicy policy = node.policy();
1799*c217d954SCole Faust
1800*c217d954SCole Faust // Create and configure function
1801*c217d954SCole Faust auto func = std::make_unique<ResizeLayerFunction>();
1802*c217d954SCole Faust func->configure(input, output, ScaleKernelInfo{ policy, BorderMode::CONSTANT, PixelValue(), SamplingPolicy::CENTER, false, false });
1803*c217d954SCole Faust
1804*c217d954SCole Faust // Log info
1805*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1806*c217d954SCole Faust << node.name()
1807*c217d954SCole Faust << " Type: " << node.type()
1808*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1809*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1810*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1811*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1812*c217d954SCole Faust << " Interpolation: " << policy
1813*c217d954SCole Faust << std::endl);
1814*c217d954SCole Faust
1815*c217d954SCole Faust return std::move(func);
1816*c217d954SCole Faust }
1817*c217d954SCole Faust
1818*c217d954SCole Faust /** Create a backend ROI align layer function
1819*c217d954SCole Faust *
1820*c217d954SCole Faust * @tparam ROIAlignLayerFunction ROI Align function
1821*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1822*c217d954SCole Faust *
1823*c217d954SCole Faust * @param[in] node Node to create the backend function for
1824*c217d954SCole Faust *
1825*c217d954SCole Faust * @return ROI Align layer function
1826*c217d954SCole Faust */
1827*c217d954SCole Faust template <typename ROIAlignLayerFunction, typename TargetInfo>
create_roi_align_layer(ROIAlignLayerNode & node)1828*c217d954SCole Faust std::unique_ptr<IFunction> create_roi_align_layer(ROIAlignLayerNode &node)
1829*c217d954SCole Faust {
1830*c217d954SCole Faust validate_node<TargetInfo>(node, 2 /* expected inputs */, 1 /* expected outputs */);
1831*c217d954SCole Faust
1832*c217d954SCole Faust // Extract IO and info
1833*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1834*c217d954SCole Faust typename TargetInfo::TensorType *rois = get_backing_tensor<TargetInfo>(node.input(1));
1835*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1836*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1837*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1838*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(rois == nullptr);
1839*c217d954SCole Faust
1840*c217d954SCole Faust const ROIPoolingLayerInfo pool_info = node.pooling_info();
1841*c217d954SCole Faust
1842*c217d954SCole Faust // Create and configure function
1843*c217d954SCole Faust auto func = std::make_unique<ROIAlignLayerFunction>();
1844*c217d954SCole Faust
1845*c217d954SCole Faust func->configure(input, rois, output, pool_info);
1846*c217d954SCole Faust
1847*c217d954SCole Faust // Log info
1848*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1849*c217d954SCole Faust << node.name()
1850*c217d954SCole Faust << " Type: " << node.type()
1851*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1852*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1853*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1854*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1855*c217d954SCole Faust << " ROIs shape: " << rois->info()->tensor_shape()
1856*c217d954SCole Faust << " ROIPooling width: " << pool_info.pooled_width()
1857*c217d954SCole Faust << " ROIPooling height: " << pool_info.pooled_height()
1858*c217d954SCole Faust << std::endl);
1859*c217d954SCole Faust
1860*c217d954SCole Faust return std::move(func);
1861*c217d954SCole Faust }
1862*c217d954SCole Faust
1863*c217d954SCole Faust /** Create a backend slice layer function
1864*c217d954SCole Faust *
1865*c217d954SCole Faust * @tparam SliceLayerFunction Backend slice function
1866*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1867*c217d954SCole Faust *
1868*c217d954SCole Faust * @param[in] node Node to create the backend function for
1869*c217d954SCole Faust *
1870*c217d954SCole Faust * @return Backend slice layer function
1871*c217d954SCole Faust */
1872*c217d954SCole Faust template <typename SliceLayerFunction, typename TargetInfo>
create_slice_layer(SliceLayerNode & node)1873*c217d954SCole Faust std::unique_ptr<IFunction> create_slice_layer(SliceLayerNode &node)
1874*c217d954SCole Faust {
1875*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1876*c217d954SCole Faust
1877*c217d954SCole Faust // Extract IO and info
1878*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1879*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1880*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1881*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1882*c217d954SCole Faust
1883*c217d954SCole Faust // Create and configure function
1884*c217d954SCole Faust auto func = std::make_unique<SliceLayerFunction>();
1885*c217d954SCole Faust func->configure(input, output, node.starts(), node.ends());
1886*c217d954SCole Faust
1887*c217d954SCole Faust // Log info
1888*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1889*c217d954SCole Faust << node.name()
1890*c217d954SCole Faust << " Type: " << node.type()
1891*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1892*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1893*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1894*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1895*c217d954SCole Faust << std::endl);
1896*c217d954SCole Faust
1897*c217d954SCole Faust return std::move(func);
1898*c217d954SCole Faust }
1899*c217d954SCole Faust
1900*c217d954SCole Faust /** Create a backend softmax layer function
1901*c217d954SCole Faust *
1902*c217d954SCole Faust * @tparam SoftmaxLayerFunction Backend softmax function
1903*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1904*c217d954SCole Faust *
1905*c217d954SCole Faust * @param[in] node Node to create the backend function for
1906*c217d954SCole Faust * @param[in] ctx Graph context
1907*c217d954SCole Faust *
1908*c217d954SCole Faust * @return Backend softmax layer function
1909*c217d954SCole Faust */
1910*c217d954SCole Faust template <typename SoftmaxLayerFunction, typename TargetInfo>
create_softmax_layer(SoftmaxLayerNode & node,GraphContext & ctx)1911*c217d954SCole Faust std::unique_ptr<IFunction> create_softmax_layer(SoftmaxLayerNode &node, GraphContext &ctx)
1912*c217d954SCole Faust {
1913*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1914*c217d954SCole Faust
1915*c217d954SCole Faust // Extract IO and info
1916*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1917*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1918*c217d954SCole Faust const float beta = node.beta();
1919*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
1920*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
1921*c217d954SCole Faust
1922*c217d954SCole Faust // Create and configure function
1923*c217d954SCole Faust auto func = std::make_unique<SoftmaxLayerFunction>(get_memory_manager(ctx, TargetInfo::TargetType));
1924*c217d954SCole Faust func->configure(input, output, beta);
1925*c217d954SCole Faust
1926*c217d954SCole Faust // Log info
1927*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1928*c217d954SCole Faust << node.name()
1929*c217d954SCole Faust << " Type: " << node.type()
1930*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1931*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
1932*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
1933*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1934*c217d954SCole Faust << std::endl);
1935*c217d954SCole Faust
1936*c217d954SCole Faust return std::move(func);
1937*c217d954SCole Faust }
1938*c217d954SCole Faust
1939*c217d954SCole Faust /** Create a backend layer stack function
1940*c217d954SCole Faust *
1941*c217d954SCole Faust * @tparam StackLayerFunction Backend stack function
1942*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1943*c217d954SCole Faust *
1944*c217d954SCole Faust * @param[in] node Node to create the backend function for
1945*c217d954SCole Faust *
1946*c217d954SCole Faust * @return Backend stack layer function
1947*c217d954SCole Faust */
1948*c217d954SCole Faust template <typename StackLayerFunction, typename TargetInfo>
create_stack_layer(StackLayerNode & node)1949*c217d954SCole Faust std::unique_ptr<arm_compute::IFunction> create_stack_layer(StackLayerNode &node)
1950*c217d954SCole Faust {
1951*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating Stack node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
1952*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
1953*c217d954SCole Faust
1954*c217d954SCole Faust // Extract IO and info
1955*c217d954SCole Faust std::vector<typename TargetInfo::TensorType *> inputs;
1956*c217d954SCole Faust for(unsigned int i = 0; i < node.num_inputs(); ++i)
1957*c217d954SCole Faust {
1958*c217d954SCole Faust inputs.push_back(get_backing_tensor<TargetInfo>(node.input(i)));
1959*c217d954SCole Faust }
1960*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1961*c217d954SCole Faust const int axis = node.axis();
1962*c217d954SCole Faust
1963*c217d954SCole Faust // Create and configure function
1964*c217d954SCole Faust auto func = std::make_unique<StackLayerFunction>();
1965*c217d954SCole Faust func->configure(inputs, axis, output);
1966*c217d954SCole Faust
1967*c217d954SCole Faust // Log info
1968*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
1969*c217d954SCole Faust << node.name()
1970*c217d954SCole Faust << " Type: " << node.type()
1971*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
1972*c217d954SCole Faust << " Data Type: " << output->info()->data_type()
1973*c217d954SCole Faust << " Inputs shape: " << inputs[0]->info()->tensor_shape()
1974*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
1975*c217d954SCole Faust << " Num Inputs: " << inputs.size()
1976*c217d954SCole Faust << " Axis: " << axis
1977*c217d954SCole Faust << std::endl);
1978*c217d954SCole Faust
1979*c217d954SCole Faust return std::move(func);
1980*c217d954SCole Faust }
1981*c217d954SCole Faust
1982*c217d954SCole Faust /** Create a backend slice layer function
1983*c217d954SCole Faust *
1984*c217d954SCole Faust * @tparam StridedSliceLayerFunction Backend strided slice function
1985*c217d954SCole Faust * @tparam TargetInfo Target-specific information
1986*c217d954SCole Faust *
1987*c217d954SCole Faust * @param[in] node Node to create the backend function for
1988*c217d954SCole Faust *
1989*c217d954SCole Faust * @return Backend strided slice layer function
1990*c217d954SCole Faust */
1991*c217d954SCole Faust template <typename StridedSliceLayerFunction, typename TargetInfo>
create_strided_slice_layer(StridedSliceLayerNode & node)1992*c217d954SCole Faust std::unique_ptr<IFunction> create_strided_slice_layer(StridedSliceLayerNode &node)
1993*c217d954SCole Faust {
1994*c217d954SCole Faust validate_node<TargetInfo>(node, 1 /* expected inputs */, 1 /* expected outputs */);
1995*c217d954SCole Faust
1996*c217d954SCole Faust // Extract IO and info
1997*c217d954SCole Faust typename TargetInfo::TensorType *input = get_backing_tensor<TargetInfo>(node.input(0));
1998*c217d954SCole Faust typename TargetInfo::TensorType *output = get_backing_tensor<TargetInfo>(node.output(0));
1999*c217d954SCole Faust Coordinates starts = node.starts();
2000*c217d954SCole Faust Coordinates ends = node.ends();
2001*c217d954SCole Faust BiStrides strides = node.strides();
2002*c217d954SCole Faust StridedSliceLayerInfo info = node.strided_slice_info();
2003*c217d954SCole Faust
2004*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(input == nullptr);
2005*c217d954SCole Faust ARM_COMPUTE_ERROR_ON(output == nullptr);
2006*c217d954SCole Faust
2007*c217d954SCole Faust // Create and configure function
2008*c217d954SCole Faust auto func = std::make_unique<StridedSliceLayerFunction>();
2009*c217d954SCole Faust func->configure(input, output, starts, ends, strides, info.begin_mask(), info.end_mask(), info.shrink_axis_mask());
2010*c217d954SCole Faust
2011*c217d954SCole Faust // Log info
2012*c217d954SCole Faust ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated "
2013*c217d954SCole Faust << node.name()
2014*c217d954SCole Faust << " Type: " << node.type()
2015*c217d954SCole Faust << " Target: " << TargetInfo::TargetType
2016*c217d954SCole Faust << " Data Type: " << input->info()->data_type()
2017*c217d954SCole Faust << " Input shape: " << input->info()->tensor_shape()
2018*c217d954SCole Faust << " Output shape: " << output->info()->tensor_shape()
2019*c217d954SCole Faust << std::endl);
2020*c217d954SCole Faust
2021*c217d954SCole Faust return std::move(func);
2022*c217d954SCole Faust }
2023*c217d954SCole Faust } // namespace detail
2024*c217d954SCole Faust } // namespace backends
2025*c217d954SCole Faust } // namespace graph
2026*c217d954SCole Faust } // namespace arm_compute
2027*c217d954SCole Faust
2028*c217d954SCole Faust #endif /* ARM_COMPUTE_GRAPH_BACKENDS_DETAIL_FUNCTION_HELPERS_H */
2029