xref: /aosp_15_r20/external/ComputeLibrary/arm_compute/graph/frontend/Layers.h (revision c217d954acce2dbc11938adb493fc0abd69584f3)
1 /*
2  * Copyright (c) 2018-2021 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #ifndef ARM_COMPUTE_GRAPH_LAYERS_H
25 #define ARM_COMPUTE_GRAPH_LAYERS_H
26 
27 #include "arm_compute/graph/GraphBuilder.h"
28 #include "arm_compute/graph/Types.h"
29 #include "arm_compute/graph/frontend/ILayer.h"
30 #include "arm_compute/graph/frontend/IStream.h"
31 #include "arm_compute/graph/frontend/SubStream.h"
32 
33 #include "arm_compute/core/utils/misc/Utility.h"
34 
35 #include <memory>
36 #include <string>
37 
38 namespace arm_compute
39 {
40 namespace graph
41 {
42 namespace frontend
43 {
44 /** Input Layer */
45 class InputLayer final : public ILayer
46 {
47 public:
48     /** Construct an input layer.
49      *
50      * @param[in] desc     Description of input tensor.
51      * @param[in] accessor Accessor to get input tensor data from.
52      */
InputLayer(TensorDescriptor desc,ITensorAccessorUPtr accessor)53     InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
54         : _desc(desc), _accessor(std::move(accessor))
55     {
56     }
57 
create_layer(IStream & s)58     NodeID create_layer(IStream &s) override
59     {
60         NodeParams common_params = { name(), s.hints().target_hint };
61         return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62     }
63 
64 private:
65     TensorDescriptor    _desc;
66     ITensorAccessorUPtr _accessor;
67 };
68 
69 /** Constant Layer */
70 class ConstantLayer final : public ILayer
71 {
72 public:
73     /** Construct a constant layer.
74      *
75      * @param[in] desc     Description of input tensor.
76      * @param[in] accessor Accessor to get input tensor data from.
77      */
ConstantLayer(TensorDescriptor desc,ITensorAccessorUPtr accessor)78     ConstantLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
79         : _desc(desc), _accessor(std::move(accessor))
80     {
81     }
82 
create_layer(IStream & s)83     NodeID create_layer(IStream &s) override
84     {
85         NodeParams common_params = { name(), s.hints().target_hint };
86         return GraphBuilder::add_const_node(s.graph(), common_params, _desc, std::move(_accessor));
87     }
88 
89 private:
90     TensorDescriptor    _desc;
91     ITensorAccessorUPtr _accessor;
92 };
93 
94 /** Output Layer */
95 class OutputLayer final : public ILayer
96 {
97 public:
98     /** Construct an output layer.
99      *
100      * @param[in] accessor       Accessor to give output tensor data to.
101      * @param[in] connection_idx (Optional) Input connection index
102      */
103     OutputLayer(ITensorAccessorUPtr accessor, unsigned int connection_idx = 0)
_accessor(std::move (accessor))104         : _accessor(std::move(accessor)), _connection_idx(connection_idx)
105     {
106     }
107 
create_layer(IStream & s)108     NodeID create_layer(IStream &s) override
109     {
110         NodeParams  common_params = { name(), s.hints().target_hint };
111         NodeIdxPair input         = { s.tail_node(), _connection_idx };
112         return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
113     }
114 
115 private:
116     ITensorAccessorUPtr _accessor;
117     unsigned int        _connection_idx;
118 };
119 
120 /** Activation Layer */
121 class ActivationLayer final : public ILayer
122 {
123 public:
124     /** Construct an activation layer.
125      *
126      * @param[in] act_info       Activation information
127      * @param[in] out_quant_info (Optional) Output quantization info
128      */
129     ActivationLayer(ActivationLayerInfo    act_info,
130                     const QuantizationInfo out_quant_info = QuantizationInfo())
_act_info(act_info)131         : _act_info(act_info),
132           _out_quant_info(std::move(out_quant_info))
133     {
134     }
135 
create_layer(IStream & s)136     NodeID create_layer(IStream &s) override
137     {
138         NodeParams  common_params = { name(), s.hints().target_hint };
139         NodeIdxPair input         = { s.tail_node(), 0 };
140         return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info, std::move(_out_quant_info));
141     }
142 
143 private:
144     ActivationLayerInfo    _act_info;
145     const QuantizationInfo _out_quant_info;
146 };
147 
148 /** ArgMinMax Layer */
149 class ArgMinMaxLayer final : public ILayer
150 {
151 public:
152     /** Construct an activation layer.
153      *
154      * @param[in] op             Reduction Operation: min or max
155      * @param[in] axis           Axis to perform reduction along
156      * @param[in] out_data_type  (Optional) Output tensor data type
157      * @param[in] out_quant_info (Optional) Output quantization info
158      */
159     ArgMinMaxLayer(ReductionOperation     op,
160                    unsigned int           axis,
161                    DataType               out_data_type  = DataType::UNKNOWN,
162                    const QuantizationInfo out_quant_info = QuantizationInfo())
_op(op)163         : _op(op),
164           _axis(axis),
165           _out_data_type(out_data_type),
166           _out_quant_info(std::move(out_quant_info))
167     {
168     }
169 
170     /** Create layer and add to the given stream.
171      *
172      * @param[in] s Stream to add layer to.
173      *
174      * @return ID of the created node.
175      */
create_layer(IStream & s)176     NodeID create_layer(IStream &s) override
177     {
178         NodeParams  common_params = { name(), s.hints().target_hint };
179         NodeIdxPair input         = { s.tail_node(), 0 };
180         return GraphBuilder::add_arg_min_max_node(s.graph(), common_params, input, _op, _axis, _out_data_type, std::move(_out_quant_info));
181     }
182 
183 private:
184     ReductionOperation _op;
185     unsigned int       _axis;
186     DataType           _out_data_type;
187     QuantizationInfo   _out_quant_info;
188 };
189 
190 /** Batchnormalization Layer */
191 class BatchNormalizationLayer final : public ILayer
192 {
193 public:
194     /** Construct a batch normalization layer.
195      *
196      * @param[in] mean    Accessor to get mean tensor data from.
197      * @param[in] var     Accessor to get var tensor data from.
198      * @param[in] gamma   (Optional) Accessor to get gamma tensor data from. Default: nullptr.
199      * @param[in] beta    (Optional) Accessor to get beta tensor data from. Default: nullptr.
200      * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
201      */
202     BatchNormalizationLayer(ITensorAccessorUPtr mean,
203                             ITensorAccessorUPtr var,
204                             ITensorAccessorUPtr gamma   = nullptr,
205                             ITensorAccessorUPtr beta    = nullptr,
206                             float               epsilon = 0.001f)
_mean(std::move (mean))207         : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
208     {
209     }
210 
create_layer(IStream & s)211     NodeID create_layer(IStream &s) override
212     {
213         ARM_COMPUTE_ERROR_ON(_mean == nullptr);
214         ARM_COMPUTE_ERROR_ON(_var == nullptr);
215 
216         NodeParams  common_params = { name(), s.hints().target_hint };
217         NodeIdxPair input         = { s.tail_node(), 0 };
218         return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
219                                                           std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
220     }
221 
222 private:
223     ITensorAccessorUPtr _mean;
224     ITensorAccessorUPtr _var;
225     ITensorAccessorUPtr _gamma;
226     ITensorAccessorUPtr _beta;
227     float               _epsilon;
228 };
229 
230 /** Bounding Box Transform Layer */
231 class BoundingBoxTransformLayer final : public ILayer
232 {
233 public:
234     /** Construct a bounding box transform layer.
235      *
236      * @param[in] sub_stream_input  Graph sub-stream for the input
237      * @param[in] sub_stream_deltas Graph sub-stream for the deltas
238      * @param[in] info              Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
239      */
BoundingBoxTransformLayer(SubStream && sub_stream_input,SubStream && sub_stream_deltas,BoundingBoxTransformInfo info)240     BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
241         : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
242     {
243     }
244 
245     /** Create layer and add to the given stream.
246      *
247      * @param[in] s Stream to add layer to.
248      *
249      * @return ID of the created node.
250      */
create_layer(IStream & s)251     NodeID create_layer(IStream &s) override
252     {
253         NodeParams  common_params = { name(), s.hints().target_hint };
254         NodeIdxPair input         = { _ss_input.tail_node(), 0 };
255         NodeIdxPair deltas        = { _ss_deltas.tail_node(), 0 };
256         return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
257     }
258 
259 private:
260     SubStream                _ss_input;
261     SubStream                _ss_deltas;
262     BoundingBoxTransformInfo _bbox_info;
263 };
264 
265 /** Channel Shuffle Layer */
266 class ChannelShuffleLayer final : public ILayer
267 {
268 public:
269     /** Construct a Channel Shuffle layer.
270      *
271      * @param[in] num_groups Number of groups
272      */
ChannelShuffleLayer(unsigned int num_groups)273     ChannelShuffleLayer(unsigned int num_groups)
274         : _num_groups(num_groups)
275     {
276     }
277 
create_layer(IStream & s)278     NodeID create_layer(IStream &s) override
279     {
280         NodeParams  common_params = { name(), s.hints().target_hint };
281         NodeIdxPair input         = { s.tail_node(), 0 };
282         return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
283     }
284 
285 private:
286     unsigned int _num_groups;
287 };
288 
289 /** Concat Layer */
290 class ConcatLayer final : public ILayer
291 {
292 public:
293     /** Construct a concatenation layer
294      *
295      * @param[in] sub_stream1      First graph branch
296      * @param[in] sub_stream2      Second graph branch
297      * @param[in] rest_sub_streams Rest sub-graph branches
298      */
299     template <typename... Ts>
ConcatLayer(SubStream && sub_stream1,SubStream && sub_stream2,Ts &&...rest_sub_streams)300     ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
301         : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
302     {
303         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
304         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
305 
306         utility::for_each([&](SubStream && sub_stream)
307         {
308             _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
309         },
310         std::move(rest_sub_streams)...);
311     }
312     /** Construct a concatenation layer
313      *
314      * @param[in] concat_descriptor Concat layer descriptor
315      * @param[in] sub_stream1       First graph branch
316      * @param[in] sub_stream2       Second graph branch
317      * @param[in] rest_sub_streams  Rest sub-graph branches
318      */
319     template <typename... Ts>
ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor,SubStream && sub_stream1,SubStream && sub_stream2,Ts &&...rest_sub_streams)320     ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
321         : _sub_streams(), _concat_descriptor(concat_descriptor)
322     {
323         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
324         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
325 
326         utility::for_each([&](SubStream && sub_stream)
327         {
328             _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
329         },
330         std::move(rest_sub_streams)...);
331     }
332     /** Construct a concat layer
333      *
334      * @param[in] sub_stream Sub-stream
335      */
336     template <typename... Ts>
ConcatLayer(SubStream && sub_stream)337     ConcatLayer(SubStream &&sub_stream)
338         : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
339     {
340         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
341     }
create_layer(IStream & s)342     NodeID create_layer(IStream &s) override
343     {
344         NodeID     nid           = EmptyNodeID;
345         NodeParams common_params = { name(), s.hints().target_hint };
346         if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
347         {
348             nid = _sub_streams[0]->tail_node();
349         }
350         else
351         {
352             // Collect tail nodes and concatenate
353             std::vector<NodeIdxPair> nodes;
354             for(auto &ss : _sub_streams)
355             {
356                 if(ss && (ss->tail_node() != EmptyNodeID))
357                 {
358                     const auto tail_node = s.graph().node(ss->tail_node());
359                     if(tail_node != nullptr && tail_node->type() != NodeType::Output)
360                     {
361                         nodes.push_back({ ss->tail_node(), 0 });
362                     }
363                 }
364             }
365             nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _concat_descriptor);
366         }
367         return nid;
368     }
369 
370 private:
371     std::vector<std::unique_ptr<SubStream>> _sub_streams;
372     descriptors::ConcatLayerDescriptor      _concat_descriptor;
373 };
374 
375 /** Convolution Layer */
376 class ConvolutionLayer final : public ILayer
377 {
378 public:
379     /** Construct a convolution layer.
380      *
381      * @param[in] conv_width         Convolution width.
382      * @param[in] conv_height        Convolution height.
383      * @param[in] ofm                Output feature map.
384      * @param[in] weights            Accessor to get kernel weights from.
385      * @param[in] bias               Accessor to get kernel bias from.
386      * @param[in] conv_info          Padding and stride information.
387      * @param[in] num_groups         (Optional) Number of groups. Default: 1.
388      * @param[in] weights_quant_info (Optional) Weights quantization information
389      * @param[in] out_quant_info     (Optional) Output quantization info
390      */
391     ConvolutionLayer(unsigned int           conv_width,
392                      unsigned int           conv_height,
393                      unsigned int           ofm,
394                      ITensorAccessorUPtr    weights,
395                      ITensorAccessorUPtr    bias,
396                      PadStrideInfo          conv_info,
397                      unsigned int           num_groups         = 1,
398                      const QuantizationInfo weights_quant_info = QuantizationInfo(),
399                      const QuantizationInfo out_quant_info     = QuantizationInfo())
_conv_width(conv_width)400         : _conv_width(conv_width),
401           _conv_height(conv_height),
402           _ofm(ofm),
403           _conv_info(std::move(conv_info)),
404           _num_groups(num_groups),
405           _weights(std::move(weights)),
406           _bias(std::move(bias)),
407           _weights_quant_info(std::move(weights_quant_info)),
408           _out_quant_info(std::move(out_quant_info))
409     {
410     }
411 
create_layer(IStream & s)412     NodeID create_layer(IStream &s) override
413     {
414         NodeIdxPair input         = { s.tail_node(), 0 };
415         NodeParams  common_params = { name(), s.hints().target_hint };
416         return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
417                                                   Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
418                                                   s.hints().convolution_method_hint, s.hints().fast_math_hint,
419                                                   std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
420     }
421 
422 private:
423     unsigned int           _conv_width;
424     unsigned int           _conv_height;
425     unsigned int           _ofm;
426     const PadStrideInfo    _conv_info;
427     unsigned int           _num_groups;
428     ITensorAccessorUPtr    _weights;
429     ITensorAccessorUPtr    _bias;
430     const QuantizationInfo _weights_quant_info;
431     const QuantizationInfo _out_quant_info;
432 };
433 
434 /** Deconvolution Layer */
435 class DeconvolutionLayer final : public ILayer
436 {
437 public:
438     /** Construct a convolution layer.
439      *
440      * @param[in] conv_width  Convolution width.
441      * @param[in] conv_height Convolution height.
442      * @param[in] ofm         Output feature map.
443      * @param[in] weights     Accessor to get kernel weights from.
444      * @param[in] bias        Accessor to get kernel bias from.
445      * @param[in] deconv_info Padding and stride information.
446      */
DeconvolutionLayer(unsigned int conv_width,unsigned int conv_height,unsigned int ofm,ITensorAccessorUPtr weights,ITensorAccessorUPtr bias,PadStrideInfo deconv_info)447     DeconvolutionLayer(unsigned int        conv_width,
448                        unsigned int        conv_height,
449                        unsigned int        ofm,
450                        ITensorAccessorUPtr weights,
451                        ITensorAccessorUPtr bias,
452                        PadStrideInfo       deconv_info)
453         : _conv_width(conv_width),
454           _conv_height(conv_height),
455           _ofm(ofm),
456           _deconv_info(std::move(deconv_info)),
457           _weights(std::move(weights)),
458           _bias(std::move(bias))
459     {
460     }
461 
create_layer(IStream & s)462     NodeID create_layer(IStream &s) override
463     {
464         NodeIdxPair input         = { s.tail_node(), 0 };
465         NodeParams  common_params = { name(), s.hints().target_hint };
466         return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
467                                                     Size2D(_conv_width, _conv_height), _ofm, _deconv_info,
468                                                     std::move(_weights), std::move(_bias));
469     }
470 
471 private:
472     unsigned int        _conv_width;
473     unsigned int        _conv_height;
474     unsigned int        _ofm;
475     const PadStrideInfo _deconv_info;
476     ITensorAccessorUPtr _weights;
477     ITensorAccessorUPtr _bias;
478 };
479 
480 /** Depthwise Convolution Layer */
481 class DepthwiseConvolutionLayer final : public ILayer
482 {
483 public:
484     /** Construct a depthwise convolution layer.
485      *
486      * @param[in] conv_width         Convolution width.
487      * @param[in] conv_height        Convolution height.
488      * @param[in] weights            Accessor to get kernel weights from.
489      * @param[in] bias               Accessor to get kernel bias from.
490      * @param[in] conv_info          Padding and stride information.
491      * @param[in] depth_multiplier   (Optional) Depth multiplier parameter.
492      * @param[in] weights_quant_info (Optional) Quantization info used for weights
493      * @param[in] out_quant_info     (Optional) Output quantization info
494      */
495     DepthwiseConvolutionLayer(unsigned int           conv_width,
496                               unsigned int           conv_height,
497                               ITensorAccessorUPtr    weights,
498                               ITensorAccessorUPtr    bias,
499                               PadStrideInfo          conv_info,
500                               int                    depth_multiplier   = 1,
501                               const QuantizationInfo weights_quant_info = QuantizationInfo(),
502                               const QuantizationInfo out_quant_info     = QuantizationInfo())
_conv_width(conv_width)503         : _conv_width(conv_width),
504           _conv_height(conv_height),
505           _conv_info(std::move(conv_info)),
506           _weights(std::move(weights)),
507           _bias(std::move(bias)),
508           _depth_multiplier(depth_multiplier),
509           _weights_quant_info(std::move(weights_quant_info)),
510           _out_quant_info(std::move(out_quant_info))
511     {
512     }
513 
create_layer(IStream & s)514     NodeID create_layer(IStream &s) override
515     {
516         NodeIdxPair input         = { s.tail_node(), 0 };
517         NodeParams  common_params = { name(), s.hints().target_hint };
518         return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
519                                                             input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
520                                                             s.hints().depthwise_convolution_method_hint,
521                                                             std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
522     }
523 
524 private:
525     unsigned int           _conv_width;
526     unsigned int           _conv_height;
527     const PadStrideInfo    _conv_info;
528     ITensorAccessorUPtr    _weights;
529     ITensorAccessorUPtr    _bias;
530     int                    _depth_multiplier;
531     const QuantizationInfo _weights_quant_info;
532     const QuantizationInfo _out_quant_info;
533 };
534 
535 /** DepthToSpace Layer */
536 class DepthToSpaceLayer final : public ILayer
537 {
538 public:
539     /** Construct an DepthToSpace layer.
540      *
541      * @param[in] block_shape Block size to rearranged
542      */
DepthToSpaceLayer(int32_t block_shape)543     DepthToSpaceLayer(int32_t block_shape)
544         : _block_shape(block_shape)
545     {
546     }
547 
create_layer(IStream & s)548     NodeID create_layer(IStream &s) override
549     {
550         NodeParams  common_params = { name(), s.hints().target_hint };
551         NodeIdxPair input         = { s.tail_node(), 0 };
552         return GraphBuilder::add_depth_to_space_node(s.graph(), common_params, input, _block_shape);
553     }
554 
555 private:
556     int32_t _block_shape;
557 };
558 
559 /** Dequantization Layer */
560 class DequantizationLayer final : public ILayer
561 {
562 public:
563     /** Construct a dequantization layer.
564      *
565      */
DequantizationLayer()566     DequantizationLayer()
567     {
568     }
569 
create_layer(IStream & s)570     NodeID create_layer(IStream &s) override
571     {
572         NodeParams  common_params = { name(), s.hints().target_hint };
573         NodeIdxPair input         = { s.tail_node(), 0 };
574         return GraphBuilder::add_dequantization_node(s.graph(), common_params, input);
575     }
576 };
577 
578 /** DetectionOutput Layer */
579 class DetectionOutputLayer final : public ILayer
580 {
581 public:
582     /** Construct a detection output layer.
583      *
584      * @param[in] sub_stream_conf  Confidence graph sub-stream.
585      * @param[in] sub_stream_prior PriorBox graph sub-stream.
586      * @param[in] detect_info      DetectionOutput parameters.
587      */
DetectionOutputLayer(SubStream && sub_stream_conf,SubStream && sub_stream_prior,const DetectionOutputLayerInfo & detect_info)588     DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, const DetectionOutputLayerInfo &detect_info)
589         : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
590     {
591     }
592 
create_layer(IStream & s)593     NodeID create_layer(IStream &s) override
594     {
595         NodeParams  common_params  = { name(), s.hints().target_hint };
596         NodeIdxPair input_loc      = { s.tail_node(), 0 };
597         NodeIdxPair input_conf     = { _ss_conf.tail_node(), 0 };
598         NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
599         return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
600     }
601 
602 private:
603     SubStream                _ss_conf;
604     SubStream                _ss_prior;
605     DetectionOutputLayerInfo _detect_info;
606 };
607 /** DetectionOutputPostProcess Layer */
608 class DetectionPostProcessLayer final : public ILayer
609 {
610 public:
611     /** Construct a detection output layer.
612      *
613      * @param[in] sub_stream_class_prediction Class prediction graph sub-stream.
614      * @param[in] detect_info                 DetectionOutput parameters.
615      * @param[in] anchors                     Accessor to get anchors tensor data from.
616      * @param[in] out_quant_info              (Optional) Output quantization info
617      */
618     DetectionPostProcessLayer(SubStream &&sub_stream_class_prediction, DetectionPostProcessLayerInfo detect_info, ITensorAccessorUPtr anchors,
619                               const QuantizationInfo out_quant_info = QuantizationInfo())
_sub_stream_class_prediction(std::move (sub_stream_class_prediction))620         : _sub_stream_class_prediction(std::move(sub_stream_class_prediction)), _detect_info(detect_info), _anchors(std::move(anchors)), _out_quant_info(std::move(out_quant_info))
621     {
622     }
623 
create_layer(IStream & s)624     NodeID create_layer(IStream &s) override
625     {
626         ARM_COMPUTE_ERROR_ON(_anchors == nullptr);
627 
628         NodeParams  common_params          = { name(), s.hints().target_hint };
629         NodeIdxPair input_box_encoding     = { s.tail_node(), 0 };
630         NodeIdxPair input_class_prediction = { _sub_stream_class_prediction.tail_node(), 0 };
631         return GraphBuilder::add_detection_post_process_node(s.graph(), common_params, input_box_encoding, input_class_prediction, _detect_info, std::move(_anchors), std::move(_out_quant_info));
632     }
633 
634 private:
635     SubStream                     _sub_stream_class_prediction;
636     DetectionPostProcessLayerInfo _detect_info;
637     ITensorAccessorUPtr           _anchors;
638     const QuantizationInfo        _out_quant_info;
639 };
640 /** Dummy Layer */
641 class DummyLayer final : public ILayer
642 {
643 public:
644     /** Construct a dummy layer.
645      *
646      * @param[in] shape Output shape
647      */
DummyLayer(TensorShape shape)648     DummyLayer(TensorShape shape)
649         : _shape(shape)
650     {
651     }
652 
create_layer(IStream & s)653     NodeID create_layer(IStream &s) override
654     {
655         NodeParams  common_params = { name(), s.hints().target_hint };
656         NodeIdxPair input         = { s.tail_node(), 0 };
657         return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
658     }
659 
660 private:
661     TensorShape _shape;
662 };
663 
664 class EltwiseLayer final : public ILayer
665 {
666 public:
667     /** Construct an element-wise operation layer
668      *
669      * @param[in] sub_stream0 First graph sub-stream
670      * @param[in] sub_stream1 First graph sub-stream
671      * @param[in] op          Element-wise operation to perform
672      */
EltwiseLayer(SubStream && sub_stream0,SubStream && sub_stream1,EltwiseOperation op)673     EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
674         : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
675     {
676     }
677 
create_layer(IStream & s)678     NodeID create_layer(IStream &s) override
679     {
680         NodeParams  common_params = { name(), s.hints().target_hint };
681         NodeIdxPair input0        = { _ss0.tail_node(), 0 };
682         NodeIdxPair input1        = { _ss1.tail_node(), 0 };
683 
684         return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
685     }
686 
687 private:
688     SubStream        _ss0;
689     SubStream        _ss1;
690     EltwiseOperation _op;
691 };
692 /** Flatten Layer */
693 class FlattenLayer final : public ILayer
694 {
695 public:
696     /** Construct a flatten layer. */
FlattenLayer()697     FlattenLayer()
698     {
699     }
700 
create_layer(IStream & s)701     NodeID create_layer(IStream &s) override
702     {
703         NodeParams  common_params = { name(), s.hints().target_hint };
704         NodeIdxPair input         = { s.tail_node(), 0 };
705         return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
706     }
707 };
708 
709 /** Fully Connected Layer */
710 class FullyConnectedLayer final : public ILayer
711 {
712 public:
713     /** Construct a fully connected layer.
714      *
715      * @param[in] num_outputs        Number of outputs.
716      * @param[in] weights            Accessor to get weights from.
717      * @param[in] bias               Accessor to get bias from.
718      * @param[in] fc_info            (Optional) Fully connected layer metadata
719      * @param[in] weights_quant_info (Optional) Weights quantization information
720      * @param[in] out_quant_info     (Optional) Output quantization info
721      */
722     FullyConnectedLayer(unsigned int                  num_outputs,
723                         ITensorAccessorUPtr           weights,
724                         ITensorAccessorUPtr           bias,
725                         const FullyConnectedLayerInfo fc_info            = FullyConnectedLayerInfo(),
726                         const QuantizationInfo        weights_quant_info = QuantizationInfo(),
727                         const QuantizationInfo        out_quant_info     = QuantizationInfo())
_num_outputs(num_outputs)728         : _num_outputs(num_outputs),
729           _weights(std::move(weights)),
730           _bias(std::move(bias)),
731           _weights_ss(nullptr),
732           _bias_ss(nullptr),
733           _fc_info(fc_info),
734           _weights_quant_info(std::move(weights_quant_info)),
735           _out_quant_info(std::move(out_quant_info))
736     {
737     }
738 
739     /** Construct a fully connected layer.
740      *
741      * @param[in] num_outputs        Number of outputs.
742      * @param[in] sub_stream_weights Graph sub-stream for the weights.
743      * @param[in] sub_stream_bias    Graph sub-stream for the bias.
744      * @param[in] fc_info            (Optional) Fully connected layer metadata
745      * @param[in] weights_quant_info (Optional) Weights quantization information
746      * @param[in] out_quant_info     (Optional) Output quantization info
747      */
748     FullyConnectedLayer(unsigned int                  num_outputs,
749                         SubStream                     sub_stream_weights,
750                         SubStream                     sub_stream_bias,
751                         const FullyConnectedLayerInfo fc_info            = FullyConnectedLayerInfo(),
752                         const QuantizationInfo        weights_quant_info = QuantizationInfo(),
753                         const QuantizationInfo        out_quant_info     = QuantizationInfo())
_num_outputs(num_outputs)754         : _num_outputs(num_outputs),
755           _weights(nullptr),
756           _bias(nullptr),
757           _weights_ss(std::make_unique<SubStream>(std::move(sub_stream_weights))),
758           _bias_ss(std::make_unique<SubStream>(std::move(sub_stream_bias))),
759           _fc_info(fc_info),
760           _weights_quant_info(std::move(weights_quant_info)),
761           _out_quant_info(std::move(out_quant_info))
762     {
763     }
764 
765     /** Create layer and add to the given stream.
766      *
767      * @param[in] s Stream to add layer to.
768      *
769      * @return ID of the created node.
770      */
create_layer(IStream & s)771     NodeID create_layer(IStream &s) override
772     {
773         NodeParams  common_params = { name(), s.hints().target_hint };
774         NodeIdxPair input         = { s.tail_node(), 0 };
775         if(_weights != nullptr)
776         {
777             return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
778                                                            std::move(_weights), std::move(_bias), _fc_info,
779                                                            std::move(_weights_quant_info), std::move(_out_quant_info), s.hints().fast_math_hint);
780         }
781         else
782         {
783             ARM_COMPUTE_ERROR_ON(_weights_ss == nullptr);
784 
785             NodeID bias_nid = (_bias_ss == nullptr) ? EmptyNodeID : _bias_ss->tail_node();
786             return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
787                                                            _weights_ss->tail_node(), bias_nid, _fc_info,
788                                                            std::move(_out_quant_info), s.hints().fast_math_hint);
789         }
790     }
791 
792 private:
793     unsigned int                  _num_outputs;
794     ITensorAccessorUPtr           _weights;
795     ITensorAccessorUPtr           _bias;
796     std::unique_ptr<SubStream>    _weights_ss;
797     std::unique_ptr<SubStream>    _bias_ss;
798     const FullyConnectedLayerInfo _fc_info;
799     const QuantizationInfo        _weights_quant_info;
800     const QuantizationInfo        _out_quant_info;
801 };
802 
803 /** Generate Proposals Layer */
804 class GenerateProposalsLayer final : public ILayer
805 {
806 public:
807     /** Construct a generate proposals layer.
808      *
809      * @param[in] ss_scores  Graph sub-stream for the scores.
810      * @param[in] ss_deltas  Graph sub-stream for the deltas.
811      * @param[in] ss_anchors Graph sub-stream for the anchors.
812      * @param[in] info       Generate Proposals operation information.
813      */
GenerateProposalsLayer(SubStream && ss_scores,SubStream && ss_deltas,SubStream && ss_anchors,GenerateProposalsInfo info)814     GenerateProposalsLayer(SubStream &&ss_scores, SubStream &&ss_deltas, SubStream &&ss_anchors, GenerateProposalsInfo info)
815         : _ss_scores(std::move(ss_scores)), _ss_deltas(std::move(ss_deltas)), _ss_anchors(std::move(ss_anchors)), _info(info)
816     {
817     }
818 
819     /** Create layer and add to the given stream.
820      *
821      * @param[in] s Stream to add layer to.
822      *
823      * @return ID of the created node.
824      */
create_layer(IStream & s)825     NodeID create_layer(IStream &s) override
826     {
827         NodeParams  common_params = { name(), s.hints().target_hint };
828         NodeIdxPair scores        = { _ss_scores.tail_node(), 0 };
829         NodeIdxPair deltas        = { _ss_deltas.tail_node(), 0 };
830         NodeIdxPair anchors       = { _ss_anchors.tail_node(), 0 };
831         return GraphBuilder::add_generate_proposals_node(s.graph(), common_params, scores, deltas, anchors, _info);
832     }
833 
834 private:
835     SubStream             _ss_scores;
836     SubStream             _ss_deltas;
837     SubStream             _ss_anchors;
838     GenerateProposalsInfo _info;
839 };
840 
841 /** L2 Normalize Layer */
842 class L2NormalizeLayer final : public ILayer
843 {
844 public:
845     /** Construct a L2 Normalize layer.
846      *
847      * @param[in] axis    Axis to perform normalization on
848      * @param[in] epsilon Lower bound value for the normalization
849      */
L2NormalizeLayer(int axis,float epsilon)850     L2NormalizeLayer(int axis, float epsilon)
851         : _axis(axis), _epsilon(epsilon)
852     {
853     }
854 
create_layer(IStream & s)855     NodeID create_layer(IStream &s) override
856     {
857         NodeParams  common_params = { name(), s.hints().target_hint };
858         NodeIdxPair input         = { s.tail_node(), 0 };
859         return GraphBuilder::add_l2_normalize_node(s.graph(), common_params, input, _axis, _epsilon);
860     }
861 
862 private:
863     int   _axis;
864     float _epsilon;
865 };
866 
867 /** Normalization Layer */
868 class NormalizationLayer final : public ILayer
869 {
870 public:
871     /** Construct a normalization layer.
872      *
873      * @param[in] norm_info Normalization information.
874      */
NormalizationLayer(NormalizationLayerInfo norm_info)875     NormalizationLayer(NormalizationLayerInfo norm_info)
876         : _norm_info(norm_info)
877     {
878     }
879 
create_layer(IStream & s)880     NodeID create_layer(IStream &s) override
881     {
882         NodeParams  common_params = { name(), s.hints().target_hint };
883         NodeIdxPair input         = { s.tail_node(), 0 };
884         return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
885     }
886 
887 private:
888     NormalizationLayerInfo _norm_info;
889 };
890 
891 /** Normalize planar YUV Layer */
892 class NormalizePlanarYUVLayer final : public ILayer
893 {
894 public:
895     /** Construct a normalize planar YUV layer.
896      *
897      * @param[in] mean Accessor to get mean tensor data from.
898      * @param[in] std  Accessor to get std tensor data from.
899      */
NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,ITensorAccessorUPtr std)900     NormalizePlanarYUVLayer(ITensorAccessorUPtr mean,
901                             ITensorAccessorUPtr std)
902         : _mean(std::move(mean)), _std(std::move(std))
903     {
904     }
905 
create_layer(IStream & s)906     NodeID create_layer(IStream &s) override
907     {
908         ARM_COMPUTE_ERROR_ON(_mean == nullptr);
909         ARM_COMPUTE_ERROR_ON(_std == nullptr);
910 
911         NodeParams  common_params = { name(), s.hints().target_hint };
912         NodeIdxPair input         = { s.tail_node(), 0 };
913         return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
914                                                            std::move(_mean), std::move(_std));
915     }
916 
917 private:
918     ITensorAccessorUPtr _mean;
919     ITensorAccessorUPtr _std;
920 };
921 
922 /** Pad Layer */
923 class PadLayer final : public ILayer
924 {
925 public:
926     /** Construct a pad layer.
927      *
928      * @param[in] padding   The padding for each spatial dimension of the input tensor. The pair padding[i]
929      *                      specifies the front and the end padding in the i-th dimension.
930      * @param[in] pad_value Padding value to use. Defaults to 0.
931      */
932     PadLayer(PaddingList padding, PixelValue pad_value = PixelValue())
_padding(padding)933         : _padding(padding), _pad_value(pad_value)
934     {
935     }
936 
create_layer(IStream & s)937     NodeID create_layer(IStream &s) override
938     {
939         NodeParams  common_params = { name(), s.hints().target_hint };
940         NodeIdxPair input         = { s.tail_node(), 0 };
941         return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding, _pad_value);
942     }
943 
944 private:
945     PaddingList _padding;
946     PixelValue  _pad_value;
947 };
948 
949 /** Permute Layer */
950 class PermuteLayer final : public ILayer
951 {
952 public:
953     /** Construct a permute layer.
954      *
955      * @param[in] perm   Permutation vector.
956      * @param[in] layout (Optional) Data layout to assign to permuted tensor.
957      *                   If UNKNOWN then the input's layout will be used.
958      */
959     PermuteLayer(PermutationVector perm, DataLayout layout = DataLayout::UNKNOWN)
_perm(perm)960         : _perm(perm), _layout(layout)
961     {
962     }
963 
create_layer(IStream & s)964     NodeID create_layer(IStream &s) override
965     {
966         NodeParams  common_params = { name(), s.hints().target_hint };
967         NodeIdxPair input         = { s.tail_node(), 0 };
968         return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
969     }
970 
971 private:
972     PermutationVector _perm;
973     DataLayout        _layout;
974 };
975 
976 /** Pooling Layer */
977 class PoolingLayer final : public ILayer
978 {
979 public:
980     /** Construct a pooling layer.
981      *
982      * @param[in] pool_info Pooling information.
983      */
PoolingLayer(PoolingLayerInfo pool_info)984     PoolingLayer(PoolingLayerInfo pool_info)
985         : _pool_info(pool_info)
986     {
987     }
988 
create_layer(IStream & s)989     NodeID create_layer(IStream &s) override
990     {
991         NodeParams  common_params = { name(), s.hints().target_hint };
992         NodeIdxPair input         = { s.tail_node(), 0 };
993         return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
994     }
995 
996 private:
997     PoolingLayerInfo _pool_info;
998 };
999 
1000 /** PRelu Layer */
1001 class PReluLayer final : public ILayer
1002 {
1003 public:
1004     /** Construct an PRelu operation layer
1005      *
1006      * @param[in] sub_stream0 First graph sub-stream
1007      * @param[in] sub_stream1 First graph sub-stream
1008      */
PReluLayer(SubStream && sub_stream0,SubStream && sub_stream1)1009     PReluLayer(SubStream &&sub_stream0, SubStream &&sub_stream1)
1010         : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1))
1011     {
1012     }
1013 
create_layer(IStream & s)1014     NodeID create_layer(IStream &s) override
1015     {
1016         NodeParams  common_params = { name(), s.hints().target_hint };
1017         NodeIdxPair input         = { _ss0.tail_node(), 0 };
1018         NodeIdxPair alpha         = { _ss1.tail_node(), 0 };
1019 
1020         return GraphBuilder::add_prelu_node(s.graph(), common_params, input, alpha);
1021     }
1022 
1023 private:
1024     SubStream _ss0;
1025     SubStream _ss1;
1026 };
1027 
1028 /** Print Layer */
1029 class PrintLayer final : public ILayer
1030 {
1031 public:
1032     /** Construct a print layer.
1033      *
1034      * Example usage to locally dequantize and print a tensor:
1035      *
1036      * Tensor *output = new Tensor();
1037      * const auto transform = [output](ITensor *input)
1038      * {
1039      *     output->allocator()->init(*input->info());
1040      *     output->info()->set_data_type(DataType::F32);
1041      *     output->allocator()->allocate();
1042      *
1043      *     Window win;
1044      *     win.use_tensor_dimensions(input->info()->tensor_shape());
1045      *     Iterator in(input, win);
1046      *     Iterator out(output, win);
1047      *     execute_window_loop(win, [&](const Coordinates &)
1048      *     {
1049      *         *(reinterpret_cast<float *>(out.ptr())) = dequantize_qasymm8(*in.ptr(), input->info()->quantization_info().uniform());
1050      *     }, in, out);
1051      *
1052      *     return output;
1053      * };
1054      *
1055      * graph << InputLayer(input_descriptor.set_quantization_info(in_quant_info), get_input_accessor(common_params, nullptr, false))
1056      *       << ...
1057      *       << \\ CNN Layers
1058      *       << ...
1059      *       << PrintLayer(std::cout, IOFormatInfo(), transform)
1060      *       << ...
1061      *       << OutputLayer(get_output_accessor(common_params, 5));
1062      *
1063      * @param[in] stream      Output stream.
1064      * @param[in] format_info (Optional) Format info.
1065      * @param[in] transform   (Optional) Input transform function.
1066      */
1067     PrintLayer(std::ostream &stream, const IOFormatInfo &format_info = IOFormatInfo(), const std::function<ITensor *(ITensor *)> transform = nullptr)
_stream(stream)1068         : _stream(stream), _format_info(format_info), _transform(transform)
1069     {
1070     }
1071 
create_layer(IStream & s)1072     NodeID create_layer(IStream &s) override
1073     {
1074         NodeParams  common_params = { name(), s.hints().target_hint };
1075         NodeIdxPair input         = { s.tail_node(), 0 };
1076         return GraphBuilder::add_print_node(s.graph(), common_params, input, _stream, _format_info, _transform);
1077     }
1078 
1079 private:
1080     std::ostream                             &_stream;
1081     const IOFormatInfo                       &_format_info;
1082     const std::function<ITensor *(ITensor *)> _transform;
1083 };
1084 
1085 /** PriorBox Layer */
1086 class PriorBoxLayer final : public ILayer
1087 {
1088 public:
1089     /** Construct a priorbox layer.
1090      *
1091      * @param[in] sub_stream First graph sub-stream
1092      * @param[in] prior_info PriorBox parameters.
1093      */
PriorBoxLayer(SubStream && sub_stream,const PriorBoxLayerInfo & prior_info)1094     PriorBoxLayer(SubStream &&sub_stream, const PriorBoxLayerInfo &prior_info)
1095         : _ss(std::move(sub_stream)), _prior_info(prior_info)
1096     {
1097     }
1098 
create_layer(IStream & s)1099     NodeID create_layer(IStream &s) override
1100     {
1101         NodeParams  common_params = { name(), s.hints().target_hint };
1102         NodeIdxPair input0        = { s.tail_node(), 0 };
1103         NodeIdxPair input1        = { _ss.tail_node(), 0 };
1104         return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
1105     }
1106 
1107 private:
1108     SubStream         _ss;
1109     PriorBoxLayerInfo _prior_info;
1110 };
1111 
1112 /** Quantization Layer */
1113 class QuantizationLayer final : public ILayer
1114 {
1115 public:
1116     /** Construct a quantization layer.
1117      *
1118      * @param[in] out_quant_info Output tensor quantization info
1119      */
QuantizationLayer(QuantizationInfo out_quant_info)1120     QuantizationLayer(QuantizationInfo out_quant_info)
1121         : _out_quant_info(out_quant_info)
1122     {
1123     }
1124 
create_layer(IStream & s)1125     NodeID create_layer(IStream &s) override
1126     {
1127         NodeParams  common_params = { name(), s.hints().target_hint };
1128         NodeIdxPair input         = { s.tail_node(), 0 };
1129         return GraphBuilder::add_quantization_node(s.graph(), common_params, input, _out_quant_info);
1130     }
1131 
1132 private:
1133     QuantizationInfo _out_quant_info;
1134 };
1135 
1136 /** Reduction Layer */
1137 class ReductionLayer final : public ILayer
1138 {
1139 public:
1140     /** Construct a reduction layer.
1141      *
1142      * @param[in] op        Reduction operation
1143      * @param[in] axis      Reduction axis
1144      * @param[in] keep_dims (Optional) Whether to keep the reduced dimension after the operation. Defaults to true.
1145      */
ReductionLayer(ReductionOperation op,unsigned int axis,bool keep_dims)1146     ReductionLayer(ReductionOperation op, unsigned int axis, bool keep_dims)
1147         : _op(op), _axis(axis), _keep_dims(keep_dims)
1148     {
1149     }
1150 
create_layer(IStream & s)1151     NodeID create_layer(IStream &s) override
1152     {
1153         NodeParams  common_params = { name(), s.hints().target_hint };
1154         NodeIdxPair input         = { s.tail_node(), 0 };
1155         return GraphBuilder::add_reduction_operation_node(s.graph(), common_params, input, _op, _axis, _keep_dims);
1156     }
1157 
1158 private:
1159     ReductionOperation _op;
1160     unsigned int       _axis;
1161     bool               _keep_dims;
1162 };
1163 
1164 /** Reorg Layer */
1165 class ReorgLayer final : public ILayer
1166 {
1167 public:
1168     /** Construct a reorg layer.
1169      *
1170      * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
1171      *                   It defines the spatial distance between 2 consecutive pixels in the x and y direction
1172      */
ReorgLayer(int stride)1173     ReorgLayer(int stride)
1174         : _stride(stride)
1175     {
1176     }
1177 
create_layer(IStream & s)1178     NodeID create_layer(IStream &s) override
1179     {
1180         NodeParams  common_params = { name(), s.hints().target_hint };
1181         NodeIdxPair input         = { s.tail_node(), 0 };
1182         return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
1183     }
1184 
1185 private:
1186     int _stride;
1187 };
1188 
1189 /** Reshape Layer */
1190 class ReshapeLayer final : public ILayer
1191 {
1192 public:
1193     /** Construct a reshape layer.
1194      *
1195      * @param[in] shape Target shape.
1196      */
ReshapeLayer(TensorShape shape)1197     ReshapeLayer(TensorShape shape)
1198         : _shape(shape)
1199     {
1200     }
1201 
create_layer(IStream & s)1202     NodeID create_layer(IStream &s) override
1203     {
1204         NodeParams  common_params = { name(), s.hints().target_hint };
1205         NodeIdxPair input         = { s.tail_node(), 0 };
1206         return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
1207     }
1208 
1209 private:
1210     TensorShape _shape;
1211 };
1212 
1213 /** Resize Layer */
1214 class ResizeLayer final : public ILayer
1215 {
1216 public:
ResizeLayer(InterpolationPolicy policy,float width_scale,float height_scale)1217     ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
1218         : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
1219     {
1220     }
1221 
create_layer(IStream & s)1222     NodeID create_layer(IStream &s) override
1223     {
1224         NodeParams  common_params = { name(), s.hints().target_hint };
1225         NodeIdxPair input         = { s.tail_node(), 0 };
1226         return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
1227     }
1228 
1229 private:
1230     InterpolationPolicy _policy;
1231     float               _width_scale;
1232     float               _height_scale;
1233 };
1234 
1235 /** ROIAlign Layer */
1236 class ROIAlignLayer final : public ILayer
1237 {
1238 public:
1239     /** Construct a RoiAlign layer.
1240      *
1241      * @param[in] sub_stream_input Graph sub-stream for the input
1242      * @param[in] sub_stream_rois  Graph sub-stream for the rois
1243      * @param[in] pool_info        Pooling information.
1244      */
ROIAlignLayer(SubStream && sub_stream_input,SubStream && sub_stream_rois,ROIPoolingLayerInfo pool_info)1245     ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
1246         : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
1247     {
1248     }
1249 
1250     /** Prevent instances of this class from being copy constructed */
1251     ROIAlignLayer(const ROIAlignLayer &) = delete;
1252     /** Prevent instances of this class from being copied */
1253     ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
1254 
create_layer(IStream & s)1255     NodeID create_layer(IStream &s) override
1256     {
1257         NodeParams  common_params = { name(), s.hints().target_hint };
1258         NodeIdxPair input         = { _ss_input.tail_node(), 0 };
1259         NodeIdxPair rois          = { _ss_rois.tail_node(), 0 };
1260         return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
1261     }
1262 
1263 private:
1264     SubStream           _ss_input;
1265     SubStream           _ss_rois;
1266     ROIPoolingLayerInfo _pool_info;
1267 };
1268 
1269 /** Scale Layer */
1270 class ScaleLayer final : public ILayer
1271 {
1272 public:
1273     /** Construct a scale layer.
1274      *
1275      * @param[in] mul_w Accessor to get mul weight from.
1276      * @param[in] add_w Accessor to get add weight from.
1277      */
ScaleLayer(ITensorAccessorUPtr mul_w,ITensorAccessorUPtr add_w)1278     ScaleLayer(ITensorAccessorUPtr mul_w,
1279                ITensorAccessorUPtr add_w)
1280         : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
1281     {
1282     }
1283 
create_layer(IStream & s)1284     NodeID create_layer(IStream &s) override
1285     {
1286         NodeParams  common_params = { name(), s.hints().target_hint };
1287         NodeIdxPair input         = { s.tail_node(), 0 };
1288         return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
1289     }
1290 
1291 private:
1292     ITensorAccessorUPtr _mul_w;
1293     ITensorAccessorUPtr _add_w;
1294 };
1295 
1296 /** Slice Layer */
1297 class SliceLayer final : public ILayer
1298 {
1299 public:
1300     /** Construct a slice layer.
1301      *
1302      * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1303      * @param[in] ends   The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1304      */
SliceLayer(Coordinates & starts,Coordinates & ends)1305     SliceLayer(Coordinates &starts, Coordinates &ends)
1306         : _starts(starts), _ends(ends)
1307     {
1308     }
1309 
create_layer(IStream & s)1310     NodeID create_layer(IStream &s) override
1311     {
1312         NodeParams  common_params = { name(), s.hints().target_hint };
1313         NodeIdxPair input         = { s.tail_node(), 0 };
1314         return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
1315     }
1316 
1317 private:
1318     Coordinates _starts;
1319     Coordinates _ends;
1320 };
1321 
1322 /** Softmax Layer */
1323 class SoftmaxLayer final : public ILayer
1324 {
1325 public:
1326     /** Construct a softmax layer.
1327      *
1328      * @param[in] beta (Optional) Beta value. Default 1.0.
1329      */
1330     SoftmaxLayer(float beta = 1.0f)
_beta(beta)1331         : _beta(beta)
1332     {
1333     }
1334 
create_layer(IStream & s)1335     NodeID create_layer(IStream &s) override
1336     {
1337         NodeParams  common_params = { name(), s.hints().target_hint };
1338         NodeIdxPair input         = { s.tail_node(), 0 };
1339         return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
1340     }
1341 
1342 private:
1343     float _beta;
1344 };
1345 
1346 /** Stack Layer */
1347 class StackLayer final : public ILayer
1348 {
1349 public:
1350     /** Construct a concatenation layer
1351      *
1352      * @param[in] sub_stream1      First graph branch
1353      * @param[in] sub_stream2      Second graph branch
1354      * @param[in] rest_sub_streams Rest sub-graph branches
1355      */
1356     template <typename... Ts>
StackLayer(SubStream && sub_stream1,SubStream && sub_stream2,Ts &&...rest_sub_streams)1357     StackLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1358         : _sub_streams(), _axis(0)
1359     {
1360         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
1361         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
1362 
1363         utility::for_each([&](SubStream && sub_stream)
1364         {
1365             _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
1366         },
1367         std::move(rest_sub_streams)...);
1368     }
1369     /** Construct a concatenation layer
1370      *
1371      * @param[in] axis             Stack layer axis along which to stack the inputs
1372      * @param[in] sub_stream1      First graph branch
1373      * @param[in] sub_stream2      Second graph branch
1374      * @param[in] rest_sub_streams Rest sub-graph branches
1375      */
1376     template <typename... Ts>
StackLayer(int axis,SubStream && sub_stream1,SubStream && sub_stream2,Ts &&...rest_sub_streams)1377     StackLayer(int axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1378         : _sub_streams(), _axis(axis)
1379     {
1380         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
1381         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
1382 
1383         utility::for_each([&](SubStream && sub_stream)
1384         {
1385             _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
1386         },
1387         std::move(rest_sub_streams)...);
1388     }
1389     /** Construct a concat layer
1390      *
1391      * @param[in] sub_stream Sub-stream
1392      */
1393     template <typename... Ts>
StackLayer(SubStream && sub_stream)1394     StackLayer(SubStream &&sub_stream)
1395         : _sub_streams(), _axis(0)
1396     {
1397         _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
1398     }
create_layer(IStream & s)1399     NodeID create_layer(IStream &s) override
1400     {
1401         NodeID     nid           = EmptyNodeID;
1402         NodeParams common_params = { name(), s.hints().target_hint };
1403         if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
1404         {
1405             nid = _sub_streams[0]->tail_node();
1406         }
1407         else
1408         {
1409             // Collect tail nodes and stack
1410             std::vector<NodeIdxPair> nodes;
1411             for(auto &ss : _sub_streams)
1412             {
1413                 if(ss && (ss->tail_node() != EmptyNodeID))
1414                 {
1415                     const auto tail_node = s.graph().node(ss->tail_node());
1416                     if(tail_node != nullptr && tail_node->type() != NodeType::Output)
1417                     {
1418                         nodes.push_back({ ss->tail_node(), 0 });
1419                     }
1420                 }
1421             }
1422             nid = GraphBuilder::add_stack_node(s.graph(), common_params, nodes, _axis);
1423         }
1424         return nid;
1425     }
1426 
1427 private:
1428     std::vector<std::unique_ptr<SubStream>> _sub_streams;
1429     int                                     _axis;
1430 };
1431 
1432 /** StridedSlice Layer */
1433 class StridedSliceLayer final : public ILayer
1434 {
1435 public:
1436     /** Construct a strided slice layer.
1437      *
1438      * @param[in] starts             The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1439      * @param[in] ends               The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1440      * @param[in] strides            The strides of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1441      * @param[in] strided_slice_info Contains masks for the starts, ends and strides
1442      */
StridedSliceLayer(Coordinates & starts,Coordinates & ends,BiStrides & strides,StridedSliceLayerInfo strided_slice_info)1443     StridedSliceLayer(Coordinates &starts, Coordinates &ends, BiStrides &strides, StridedSliceLayerInfo strided_slice_info)
1444         : _starts(starts), _ends(ends), _strides(strides), _info(strided_slice_info)
1445     {
1446     }
1447 
create_layer(IStream & s)1448     NodeID create_layer(IStream &s) override
1449     {
1450         NodeParams  common_params = { name(), s.hints().target_hint };
1451         NodeIdxPair input         = { s.tail_node(), 0 };
1452         return GraphBuilder::add_strided_slice_node(s.graph(), common_params, input, _starts, _ends, _strides, _info);
1453     }
1454 
1455 private:
1456     Coordinates           _starts;
1457     Coordinates           _ends;
1458     BiStrides             _strides;
1459     StridedSliceLayerInfo _info;
1460 };
1461 
1462 /** YOLO Layer */
1463 class YOLOLayer final : public ILayer
1464 {
1465 public:
1466     /** Construct a YOLO layer.
1467      *
1468      * @param[in] act_info Activation info
1469      */
YOLOLayer(ActivationLayerInfo act_info)1470     YOLOLayer(ActivationLayerInfo act_info)
1471         : _act_info(act_info)
1472     {
1473     }
1474 
create_layer(IStream & s)1475     NodeID create_layer(IStream &s) override
1476     {
1477         NodeParams  common_params = { name(), s.hints().target_hint };
1478         NodeIdxPair input         = { s.tail_node(), 0 };
1479         return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info);
1480     }
1481 
1482 private:
1483     ActivationLayerInfo _act_info;
1484 };
1485 } // namespace frontend
1486 } // namespace graph
1487 } // namespace arm_compute
1488 #endif /* ARM_COMPUTE_GRAPH_LAYERS_H */
1489