1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include "PadLayer.hpp"
7 #include "LayerCloneBase.hpp"
8
9 #include <armnn/backends/TensorHandle.hpp>
10 #include <armnn/backends/WorkloadData.hpp>
11 #include <armnn/backends/WorkloadFactory.hpp>
12
13 #include <cstring>
14
15 namespace armnn
16 {
17
PadLayer(const armnn::PadDescriptor & param,const char * name)18 PadLayer::PadLayer(const armnn::PadDescriptor& param, const char* name)
19 : LayerWithParameters(1, 1, LayerType::Pad, param, name)
20 {}
21
CreateWorkload(const armnn::IWorkloadFactory & factory) const22 std::unique_ptr<IWorkload> PadLayer::CreateWorkload(const armnn::IWorkloadFactory& factory) const
23 {
24 PadQueueDescriptor descriptor;
25 descriptor.m_Parameters.m_PadList = m_Param.m_PadList;
26 descriptor.m_Parameters.m_PaddingMode = m_Param.m_PaddingMode;
27 SetAdditionalInfo(descriptor);
28
29 return factory.CreateWorkload(LayerType::Pad, descriptor, PrepInfoAndDesc(descriptor));
30 }
31
Clone(Graph & graph) const32 PadLayer* PadLayer::Clone(Graph& graph) const
33 {
34 auto layer = CloneBase<PadLayer>(graph, m_Param, GetName());
35
36 layer->m_Param.m_PadList = m_Param.m_PadList;
37 layer->m_Param.m_PaddingMode = m_Param.m_PaddingMode;
38
39 return std::move(layer);
40 }
41
InferOutputShapes(const std::vector<TensorShape> & inputShapes) const42 std::vector<TensorShape> PadLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
43 {
44 ARMNN_ASSERT(inputShapes.size() == 1);
45 const TensorShape& inputShape = inputShapes[0];
46
47 unsigned int rank = inputShape.GetNumDimensions();
48 ARMNN_ASSERT(m_Param.m_PadList.size() == rank);
49 ARMNN_ASSERT(rank != 0);
50
51 std::vector<unsigned int> outputDimensionSizes(rank);
52 for (unsigned int i = 0; i < rank; ++i)
53 {
54 outputDimensionSizes[i] = inputShape[i] + m_Param.m_PadList[i].first + m_Param.m_PadList[i].second;
55 }
56
57 TensorShape tensorShape = TensorShape( rank, outputDimensionSizes.data());
58 return std::vector<TensorShape>({ tensorShape });
59 }
60
ValidateTensorShapesFromInputs()61 void PadLayer::ValidateTensorShapesFromInputs()
62 {
63 VerifyLayerConnections(1, CHECK_LOCATION());
64
65 const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
66
67 VerifyShapeInferenceType(outputShape, m_ShapeInferenceMethod);
68
69 auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
70
71 ARMNN_ASSERT(inferredShapes.size() == 1);
72
73 ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "PadLayer");
74 }
75
ExecuteStrategy(IStrategy & strategy) const76 void PadLayer::ExecuteStrategy(IStrategy& strategy) const
77 {
78 strategy.ExecuteStrategy(this, GetParameters(), {}, GetName());
79 }
80
81 } // namespace armnn
82