1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "MemImportLayer.hpp"
6
7 #include "LayerCloneBase.hpp"
8
9 #include <armnn/TypesUtils.hpp>
10 #include <armnn/backends/WorkloadData.hpp>
11 #include <armnn/backends/WorkloadFactory.hpp>
12 #include <backendsCommon/MemImportWorkload.hpp>
13
14 namespace armnn
15 {
16
MemImportLayer(const char * name)17 MemImportLayer::MemImportLayer(const char* name)
18 : Layer(1, 1, LayerType::MemImport, name)
19 {
20 }
21
Clone(Graph & graph) const22 MemImportLayer* MemImportLayer::Clone(Graph& graph) const
23 {
24 return CloneBase<MemImportLayer>(graph, GetName());
25 }
26
CreateWorkload(const IWorkloadFactory & factory) const27 std::unique_ptr<IWorkload> MemImportLayer::CreateWorkload(const IWorkloadFactory& factory) const
28 {
29 IgnoreUnused(factory);
30 MemImportQueueDescriptor descriptor;
31 SetAdditionalInfo(descriptor);
32
33 //This is different from other workloads. Does not get created by the workload factory.
34 return std::make_unique<ImportMemGenericWorkload>(descriptor, PrepInfoAndDesc(descriptor));
35 }
36
ValidateTensorShapesFromInputs()37 void MemImportLayer::ValidateTensorShapesFromInputs()
38 {
39 VerifyLayerConnections(1, CHECK_LOCATION());
40
41 const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
42
43 VerifyShapeInferenceType(outputShape, m_ShapeInferenceMethod);
44
45 auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
46
47 ARMNN_ASSERT(inferredShapes.size() == 1);
48
49 ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "MemImportLayer");
50 }
51
ExecuteStrategy(IStrategy & strategy) const52 void MemImportLayer::ExecuteStrategy(IStrategy& strategy) const
53 {
54 IgnoreUnused(strategy);
55 throw armnn::Exception("MemImportLayer should not appear in an input graph");
56 }
57
58 } // namespace armnn
59