1 //
2 // Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include <armnn/BackendRegistry.hpp>
7 #include <armnn/backends/MemCopyWorkload.hpp>
8 #include <armnnTestUtils/MockBackend.hpp>
9 #include <armnnTestUtils/MockTensorHandle.hpp>
10 #include <backendsCommon/DefaultAllocator.hpp>
11 #include <backendsCommon/test/MockBackendId.hpp>
12 #include <SubgraphViewSelector.hpp>
13
14 #include "Layer.hpp"
15
16 namespace armnn
17 {
18
GetIdStatic()19 const BackendId& MockBackend::GetIdStatic()
20 {
21 static const BackendId s_Id{MockBackendId()};
22 return s_Id;
23 }
24
25 namespace
26 {
27 static const BackendId s_Id{ MockBackendId() };
28 }
29
MockWorkloadFactory(const std::shared_ptr<MockMemoryManager> & memoryManager)30 MockWorkloadFactory::MockWorkloadFactory(const std::shared_ptr<MockMemoryManager>& memoryManager)
31 : m_MemoryManager(memoryManager)
32 {}
33
MockWorkloadFactory()34 MockWorkloadFactory::MockWorkloadFactory()
35 : m_MemoryManager(new MockMemoryManager())
36 {}
37
GetBackendId() const38 const BackendId& MockWorkloadFactory::GetBackendId() const
39 {
40 return s_Id;
41 }
42
CreateWorkload(LayerType type,const QueueDescriptor & descriptor,const WorkloadInfo & info) const43 std::unique_ptr<IWorkload> MockWorkloadFactory::CreateWorkload(LayerType type,
44 const QueueDescriptor& descriptor,
45 const WorkloadInfo& info) const
46 {
47 switch (type)
48 {
49 case LayerType::MemCopy: {
50 auto memCopyQueueDescriptor = PolymorphicDowncast<const MemCopyQueueDescriptor*>(&descriptor);
51 if (descriptor.m_Inputs.empty())
52 {
53 throw InvalidArgumentException("MockWorkloadFactory: CreateMemCopy() expected an input tensor.");
54 }
55 return std::make_unique<CopyMemGenericWorkload>(*memCopyQueueDescriptor, info);
56 }
57 default:
58 return nullptr;
59 }
60 }
61
IsLayerSupported(const armnn::Layer * layer)62 bool IsLayerSupported(const armnn::Layer* layer)
63 {
64 ARMNN_ASSERT(layer != nullptr);
65
66 armnn::LayerType layerType = layer->GetType();
67 switch (layerType)
68 {
69 case armnn::LayerType::Input:
70 case armnn::LayerType::Output:
71 case armnn::LayerType::Constant:
72 case armnn::LayerType::Addition:
73 case armnn::LayerType::Convolution2d:
74 case armnn::LayerType::ElementwiseBinary:
75 // Layer supported
76 return true;
77 default:
78 // Layer unsupported
79 return false;
80 }
81 }
82
IsLayerSupported(const armnn::Layer & layer)83 bool IsLayerSupported(const armnn::Layer& layer)
84 {
85 return IsLayerSupported(&layer);
86 }
87
IsLayerOptimizable(const armnn::Layer * layer)88 bool IsLayerOptimizable(const armnn::Layer* layer)
89 {
90 ARMNN_ASSERT(layer != nullptr);
91
92 // A Layer is not optimizable if its name contains "unoptimizable"
93 const std::string layerName(layer->GetName());
94 bool optimizable = layerName.find("unoptimizable") == std::string::npos;
95
96 return optimizable;
97 }
98
IsLayerOptimizable(const armnn::Layer & layer)99 bool IsLayerOptimizable(const armnn::Layer& layer)
100 {
101 return IsLayerOptimizable(&layer);
102 }
103
104 } // Anonymous namespace
105
106 namespace armnn
107 {
108
MockBackendInitialiser()109 MockBackendInitialiser::MockBackendInitialiser()
110 {
111 BackendRegistryInstance().Register(MockBackend::GetIdStatic(),
112 []()
113 {
114 return IBackendInternalUniquePtr(new MockBackend);
115 });
116 }
117
~MockBackendInitialiser()118 MockBackendInitialiser::~MockBackendInitialiser()
119 {
120 try
121 {
122 BackendRegistryInstance().Deregister(MockBackend::GetIdStatic());
123 }
124 catch (...)
125 {
126 std::cerr << "could not deregister mock backend" << std::endl;
127 }
128 }
129
CreateWorkloadFactory(const IBackendInternal::IMemoryManagerSharedPtr &) const130 IBackendInternal::IWorkloadFactoryPtr MockBackend::CreateWorkloadFactory(
131 const IBackendInternal::IMemoryManagerSharedPtr& /*memoryManager*/) const
132 {
133 return IWorkloadFactoryPtr{};
134 }
135
CreateBackendContext(const IRuntime::CreationOptions &) const136 IBackendInternal::IBackendContextPtr MockBackend::CreateBackendContext(const IRuntime::CreationOptions&) const
137 {
138 return IBackendContextPtr{};
139 }
140
CreateBackendProfilingContext(const IRuntime::CreationOptions & options,IBackendProfilingPtr & backendProfiling)141 IBackendInternal::IBackendProfilingContextPtr MockBackend::CreateBackendProfilingContext(
142 const IRuntime::CreationOptions& options, IBackendProfilingPtr& backendProfiling)
143 {
144 IgnoreUnused(options);
145 std::shared_ptr<armnn::MockBackendProfilingContext> context =
146 std::make_shared<MockBackendProfilingContext>(backendProfiling);
147 MockBackendProfilingService::Instance().SetProfilingContextPtr(context);
148 return context;
149 }
150
CreateMemoryManager() const151 IBackendInternal::IMemoryManagerUniquePtr MockBackend::CreateMemoryManager() const
152 {
153 return IMemoryManagerUniquePtr{};
154 }
155
GetLayerSupport() const156 IBackendInternal::ILayerSupportSharedPtr MockBackend::GetLayerSupport() const
157 {
158 static ILayerSupportSharedPtr layerSupport{new MockLayerSupport};
159 return layerSupport;
160 }
161
OptimizeSubgraphView(const SubgraphView & subgraph) const162 OptimizationViews MockBackend::OptimizeSubgraphView(const SubgraphView& subgraph) const
163 {
164 // Prepare the optimization views
165 OptimizationViews optimizationViews;
166
167 // Get the layers of the input sub-graph
168 const SubgraphView::IConnectableLayers& subgraphLayers = subgraph.GetIConnectableLayers();
169
170 // Parse the layers
171 SubgraphView::IConnectableLayers supportedLayers;
172 SubgraphView::IConnectableLayers unsupportedLayers;
173 SubgraphView::IConnectableLayers untouchedLayers;
174 std::for_each(subgraphLayers.begin(),
175 subgraphLayers.end(),
176 [&](IConnectableLayer* layer)
177 {
178 bool supported = IsLayerSupported(PolymorphicDowncast<Layer*>(layer));
179 if (supported)
180 {
181 // Layer supported, check if it's optimizable
182 bool optimizable = IsLayerOptimizable(PolymorphicDowncast<Layer*>(layer));
183 if (optimizable)
184 {
185 // Layer fully supported
186 supportedLayers.push_back(layer);
187 }
188 else
189 {
190 // Layer supported but not optimizable
191 untouchedLayers.push_back(layer);
192 }
193 }
194 else
195 {
196 // Layer unsupported
197 unsupportedLayers.push_back(layer);
198 }
199 });
200
201 // Check if there are supported layers
202 if (!supportedLayers.empty())
203 {
204 // Select the layers that are neither inputs or outputs, but that are optimizable
205 auto supportedSubgraphSelector = [](const Layer& layer)
206 {
207 return layer.GetType() != LayerType::Input &&
208 layer.GetType() != LayerType::Output &&
209 IsLayerSupported(layer) &&
210 IsLayerOptimizable(layer);
211 };
212
213 // Apply the subgraph selector to the supported layers to group them into sub-graphs were appropriate
214 SubgraphView mutableSubgraph(subgraph);
215 SubgraphViewSelector::Subgraphs supportedSubgraphs =
216 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, supportedSubgraphSelector);
217
218 // Create a substitution pair for each supported sub-graph
219 std::for_each(supportedSubgraphs.begin(),
220 supportedSubgraphs.end(),
221 [&optimizationViews](const SubgraphView::SubgraphViewPtr& supportedSubgraph)
222 {
223 ARMNN_ASSERT(supportedSubgraph != nullptr);
224
225 CompiledBlobPtr blobPtr;
226 BackendId backend = MockBackendId();
227
228 IConnectableLayer* preCompiledLayer =
229 optimizationViews.GetINetwork()->AddPrecompiledLayer(
230 PreCompiledDescriptor(supportedSubgraph->GetNumInputSlots(),
231 supportedSubgraph->GetNumOutputSlots()),
232 std::move(blobPtr),
233 backend,
234 nullptr);
235
236 SubgraphView substitutionSubgraph(*supportedSubgraph);
237 SubgraphView replacementSubgraph(preCompiledLayer);
238
239 optimizationViews.AddSubstitution({ substitutionSubgraph, replacementSubgraph });
240 });
241 }
242
243 // Check if there are unsupported layers
244 if (!unsupportedLayers.empty())
245 {
246 // Select the layers that are neither inputs or outputs, and are not optimizable
247 auto unsupportedSubgraphSelector = [](const Layer& layer)
248 {
249 return layer.GetType() != LayerType::Input &&
250 layer.GetType() != LayerType::Output &&
251 !IsLayerSupported(layer);
252 };
253
254 // Apply the subgraph selector to the unsupported layers to group them into sub-graphs were appropriate
255 SubgraphView mutableSubgraph(subgraph);
256 SubgraphViewSelector::Subgraphs unsupportedSubgraphs =
257 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, unsupportedSubgraphSelector);
258
259 // Add each unsupported sub-graph to the list of failed sub-graphs in the optimizization views
260 std::for_each(unsupportedSubgraphs.begin(),
261 unsupportedSubgraphs.end(),
262 [&optimizationViews](const SubgraphView::SubgraphViewPtr& unsupportedSubgraph)
263 {
264 ARMNN_ASSERT(unsupportedSubgraph != nullptr);
265
266 optimizationViews.AddFailedSubgraph(SubgraphView(*unsupportedSubgraph));
267 });
268 }
269
270 // Check if there are untouched layers
271 if (!untouchedLayers.empty())
272 {
273 // Select the layers that are neither inputs or outputs, that are supported but that and are not optimizable
274 auto untouchedSubgraphSelector = [](const Layer& layer)
275 {
276 return layer.GetType() != LayerType::Input &&
277 layer.GetType() != LayerType::Output &&
278 IsLayerSupported(layer) &&
279 !IsLayerOptimizable(layer);
280 };
281
282 // Apply the subgraph selector to the untouched layers to group them into sub-graphs were appropriate
283 SubgraphView mutableSubgraph(subgraph);
284 SubgraphViewSelector::Subgraphs untouchedSubgraphs =
285 SubgraphViewSelector::SelectSubgraphs(mutableSubgraph, untouchedSubgraphSelector);
286
287 // Add each untouched sub-graph to the list of untouched sub-graphs in the optimizization views
288 std::for_each(untouchedSubgraphs.begin(),
289 untouchedSubgraphs.end(),
290 [&optimizationViews](const SubgraphView::SubgraphViewPtr& untouchedSubgraph)
291 {
292 ARMNN_ASSERT(untouchedSubgraph != nullptr);
293
294 optimizationViews.AddUntouchedSubgraph(SubgraphView(*untouchedSubgraph));
295 });
296 }
297
298 return optimizationViews;
299 }
300
GetDefaultAllocator() const301 std::unique_ptr<ICustomAllocator> MockBackend::GetDefaultAllocator() const
302 {
303 return std::make_unique<DefaultAllocator>();
304 }
305
306 } // namespace armnn
307