xref: /aosp_15_r20/external/armnn/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp (revision 89c4ff92f2867872bb9e2354d150bf0c8c502810)
1 //
2 // Copyright © 2017, 2022-2023 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <CommonTestUtils.hpp>
7 #include "MockBackendId.hpp"
8 
9 #include <Graph.hpp>
10 #include <Network.hpp>
11 
12 #include <armnn/BackendRegistry.hpp>
13 #include <armnnTestUtils/MockBackend.hpp>
14 
15 #include <doctest/doctest.h>
16 #include <unordered_map>
17 
18 using namespace armnn;
19 
20 namespace
21 {
22 
23 // The expected number of layers, input and output slots in a subgraph after a test
24 struct ExpectedSubgraphSize
25 {
26     size_t m_NumInputSlots  = 0;
27     size_t m_NumOutputSlots = 0;
28     size_t m_NumLayers      = 0;
29 };
30 
31 // Keep the layers organized by layer name
32 using LayerNameToLayerMap = std::unordered_map<std::string, Layer*>;
33 
34 // Used to convert input and output slots from reference type (as stored in graphs) to
35 // pointer type (as stored in subgraphs)
36 template <typename SlotType>
ConvertReferenceTypeToPointerType(const SlotType & input)37 SlotType* ConvertReferenceTypeToPointerType(const SlotType& input)
38 {
39     return const_cast<SlotType*>(&input);
40 }
41 
42 // Used to convert input and output slots from reference type (as stored in graphs) to
43 // pointer type (as stored in subgraphs), array version
44 template <typename SlotType>
ConvertReferenceTypeToPointerType(const std::vector<SlotType> & input)45 std::vector<SlotType*> ConvertReferenceTypeToPointerType(const std::vector<SlotType>& input)
46 {
47     std::vector<SlotType*> output;
48     std::transform(input.begin(),
49                    input.end(),
50                    std::back_inserter(output),
51                    [](const SlotType& inputItem)
52     {
53         return ConvertReferenceTypeToPointerType(inputItem);
54     });
55 
56     return output;
57 }
58 
59 // Convert from vector of Slots* (Input/Output) to vector of ISlots* (IInput/IOutput)
60 template <typename SlotType, typename ResultSlotType>
ConvertSlotsToISlots(const std::vector<SlotType * > input)61 std::vector<ResultSlotType*> ConvertSlotsToISlots(const std::vector<SlotType*> input)
62 {
63     std::vector<ResultSlotType*> output;
64     for (auto slot : input)
65     {
66         output.push_back(PolymorphicDowncast<ResultSlotType*>(slot));
67     }
68     return output;
69 }
70 
71 // Convenience function to add an input layer to a graph
AddInputLayer(Graph & graph,const std::string & layerName,const TensorInfo & inputInfo,LayerBindingId inputId=0)72 Layer* AddInputLayer(Graph& graph,
73                      const std::string& layerName,
74                      const TensorInfo& inputInfo,
75                      LayerBindingId inputId = 0)
76 {
77     Layer* const inputLayer = graph.AddLayer<InputLayer>(inputId, layerName.c_str());
78     CHECK(inputLayer);
79     inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
80     return inputLayer;
81 }
82 
83 // Convenience function to add an output layer to a graph
AddOutputLayer(Graph & graph,const std::string & layerName)84 Layer* AddOutputLayer(Graph& graph,
85                       const std::string& layerName)
86 {
87     Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, layerName.c_str());
88     CHECK(outputLayer);
89     return outputLayer;
90 }
91 
92 // Convenience function to add a convolution layer to a graph
AddConvolutionLayer(Graph & graph,LayerNameToLayerMap & layersInGraph,const Convolution2dDescriptor & convolutionDescriptor,const std::string & layerName,const TensorInfo & outputInfo)93 Convolution2dLayer* AddConvolutionLayer(Graph& graph,
94                                         LayerNameToLayerMap& layersInGraph,
95                                         const Convolution2dDescriptor& convolutionDescriptor,
96                                         const std::string& layerName,
97                                         const TensorInfo& outputInfo)
98 {
99     Convolution2dLayer* const convLayer = graph.AddLayer<Convolution2dLayer>(convolutionDescriptor, layerName.c_str());
100     CHECK(convLayer);
101     convLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
102     layersInGraph.insert(std::make_pair(convLayer->GetName(), convLayer));
103     return convLayer;
104 }
105 
106 // Convenience function to add a constant layer to a graph
AddConstantLayer(Graph & graph,LayerNameToLayerMap & layersInGraph,const std::string & layerName,const ConstTensor & constTensor,const TensorInfo & outputInfo)107 ConstantLayer* AddConstantLayer(Graph& graph,
108                                 LayerNameToLayerMap& layersInGraph,
109                                 const std::string& layerName,
110                                 const ConstTensor& constTensor,
111                                 const TensorInfo& outputInfo)
112 {
113     ConstantLayer* const constantLayer = graph.AddLayer<ConstantLayer>(layerName.c_str());
114     CHECK(constantLayer);
115     constantLayer->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
116     constantLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
117     layersInGraph.insert(std::make_pair(constantLayer->GetName(), constantLayer));
118     return constantLayer;
119 }
120 
121 // Convenience function to add a pooling layer to a graph
AddPoolingLayer(Graph & graph,LayerNameToLayerMap & layersInGraph,const Pooling2dDescriptor & poolingDescriptor,const std::string & layerName,const TensorInfo & outputInfo)122 Pooling2dLayer* AddPoolingLayer(Graph& graph,
123                                 LayerNameToLayerMap& layersInGraph,
124                                 const Pooling2dDescriptor& poolingDescriptor,
125                                 const std::string& layerName,
126                                 const TensorInfo& outputInfo)
127 {
128     Pooling2dLayer* const poolingLayer = graph.AddLayer<Pooling2dLayer>(poolingDescriptor, layerName.c_str());
129     CHECK(poolingLayer);
130     poolingLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
131     layersInGraph.insert(std::make_pair(poolingLayer->GetName(), poolingLayer));
132     return poolingLayer;
133 }
134 
135 // Convenience function to add an addition layer to a graph
AddAdditionaLayer(Graph & graph,LayerNameToLayerMap & layersInGraph,const std::string & layerName,const TensorInfo & outputInfo)136 AdditionLayer* AddAdditionaLayer(Graph& graph,
137                                  LayerNameToLayerMap& layersInGraph,
138                                  const std::string& layerName,
139                                  const TensorInfo& outputInfo)
140 {
141     AdditionLayer* const additionLayer = graph.AddLayer<AdditionLayer>(layerName.c_str());
142     CHECK(additionLayer);
143     additionLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
144     layersInGraph.insert(std::make_pair(additionLayer->GetName(), additionLayer));
145     return additionLayer;
146 }
147 
148 // Convenience function to check that the given substitution matches the specified expected values
CheckSubstitution(const OptimizationViews::SubstitutionPair & substitution,const ExpectedSubgraphSize & expectedSubstitutableSubgraphSize,const ExpectedSubgraphSize & expectedReplacementSubgraphSize,const SubgraphView::IInputSlots & expectedSubstitutableInputSlots,const SubgraphView::IOutputSlots & expectedSubstitutableOutputSlots,const SubgraphView::IConnectableLayers & expectedSubstitutableLayers)149 void CheckSubstitution(const OptimizationViews::SubstitutionPair& substitution,
150                        const ExpectedSubgraphSize& expectedSubstitutableSubgraphSize,
151                        const ExpectedSubgraphSize& expectedReplacementSubgraphSize,
152                        const SubgraphView::IInputSlots& expectedSubstitutableInputSlots,
153                        const SubgraphView::IOutputSlots& expectedSubstitutableOutputSlots,
154                        const SubgraphView::IConnectableLayers& expectedSubstitutableLayers)
155 {
156     const SubgraphView& substitutableSubgraph = substitution.m_SubstitutableSubgraph;
157     const SubgraphView::IInputSlots& substitutableSubgraphInputSlots = substitutableSubgraph.GetIInputSlots();
158     const SubgraphView::IOutputSlots& substitutableSubgraphOutputSlots = substitutableSubgraph.GetIOutputSlots();
159     const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
160             substitutableSubgraph.GetIConnectableLayers();
161 
162     const SubgraphView& replacementSubgraph                          = substitution.m_ReplacementSubgraph;
163     const SubgraphView::IInputSlots& replacementSubgraphInputSlots   = replacementSubgraph.GetIInputSlots();
164     const SubgraphView::IOutputSlots& replacementSubgraphOutputSlots = replacementSubgraph.GetIOutputSlots();
165     const SubgraphView::IConnectableLayers& replacementSubgraphLayers = replacementSubgraph.GetIConnectableLayers();
166 
167     CHECK(substitutableSubgraphInputSlots.size()  == expectedSubstitutableSubgraphSize.m_NumInputSlots);
168     CHECK(substitutableSubgraphOutputSlots.size() == expectedSubstitutableSubgraphSize.m_NumOutputSlots);
169     CHECK(substitutableSubgraphLayers.size()      == expectedSubstitutableSubgraphSize.m_NumLayers);
170 
171     CHECK(AreEqual(substitutableSubgraphInputSlots,  expectedSubstitutableInputSlots));
172     CHECK(AreEqual(substitutableSubgraphOutputSlots, expectedSubstitutableOutputSlots));
173     CHECK(AreEqual(substitutableSubgraphLayers,      expectedSubstitutableLayers));
174 
175     CHECK(replacementSubgraphInputSlots.size()  == expectedReplacementSubgraphSize.m_NumInputSlots);
176     CHECK(replacementSubgraphOutputSlots.size() == expectedReplacementSubgraphSize.m_NumOutputSlots);
177     CHECK(replacementSubgraphLayers.size()      == expectedReplacementSubgraphSize.m_NumLayers);
178 
179     CHECK(!AreEqual(replacementSubgraphInputSlots,  expectedSubstitutableInputSlots));
180     CHECK(!AreEqual(replacementSubgraphOutputSlots, expectedSubstitutableOutputSlots));
181     CHECK(!AreEqual(replacementSubgraphLayers,      expectedSubstitutableLayers));
182 
183     CHECK(std::all_of(replacementSubgraphLayers.begin(),
184                            replacementSubgraphLayers.end(),
185                            [](const IConnectableLayer* layer)
186     {
187         return layer->GetType() == LayerType::PreCompiled;
188     }));
189 }
190 
191 // Convenience function to check that the given failed subgraph matches the specified expected values
CheckFailedSubgraph(const SubgraphView & failedSubgraph,const ExpectedSubgraphSize & expectedFailedSubgraphSize,const SubgraphView::IInputSlots & expectedFailedInputSlots,const SubgraphView::IOutputSlots & expectedFailedOutputSlots,const SubgraphView::IConnectableLayers & expectedFailedLayers)192 void CheckFailedSubgraph(const SubgraphView& failedSubgraph,
193                          const ExpectedSubgraphSize& expectedFailedSubgraphSize,
194                          const SubgraphView::IInputSlots& expectedFailedInputSlots,
195                          const SubgraphView::IOutputSlots& expectedFailedOutputSlots,
196                          const SubgraphView::IConnectableLayers& expectedFailedLayers)
197 {
198     const SubgraphView::IInputSlots&  failedSubgraphInputSlots  = failedSubgraph.GetIInputSlots();
199     const SubgraphView::IOutputSlots& failedSubgraphOutputSlots = failedSubgraph.GetIOutputSlots();
200     const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
201 
202     CHECK(failedSubgraphInputSlots.size()  == expectedFailedSubgraphSize.m_NumInputSlots);
203     CHECK(failedSubgraphOutputSlots.size() == expectedFailedSubgraphSize.m_NumOutputSlots);
204     CHECK(failedSubgraphLayers.size()      == expectedFailedSubgraphSize.m_NumLayers);
205 
206     CHECK(AreEqual(failedSubgraphInputSlots,  expectedFailedInputSlots));
207     CHECK(AreEqual(failedSubgraphOutputSlots, expectedFailedOutputSlots));
208     CHECK(AreEqual(failedSubgraphLayers,      expectedFailedLayers));
209 }
210 
211 // Convenience function to check that the given untouched subgraph matches the specified expected values
CheckUntouchedSubgraph(const SubgraphView & untouchedSubgraph,const ExpectedSubgraphSize & expectedUntouchedSubgraphSize,const SubgraphView::IInputSlots & expectedUntouchedInputSlots,const SubgraphView::IOutputSlots & expectedUntouchedOutputSlots,const SubgraphView::IConnectableLayers & expectedUntouchedLayers)212 void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
213                             const ExpectedSubgraphSize& expectedUntouchedSubgraphSize,
214                             const SubgraphView::IInputSlots& expectedUntouchedInputSlots,
215                             const SubgraphView::IOutputSlots& expectedUntouchedOutputSlots,
216                             const SubgraphView::IConnectableLayers& expectedUntouchedLayers)
217 {
218     const SubgraphView::IInputSlots& untouchedSubgraphInputSlots = untouchedSubgraph.GetIInputSlots();
219     const SubgraphView::IOutputSlots& untouchedSubgraphOutputSlots = untouchedSubgraph.GetIOutputSlots();
220     const SubgraphView::IConnectableLayers& untouchedSubgraphLayers = untouchedSubgraph.GetIConnectableLayers();
221 
222     CHECK(untouchedSubgraphInputSlots.size()  == expectedUntouchedSubgraphSize.m_NumInputSlots);
223     CHECK(untouchedSubgraphOutputSlots.size() == expectedUntouchedSubgraphSize.m_NumOutputSlots);
224     CHECK(untouchedSubgraphLayers.size()      == expectedUntouchedSubgraphSize.m_NumLayers);
225 
226     CHECK(AreEqual(untouchedSubgraphInputSlots,  expectedUntouchedInputSlots));
227     CHECK(AreEqual(untouchedSubgraphOutputSlots, expectedUntouchedOutputSlots));
228     CHECK(AreEqual(untouchedSubgraphLayers,      expectedUntouchedLayers));
229 }
230 
231 // Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
BuildFullyUnsupportedSubgraph1(Graph & graph,LayerNameToLayerMap & layersInGraph)232 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
233 {
234     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
235     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
236 
237     Pooling2dDescriptor poolingDescriptor;
238     poolingDescriptor.m_PoolType      = armnn::PoolingAlgorithm::Average;
239     poolingDescriptor.m_PoolWidth     = 2;
240     poolingDescriptor.m_PoolHeight    = 2;
241     poolingDescriptor.m_StrideX       = 2;
242     poolingDescriptor.m_StrideY       = 2;
243     poolingDescriptor.m_PadLeft       = 1;
244     poolingDescriptor.m_PadRight      = 1;
245     poolingDescriptor.m_PadTop        = 1;
246     poolingDescriptor.m_PadBottom     = 1;
247     poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
248     poolingDescriptor.m_DataLayout    = DataLayout::NHWC;
249 
250     // Construct the graph
251     Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
252     Pooling2dLayer* const poolingLayer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
253                                                          "pooling layer", outputInfo);
254     Layer* const outputLayer = AddOutputLayer(graph, "output layer");
255 
256     // Connect the network
257     inputLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
258     poolingLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
259 
260     // Create the subgraph view for the whole network
261     return CreateSubgraphViewFrom(CreateInputsFrom(poolingLayer),
262                                   CreateOutputsFrom({poolingLayer}),
263                                   {poolingLayer});
264 }
265 
266 // Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
BuildFullyUnsupportedSubgraph2(Graph & graph,LayerNameToLayerMap & layersInGraph)267 SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
268 {
269     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
270     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
271 
272     Pooling2dDescriptor poolingDescriptor;
273     poolingDescriptor.m_PoolType      = armnn::PoolingAlgorithm::Average;
274     poolingDescriptor.m_PoolWidth     = 2;
275     poolingDescriptor.m_PoolHeight    = 2;
276     poolingDescriptor.m_StrideX       = 2;
277     poolingDescriptor.m_StrideY       = 2;
278     poolingDescriptor.m_PadLeft       = 1;
279     poolingDescriptor.m_PadRight      = 1;
280     poolingDescriptor.m_PadTop        = 1;
281     poolingDescriptor.m_PadBottom     = 1;
282     poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
283     poolingDescriptor.m_DataLayout    = DataLayout::NHWC;
284 
285     // Construct the graph
286     Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
287     Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
288                                                           "pooling1 layer", outputInfo);
289     Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
290                                                           "pooling2 layer", outputInfo);
291     Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
292                                                           "pooling3 layer", outputInfo);
293     Layer* const outputLayer = AddOutputLayer(graph, "output layer");
294 
295     // Connect the network
296     inputLayer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
297     pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
298     pooling2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
299     pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
300 
301     // Create the subgraph view for the whole network
302     return CreateSubgraphViewFrom(CreateInputsFrom(pooling1Layer),
303                                   CreateOutputsFrom({pooling3Layer}),
304                                   {pooling1Layer,
305                                    pooling2Layer,
306                                    pooling3Layer});
307 }
308 
309 // Creates a simple subgraph with only one convolution layer, supported by the mock backend
BuildFullyOptimizableSubgraph1(Graph & graph,LayerNameToLayerMap & layersInGraph)310 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
311 {
312     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
313     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
314     TensorInfo weightInfo({ 16,  1,  1, 16 }, DataType::QAsymmU8, 0.9f, 0);
315     TensorInfo biasInfo({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
316 
317     weightInfo.SetConstant(true);
318     biasInfo.SetConstant(true);
319 
320     Convolution2dDescriptor convolutionDescriptor;
321     convolutionDescriptor.m_StrideX     = 1;
322     convolutionDescriptor.m_StrideY     = 1;
323     convolutionDescriptor.m_BiasEnabled = true;
324     convolutionDescriptor.m_DataLayout  = DataLayout::NHWC;
325 
326     std::vector<float> weightsVector(64);
327     ConstTensor constWeightsTensor(weightInfo, weightsVector);
328 
329     std::vector<float> biasVector(16);
330     ConstTensor constBiasTensor(biasInfo, biasVector);
331 
332     // Construct the graph
333     Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
334     Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
335                                                               "conv layer", outputInfo);
336 
337   ConstantLayer* const weightsLayer =
338       AddConstantLayer(graph, layersInGraph, "Weights Layer", constWeightsTensor, weightInfo);
339   ConstantLayer* const biasLayer = AddConstantLayer(graph, layersInGraph, "Bias Layer", constBiasTensor, biasInfo);
340 
341     Layer* const outputLayer = AddOutputLayer(graph, "output layer");
342 
343     // Connect the network
344     inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
345     weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
346     biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
347     convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
348 
349     std::vector<unsigned int> ignoreSlots = {1, 2};
350     // Create the subgraph view for the whole network
351     return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
352                                   CreateOutputsFrom({convLayer}),
353                                   {convLayer, weightsLayer, biasLayer});
354 }
355 
356 // Creates a subgraph with five convolutions layers, all supported by the mock backend
BuildFullyOptimizableSubgraph2(Graph & graph,LayerNameToLayerMap & layersInGraph)357 SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
358 {
359     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
360     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
361     TensorInfo weightInfo({ 16,  1,  1, 16 }, DataType::QAsymmU8, 0.9f, 0);
362     TensorInfo biasInfo  ({  1,  1,  1, 16 }, DataType::Signed32,        0.9f, 0);
363 
364     weightInfo.SetConstant(true);
365     biasInfo.SetConstant(true);
366 
367     std::vector<float> weightsVector(64);
368     ConstTensor constWeightsTensor(weightInfo, weightsVector);
369 
370     std::vector<float> biasVector(16);
371     ConstTensor constBiasTensor(biasInfo, biasVector);
372 
373     Convolution2dDescriptor convolutionDescriptor;
374     convolutionDescriptor.m_StrideX     = 1;
375     convolutionDescriptor.m_StrideY     = 1;
376     convolutionDescriptor.m_BiasEnabled = true;
377     convolutionDescriptor.m_DataLayout  = DataLayout::NHWC;
378 
379     // Construct the graph
380     Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
381     Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
382                                                                "conv1 layer", outputInfo);
383     ConstantLayer* const weightsLayer1 =
384         AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
385     ConstantLayer* const biasLayer1 =
386         AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
387 
388     Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
389                                                                "conv2 layer", outputInfo);
390     ConstantLayer* const weightsLayer2 =
391         AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, weightInfo);
392     ConstantLayer* const biasLayer2 =
393         AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, biasInfo);
394 
395 
396     Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
397                                                                "conv3 layer", outputInfo);
398     ConstantLayer* const weightsLayer3 =
399         AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, weightInfo);
400     ConstantLayer* const biasLayer3 =
401         AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, biasInfo);
402 
403     Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
404                                                                "conv4 layer", outputInfo);
405     ConstantLayer* const weightsLayer4 =
406         AddConstantLayer(graph, layersInGraph, "Weights Layer 4", constWeightsTensor, weightInfo);
407     ConstantLayer* const biasLayer4 =
408         AddConstantLayer(graph, layersInGraph, "Bias Layer 4", constBiasTensor, biasInfo);
409 
410     Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
411                                                                "conv5 layer", outputInfo);
412     ConstantLayer* const weightsLayer5 =
413         AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, weightInfo);
414     ConstantLayer* const biasLayer5 =
415         AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, biasInfo);
416 
417 
418     Layer* const outputLayer = AddOutputLayer(graph, "output layer");
419 
420     // Connect the network
421     inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
422     weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
423     biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
424 
425     conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
426     weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
427     biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
428 
429     conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
430     weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
431     biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
432 
433     conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
434     weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
435     biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
436 
437     conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
438     weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
439     biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
440 
441     conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
442     std::vector<unsigned int> ignoreSlots = {1, 2};
443     // Create the subgraph view for the whole network
444     return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
445                                   CreateOutputsFrom({ conv5Layer }),
446                                   { weightsLayer1,
447                                     biasLayer1,
448                                     conv1Layer,
449                                     weightsLayer2,
450                                     biasLayer2,
451                                     conv2Layer,
452                                     weightsLayer3,
453                                     biasLayer3,
454                                     conv3Layer,
455                                     weightsLayer4,
456                                     biasLayer4,
457                                     conv4Layer,
458                                     weightsLayer5,
459                                     biasLayer5,
460                                     conv5Layer });
461 }
462 
463 // Creates a subgraph with both supported and unsupported layers
464 // (only convolutions are unsupported by the mock backend)
BuildPartiallySupportedSubgraph(Graph & graph,LayerNameToLayerMap & layersInGraph)465 SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
466 {
467     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
468     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
469     TensorInfo weightInfo({ 16,  1,  1, 16 }, DataType::QAsymmU8, 0.9f, 0);
470     TensorInfo biasInfo  ({  1,  1,  1, 16 }, DataType::Signed32,        0.9f, 0);
471 
472     weightInfo.SetConstant(true);
473     biasInfo.SetConstant(true);
474 
475     std::vector<float> weightsVector(64);
476     ConstTensor constWeightsTensor(weightInfo, weightsVector);
477 
478     std::vector<float> biasVector(16);
479     ConstTensor constBiasTensor(biasInfo, biasVector);
480 
481     Convolution2dDescriptor convolutionDescriptor;
482     convolutionDescriptor.m_StrideX     = 1;
483     convolutionDescriptor.m_StrideY     = 1;
484     convolutionDescriptor.m_BiasEnabled = true;
485     convolutionDescriptor.m_DataLayout  = DataLayout::NHWC;
486 
487     Pooling2dDescriptor poolingDescriptor;
488     poolingDescriptor.m_PoolType      = armnn::PoolingAlgorithm::Average;
489     poolingDescriptor.m_PoolWidth     = 2;
490     poolingDescriptor.m_PoolHeight    = 2;
491     poolingDescriptor.m_StrideX       = 2;
492     poolingDescriptor.m_StrideY       = 2;
493     poolingDescriptor.m_PadLeft       = 1;
494     poolingDescriptor.m_PadRight      = 1;
495     poolingDescriptor.m_PadTop        = 1;
496     poolingDescriptor.m_PadBottom     = 1;
497     poolingDescriptor.m_PaddingMethod = armnn::PaddingMethod::Exclude;
498     poolingDescriptor.m_DataLayout    = DataLayout::NHWC;
499 
500     // Construct the graph
501     Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
502     ConstantLayer* const weightsLayer1 =
503         AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
504 
505     ConstantLayer* const biasLayer1 =
506         AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
507 
508     Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
509                                                                "conv1 layer", outputInfo);
510     Pooling2dLayer* const pooling1Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
511                                                           "pooling1 layer", outputInfo);
512     Pooling2dLayer* const pooling2Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
513                                                           "pooling2 layer", outputInfo);
514 
515     ConstantLayer* const weightsLayer2 =
516         AddConstantLayer(graph, layersInGraph, "Weights Layer 2", constWeightsTensor, weightInfo);
517 
518     ConstantLayer* const biasLayer2 =
519         AddConstantLayer(graph, layersInGraph, "Bias Layer 2", constBiasTensor, biasInfo);
520 
521     Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
522                                                                "conv2 layer", outputInfo);
523     Pooling2dLayer* const pooling3Layer = AddPoolingLayer(graph, layersInGraph, poolingDescriptor,
524                                                           "pooling3 layer", outputInfo);
525     Layer* const outputLayer = AddOutputLayer(graph, "output layer");
526 
527     // Connect the network
528     inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
529     weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
530     biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
531     conv1Layer->GetOutputSlot(0).Connect(pooling1Layer->GetInputSlot(0));
532     pooling1Layer->GetOutputSlot(0).Connect(pooling2Layer->GetInputSlot(0));
533     pooling2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
534     weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
535     biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
536     conv2Layer->GetOutputSlot(0).Connect(pooling3Layer->GetInputSlot(0));
537     pooling3Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
538 
539     std::vector<unsigned int> ignoreSlots = {1, 2};
540     // Create the subgraph view for the whole network
541     return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
542                                   CreateOutputsFrom({pooling3Layer}),
543                                   {weightsLayer1,
544                                    biasLayer1,
545                                    conv1Layer,
546                                    pooling1Layer,
547                                    pooling2Layer,
548                                    weightsLayer2,
549                                    biasLayer2,
550                                    conv2Layer,
551                                    pooling3Layer});
552 }
553 
554 // Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
BuildFullyUnoptimizableSubgraph1(Graph & graph,LayerNameToLayerMap & layersInGraph)555 SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
556 {
557     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
558     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
559     TensorInfo weightInfo({ 16,  1,  1, 16 }, DataType::QAsymmU8, 0.9f, 0);
560     TensorInfo biasInfo  ({  1,  1,  1, 16 }, DataType::Signed32,        0.9f, 0);
561 
562     weightInfo.SetConstant(true);
563     biasInfo.SetConstant(true);
564 
565     std::vector<float> weightsVector(64);
566     ConstTensor constWeightsTensor(weightInfo, weightsVector);
567 
568     std::vector<float> biasVector(16);
569     ConstTensor constBiasTensor(biasInfo, biasVector);
570     Convolution2dDescriptor convolutionDescriptor;
571     convolutionDescriptor.m_StrideX     = 1;
572     convolutionDescriptor.m_StrideY     = 1;
573     convolutionDescriptor.m_BiasEnabled = true;
574     convolutionDescriptor.m_DataLayout  = DataLayout::NHWC;
575 
576     // Construct the graph
577     Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
578 
579     ConstantLayer* const weightsLayer =
580         AddConstantLayer(graph, layersInGraph, "Weights Layer unoptimizable", constWeightsTensor, weightInfo);
581 
582     ConstantLayer* const biasLayer =
583         AddConstantLayer(graph, layersInGraph, "Bias Layer unoptimizable", constBiasTensor, biasInfo);
584 
585     Convolution2dLayer* const convLayer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
586                                                                "conv layer unoptimizable", outputInfo);
587     Layer* const outputLayer = AddOutputLayer(graph, "output layer");
588 
589     // Connect the network
590     inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
591     weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
592     biasLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(2));
593     convLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
594 
595     std::vector<unsigned int> ignoreSlots = {1, 2};
596     // Create the subgraph view for the whole network
597     return CreateSubgraphViewFrom(CreateInputsFrom(convLayer, ignoreSlots),
598                                   CreateOutputsFrom({convLayer}),
599                                   {convLayer, weightsLayer, biasLayer});
600 }
601 
602 // Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
BuildPartiallyOptimizableSubgraph1(Graph & graph,LayerNameToLayerMap & layersInGraph)603 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
604 {
605     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
606     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
607     TensorInfo weightInfo({ 16,  1,  1, 16 }, DataType::QAsymmU8, 0.9f, 0);
608     TensorInfo biasInfo  ({  1,  1,  1, 16 }, DataType::Signed32,        0.9f, 0);
609 
610     weightInfo.SetConstant(true);
611     biasInfo.SetConstant(true);
612 
613     std::vector<float> weightsVector(64);
614     ConstTensor constWeightsTensor(weightInfo, weightsVector);
615 
616     std::vector<float> biasVector(16);
617     ConstTensor constBiasTensor(biasInfo, biasVector);
618 
619     Convolution2dDescriptor convolutionDescriptor;
620     convolutionDescriptor.m_StrideX     = 1;
621     convolutionDescriptor.m_StrideY     = 1;
622     convolutionDescriptor.m_BiasEnabled = true;
623     convolutionDescriptor.m_DataLayout  = DataLayout::NHWC;
624 
625     // Construct the graph
626     Layer* const inputLayer = AddInputLayer(graph, "input layer", inputInfo);
627 
628     ConstantLayer* const weightsLayer1 =
629         AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
630     ConstantLayer* const biasLayer1 =
631         AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
632     ConstantLayer* const weightsLayer2 =
633         AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, weightInfo);
634     ConstantLayer* const biasLayer2 =
635         AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, biasInfo);
636     ConstantLayer* const weightsLayer3 =
637         AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, weightInfo);
638     ConstantLayer* const biasLayer3 =
639         AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, biasInfo);
640     ConstantLayer* const weightsLayer4 =
641         AddConstantLayer(graph, layersInGraph, "Weights Layer 4 unoptimizable", constWeightsTensor, weightInfo);
642     ConstantLayer* const biasLayer4 =
643         AddConstantLayer(graph, layersInGraph, "Bias Layer 4 unoptimizable", constBiasTensor, biasInfo);
644     ConstantLayer* const weightsLayer5 =
645         AddConstantLayer(graph, layersInGraph, "Weights Layer 5", constWeightsTensor, weightInfo);
646     ConstantLayer* const biasLayer5 =
647         AddConstantLayer(graph, layersInGraph, "Bias Layer 5", constBiasTensor, biasInfo);
648 
649   Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
650                                                              "conv1 layer", outputInfo);
651   Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
652                                                              "conv2 layer unoptimizable", outputInfo);
653   Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
654                                                              "conv3 layer", outputInfo);
655   Convolution2dLayer* const conv4Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
656                                                              "conv4 layer unoptimizable", outputInfo);
657   Convolution2dLayer* const conv5Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
658                                                              "conv5 layer", outputInfo);
659 
660   Layer* const outputLayer = AddOutputLayer(graph, "output layer");
661 
662     // Connect the network
663     inputLayer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
664     weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
665     biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
666 
667     conv1Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
668     weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
669     biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
670 
671     conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
672     weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
673     biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
674 
675     conv3Layer->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(0));
676     weightsLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(1));
677     biasLayer4->GetOutputSlot(0).Connect(conv4Layer->GetInputSlot(2));
678 
679     conv4Layer->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(0));
680     weightsLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(1));
681     biasLayer5->GetOutputSlot(0).Connect(conv5Layer->GetInputSlot(2));
682 
683     conv5Layer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
684 
685     std::vector<unsigned int> ignoreSlots = {1, 2};
686     // Create the subgraph view for the whole network
687     return CreateSubgraphViewFrom(CreateInputsFrom(conv1Layer, ignoreSlots),
688                                   CreateOutputsFrom({conv5Layer}),
689                                   {weightsLayer1,
690                                     biasLayer1,
691                                     conv1Layer,
692                                     weightsLayer2,
693                                     biasLayer2,
694                                     conv2Layer,
695                                     weightsLayer3,
696                                     biasLayer3,
697                                     conv3Layer,
698                                     weightsLayer4,
699                                     biasLayer4,
700                                     conv4Layer,
701                                     weightsLayer5,
702                                     biasLayer5,
703                                     conv5Layer});
704 }
705 
706 // Creates a subgraph with some input unoptimizable layers ("unoptimizable" is added to the layer's name),
707 // this is meant to test input slots coming from different layers
BuildPartiallyOptimizableSubgraph2(Graph & graph,LayerNameToLayerMap & layersInGraph)708 SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
709 {
710     const TensorInfo inputInfo ({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
711     const TensorInfo outputInfo({  1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
712     TensorInfo weightInfo({ 16,  1,  1, 16 }, DataType::QAsymmU8, 0.9f, 0);
713     TensorInfo biasInfo  ({  1,  1,  1, 16 }, DataType::Signed32,        0.9f, 0);
714 
715     weightInfo.SetConstant(true);
716     biasInfo.SetConstant(true);
717 
718     std::vector<float> weightsVector(64);
719     ConstTensor constWeightsTensor(weightInfo, weightsVector);
720 
721     std::vector<float> biasVector(16);
722     ConstTensor constBiasTensor(biasInfo, biasVector);
723 
724     Convolution2dDescriptor convolutionDescriptor;
725     convolutionDescriptor.m_StrideX     = 1;
726     convolutionDescriptor.m_StrideY     = 1;
727     convolutionDescriptor.m_BiasEnabled = true;
728     convolutionDescriptor.m_DataLayout  = DataLayout::NHWC;
729 
730     // Construct the graph
731     Layer* const input1Layer = AddInputLayer(graph, "input1 layer", inputInfo, 0);
732     Layer* const input2Layer = AddInputLayer(graph, "input2 layer", inputInfo, 1);
733 
734     ConstantLayer* const weightsLayer1 =
735         AddConstantLayer(graph, layersInGraph, "Weights Layer 1", constWeightsTensor, weightInfo);
736     ConstantLayer* const biasLayer1 =
737         AddConstantLayer(graph, layersInGraph, "Bias Layer 1", constBiasTensor, biasInfo);
738     ConstantLayer* const weightsLayer2 =
739         AddConstantLayer(graph, layersInGraph, "Weights Layer 2 unoptimizable", constWeightsTensor, weightInfo);
740     ConstantLayer* const biasLayer2 =
741         AddConstantLayer(graph, layersInGraph, "Bias Layer 2 unoptimizable", constBiasTensor, biasInfo);
742     ConstantLayer* const weightsLayer3 =
743         AddConstantLayer(graph, layersInGraph, "Weights Layer 3", constWeightsTensor, weightInfo);
744     ConstantLayer* const biasLayer3 =
745         AddConstantLayer(graph, layersInGraph, "Bias Layer 3", constBiasTensor, biasInfo);
746 
747     Convolution2dLayer* const conv1Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
748                                                                "conv1 layer", outputInfo);
749     Convolution2dLayer* const conv2Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
750                                                                "conv2 layer unoptimizable", outputInfo);
751     Convolution2dLayer* const conv3Layer = AddConvolutionLayer(graph, layersInGraph, convolutionDescriptor,
752                                                                "conv3 layer", outputInfo);
753     AdditionLayer* const addLayer = AddAdditionaLayer(graph, layersInGraph, "add layer", outputInfo);
754     Layer* const outputLayer = AddOutputLayer(graph, "output layer");
755 
756     // Connect the network
757     input1Layer->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(0));
758     weightsLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(1));
759     biasLayer1->GetOutputSlot(0).Connect(conv1Layer->GetInputSlot(2));
760     conv1Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
761 
762     input2Layer->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(0));
763     weightsLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(1));
764     biasLayer2->GetOutputSlot(0).Connect(conv2Layer->GetInputSlot(2));
765     conv2Layer->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(0));
766     weightsLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(1));
767     biasLayer3->GetOutputSlot(0).Connect(conv3Layer->GetInputSlot(2));
768     conv3Layer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
769 
770     addLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
771 
772     // Create the subgraph view for the whole network
773     std::vector<unsigned int> ignoreSlots = {1, 2};
774     return CreateSubgraphViewFrom(CreateInputsFrom({conv1Layer,
775                                                     conv2Layer}, ignoreSlots),
776                                   CreateOutputsFrom({addLayer}),
777                                   { weightsLayer1,
778                                     biasLayer1,
779                                     weightsLayer2,
780                                     biasLayer2,
781                                     weightsLayer3,
782                                     biasLayer3,
783                                     conv1Layer,
784                                     conv2Layer,
785                                     conv3Layer,
786                                     addLayer });
787 }
788 
789 // The input subgraph contains only a single unsupported layer (only convolutions are unsupported by the mock backend)
FullyUnsupporteSubgraphTestImpl1()790 void FullyUnsupporteSubgraphTestImpl1()
791 {
792     Graph graph;
793     LayerNameToLayerMap layersInGraph;
794 
795     // Create an unsupported subgraph
796     SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph1(graph, layersInGraph);
797     CHECK((subgraphPtr != nullptr));
798 
799     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
800     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
801     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
802 
803     CHECK(subgraphInputSlots.size()  == 1);
804     CHECK(subgraphOutputSlots.size() == 1);
805     CHECK(subgraphLayers.size()      == 1);
806 
807     CHECK(Contains(layersInGraph, "pooling layer"));
808 
809     // Create a mock backend object
810     MockBackendInitialiser initialiser; // Register the Mock Backend
811     auto backendObjPtr = CreateBackendObject(MockBackendId());
812     CHECK((backendObjPtr != nullptr));
813 
814     // Optimize the subgraph
815     OptimizationViews optimizationViews;
816 
817     // Check that the optimization is carried out correctly, but no optimization is performed
818     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
819 
820     // =======================================================================
821     // The expected results are:
822     //  - No substitutions
823     //  - Exactly one failed subgraph, corresponding to the whole original one
824     //  - No untouched subgraphs
825     // =======================================================================
826 
827     // -----------------------
828     // Check the substitutions
829     // -----------------------
830 
831     CHECK(optimizationViews.GetSubstitutions().empty());
832 
833     // --------------------------
834     // Check the failed subgraphs
835     // --------------------------
836 
837     const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
838     CHECK(failedSubgraphs.size() == 1);
839 
840     CheckFailedSubgraph(failedSubgraphs.at(0),
841                         { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
842                         subgraphInputSlots,
843                         subgraphOutputSlots,
844                         subgraphLayers);
845 
846     // -----------------------------
847     // Check the untouched subgraphs
848     // -----------------------------
849 
850     CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
851 }
852 
853 // The input subgraph contains only unsupported layers (only convolutions are unsupported by the mock backend)
FullyUnsupporteSubgraphTestImpl2()854 void FullyUnsupporteSubgraphTestImpl2()
855 {
856     Graph graph;
857     LayerNameToLayerMap layersInGraph;
858 
859     // Create an unsupported subgraph
860     SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnsupportedSubgraph2(graph, layersInGraph);
861     CHECK((subgraphPtr != nullptr));
862 
863     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
864     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
865     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
866 
867     CHECK(subgraphInputSlots.size()  == 1);
868     CHECK(subgraphOutputSlots.size() == 1);
869     CHECK(subgraphLayers.size()      == 3);
870 
871     CHECK(Contains(layersInGraph, "pooling1 layer"));
872     CHECK(Contains(layersInGraph, "pooling2 layer"));
873     CHECK(Contains(layersInGraph, "pooling3 layer"));
874 
875     // Create a mock backend object
876     MockBackendInitialiser initialiser; // Register the Mock Backend
877     auto backendObjPtr = CreateBackendObject(MockBackendId());
878     CHECK((backendObjPtr != nullptr));
879 
880     // Optimize the subgraph
881     OptimizationViews optimizationViews;
882 
883     // Check that the optimization is carried out correctly, but no optimization is performed
884     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
885 
886     // =======================================================================
887     // The expected results are:
888     //  - No substitutions
889     //  - Exactly one failed subgraph, corresponding to the whole original one
890     //  - No untouched subgraphs
891     // =======================================================================
892 
893     // -----------------------
894     // Check the substitutions
895     // -----------------------
896 
897     CHECK(optimizationViews.GetSubstitutions().empty());
898 
899     // --------------------------
900     // Check the failed subgraphs
901     // --------------------------
902 
903     const OptimizationViews::Subgraphs& failedSubgraphs = optimizationViews.GetFailedSubgraphs();
904     CHECK(failedSubgraphs.size() == 1);
905 
906     std::list<IConnectableLayer*> expectedFailedLayers{ layersInGraph.at("pooling1 layer"),
907                                             layersInGraph.at("pooling2 layer"),
908                                             layersInGraph.at("pooling3 layer") };
909 
910     const SubgraphView& failedSubgraph = failedSubgraphs.at(0);
911 
912     CheckFailedSubgraph(failedSubgraph,
913                         { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
914                         subgraphInputSlots,
915                         subgraphOutputSlots,
916                         subgraphLayers);
917 
918     const SubgraphView::IConnectableLayers& failedSubgraphLayers = failedSubgraph.GetIConnectableLayers();
919 
920     CHECK_EQ(failedSubgraphLayers.front() + 0, expectedFailedLayers.front() + 0);
921     CHECK_EQ(failedSubgraphLayers.front() + 1, expectedFailedLayers.front() + 1);
922     CHECK_EQ(failedSubgraphLayers.front() + 2, expectedFailedLayers.front() + 2);
923 
924     // -----------------------------
925     // Check the untouched subgraphs
926     // -----------------------------
927 
928     CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
929 }
930 
931 // A simple case with only one layer (convolution) to optimize, supported by the mock backend
FullyOptimizableSubgraphTestImpl1()932 void FullyOptimizableSubgraphTestImpl1()
933 {
934     Graph graph;
935     LayerNameToLayerMap layersInGraph;
936 
937     // Create a fully optimizable subgraph
938     SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph1(graph, layersInGraph);
939     CHECK((subgraphPtr != nullptr));
940 
941     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
942     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
943     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
944 
945     CHECK(subgraphInputSlots.size()  == 1);
946     CHECK(subgraphOutputSlots.size() == 1);
947     CHECK(subgraphLayers.size()      == 3);
948 
949     CHECK(Contains(layersInGraph, "conv layer"));
950     CHECK(Contains(layersInGraph, "Weights Layer"));
951     CHECK(Contains(layersInGraph, "Bias Layer"));
952 
953     // Create a mock backend object
954     MockBackendInitialiser initialiser; // Register the Mock Backend
955     auto backendObjPtr = CreateBackendObject(MockBackendId());
956     CHECK((backendObjPtr != nullptr));
957 
958     // Optimize the subgraph
959     OptimizationViews optimizationViews;
960 
961     // Check that the optimization is carried out correctly
962     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
963 
964     // ===========================================================================================
965     // The expected results are:
966     //  - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
967     //  - No failed subgraphs
968     //  - No untouched subgraphs
969     // ===========================================================================================
970 
971     // -----------------------
972     // Check the substitutions
973     // -----------------------
974 
975     const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
976     CHECK(substitutions.size() == 1);
977 
978     CheckSubstitution(substitutions.at(0),
979                       { subgraphInputSlots.size(), subgraphOutputSlots.size(), subgraphLayers.size() },
980                       { subgraphInputSlots.size(), subgraphOutputSlots.size(), 1 },
981                       subgraphInputSlots,
982                       subgraphOutputSlots,
983                       subgraphLayers);
984 
985     // --------------------------
986     // Check the failed subgraphs
987     // --------------------------
988 
989     CHECK(optimizationViews.GetFailedSubgraphs().empty());
990 
991     // -----------------------------
992     // Check the untouched subgraphs
993     // -----------------------------
994 
995     CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
996 }
997 
998 // A case with five layers (all convolutions) to optimize, all supported by the mock backend
FullyOptimizableSubgraphTestImpl2()999 void FullyOptimizableSubgraphTestImpl2()
1000 {
1001     Graph graph;
1002     LayerNameToLayerMap layersInGraph;
1003 
1004     // Create a fully optimizable subgraph
1005     SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyOptimizableSubgraph2(graph, layersInGraph);
1006     CHECK((subgraphPtr != nullptr));
1007 
1008     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1009     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1010     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1011 
1012     CHECK(subgraphInputSlots.size()  == 1);
1013     CHECK(subgraphOutputSlots.size() == 1);
1014     CHECK(subgraphPtr->GetIConnectableLayers().size() == 15);
1015 
1016     CHECK(Contains(layersInGraph, "conv1 layer"));
1017     CHECK(Contains(layersInGraph, "conv2 layer"));
1018     CHECK(Contains(layersInGraph, "conv3 layer"));
1019     CHECK(Contains(layersInGraph, "conv4 layer"));
1020     CHECK(Contains(layersInGraph, "conv5 layer"));
1021     CHECK(Contains(layersInGraph, "Weights Layer 1"));
1022     CHECK(Contains(layersInGraph, "Weights Layer 2"));
1023     CHECK(Contains(layersInGraph, "Weights Layer 3"));
1024     CHECK(Contains(layersInGraph, "Weights Layer 4"));
1025     CHECK(Contains(layersInGraph, "Weights Layer 5"));
1026     CHECK(Contains(layersInGraph, "Bias Layer 1"));
1027     CHECK(Contains(layersInGraph, "Bias Layer 2"));
1028     CHECK(Contains(layersInGraph, "Bias Layer 3"));
1029     CHECK(Contains(layersInGraph, "Bias Layer 4"));
1030     CHECK(Contains(layersInGraph, "Bias Layer 5"));
1031 
1032     // Create a mock backend object
1033     MockBackendInitialiser initialiser; // Register the Mock Backend
1034     auto backendObjPtr = CreateBackendObject(MockBackendId());
1035     CHECK((backendObjPtr != nullptr));
1036 
1037     // Optimize the subgraph
1038     OptimizationViews optimizationViews;
1039 
1040     // Check that the optimization is carried out correctly
1041     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1042 
1043     // ===========================================================================================
1044     // The expected results are:
1045     //  - Exactly one substitution, mapping the whole input subgraph to a new replacement subgraph
1046     //  - No failed subgraphs
1047     //  - No untouched subgraphs
1048     // ===========================================================================================
1049 
1050     // -----------------------
1051     // Check the substitutions
1052     // -----------------------
1053 
1054     const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1055     CHECK(substitutions.size() == 1);
1056 
1057     std::list<IConnectableLayer*> expectedSubstitutableLayers{
1058                                                    layersInGraph.at("Weights Layer 1"),
1059                                                    layersInGraph.at("Weights Layer 2"),
1060                                                    layersInGraph.at("Weights Layer 3"),
1061                                                    layersInGraph.at("Weights Layer 4"),
1062                                                    layersInGraph.at("Weights Layer 5"),
1063                                                    layersInGraph.at("Bias Layer 1"),
1064                                                    layersInGraph.at("Bias Layer 2"),
1065                                                    layersInGraph.at("Bias Layer 3"),
1066                                                    layersInGraph.at("Bias Layer 4"),
1067                                                    layersInGraph.at("Bias Layer 5"),
1068                                                    layersInGraph.at("conv1 layer"),
1069                                                    layersInGraph.at("conv2 layer"),
1070                                                    layersInGraph.at("conv3 layer"),
1071                                                    layersInGraph.at("conv4 layer"),
1072                                                    layersInGraph.at("conv5 layer")};
1073 
1074     const OptimizationViews::SubstitutionPair& substitution = substitutions.at(0);
1075 
1076     CheckSubstitution(
1077         substitution,
1078         {subgraphInputSlots.size(), subgraphOutputSlots.size(),
1079          subgraphLayers.size()},
1080         {subgraphInputSlots.size(), subgraphOutputSlots.size(), 1},
1081         subgraphInputSlots, subgraphOutputSlots, expectedSubstitutableLayers);
1082 
1083     const SubgraphView::IConnectableLayers& substitutableSubgraphLayers =
1084             substitution.m_SubstitutableSubgraph.GetIConnectableLayers();
1085 
1086     CHECK_EQ(substitutableSubgraphLayers.front() + 0, expectedSubstitutableLayers.front() + 0);
1087     CHECK_EQ(substitutableSubgraphLayers.front() + 1, expectedSubstitutableLayers.front() + 1);
1088     CHECK_EQ(substitutableSubgraphLayers.front() + 2, expectedSubstitutableLayers.front() + 2);
1089     CHECK_EQ(substitutableSubgraphLayers.front() + 3, expectedSubstitutableLayers.front() + 3);
1090     CHECK_EQ(substitutableSubgraphLayers.front() + 4, expectedSubstitutableLayers.front() + 4);
1091 
1092     // --------------------------
1093     // Check the failed subgraphs
1094     // --------------------------
1095 
1096     CHECK(optimizationViews.GetFailedSubgraphs().empty());
1097 
1098     // -----------------------------
1099     // Check the untouched subgraphs
1100     // -----------------------------
1101 
1102     CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
1103 }
1104 
1105 // The input subgraph contaions both supported and unsupported layers
1106 // (but only convolutions are unsupported by the mock backend)
PartiallySupportedSubgraphTestImpl()1107 void PartiallySupportedSubgraphTestImpl()
1108 {
1109     Graph graph;
1110     LayerNameToLayerMap layersInGraph;
1111 
1112     // Create a fully optimizable subgraph
1113     SubgraphView::SubgraphViewPtr subgraphPtr = BuildPartiallySupportedSubgraph(graph, layersInGraph);
1114     CHECK((subgraphPtr != nullptr));
1115 
1116     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1117     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1118     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1119 
1120     CHECK(subgraphInputSlots.size()  == 1);
1121     CHECK(subgraphOutputSlots.size() == 1);
1122     CHECK(subgraphLayers.size()      == 9);
1123 
1124     CHECK(Contains(layersInGraph, "Weights Layer 1"));
1125     CHECK(Contains(layersInGraph, "Bias Layer 1"));
1126     CHECK(Contains(layersInGraph, "conv1 layer"));
1127     CHECK(Contains(layersInGraph, "pooling1 layer"));
1128     CHECK(Contains(layersInGraph, "pooling2 layer"));
1129     CHECK(Contains(layersInGraph, "Weights Layer 2"));
1130     CHECK(Contains(layersInGraph, "Bias Layer 2"));
1131     CHECK(Contains(layersInGraph, "conv2 layer"));
1132     CHECK(Contains(layersInGraph, "pooling3 layer"));
1133 
1134     // Create a mock backend object
1135     MockBackendInitialiser initialiser; // Register the Mock Backend
1136     auto backendObjPtr = CreateBackendObject(MockBackendId());
1137     CHECK((backendObjPtr != nullptr));
1138 
1139     // Optimize the subgraph
1140     OptimizationViews optimizationViews;
1141 
1142     // Check that the optimization is carried out correctly
1143     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1144 
1145     // ========================================================================
1146     // The expected results are:
1147     //  - Exactly two substitution, corresponding to the supported layers
1148     //  - Exactly two failed subgraphs, corresponding to the unsupported layers
1149     //  - No untouched subgraphs
1150     // ========================================================================
1151 
1152     // -----------------------
1153     // Check the substitutions
1154     // -----------------------
1155 
1156     OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
1157     CHECK(substitutions.size() == 2);
1158     // Sort into a consistent order
1159     std::sort(substitutions.begin(), substitutions.end(), [](auto s1, auto s2) {
1160         return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1161                       s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0;
1162     });
1163 
1164     std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1165                                                                           { 1, 1, 3 } };
1166     std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1167                                                                         { 1, 1, 1 } };
1168     std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
1169     {
1170             ConvertSlotsToISlots<InputSlot, IInputSlot>(
1171                 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
1172             ConvertSlotsToISlots<InputSlot, IInputSlot>(
1173                 {ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetInputSlot(0))})
1174     };
1175 
1176     std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
1177     {
1178         ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1179                 ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1180         ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1181                 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer")->GetOutputSlots()))
1182     };
1183     std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
1184     {
1185         { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1186         { layersInGraph.at("Weights Layer 2"), layersInGraph.at("Bias Layer 2"), layersInGraph.at("conv2 layer") }
1187     };
1188 
1189     for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1190     {
1191         CheckSubstitution(substitutions.at(substitutionIndex),
1192                           expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1193                           expectedReplacementSubgraphSizes.at(substitutionIndex),
1194                           expectedSubstitutableInputSlots.at(substitutionIndex),
1195                           expectedSubstitutableOutputSlots.at(substitutionIndex),
1196                           expectedSubstitutableLayers.at(substitutionIndex));
1197     }
1198 
1199     // --------------------------
1200     // Check the failed subgraphs
1201     // --------------------------
1202 
1203     OptimizationViews::Subgraphs failedSubgraphs = optimizationViews.GetFailedSubgraphs();
1204     CHECK(failedSubgraphs.size() == 2);
1205     // Sort into a consistent order
1206     std::sort(failedSubgraphs.begin(), failedSubgraphs.end(), [](auto s1, auto s2) {
1207         return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1208                       s2.GetIConnectableLayers().front()->GetName()) < 0;
1209     });
1210 
1211     std::vector<ExpectedSubgraphSize> expectedFailedSubgraphSizes{ { 1, 1, 2 },
1212                                                                    { 1, 1, 1 } };
1213     std::vector<SubgraphView::IInputSlots> expectedFailedInputSlots
1214     {
1215         ConvertSlotsToISlots<InputSlot, IInputSlot>(
1216         ConvertReferenceTypeToPointerType(layersInGraph.at("pooling1 layer")->GetInputSlots())),
1217         ConvertSlotsToISlots<InputSlot, IInputSlot>(
1218         ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetInputSlots()))
1219     };
1220     std::vector<SubgraphView::IOutputSlots> expectedFailedOutputSlots
1221     {
1222         ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1223         ConvertReferenceTypeToPointerType(layersInGraph.at("pooling2 layer")->GetOutputSlots())),
1224         ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1225         ConvertReferenceTypeToPointerType(layersInGraph.at("pooling3 layer")->GetOutputSlots()))
1226     };
1227     std::vector<SubgraphView::IConnectableLayers> expectedFailedLayers
1228     {
1229         { layersInGraph.at("pooling1 layer"),
1230           layersInGraph.at("pooling2 layer") },
1231         { layersInGraph.at("pooling3 layer") }
1232     };
1233 
1234     for (size_t failedIndex = 0; failedIndex < failedSubgraphs.size(); failedIndex++)
1235     {
1236         CheckFailedSubgraph(failedSubgraphs.at(failedIndex),
1237                             expectedFailedSubgraphSizes.at(failedIndex),
1238                             expectedFailedInputSlots.at(failedIndex),
1239                             expectedFailedOutputSlots.at(failedIndex),
1240                             expectedFailedLayers.at(failedIndex));
1241     }
1242 
1243     // -----------------------------
1244     // Check the untouched subgraphs
1245     // -----------------------------
1246 
1247     CHECK(optimizationViews.GetUntouchedSubgraphs().empty());
1248 }
1249 
1250 // The input subgraph contains only unoptimizable layers ("unoptimizable" is added to the layer's name)
FullyUnoptimizableSubgraphTestImpl1()1251 void FullyUnoptimizableSubgraphTestImpl1()
1252 {
1253     Graph graph;
1254     LayerNameToLayerMap layersInGraph;
1255 
1256     // Create a fully optimizable subgraph
1257     SubgraphView::SubgraphViewPtr subgraphPtr = BuildFullyUnoptimizableSubgraph1(graph, layersInGraph);
1258     CHECK((subgraphPtr != nullptr));
1259 
1260     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1261     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1262     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1263 
1264     CHECK(subgraphInputSlots.size()  == 1);
1265     CHECK(subgraphOutputSlots.size() == 1);
1266     CHECK(subgraphLayers.size()      == 3);
1267 
1268     CHECK(Contains(layersInGraph, "conv layer unoptimizable"));
1269 
1270     // Create a mock backend object
1271     MockBackendInitialiser initialiser; // Register the Mock Backend
1272     auto backendObjPtr = CreateBackendObject(MockBackendId());
1273     CHECK((backendObjPtr != nullptr));
1274 
1275     // Optimize the subgraph
1276     OptimizationViews optimizationViews;
1277 
1278     // Check that the optimization is carried out correctly
1279     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1280 
1281     // ============================================================================
1282     // The expected results are:
1283     //  - No substitutions
1284     //  - No failed subgraphs
1285     //  - Exactly one untouched subgraph, corresponding to the whole input subgraph
1286     // ============================================================================
1287 
1288     // -----------------------
1289     // Check the substitutions
1290     // -----------------------
1291 
1292     CHECK(optimizationViews.GetSubstitutions().empty());
1293 
1294     // --------------------------
1295     // Check the failed subgraphs
1296     // --------------------------
1297 
1298     CHECK(optimizationViews.GetFailedSubgraphs().empty());
1299 
1300     // -----------------------------
1301     // Check the untouched subgraphs
1302     // -----------------------------
1303 
1304     const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1305     CHECK(untouchedSubgraphs.size() == 1);
1306 
1307     CheckUntouchedSubgraph(untouchedSubgraphs.at(0),
1308                            {subgraphInputSlots.size(),
1309                             subgraphOutputSlots.size(), subgraphLayers.size()},
1310                            subgraphInputSlots, subgraphOutputSlots,
1311                            subgraphLayers);
1312 }
1313 
1314 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name)
PartiallyOptimizableSubgraphTestImpl1()1315 void PartiallyOptimizableSubgraphTestImpl1()
1316 {
1317     Graph graph;
1318     LayerNameToLayerMap layersInGraph;
1319 
1320     // Create a fully optimizable subgraph
1321     SubgraphView::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph1(graph, layersInGraph);
1322     CHECK((subgraphPtr != nullptr));
1323 
1324     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1325     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1326     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1327 
1328     CHECK(subgraphInputSlots.size()  == 1);
1329     CHECK(subgraphOutputSlots.size() == 1);
1330     CHECK(subgraphLayers.size()      == 15);
1331 
1332     CHECK(Contains(layersInGraph, "conv1 layer"));
1333     CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1334     CHECK(Contains(layersInGraph, "conv3 layer"));
1335     CHECK(Contains(layersInGraph, "conv4 layer unoptimizable"));
1336     CHECK(Contains(layersInGraph, "conv5 layer"));
1337 
1338     // Create a mock backend object
1339     MockBackendInitialiser initialiser; // Register the Mock Backend
1340     auto backendObjPtr = CreateBackendObject(MockBackendId());
1341     CHECK((backendObjPtr != nullptr));
1342 
1343     // Optimize the subgraph
1344     OptimizationViews optimizationViews;
1345 
1346     // Check that the optimization is carried out correctly
1347     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1348 
1349     // ===============================================================================
1350     // The expected results are:
1351     //  - Exactly three substitutions, corresponding to the optimizable layers
1352     //  - No failed subgraphs
1353     //  - Exactly two untouched subgraphs, corresponding to the non-optimizable layers
1354     // ===============================================================================
1355 
1356     // -----------------------
1357     // Check the substitutions
1358     // -----------------------
1359 
1360     OptimizationViews::Substitutions substitutions = optimizationViews.GetSubstitutions();
1361     CHECK(substitutions.size() == 3);
1362     // Sort into a consistent order
1363     std::sort(substitutions.begin(), substitutions.end(),
1364         [](auto s1, auto s2)
1365         { return strcmp(s1.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName(),
1366                         s2.m_SubstitutableSubgraph.GetIConnectableLayers().front()->GetName()) < 0; });
1367 
1368     std::vector<ExpectedSubgraphSize> expectedSubstitutableSubgraphSizes{ { 1, 1, 3 },
1369                                                                           { 1, 1, 3 },
1370                                                                           { 1, 1, 3 } };
1371     std::vector<ExpectedSubgraphSize> expectedReplacementSubgraphSizes{ { 1, 1, 1 },
1372                                                                         { 1, 1, 1 },
1373                                                                         { 1, 1, 1 } };
1374     std::vector<SubgraphView::IInputSlots> expectedSubstitutableInputSlots
1375     {
1376         ConvertSlotsToISlots<InputSlot, IInputSlot>(
1377             {ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlot(0))}),
1378         ConvertSlotsToISlots<InputSlot, IInputSlot>(
1379         {ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlot(0))}),
1380         ConvertSlotsToISlots<InputSlot, IInputSlot>(
1381         {ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetInputSlot(0))})
1382     };
1383     std::vector<SubgraphView::IOutputSlots> expectedSubstitutableOutputSlots
1384     {
1385         ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1386         ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetOutputSlots())),
1387         ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1388         ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetOutputSlots())),
1389         ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1390         ConvertReferenceTypeToPointerType(layersInGraph.at("conv5 layer")->GetOutputSlots()))
1391     };
1392     std::vector<SubgraphView::IConnectableLayers> expectedSubstitutableLayers
1393     {
1394         { layersInGraph.at("Weights Layer 1"), layersInGraph.at("Bias Layer 1"), layersInGraph.at("conv1 layer") },
1395         { layersInGraph.at("Weights Layer 3"), layersInGraph.at("Bias Layer 3"), layersInGraph.at("conv3 layer") },
1396         { layersInGraph.at("Weights Layer 5"), layersInGraph.at("Bias Layer 5"), layersInGraph.at("conv5 layer") }
1397     };
1398 
1399     for (size_t substitutionIndex = 0; substitutionIndex < substitutions.size(); substitutionIndex++)
1400     {
1401         CheckSubstitution(substitutions.at(substitutionIndex),
1402                           expectedSubstitutableSubgraphSizes.at(substitutionIndex),
1403                           expectedReplacementSubgraphSizes.at(substitutionIndex),
1404                           expectedSubstitutableInputSlots.at(substitutionIndex),
1405                           expectedSubstitutableOutputSlots.at(substitutionIndex),
1406                           expectedSubstitutableLayers.at(substitutionIndex));
1407     }
1408 
1409     // --------------------------
1410     // Check the failed subgraphs
1411     // --------------------------
1412 
1413     CHECK(optimizationViews.GetFailedSubgraphs().empty());
1414 
1415     // -----------------------------
1416     // Check the untouched subgraphs
1417     // -----------------------------
1418 
1419     OptimizationViews::Subgraphs untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1420     CHECK(untouchedSubgraphs.size() == 2);
1421     // Sort into a consistent order
1422     std::sort(untouchedSubgraphs.begin(), untouchedSubgraphs.end(), [](auto s1, auto s2) {
1423         return strcmp(s1.GetIConnectableLayers().front()->GetName(),
1424                       s2.GetIConnectableLayers().front()->GetName()) < 0;
1425     });
1426 
1427     std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 },
1428                                                                       { 1, 1, 3 } };
1429     std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots{
1430         ConvertSlotsToISlots<InputSlot,
1431                              IInputSlot>({ConvertReferenceTypeToPointerType(
1432             layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))}),
1433         ConvertSlotsToISlots<InputSlot,
1434                              IInputSlot>({ConvertReferenceTypeToPointerType(
1435             layersInGraph.at("conv4 layer unoptimizable")->GetInputSlot(0))})};
1436 
1437     std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
1438         {
1439             ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1440                 ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots())),
1441             ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1442                 ConvertReferenceTypeToPointerType(layersInGraph.at("conv4 layer unoptimizable")->GetOutputSlots()))
1443         };
1444 
1445     std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
1446         {
1447             { layersInGraph.at("Weights Layer 2 unoptimizable"),
1448               layersInGraph.at("Bias Layer 2 unoptimizable"),
1449               layersInGraph.at("conv2 layer unoptimizable") },
1450             { layersInGraph.at("Weights Layer 4 unoptimizable"),
1451               layersInGraph.at("Bias Layer 4 unoptimizable"),
1452               layersInGraph.at("conv4 layer unoptimizable") }
1453         };
1454 
1455     for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1456     {
1457         CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1458                                expectedUntouchedSubgraphSizes.at(untouchedIndex),
1459                                expectedUntouchedInputSlots.at(untouchedIndex),
1460                                expectedUntouchedOutputSlots.at(untouchedIndex),
1461                                expectedUntouchedLayers.at(untouchedIndex));
1462     }
1463 }
1464 
1465 // The input subgraph contains some unoptimizable layers ("unoptimizable" is added to the layer's name),
1466 // this is meant to test input slots coming from different layers
PartiallyOptimizableSubgraphTestImpl2()1467 void PartiallyOptimizableSubgraphTestImpl2()
1468 {
1469     Graph graph;
1470     LayerNameToLayerMap layersInGraph;
1471 
1472     // Create a partially optimizable subgraph
1473     SubgraphView::SubgraphViewPtr subgraphPtr = BuildPartiallyOptimizableSubgraph2(graph, layersInGraph);
1474     CHECK((subgraphPtr != nullptr));
1475 
1476     const SubgraphView::IInputSlots& subgraphInputSlots = subgraphPtr->GetIInputSlots();
1477     const SubgraphView::IOutputSlots& subgraphOutputSlots = subgraphPtr->GetIOutputSlots();
1478     const SubgraphView::IConnectableLayers& subgraphLayers = subgraphPtr->GetIConnectableLayers();
1479 
1480     CHECK(subgraphInputSlots.size()  == 2);
1481     CHECK(subgraphOutputSlots.size() == 1);
1482     CHECK(subgraphLayers.size()      == 10);
1483 
1484     CHECK(Contains(layersInGraph, "conv1 layer"));
1485     CHECK(Contains(layersInGraph, "conv2 layer unoptimizable"));
1486     CHECK(Contains(layersInGraph, "conv3 layer"));
1487     CHECK(Contains(layersInGraph, "add layer"));
1488 
1489     // Create a mock backend object
1490     MockBackendInitialiser initialiser; // Register the Mock Backend
1491     auto backendObjPtr = CreateBackendObject(MockBackendId());
1492     CHECK((backendObjPtr != nullptr));
1493 
1494     // Optimize the subgraph
1495     OptimizationViews optimizationViews;
1496 
1497     // Check that the optimization is carried out correctly
1498     CHECK_NOTHROW(optimizationViews = backendObjPtr->OptimizeSubgraphView(*subgraphPtr));
1499 
1500     // ==============================================================================
1501     // The expected results are:
1502     //  - Exactly one substitution, corresponding to the optimizable layers
1503     //  - No failed subgraphs
1504     //  - Exactly two untouched subgraphs, corresponding to the non-optimizable layer
1505     // ==============================================================================
1506 
1507     // -----------------------
1508     // Check the substitutions
1509     // -----------------------
1510 
1511     const OptimizationViews::Substitutions& substitutions = optimizationViews.GetSubstitutions();
1512     CHECK(substitutions.size() == 1);
1513 
1514     ExpectedSubgraphSize expectedSubstitutableSubgraphSizes{ 2, 1, 7 };
1515     ExpectedSubgraphSize expectedReplacementSubgraphSizes{ 2, 1, 1 };
1516 
1517     SubgraphView::IInputSlots expectedSubstitutableInputSlots
1518     {
1519             ConvertSlotsToISlots<InputSlot, IInputSlot>({
1520                     ConvertReferenceTypeToPointerType(layersInGraph.at("conv1 layer")->GetInputSlots()[0])})[0],
1521             ConvertSlotsToISlots<InputSlot, IInputSlot>({
1522                     ConvertReferenceTypeToPointerType(layersInGraph.at("conv3 layer")->GetInputSlots()[0])})[0]
1523     };
1524 
1525     SubgraphView::IOutputSlots expectedSubstitutableOutputSlots
1526     {
1527             ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1528                     ConvertReferenceTypeToPointerType(layersInGraph.at("add layer")->GetOutputSlots()))
1529     };
1530 
1531     SubgraphView::IConnectableLayers expectedSubstitutableLayers
1532     {
1533         layersInGraph.at("Weights Layer 1"),
1534         layersInGraph.at("Weights Layer 3"),
1535         layersInGraph.at("Bias Layer 1"),
1536         layersInGraph.at("Bias Layer 3"),
1537         layersInGraph.at("conv1 layer"),
1538         layersInGraph.at("conv3 layer"),
1539         layersInGraph.at("add layer")
1540     };
1541 
1542     CheckSubstitution(substitutions[0],
1543                       expectedSubstitutableSubgraphSizes,
1544                       expectedReplacementSubgraphSizes,
1545                       expectedSubstitutableInputSlots,
1546                       expectedSubstitutableOutputSlots,
1547                       expectedSubstitutableLayers);
1548 
1549     // --------------------------
1550     // Check the failed subgraphs
1551     // --------------------------
1552 
1553     CHECK(optimizationViews.GetFailedSubgraphs().empty());
1554 
1555     // -----------------------------
1556     // Check the untouched subgraphs
1557     // -----------------------------
1558 
1559     const OptimizationViews::Subgraphs& untouchedSubgraphs = optimizationViews.GetUntouchedSubgraphs();
1560     CHECK(untouchedSubgraphs.size() == 1);
1561 
1562     std::vector<ExpectedSubgraphSize> expectedUntouchedSubgraphSizes{ { 1, 1, 3 } };
1563     std::vector<SubgraphView::IInputSlots> expectedUntouchedInputSlots
1564     {
1565         ConvertSlotsToISlots<InputSlot,
1566                              IInputSlot>({ConvertReferenceTypeToPointerType(
1567             layersInGraph.at("conv2 layer unoptimizable")->GetInputSlot(0))})};
1568     std::vector<SubgraphView::IOutputSlots> expectedUntouchedOutputSlots
1569     {
1570             ConvertSlotsToISlots<OutputSlot, IOutputSlot>(
1571         ConvertReferenceTypeToPointerType(layersInGraph.at("conv2 layer unoptimizable")->GetOutputSlots()))
1572     };
1573     std::vector<SubgraphView::IConnectableLayers> expectedUntouchedLayers
1574     {
1575         { layersInGraph.at("conv2 layer unoptimizable"), layersInGraph.at("Weights Layer 2 unoptimizable"),
1576         layersInGraph.at("Bias Layer 2 unoptimizable") }
1577     };
1578 
1579     for (size_t untouchedIndex = 0; untouchedIndex < untouchedSubgraphs.size(); untouchedIndex++)
1580     {
1581         CheckUntouchedSubgraph(untouchedSubgraphs.at(untouchedIndex),
1582                                expectedUntouchedSubgraphSizes.at(untouchedIndex),
1583                                expectedUntouchedInputSlots.at(untouchedIndex),
1584                                expectedUntouchedOutputSlots.at(untouchedIndex),
1585                                expectedUntouchedLayers.at(untouchedIndex));
1586     }
1587 }
1588 
1589 } // Anonymous namespace
1590 
1591 TEST_SUITE("OptimizeSubGraph")
1592 {
1593 TEST_CASE("FullyUnsupportedSubgraph1")     { FullyUnsupporteSubgraphTestImpl1();      }
1594 TEST_CASE("FullyUnsupportedSubgraph2")     { FullyUnsupporteSubgraphTestImpl2();      }
1595 TEST_CASE("FullyOptimizableSubgraph1")     { FullyOptimizableSubgraphTestImpl1();     }
1596 TEST_CASE("FullyOptimizableSubgraph2")     { FullyOptimizableSubgraphTestImpl2();     }
1597 TEST_CASE("PartiallySupportedSubgraph")    { PartiallySupportedSubgraphTestImpl();    }
1598 TEST_CASE("FullyUnoptimizableSubgraph")    { FullyUnoptimizableSubgraphTestImpl1();   }
1599 TEST_CASE("PartiallyOptimizableSubgraph1") { PartiallyOptimizableSubgraphTestImpl1(); }
1600 TEST_CASE("PartiallyOptimizableSubgraph2") { PartiallyOptimizableSubgraphTestImpl2(); }
1601 
1602 }
1603