Lines Matching full:layer
32 // Calls CreateWorkload for a layer, and checks the returned pointer is of the correct type.
34 std::unique_ptr<Workload> MakeAndCheckWorkload(Layer& layer, in MakeAndCheckWorkload() argument
38 std::unique_ptr<IWorkload> workload = layer.CreateWorkload(factory);
42 layer.SetBackendId(factory.GetBackendId());
43 CHECK(factory.IsLayerSupported(layer, layer.GetDataType(), reasonIfUnsupported, modelOptions));
52 for (auto&& layer : graph.TopologicalSort()) in CreateTensorHandles()
54 layer->CreateTensorHandles(tmpRegistry, factory); in CreateTensorHandles()
69 // Creates the layer we're testing. in CreateActivationWorkloadTest()
75 ActivationLayer* const layer = graph.AddLayer<ActivationLayer>(layerDesc, "layer"); in CreateActivationWorkloadTest() local
78 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateActivationWorkloadTest()
79 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateActivationWorkloadTest()
84 Connect(input, layer, tensorInfo); in CreateActivationWorkloadTest()
85 Connect(layer, output, tensorInfo); in CreateActivationWorkloadTest()
90 auto workload = MakeAndCheckWorkload<ActivationWorkload>(*layer, factory); in CreateActivationWorkloadTest()
110 // Creates the layer we're testing. in CreateElementwiseWorkloadTest()
111 Layer* const layer = graph.AddLayer<LayerType>("layer"); in CreateElementwiseWorkloadTest() local
114 Layer* const input1 = graph.AddLayer<InputLayer>(1, "input1"); in CreateElementwiseWorkloadTest()
115 Layer* const input2 = graph.AddLayer<InputLayer>(2, "input2"); in CreateElementwiseWorkloadTest()
116 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateElementwiseWorkloadTest()
120 Connect(input1, layer, tensorInfo, 0, 0); in CreateElementwiseWorkloadTest()
121 Connect(input2, layer, tensorInfo, 0, 1); in CreateElementwiseWorkloadTest()
122 Connect(layer, output, tensorInfo); in CreateElementwiseWorkloadTest()
126 auto workload = MakeAndCheckWorkload<WorkloadType>(*layer, factory); in CreateElementwiseWorkloadTest()
141 // Creates the layer we're testing. in CreateElementwiseBinaryWorkloadTest()
145 Layer* const layer = graph.AddLayer<ElementwiseBinaryLayer>(descriptor, "layer"); in CreateElementwiseBinaryWorkloadTest() local
148 Layer* const input1 = graph.AddLayer<InputLayer>(1, "input1"); in CreateElementwiseBinaryWorkloadTest()
149 Layer* const input2 = graph.AddLayer<InputLayer>(2, "input2"); in CreateElementwiseBinaryWorkloadTest()
150 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateElementwiseBinaryWorkloadTest()
154 Connect(input1, layer, tensorInfo, 0, 0); in CreateElementwiseBinaryWorkloadTest()
155 Connect(input2, layer, tensorInfo, 0, 1); in CreateElementwiseBinaryWorkloadTest()
156 Connect(layer, output, tensorInfo); in CreateElementwiseBinaryWorkloadTest()
160 auto workload = MakeAndCheckWorkload<WorkloadType>(*layer, factory); in CreateElementwiseBinaryWorkloadTest()
176 // Creates the layer we're testing. in CreateSubtractionWithBlobWorkloadTest()
177 SubtractionLayer* const layer = graph.AddLayer<SubtractionLayer>("layer"); in CreateSubtractionWithBlobWorkloadTest() local
184 layer->SetAdditionalInfoForObject(activationDesc); in CreateSubtractionWithBlobWorkloadTest()
187 Layer* const input1 = graph.AddLayer<InputLayer>(1, "input1"); in CreateSubtractionWithBlobWorkloadTest()
188 Layer* const input2 = graph.AddLayer<InputLayer>(2, "input2"); in CreateSubtractionWithBlobWorkloadTest()
189 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateSubtractionWithBlobWorkloadTest()
193 Connect(input1, layer, tensorInfo, 0, 0); in CreateSubtractionWithBlobWorkloadTest()
194 Connect(input2, layer, tensorInfo, 0, 1); in CreateSubtractionWithBlobWorkloadTest()
195 Connect(layer, output, tensorInfo); in CreateSubtractionWithBlobWorkloadTest()
198 // Check that the additional information can be queried from the layer in CreateSubtractionWithBlobWorkloadTest()
200 activationDescPtr = layer->GetAdditionalInformation<ActivationDescriptor>(); in CreateSubtractionWithBlobWorkloadTest()
209 auto workload = MakeAndCheckWorkload<WorkloadType>(*layer, factory); in CreateSubtractionWithBlobWorkloadTest()
235 // Creates the layer we're testing. in CreateMultiplicationWithBlobWorkloadTest()
236 MultiplicationLayer* const layer = graph.AddLayer<MultiplicationLayer>("layer"); in CreateMultiplicationWithBlobWorkloadTest() local
243 layer->SetAdditionalInfoForObject(activationDesc); in CreateMultiplicationWithBlobWorkloadTest()
246 Layer* const input1 = graph.AddLayer<InputLayer>(1, "input1"); in CreateMultiplicationWithBlobWorkloadTest()
247 Layer* const input2 = graph.AddLayer<InputLayer>(2, "input2"); in CreateMultiplicationWithBlobWorkloadTest()
248 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateMultiplicationWithBlobWorkloadTest()
252 Connect(input1, layer, tensorInfo, 0, 0); in CreateMultiplicationWithBlobWorkloadTest()
253 Connect(input2, layer, tensorInfo, 0, 1); in CreateMultiplicationWithBlobWorkloadTest()
254 Connect(layer, output, tensorInfo); in CreateMultiplicationWithBlobWorkloadTest()
257 // Check that the additional information can be queried from the layer in CreateMultiplicationWithBlobWorkloadTest()
259 activationDescPtr = layer->GetAdditionalInformation<ActivationDescriptor>(); in CreateMultiplicationWithBlobWorkloadTest()
268 auto workload = MakeAndCheckWorkload<WorkloadType>(*layer, factory); in CreateMultiplicationWithBlobWorkloadTest()
291 // Creates the layer we're testing. in CreateAdditionWithBlobWorkloadTest()
292 AdditionLayer* const layer = graph.AddLayer<AdditionLayer>("layer"); in CreateAdditionWithBlobWorkloadTest() local
299 layer->SetAdditionalInfoForObject(activationDesc); in CreateAdditionWithBlobWorkloadTest()
302 Layer* const input1 = graph.AddLayer<InputLayer>(1, "input1"); in CreateAdditionWithBlobWorkloadTest()
303 Layer* const input2 = graph.AddLayer<InputLayer>(2, "input2"); in CreateAdditionWithBlobWorkloadTest()
304 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateAdditionWithBlobWorkloadTest()
308 Connect(input1, layer, tensorInfo, 0, 0); in CreateAdditionWithBlobWorkloadTest()
309 Connect(input2, layer, tensorInfo, 0, 1); in CreateAdditionWithBlobWorkloadTest()
310 Connect(layer, output, tensorInfo); in CreateAdditionWithBlobWorkloadTest()
313 // Check that the additional information can be queried from the layer in CreateAdditionWithBlobWorkloadTest()
315 activationDescPtr = layer->template GetAdditionalInformation<ActivationDescriptor>(); in CreateAdditionWithBlobWorkloadTest()
324 auto workload = MakeAndCheckWorkload<WorkloadType>(*layer, factory); in CreateAdditionWithBlobWorkloadTest()
349 Layer* const layer = graph.AddLayer<armnn::ElementwiseUnaryLayer>(desc, "layer"); in CreateElementwiseUnaryWorkloadTest() local
351 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateElementwiseUnaryWorkloadTest()
352 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateElementwiseUnaryWorkloadTest()
355 Connect(input, layer, tensorInfo, 0, 0); in CreateElementwiseUnaryWorkloadTest()
356 Connect(layer, output, tensorInfo, 0, 0); in CreateElementwiseUnaryWorkloadTest()
359 auto workload = MakeAndCheckWorkload<WorkloadType>(*layer, factory); in CreateElementwiseUnaryWorkloadTest()
383 // Creates the layer we're testing. in CreateBatchNormalizationWorkloadTest()
388 …BatchNormalizationLayer* const layer = graph.AddLayer<BatchNormalizationLayer>(layerDesc, "layer"); in CreateBatchNormalizationWorkloadTest() local
391 layer->m_Mean = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWorkloadTest()
392 layer->m_Variance = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWorkloadTest()
393 layer->m_Beta = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWorkloadTest()
394 layer->m_Gamma = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWorkloadTest()
395 layer->m_Mean->Allocate(); in CreateBatchNormalizationWorkloadTest()
396 layer->m_Variance->Allocate(); in CreateBatchNormalizationWorkloadTest()
397 layer->m_Beta->Allocate(); in CreateBatchNormalizationWorkloadTest()
398 layer->m_Gamma->Allocate(); in CreateBatchNormalizationWorkloadTest()
401 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateBatchNormalizationWorkloadTest()
402 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateBatchNormalizationWorkloadTest()
406 Connect(input, layer, tensorInfo); in CreateBatchNormalizationWorkloadTest()
407 Connect(layer, output, tensorInfo); in CreateBatchNormalizationWorkloadTest()
411 auto workload = MakeAndCheckWorkload<BatchNormalizationWorkloadType>(*layer, factory); in CreateBatchNormalizationWorkloadTest()
441 // Creates the layer we're testing. in CreateBatchNormalizationWithBlobWorkloadTest()
446 …BatchNormalizationLayer* const layer = graph.AddLayer<BatchNormalizationLayer>(layerDesc, "layer"); in CreateBatchNormalizationWithBlobWorkloadTest() local
449 layer->m_Mean = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWithBlobWorkloadTest()
450 layer->m_Variance = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWithBlobWorkloadTest()
451 layer->m_Beta = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWithBlobWorkloadTest()
452 layer->m_Gamma = std::make_unique<ScopedTensorHandle>(weightInfo); in CreateBatchNormalizationWithBlobWorkloadTest()
453 layer->m_Mean->Allocate(); in CreateBatchNormalizationWithBlobWorkloadTest()
454 layer->m_Variance->Allocate(); in CreateBatchNormalizationWithBlobWorkloadTest()
455 layer->m_Beta->Allocate(); in CreateBatchNormalizationWithBlobWorkloadTest()
456 layer->m_Gamma->Allocate(); in CreateBatchNormalizationWithBlobWorkloadTest()
463 layer->SetAdditionalInfoForObject(activationDesc); in CreateBatchNormalizationWithBlobWorkloadTest()
465 // Check that the additional information can be queried from the layer in CreateBatchNormalizationWithBlobWorkloadTest()
466 …std::shared_ptr<ActivationDescriptor> activationDescPtr = layer->GetAdditionalInformation<Activati… in CreateBatchNormalizationWithBlobWorkloadTest()
474 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateBatchNormalizationWithBlobWorkloadTest()
475 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateBatchNormalizationWithBlobWorkloadTest()
479 Connect(input, layer, tensorInfo); in CreateBatchNormalizationWithBlobWorkloadTest()
480 Connect(layer, output, tensorInfo); in CreateBatchNormalizationWithBlobWorkloadTest()
484 auto workload = MakeAndCheckWorkload<BatchNormalizationWorkloadType>(*layer, factory); in CreateBatchNormalizationWithBlobWorkloadTest()
513 // Creates the layer we're testing.
527 Convolution2dLayer* const layer = graph.AddLayer<Convolution2dLayer>(layerDesc, "layer"); local
537 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
539 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
545 Connect(input, layer, TensorInfo(inputShape, DataType, inputsQScale));
546 Connect(weights, layer, weightsTensorInfo, 0, 1);
547 Connect(layer, output, TensorInfo(outputShape, DataType, outputQScale));
551 auto workload = MakeAndCheckWorkload<Convolution2dWorkload>(*layer, factory, modelOptions);
577 // Creates the layer we're testing.
591 Convolution2dLayer* const layer = graph.AddLayer<Convolution2dLayer>(layerDesc, "layer"); local
607 layer->SetAdditionalInfoForObject(activationDesc);
609 // Check that the additional information can be queried from the layer
610 …std::shared_ptr<ActivationDescriptor> activationDescPtr = layer->GetAdditionalInformation<Activati…
619 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
622 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
630 Connect(input, layer, TensorInfo(inputShape, DataType, inputsQScale));
631 Connect(weights, layer, weightsTensorInfo, 0, 1);
632 Connect(bias, layer, biasTensorInfo, 0, 2);
633 Connect(layer, output, TensorInfo(outputShape, DataType, outputQScale));
637 auto workload = MakeAndCheckWorkload<Convolution2dWorkload>(*layer, factory, modelOptions);
670 // Creates the layer we're testing.
684 Convolution2dLayer* const layer = graph.AddLayer<Convolution2dLayer>(layerDesc, "layer"); local
697 Layer* const input = graph.AddLayer<InputLayer>(0, "input");
700 Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
703 Connect(input, layer, TensorInfo(inputShape, DataType));
704 Connect(weights, layer, weightsTensorInfo, 0, 1);
705 Connect(bias, layer, biasTensorInfo, 0, 2);
706 Connect(layer, output, TensorInfo(outputShape, DataType, outputQScale));
710 auto workload = MakeAndCheckWorkload<Convolution2dWorkload>(*layer, factory, modelOptions);
740 LstmLayer* const layer = graph.AddLayer<LstmLayer>(layerDesc, "layer"); in CreateLstmWorkloadTest() local
746 layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
748 layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
750 layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
752 layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
754 layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
756 layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
758 layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
760 layer->m_BasicParameters.m_CellBias = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
762 layer->m_BasicParameters.m_OutputGateBias = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
765 layer->m_BasicParameters.m_InputToForgetWeights->Allocate(); in CreateLstmWorkloadTest()
766 layer->m_BasicParameters.m_InputToCellWeights->Allocate(); in CreateLstmWorkloadTest()
767 layer->m_BasicParameters.m_InputToOutputWeights->Allocate(); in CreateLstmWorkloadTest()
768 layer->m_BasicParameters.m_RecurrentToForgetWeights->Allocate(); in CreateLstmWorkloadTest()
769 layer->m_BasicParameters.m_RecurrentToCellWeights->Allocate(); in CreateLstmWorkloadTest()
770 layer->m_BasicParameters.m_RecurrentToOutputWeights->Allocate(); in CreateLstmWorkloadTest()
771 layer->m_BasicParameters.m_ForgetGateBias->Allocate(); in CreateLstmWorkloadTest()
772 layer->m_BasicParameters.m_CellBias->Allocate(); in CreateLstmWorkloadTest()
773 layer->m_BasicParameters.m_OutputGateBias->Allocate(); in CreateLstmWorkloadTest()
778 layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
780 layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<ScopedTensorHandle> in CreateLstmWorkloadTest()
782 layer->m_PeepholeParameters.m_CellToForgetWeights->Allocate(); in CreateLstmWorkloadTest()
783 layer->m_PeepholeParameters.m_CellToOutputWeights->Allocate(); in CreateLstmWorkloadTest()
787 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateLstmWorkloadTest()
788 Layer* const outputStateIn = graph.AddLayer<InputLayer>(1, "outputStateIn"); in CreateLstmWorkloadTest()
789 Layer* const cellStateIn = graph.AddLayer<InputLayer>(2, "cellStateIn"); in CreateLstmWorkloadTest()
790 Layer* const scratchBuffer = graph.AddLayer<OutputLayer>(0, "scratchBuffer"); in CreateLstmWorkloadTest()
791 Layer* const outputStateOut = graph.AddLayer<OutputLayer>(1, "outputStateOut"); in CreateLstmWorkloadTest()
792 Layer* const cellStateOut = graph.AddLayer<OutputLayer>(2, "cellStateOut"); in CreateLstmWorkloadTest()
793 Layer* const output = graph.AddLayer<OutputLayer>(3, "output"); in CreateLstmWorkloadTest()
801 Connect(input, layer, lstmTensorInfo1, 0, 0); in CreateLstmWorkloadTest()
802 Connect(cellStateIn, layer, lstmTensorInfo2, 0, 1); in CreateLstmWorkloadTest()
803 Connect(outputStateIn, layer, lstmTensorInfo3, 0, 2); in CreateLstmWorkloadTest()
804 Connect(layer, scratchBuffer, lstmTensorInfoScratchBuff, 0, 0); in CreateLstmWorkloadTest()
805 Connect(layer, outputStateOut, lstmTensorInfo3, 1, 0); in CreateLstmWorkloadTest()
806 Connect(layer, cellStateOut, lstmTensorInfo2, 2, 0); in CreateLstmWorkloadTest()
807 Connect(layer, output, lstmTensorInfo3, 3, 0); in CreateLstmWorkloadTest()
812 auto workload = MakeAndCheckWorkload<LstmWorkload>(*layer, factory); in CreateLstmWorkloadTest()
832 auto layer = graph.AddLayer<QuantizedLstmLayer>("quantizedLstmlayer"); in CreateQuantizedLstmWorkloadTest() local
867 layer->m_QuantizedLstmParameters.m_InputToInputWeights = in CreateQuantizedLstmWorkloadTest()
869 layer->m_QuantizedLstmParameters.m_InputToForgetWeights = in CreateQuantizedLstmWorkloadTest()
871 layer->m_QuantizedLstmParameters.m_InputToCellWeights = in CreateQuantizedLstmWorkloadTest()
873 layer->m_QuantizedLstmParameters.m_InputToOutputWeights = in CreateQuantizedLstmWorkloadTest()
876 layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = in CreateQuantizedLstmWorkloadTest()
878 layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = in CreateQuantizedLstmWorkloadTest()
880 layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = in CreateQuantizedLstmWorkloadTest()
882 layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = in CreateQuantizedLstmWorkloadTest()
885 … layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<ScopedTensorHandle>(biasInfo); in CreateQuantizedLstmWorkloadTest()
886 …layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<ScopedTensorHandle>(biasInfo); in CreateQuantizedLstmWorkloadTest()
887 layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<ScopedTensorHandle>(biasInfo); in CreateQuantizedLstmWorkloadTest()
888 …layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<ScopedTensorHandle>(biasInfo); in CreateQuantizedLstmWorkloadTest()
891 layer->m_QuantizedLstmParameters.m_InputToInputWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
892 layer->m_QuantizedLstmParameters.m_InputToForgetWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
893 layer->m_QuantizedLstmParameters.m_InputToCellWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
894 layer->m_QuantizedLstmParameters.m_InputToOutputWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
896 layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
897 layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
898 layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
899 layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->Allocate(); in CreateQuantizedLstmWorkloadTest()
901 layer->m_QuantizedLstmParameters.m_InputGateBias->Allocate(); in CreateQuantizedLstmWorkloadTest()
902 layer->m_QuantizedLstmParameters.m_ForgetGateBias->Allocate(); in CreateQuantizedLstmWorkloadTest()
903 layer->m_QuantizedLstmParameters.m_CellBias->Allocate(); in CreateQuantizedLstmWorkloadTest()
904 layer->m_QuantizedLstmParameters.m_OutputGateBias->Allocate(); in CreateQuantizedLstmWorkloadTest()
907 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateQuantizedLstmWorkloadTest()
908 Layer* const cellStateIn = graph.AddLayer<InputLayer>(1, "cellStateIn"); in CreateQuantizedLstmWorkloadTest()
909 Layer* const outputStateIn = graph.AddLayer<InputLayer>(2, "outputStateIn"); in CreateQuantizedLstmWorkloadTest()
911 Layer* const cellStateOut = graph.AddLayer<OutputLayer>(0, "cellStateOut"); in CreateQuantizedLstmWorkloadTest()
912 Layer* const outputStateOut = graph.AddLayer<OutputLayer>(1, "outputStateOut"); in CreateQuantizedLstmWorkloadTest()
931 Connect(input, layer, inputInfo, 0, 0); in CreateQuantizedLstmWorkloadTest()
932 Connect(cellStateIn, layer, cellStateInfo, 0, 1); in CreateQuantizedLstmWorkloadTest()
933 Connect(outputStateIn, layer, outputStateInfo, 0, 2); in CreateQuantizedLstmWorkloadTest()
935 Connect(layer, cellStateOut, cellStateInfo, 0, 0); in CreateQuantizedLstmWorkloadTest()
936 Connect(layer, outputStateOut, outputStateInfo, 1, 0); in CreateQuantizedLstmWorkloadTest()
940 // Create workload and check layer support in CreateQuantizedLstmWorkloadTest()
941 auto workload = MakeAndCheckWorkload<QuantizedLstmWorkload>(*layer, factory); in CreateQuantizedLstmWorkloadTest()
988 QLstmLayer* const layer = graph.AddLayer<QLstmLayer>(layerDesc, "qLstm"); in CreateQLstmWorkloadTest() local
1031 …layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<ScopedTensorHandle>(inputWeight… in CreateQLstmWorkloadTest()
1032 …layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<ScopedTensorHandle>(inputWeightsI… in CreateQLstmWorkloadTest()
1033 …layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<ScopedTensorHandle>(inputWeight… in CreateQLstmWorkloadTest()
1035 layer->m_BasicParameters.m_RecurrentToForgetWeights = in CreateQLstmWorkloadTest()
1037 layer->m_BasicParameters.m_RecurrentToCellWeights = in CreateQLstmWorkloadTest()
1039 layer->m_BasicParameters.m_RecurrentToOutputWeights = in CreateQLstmWorkloadTest()
1042 layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<ScopedTensorHandle>(biasInfo); in CreateQLstmWorkloadTest()
1043 layer->m_BasicParameters.m_CellBias = std::make_unique<ScopedTensorHandle>(biasInfo); in CreateQLstmWorkloadTest()
1044 layer->m_BasicParameters.m_OutputGateBias = std::make_unique<ScopedTensorHandle>(biasInfo); in CreateQLstmWorkloadTest()
1046 layer->m_LayerNormParameters.m_ForgetLayerNormWeights = in CreateQLstmWorkloadTest()
1048 layer->m_LayerNormParameters.m_CellLayerNormWeights = in CreateQLstmWorkloadTest()
1050 layer->m_LayerNormParameters.m_OutputLayerNormWeights = in CreateQLstmWorkloadTest()
1053 layer->m_BasicParameters.m_InputToForgetWeights->Allocate(); in CreateQLstmWorkloadTest()
1054 layer->m_BasicParameters.m_InputToCellWeights->Allocate(); in CreateQLstmWorkloadTest()
1055 layer->m_BasicParameters.m_InputToOutputWeights->Allocate(); in CreateQLstmWorkloadTest()
1057 layer->m_BasicParameters.m_RecurrentToForgetWeights->Allocate(); in CreateQLstmWorkloadTest()
1058 layer->m_BasicParameters.m_RecurrentToCellWeights->Allocate(); in CreateQLstmWorkloadTest()
1059 layer->m_BasicParameters.m_RecurrentToOutputWeights->Allocate(); in CreateQLstmWorkloadTest()
1061 layer->m_BasicParameters.m_ForgetGateBias->Allocate(); in CreateQLstmWorkloadTest()
1062 layer->m_BasicParameters.m_CellBias->Allocate(); in CreateQLstmWorkloadTest()
1063 layer->m_BasicParameters.m_OutputGateBias->Allocate(); in CreateQLstmWorkloadTest()
1065 layer->m_LayerNormParameters.m_ForgetLayerNormWeights->Allocate(); in CreateQLstmWorkloadTest()
1066 layer->m_LayerNormParameters.m_CellLayerNormWeights->Allocate(); in CreateQLstmWorkloadTest()
1067 layer->m_LayerNormParameters.m_OutputLayerNormWeights->Allocate(); in CreateQLstmWorkloadTest()
1070 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateQLstmWorkloadTest()
1071 Layer* const outputStateIn = graph.AddLayer<InputLayer>(1, "outputStateIn"); in CreateQLstmWorkloadTest()
1072 Layer* const cellStateIn = graph.AddLayer<InputLayer>(2, "cellStateIn"); in CreateQLstmWorkloadTest()
1074 Layer* const outputStateOut = graph.AddLayer<OutputLayer>(0, "outputStateOut"); in CreateQLstmWorkloadTest()
1075 Layer* const cellStateOut = graph.AddLayer<OutputLayer>(1, "cellStateOut"); in CreateQLstmWorkloadTest()
1076 Layer* const output = graph.AddLayer<OutputLayer>(2, "output"); in CreateQLstmWorkloadTest()
1095 Connect(input, layer, inputInfo, 0, 0); in CreateQLstmWorkloadTest()
1096 Connect(outputStateIn, layer, outputStateInfo, 0, 1); in CreateQLstmWorkloadTest()
1097 Connect(cellStateIn, layer, cellStateInfo, 0, 2); in CreateQLstmWorkloadTest()
1099 Connect(layer, outputStateOut, outputStateInfo, 0, 0); in CreateQLstmWorkloadTest()
1100 Connect(layer, cellStateOut, cellStateInfo, 1, 0); in CreateQLstmWorkloadTest()
1101 Connect(layer, output, outputStateInfo, 2, 0); in CreateQLstmWorkloadTest()
1106 auto workload = MakeAndCheckWorkload<QLstmWorkload>(*layer, factory); in CreateQLstmWorkloadTest()
1132 // Creates the layer we're testing. in CreateDirectConvolution2dWorkloadTest()
1142 Convolution2dLayer* const layer = graph.AddLayer<Convolution2dLayer>(layerDesc, "layer"); in CreateDirectConvolution2dWorkloadTest() local
1155 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateDirectConvolution2dWorkloadTest()
1158 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateDirectConvolution2dWorkloadTest()
1166 Connect(input, layer, TensorInfo({2, 3, 6, 6}, DataType, inputsQScale)); in CreateDirectConvolution2dWorkloadTest()
1167 Connect(weights, layer, weightsTensorInfo, 0, 1); in CreateDirectConvolution2dWorkloadTest()
1168 Connect(bias, layer, biasTensorInfo, 0, 2); in CreateDirectConvolution2dWorkloadTest()
1169 Connect(layer, output, TensorInfo({2, 2, 6, 6}, DataType, outputQScale)); in CreateDirectConvolution2dWorkloadTest()
1173 auto workload = MakeAndCheckWorkload<Convolution2dWorkload>(*layer, factory); in CreateDirectConvolution2dWorkloadTest()
1195 // Creates the layer we're testing. in CreateDepthwiseConvolution2dWorkloadTest()
1215 …DepthwiseConvolution2dLayer* const layer = graph.AddLayer<DepthwiseConvolution2dLayer>(layerDesc, … in CreateDepthwiseConvolution2dWorkloadTest() local
1219 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateDepthwiseConvolution2dWorkloadTest()
1220 Layer* const weights = graph.AddLayer<ConstantLayer>("weights"); in CreateDepthwiseConvolution2dWorkloadTest()
1221 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateDepthwiseConvolution2dWorkloadTest()
1224 Connect(input, layer, TensorInfo(inputShape, DataType, inputsQScale)); in CreateDepthwiseConvolution2dWorkloadTest()
1225 Connect(weights, layer, TensorInfo(weightShape, DataType, inputsQScale, 0.0f, true), 0, 1); in CreateDepthwiseConvolution2dWorkloadTest()
1226 Connect(layer, output, TensorInfo(outputShape, DataType, outputQScale)); in CreateDepthwiseConvolution2dWorkloadTest()
1230 auto workload = MakeAndCheckWorkload<DepthwiseConvolution2dFloat32Workload>(*layer, factory); in CreateDepthwiseConvolution2dWorkloadTest()
1253 // Creates the layer we're testing. in CreateFullyConnectedWorkloadTest()
1258 FullyConnectedLayer* const layer = graph.AddLayer<FullyConnectedLayer>(layerDesc, "layer"); in CreateFullyConnectedWorkloadTest() local
1267 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateFullyConnectedWorkloadTest()
1269 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateFullyConnectedWorkloadTest()
1275 Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType, inputsQScale), 0, 0); in CreateFullyConnectedWorkloadTest()
1276 Connect(weights, layer, weightsTensorInfo, 0, 1); in CreateFullyConnectedWorkloadTest()
1277 Connect(layer, output, TensorInfo({3, 7}, DataType, outputQScale)); in CreateFullyConnectedWorkloadTest()
1281 auto workload = MakeAndCheckWorkload<FullyConnectedWorkload>(*layer, factory); in CreateFullyConnectedWorkloadTest()
1298 // Creates the layer we're testing. in CreateFullyConnectedWithBlobWorkloadTest()
1303 FullyConnectedLayer* const layer = graph.AddLayer<FullyConnectedLayer>(layerDesc, "layer"); in CreateFullyConnectedWithBlobWorkloadTest() local
1318 layer->SetAdditionalInfoForObject(activationDesc); in CreateFullyConnectedWithBlobWorkloadTest()
1320 // Check that the additional information can be queried from the layer in CreateFullyConnectedWithBlobWorkloadTest()
1321 …std::shared_ptr<ActivationDescriptor> activationDescPtr = layer->GetAdditionalInformation<Activati… in CreateFullyConnectedWithBlobWorkloadTest()
1328 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateFullyConnectedWithBlobWorkloadTest()
1331 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateFullyConnectedWithBlobWorkloadTest()
1339 Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType, inputsQScale), 0, 0); in CreateFullyConnectedWithBlobWorkloadTest()
1340 Connect(weights, layer, weightsTensorInfo, 0, 1); in CreateFullyConnectedWithBlobWorkloadTest()
1341 Connect(biases, layer, biasesTensorInfo, 0, 2); in CreateFullyConnectedWithBlobWorkloadTest()
1342 Connect(layer, output, TensorInfo({3, 7}, DataType, outputQScale)); in CreateFullyConnectedWithBlobWorkloadTest()
1346 auto workload = MakeAndCheckWorkload<FullyConnectedWorkload>(*layer, factory); in CreateFullyConnectedWithBlobWorkloadTest()
1373 // Creates the layer we're testing. in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1379 FullyConnectedLayer* const layer = graph.AddLayer<FullyConnectedLayer>(layerDesc, "layer"); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest() local
1385 Layer* const input = graph.AddLayer<InputLayer>(1, "input"); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1386 Layer* const weights = graph.AddLayer<InputLayer>(2, "weights"); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1387 Layer* const biases = graph.AddLayer<InputLayer>(3, "biases"); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1388 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1391 Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType, inputsQScale), 0, 0); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1392 Connect(weights, layer, TensorInfo({7, 20}, DataType, inputsQScale), 0, 1); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1393 Connect(biases, layer, TensorInfo({7}, GetBiasDataType(DataType), inputsQScale), 0, 2); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1394 Connect(layer, output, TensorInfo({3, 7}, DataType, outputQScale)); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1398 auto workload = MakeAndCheckWorkload<FullyConnectedWorkload>(*layer, factory); in CreateFullyConnectedWorkloadWeightsBiasesAsInputsTest()
1418 // Creates the layer we're testing. in CreateNormalizationWorkloadTest()
1428 NormalizationLayer* layer = graph.AddLayer<NormalizationLayer>(layerDesc, "layer"); in CreateNormalizationWorkloadTest() local
1431 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateNormalizationWorkloadTest()
1432 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateNormalizationWorkloadTest()
1442 Connect(input, layer, inputTensorInfo); in CreateNormalizationWorkloadTest()
1443 Connect(layer, output, outputTensorInfo); in CreateNormalizationWorkloadTest()
1447 auto workload = MakeAndCheckWorkload<NormalizationWorkload>(*layer, factory); in CreateNormalizationWorkloadTest()
1470 // Creates the layer we're testing. in CreatePooling2dWorkloadTest()
1484 Pooling2dLayer* const layer = graph.AddLayer<Pooling2dLayer>(layerDesc, "layer"); in CreatePooling2dWorkloadTest() local
1487 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreatePooling2dWorkloadTest()
1488 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreatePooling2dWorkloadTest()
1494 Connect(input, layer, TensorInfo(inputShape, DataType)); in CreatePooling2dWorkloadTest()
1495 Connect(layer, output, TensorInfo(outputShape, DataType)); in CreatePooling2dWorkloadTest()
1499 auto workload = MakeAndCheckWorkload<Pooling2dWorkload>(*layer, factory); in CreatePooling2dWorkloadTest()
1525 // Create the layer we're testing. in CreateSoftmaxWorkloadTest()
1533 Layer* const layer = graph.AddLayer<SoftmaxLayer>(softmaxDescriptor, "layer"); in CreateSoftmaxWorkloadTest() local
1535 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateSoftmaxWorkloadTest()
1536 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateSoftmaxWorkloadTest()
1551 Connect(input, layer, tensorInfo); in CreateSoftmaxWorkloadTest()
1552 Connect(layer, output, tensorInfo); in CreateSoftmaxWorkloadTest()
1556 auto workload = MakeAndCheckWorkload<SoftmaxWorkload>(*layer, factory); in CreateSoftmaxWorkloadTest()
1570 // Create the layer we're testing. in CreateSplitterWorkloadTest()
1582 Layer* const layer = graph.AddLayer<SplitterLayer>(layerDesc, "layer"); in CreateSplitterWorkloadTest() local
1585 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateSplitterWorkloadTest()
1586 Layer* const output0 = graph.AddLayer<OutputLayer>(0, "output0"); in CreateSplitterWorkloadTest()
1587 Layer* const output1 = graph.AddLayer<OutputLayer>(1, "output1"); in CreateSplitterWorkloadTest()
1588 Layer* const output2 = graph.AddLayer<OutputLayer>(2, "output2"); in CreateSplitterWorkloadTest()
1592 Connect(input, layer, tensorInfo); in CreateSplitterWorkloadTest()
1598 Connect(layer, output0, output0Info, 0, 0); in CreateSplitterWorkloadTest()
1599 Connect(layer, output1, output1Info, 1, 0); in CreateSplitterWorkloadTest()
1600 Connect(layer, output2, output2Info, 2, 0); in CreateSplitterWorkloadTest()
1605 auto workload = MakeAndCheckWorkload<SplitterWorkload>(*layer, factory); in CreateSplitterWorkloadTest()
1637 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateSplitterConcatWorkloadTest()
1650 // create splitter layer in CreateSplitterConcatWorkloadTest()
1651 Layer* const splitter = graph.AddLayer<SplitterLayer>(splitterViews, "splitter"); in CreateSplitterConcatWorkloadTest()
1665 // create concat layer in CreateSplitterConcatWorkloadTest()
1666 Layer* const concat = graph.AddLayer<ConcatLayer>(concatViews, "concat"); in CreateSplitterConcatWorkloadTest()
1669 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateSplitterConcatWorkloadTest()
1710 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1724 Layer* const splitter = graph.AddLayer<SplitterLayer>(splitterViews, "splitter"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1728 Layer* const activ0_0 = graph.AddLayer<ActivationLayer>(activationDesc, "activ0_0"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1729 Layer* const activ0_1 = graph.AddLayer<ActivationLayer>(activationDesc, "activ0_1"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1730 Layer* const activ1_0 = graph.AddLayer<ActivationLayer>(activationDesc, "activ1_0"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1731 Layer* const activ1_1 = graph.AddLayer<ActivationLayer>(activationDesc, "activ1_1"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1733 Layer* const output1 = graph.AddLayer<OutputLayer>(1, "output1"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1734 Layer* const output2 = graph.AddLayer<OutputLayer>(2, "output2"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1735 Layer* const output3 = graph.AddLayer<OutputLayer>(3, "output3"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1736 Layer* const output4 = graph.AddLayer<OutputLayer>(4, "output4"); in CreateSplitterMultipleInputsOneOutputWorkloadTest()
1785 // Creates the layer we're testing. in CreateResizeBilinearWorkloadTest()
1792 Layer* const layer = graph.AddLayer<ResizeLayer>(resizeDesc, "resize"); in CreateResizeBilinearWorkloadTest() local
1795 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateResizeBilinearWorkloadTest()
1796 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateResizeBilinearWorkloadTest()
1801 Connect(input, layer, inputTensorInfo); in CreateResizeBilinearWorkloadTest()
1802 Connect(layer, output, outputTensorInfo); in CreateResizeBilinearWorkloadTest()
1806 auto workload = MakeAndCheckWorkload<ResizeWorkload>(*layer, factory); in CreateResizeBilinearWorkloadTest()
1822 Layer* const layer = graph.AddLayer<BatchToSpaceNdLayer>(desc, "batchToSpace"); in CreateBatchToSpaceNdWorkloadTest() local
1825 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateBatchToSpaceNdWorkloadTest()
1826 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateBatchToSpaceNdWorkloadTest()
1831 Connect(input, layer, tensorInfo); in CreateBatchToSpaceNdWorkloadTest()
1832 Connect(layer, output, tensorInfo); in CreateBatchToSpaceNdWorkloadTest()
1837 auto workload = MakeAndCheckWorkload<BatchToSpaceNdWorkload>(*layer, factory); in CreateBatchToSpaceNdWorkloadTest()
1850 // Create the layer we're testing. in CreateLogSoftmaxWorkloadTest()
1858 Layer* const layer = graph.AddLayer<LogSoftmaxLayer>(logSoftmaxDescriptor, "layer"); in CreateLogSoftmaxWorkloadTest() local
1860 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateLogSoftmaxWorkloadTest()
1861 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateLogSoftmaxWorkloadTest()
1866 Connect(input, layer, tensorInfo); in CreateLogSoftmaxWorkloadTest()
1867 Connect(layer, output, tensorInfo); in CreateLogSoftmaxWorkloadTest()
1871 auto workload = MakeAndCheckWorkload<LogSoftmaxWorkload>(*layer, factory); in CreateLogSoftmaxWorkloadTest()
1885 // Creates the layer we're testing. in CreateL2NormalizationWorkloadTest()
1889 Layer* const layer = graph.AddLayer<L2NormalizationLayer>(layerDesc, "l2norm"); in CreateL2NormalizationWorkloadTest() local
1892 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateL2NormalizationWorkloadTest()
1893 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateL2NormalizationWorkloadTest()
1903 Connect(input, layer, inputTensorInfo); in CreateL2NormalizationWorkloadTest()
1904 Connect(layer, output, outputTensorInfo); in CreateL2NormalizationWorkloadTest()
1908 auto workload = MakeAndCheckWorkload<L2NormalizationWorkload>(*layer, factory); in CreateL2NormalizationWorkloadTest()
1923 // Creates the layer we're testing. in CreateReshapeWorkloadTest()
1927 Layer* const layer = graph.AddLayer<ReshapeLayer>(reshapeDesc, "layer"); in CreateReshapeWorkloadTest() local
1930 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateReshapeWorkloadTest()
1931 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateReshapeWorkloadTest()
1936 Connect(input, layer, inputTensorInfo); in CreateReshapeWorkloadTest()
1937 Connect(layer, output, outputTensorInfo); in CreateReshapeWorkloadTest()
1941 auto workload = MakeAndCheckWorkload<ReshapeWorkload>(*layer, factory); in CreateReshapeWorkloadTest()
1955 // Creates the layer we're testing. in CreateConvertFp16ToFp32WorkloadTest()
1956 …ConvertFp16ToFp32Layer* const layer = graph.AddLayer<ConvertFp16ToFp32Layer>("Fp16ToFp32Converter"… in CreateConvertFp16ToFp32WorkloadTest() local
1959 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateConvertFp16ToFp32WorkloadTest()
1960 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateConvertFp16ToFp32WorkloadTest()
1965 Connect(input, layer, inputTensorInfo); in CreateConvertFp16ToFp32WorkloadTest()
1966 Connect(layer, output, outputTensorInfo); in CreateConvertFp16ToFp32WorkloadTest()
1970 auto workload = MakeAndCheckWorkload<ConvertFp16ToFp32Float32Workload>(*layer, factory); in CreateConvertFp16ToFp32WorkloadTest()
1984 // Creates the layer we're testing. in CreateConvertFp32ToFp16WorkloadTest()
1985 …ConvertFp32ToFp16Layer* const layer = graph.AddLayer<ConvertFp32ToFp16Layer>("Fp32ToFp16Converter"… in CreateConvertFp32ToFp16WorkloadTest() local
1988 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateConvertFp32ToFp16WorkloadTest()
1989 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateConvertFp32ToFp16WorkloadTest()
1994 Connect(input, layer, inputTensorInfo); in CreateConvertFp32ToFp16WorkloadTest()
1995 Connect(layer, output, outputTensorInfo); in CreateConvertFp32ToFp16WorkloadTest()
1999 auto workload = MakeAndCheckWorkload<ConvertFp32ToFp16Float16Workload>(*layer, factory); in CreateConvertFp32ToFp16WorkloadTest()
2015 // Creates the layer we're testing. in CreateMeanWorkloadTest()
2016 Layer* const layer = graph.AddLayer<MeanLayer>(descriptor, "mean"); in CreateMeanWorkloadTest() local
2019 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateMeanWorkloadTest()
2020 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateMeanWorkloadTest()
2025 Connect(input, layer, inputTensorInfo); in CreateMeanWorkloadTest()
2026 Connect(layer, output, outputTensorInfo); in CreateMeanWorkloadTest()
2030 auto workload = MakeAndCheckWorkload<MeanWorkload>(*layer, factory); in CreateMeanWorkloadTest()
2052 Layer* const input0 = graph.AddLayer<InputLayer>(0, "input0"); in CreateConcatWorkloadTest()
2053 Layer* const input1 = graph.AddLayer<InputLayer>(1, "input1"); in CreateConcatWorkloadTest()
2062 // create concat layer in CreateConcatWorkloadTest()
2063 Layer* const concat = graph.AddLayer<ConcatLayer>(descriptor, "concat"); in CreateConcatWorkloadTest()
2066 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateConcatWorkloadTest()
2097 // Add an input layer in CreatePreCompiledWorkloadTest()
2098 armnn::IConnectableLayer* const inputLayer = net->AddInputLayer(0, "input layer"); in CreatePreCompiledWorkloadTest()
2116 // Add a layer that can be used in the PreCompiled layer in CreatePreCompiledWorkloadTest()
2124 const std::string convLayerName("conv layer"); in CreatePreCompiledWorkloadTest()
2154 // Add an output layer in CreatePreCompiledWorkloadTest()
2155 armnn::IConnectableLayer* const outputLayer = net->AddOutputLayer(0, "output layer"); in CreatePreCompiledWorkloadTest()
2189 // Find the PreCompiled layer in the optimised graph in CreatePreCompiledWorkloadTest()
2191 Layer* preCompiledLayer = nullptr; in CreatePreCompiledWorkloadTest()
2192 for (auto& layer : optimisedGraph) in CreatePreCompiledWorkloadTest() local
2194 if (layer->GetType() == LayerType::PreCompiled) in CreatePreCompiledWorkloadTest()
2196 preCompiledLayer = layer; in CreatePreCompiledWorkloadTest()
2224 // create constant layer in CreateConstantWorkloadTest()
2229 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateConstantWorkloadTest()
2253 // Creates the PReLU layer in CreatePreluWorkloadTest()
2254 Layer* const layer = graph.AddLayer<PreluLayer>("prelu"); in CreatePreluWorkloadTest() local
2255 CHECK(layer != nullptr); in CreatePreluWorkloadTest()
2258 Layer* const input = graph.AddLayer<InputLayer> (0, "input"); in CreatePreluWorkloadTest()
2259 Layer* const alpha = graph.AddLayer<InputLayer> (1, "alpha"); in CreatePreluWorkloadTest()
2260 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreatePreluWorkloadTest()
2269 Connect(input, layer, inputTensorInfo, 0, 0); in CreatePreluWorkloadTest()
2270 Connect(alpha, layer, alphaTensorInfo, 0, 1); in CreatePreluWorkloadTest()
2271 Connect(layer, output, outputTensorInfo, 0, 0); in CreatePreluWorkloadTest()
2275 auto workload = MakeAndCheckWorkload<PreluWorkload>(*layer, factory); in CreatePreluWorkloadTest()
2291 Layer* const layer = graph.AddLayer<SpaceToDepthLayer>(desc, "spaceToDepth"); in CreateSpaceToDepthWorkloadTest() local
2294 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateSpaceToDepthWorkloadTest()
2295 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateSpaceToDepthWorkloadTest()
2301 Connect(input, layer, inputTensorInfo); in CreateSpaceToDepthWorkloadTest()
2302 Connect(layer, output, outputTensorInfo); in CreateSpaceToDepthWorkloadTest()
2307 auto workload = MakeAndCheckWorkload<SpaceToDepthWorkload>(*layer, factory); in CreateSpaceToDepthWorkloadTest()
2327 // Constructs the Stack layer. in CreateStackWorkloadTest()
2329 Layer* const stackLayer = graph.AddLayer<StackLayer>(descriptor, "stack"); in CreateStackWorkloadTest()
2332 // Constructs layer inputs and output. in CreateStackWorkloadTest()
2333 std::vector<Layer*> inputs; in CreateStackWorkloadTest()
2342 Layer* const output = graph.AddLayer<OutputLayer>(0, "output"); in CreateStackWorkloadTest()