xref: /aosp_15_r20/external/armnn/src/armnn/layers/SoftmaxLayer.cpp (revision 89c4ff92f2867872bb9e2354d150bf0c8c502810)
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "SoftmaxLayer.hpp"
6 
7 #include "LayerCloneBase.hpp"
8 
9 #include <armnn/TypesUtils.hpp>
10 #include <armnn/backends/WorkloadData.hpp>
11 #include <armnn/backends/WorkloadFactory.hpp>
12 
13 namespace armnn
14 {
15 
SoftmaxLayer(const SoftmaxDescriptor & param,const char * name)16 SoftmaxLayer::SoftmaxLayer(const SoftmaxDescriptor &param, const char* name)
17     : LayerWithParameters(1, 1, LayerType::Softmax, param, name)
18 {
19 }
20 
CreateWorkload(const IWorkloadFactory & factory) const21 std::unique_ptr<IWorkload> SoftmaxLayer::CreateWorkload(const IWorkloadFactory& factory) const
22 {
23     SoftmaxQueueDescriptor descriptor;
24     SetAdditionalInfo(descriptor);
25 
26     return factory.CreateWorkload(LayerType::Softmax, descriptor, PrepInfoAndDesc(descriptor));
27 }
28 
Clone(Graph & graph) const29 SoftmaxLayer* SoftmaxLayer::Clone(Graph& graph) const
30 {
31     return CloneBase<SoftmaxLayer>(graph, m_Param, GetName());
32 }
33 
ValidateTensorShapesFromInputs()34 void SoftmaxLayer::ValidateTensorShapesFromInputs()
35 {
36     VerifyLayerConnections(1, CHECK_LOCATION());
37 
38     const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
39 
40     VerifyShapeInferenceType(outputShape, m_ShapeInferenceMethod);
41 
42     auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
43 
44     ARMNN_ASSERT(inferredShapes.size() == 1);
45 
46     ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "SoftmaxLayer");
47 }
48 
ExecuteStrategy(IStrategy & strategy) const49 void SoftmaxLayer::ExecuteStrategy(IStrategy& strategy) const
50 {
51     strategy.ExecuteStrategy(this, GetParameters(), {}, GetName());
52 }
53 
54 } // namespace armnn
55