1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include "Activation.hpp"
7
8 #include <cmath>
9
10 namespace armnn
11 {
12
Activation(float in,ActivationFunction function,float a,float b)13 float Activation(float in,
14 ActivationFunction function,
15 float a,
16 float b)
17 {
18 float output;
19
20 // Compute the result of the activation function.
21 switch (function)
22 {
23 case ActivationFunction::Linear:
24 {
25 output = a * in + b;
26 break;
27 }
28 case ActivationFunction::Sigmoid:
29 {
30 output = 1.f / (1.f + expf(-in));
31 break;
32 }
33 case ActivationFunction::ReLu:
34 {
35 output = std::max(0.f, in);
36 break;
37 }
38 case ActivationFunction::BoundedReLu:
39 {
40 output = std::min(a, std::max(b, in));
41 break;
42 }
43 case ActivationFunction::SoftReLu:
44 {
45 output = logf(1.0f + expf(in));
46 break;
47 }
48 case ActivationFunction::LeakyReLu:
49 {
50 output = in > 0.0f ? in : (in * a);
51 break;
52 }
53 case ActivationFunction::Abs:
54 {
55 output = in < 0 ? -in : in;
56 break;
57 }
58 case ActivationFunction::Sqrt:
59 {
60 output = sqrtf(in);
61 break;
62 }
63 case ActivationFunction::Square:
64 {
65 output = in * in;
66 break;
67 }
68 case ActivationFunction::TanH:
69 {
70 output = a * tanhf(b * in);
71 break;
72 }
73 case ActivationFunction::Elu:
74 {
75 output = (in >= 0) ? in : a * (expf(in) - 1);
76 break;
77 }
78 case ActivationFunction::HardSwish:
79 {
80 // hard_swish(x) = x * relu6(x+3) / 6
81 // relu6(x) = min(max(x,0),6)
82 output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6;
83 break;
84 }
85 default:
86 {
87 throw InvalidArgumentException("Unsupported activation function");
88 }
89 }
90
91 return output;
92 }
93
94
Activation(Decoder<float> & in,Encoder<float> & out,const TensorInfo & tensorInfo,ActivationFunction function,float a,float b)95 void Activation(Decoder<float>& in,
96 Encoder<float>& out,
97 const TensorInfo& tensorInfo,
98 ActivationFunction function,
99 float a,
100 float b)
101 {
102 unsigned int numElements = tensorInfo.GetNumElements();
103
104 for (unsigned int i = 0; i < numElements; i++)
105 {
106 out.Set(Activation(in.Get(), function, a, b));
107 ++in;
108 ++out;
109 }
110 in -= numElements;
111 out -= numElements;
112 }
113
114 } //namespace armnn
115