xref: /aosp_15_r20/external/pytorch/aten/src/ATen/native/Activation.h (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 #pragma once
2 
3 #include <ATen/native/DispatchStub.h>
4 #include <c10/util/Exception.h>
5 #include <c10/util/string_view.h>
6 
7 namespace c10 {
8 class Scalar;
9 }
10 
11 namespace at {
12 struct TensorIterator;
13 struct TensorIteratorBase;
14 class TensorBase;
15 }
16 
17 namespace at::native {
18 
19 // These constants control the approximation behavior of gelu function.
20 enum class GeluType {
21   None,             // Baseline Gelu
22   Tanh,             // Tahn Gelu Approximation
23   END
24 };
25 
get_gelutype_enum(const c10::string_view approximate)26 inline GeluType get_gelutype_enum(const c10::string_view approximate) {
27   if (approximate == "none") {
28     return GeluType::None;
29   } else if (approximate == "tanh") {
30     return GeluType::Tanh;
31   } else {
32     TORCH_CHECK(false, "approximate argument must be either none or tanh.");
33   }
34 }
35 
gelutype_to_string(const GeluType type)36 inline std::string gelutype_to_string(const GeluType type) {
37   switch(type) {
38     case GeluType::None: return "none";
39     case GeluType::Tanh: return "tanh";
40     default: TORCH_CHECK(false, "unknown GELU type: ", static_cast<int>(type));
41   }
42 }
43 
44 using structured_activation_fn = void (*)(TensorIteratorBase&);
45 using structured_activation_backward_fn = void (*)(TensorIteratorBase&);
46 
47 using activation_fn = void (*)(TensorIterator&);
48 using activation_backward_fn = void (*)(TensorIterator&);
49 using softplus_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&);
50 using softplus_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&);
51 using threshold_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&);
52 using hardtanh_backward_fn = void (*)(TensorIterator&, const c10::Scalar&, const c10::Scalar&);
53 using hardsigmoid_fn = void(*)(TensorIteratorBase&);
54 using hardsigmoid_backward_fn = void(*)(TensorIteratorBase&);
55 using hardswish_fn = void(*)(TensorIterator&);
56 using hardswish_backward_fn = void(*)(TensorIterator&);
57 using shrink_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
58 using softshrink_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
59 using shrink_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
60 using elu_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&, const c10::Scalar&);
61 using elu_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&, const c10::Scalar&, bool);
62 using leaky_relu_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
63 using leaky_relu_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
64 using log_sigmoid_cpu_fn = void (*)(TensorBase&, TensorBase&, const TensorBase&);
65 using gelu_fn = void (*)(TensorIteratorBase&, GeluType);
66 using gelu_backward_fn = void (*)(TensorIteratorBase&, GeluType);
67 using glu_jvp_fn = void (*)(TensorIteratorBase&);
68 
69 DECLARE_DISPATCH(elu_fn, elu_stub);
70 DECLARE_DISPATCH(elu_backward_fn, elu_backward_stub);
71 DECLARE_DISPATCH(softplus_fn, softplus_stub);
72 DECLARE_DISPATCH(softplus_backward_fn, softplus_backward_stub);
73 DECLARE_DISPATCH(log_sigmoid_cpu_fn, log_sigmoid_cpu_stub);
74 DECLARE_DISPATCH(activation_backward_fn, log_sigmoid_backward_stub);
75 DECLARE_DISPATCH(threshold_fn, threshold_stub);
76 DECLARE_DISPATCH(gelu_fn, GeluKernel);
77 DECLARE_DISPATCH(gelu_backward_fn, GeluBackwardKernel);
78 DECLARE_DISPATCH(hardtanh_backward_fn, hardtanh_backward_stub);
79 DECLARE_DISPATCH(hardsigmoid_fn, hardsigmoid_stub);
80 DECLARE_DISPATCH(hardsigmoid_backward_fn, hardsigmoid_backward_stub);
81 DECLARE_DISPATCH(hardswish_fn, hardswish_stub);
82 DECLARE_DISPATCH(hardswish_backward_fn, hardswish_backward_stub);
83 DECLARE_DISPATCH(shrink_fn, hardshrink_stub);
84 DECLARE_DISPATCH(softshrink_fn, softshrink_stub);
85 DECLARE_DISPATCH(shrink_backward_fn, shrink_backward_stub);
86 DECLARE_DISPATCH(leaky_relu_fn, leaky_relu_stub);
87 DECLARE_DISPATCH(leaky_relu_backward_fn, leaky_relu_backward_stub);
88 DECLARE_DISPATCH(structured_activation_fn, glu_stub);
89 DECLARE_DISPATCH(activation_backward_fn, glu_backward_stub);
90 DECLARE_DISPATCH(glu_jvp_fn, glu_jvp_stub);
91 DECLARE_DISPATCH(structured_activation_fn, silu_stub);
92 DECLARE_DISPATCH(structured_activation_backward_fn, silu_backward_stub);
93 DECLARE_DISPATCH(structured_activation_fn, mish_stub);
94 DECLARE_DISPATCH(activation_backward_fn, mish_backward_stub);
95 DECLARE_DISPATCH(activation_fn, prelu_stub);
96 DECLARE_DISPATCH(activation_backward_fn, prelu_backward_stub);
97 
98 } // namespace at::native
99