Home
last modified time | relevance | path

Searched full:selu (Results 1 – 25 of 127) sorted by relevance

123456

/aosp_15_r20/external/pytorch/test/onnx/
H A Dtest_onnxscript_no_runtime.py27 # 1. Register Selu onnxscript function as custom Op
31 def Selu(X): function
43 return g.onnxscript_op(Selu, X).setType(X.type())
46 symbolic_name="aten::selu",
88 model_selu = torch.nn.SELU()
106 self.assertEqual(selu_proto.functions[0].name, "Selu")
115 self.selu = torch.nn.SELU()
122 y = self.selu(x)
135 def Selu(X): function
150 return g.onnxscript_op(Selu, X).setType(X.type())
[all …]
H A Dtest_onnxscript_runtime.py24 model = torch.nn.SELU()
31 def Selu( function
45 return g.onnxscript_op(Selu, X).setType(X.type())
48 symbolic_name="aten::selu",
/aosp_15_r20/external/tensorflow/tensorflow/python/keras/
H A Dactivations.py146 @keras_export('keras.activations.selu')
148 def selu(x): function
149 """Scaled Exponential Linear Unit (SELU).
151 The Scaled Exponential Linear Unit (SELU) activation function is defined as:
159 Basically, the SELU activation function multiplies `scale` (> 1) with the
175 ... activation='selu'))
177 ... activation='selu'))
179 ... activation='selu'))
197 return nn.selu(x)
/aosp_15_r20/external/tensorflow/tensorflow/core/kernels/
H A Dunary_ops_composition.cc171 // Register compute function for the Relu/Relu6/Elu/Selu.
204 auto selu = functor::Selu<Eigen::DefaultDevice, T>(); \
205 selu(Eigen::DefaultDevice(), in, *out); \
255 REGISTER_COMPUTE_FN(Selu); in UnaryOpsCompositionSupport()
320 REGISTER_COMPUTE_FN(Selu); in UnaryOpsCompositionSupport()
384 REGISTER_COMPUTE_FN(Selu); in UnaryOpsCompositionSupport()
H A Drelu_op.cc75 Name("Selu").Device(DEVICE_CPU).TypeConstraint<type>("T"), \
81 // Elu and Selu only make sense with float or double.
104 void Selu<GPUDevice, T>::operator()( \
107 extern template struct Selu<GPUDevice, T>;
120 Name("Selu").Device(DEVICE_GPU).TypeConstraint<type>("T"), \
H A Drelu_op_functor.h174 struct Selu { struct
175 // Computes Selu activation.
198 // gradients: gradients backpropagated to the Selu op. argument
199 // activations: outputs of the Selu op.
200 // backprops: gradients to backpropagate to the Selu inputs.
/aosp_15_r20/external/tensorflow/tensorflow/python/ops/
H A Dnn_grad_test.py228 selu = gen_nn_ops.selu(inputs)
229 selu_grad = gradients_impl.gradients(selu, inputs, grad_ys=dummy)[0]
245 selu = gen_nn_ops.selu(inputs)
246 selu_grad = gradients_impl.gradients(selu, inputs, grad_ys=dummy)[0]
/aosp_15_r20/external/pytorch/torch/_refs/nn/functional/
H A D__init__.py57 "selu",
119 # alpha = - SELU.alpha * SELU.scale, here
120 # SELU.alpha = 1.6732632423543772848170429916717 and
121 # SELU.scale = 1.0507009873554804934193349852946
400 @register_decomposition(aten.selu)
407 def selu(a: TensorLikeType, inplace: bool = False) -> TensorLikeType: function
409 Reference implementation of torch.nn.functional.selu
1278 selu_ = _make_inplace(selu)
/aosp_15_r20/external/tensorflow/tensorflow/core/api_def/base_api/
H A Dapi_def_SeluGrad.pbtxt7 The backpropagated gradients to the corresponding Selu operation.
13 The outputs of the corresponding Selu operation.
23 summary: "Computes gradients for the scaled exponential linear (Selu) operation."
/aosp_15_r20/external/tensorflow/tensorflow/core/kernels/mlir_generated/
H A Dgpu_op_selu.cc21 GENERATE_AND_REGISTER_UNARY_GPU_KERNEL(Selu, DT_HALF);
22 GENERATE_AND_REGISTER_UNARY_GPU_KERNEL(Selu, DT_FLOAT);
23 GENERATE_AND_REGISTER_UNARY_GPU_KERNEL(Selu, DT_DOUBLE);
/aosp_15_r20/external/tensorflow/tensorflow/compiler/tf2xla/kernels/
H A Delu_op.cc35 XlaOp Selu(XlaOp x) { in Selu() function
82 ctx->SetOutput(0, xla::Selu(ctx->Input(0))); in Compile()
107 REGISTER_XLA_OP(Name("Selu"), SeluOp);
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/modules/
H A Dactivation.h49 // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SELU ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
51 /// Applies the selu function element-wise.
52 /// See https://pytorch.org/docs/main/nn.html#torch.nn.SELU to learn
60 /// SELU model(SELUOptions().inplace(true));
70 /// Pretty prints the `SELU` module into the given `stream`.
79 /// provides, and examples of how to use `SELU` with `torch::nn::SELUOptions`.
82 TORCH_MODULE(SELU);
/aosp_15_r20/external/pytorch/torch/nn/modules/
H A Dactivation.py29 "SELU",
613 class SELU(Module): class
614 r"""Applies the SELU function element-wise.
617 \text{SELU}(x) = \text{scale} * (\max(0,x) + \min(0, \alpha * (\exp(x) - 1)))
624 ``nonlinearity='linear'`` should be used instead of ``nonlinearity='selu'``
637 .. image:: ../scripts/activation_images/SELU.png
641 >>> m = nn.SELU()
656 return F.selu(input, self.inplace)
H A D__init__.py21 SELU,
300 "SELU",
/aosp_15_r20/external/tensorflow/tensorflow/python/kernel_tests/nn_ops/
H A Drelu_op_test.py519 tf_selu = nn_ops.selu(np_features)
537 nn_ops.selu, [x], delta=1.0 / 1024))
545 *gradient_checker_v2.compute_gradient(nn_ops.selu, [x]))
555 y = nn_ops.selu(x)
573 y = nn_ops.selu(x)
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/functional/
H A Dactivation.h49 inline Tensor selu(Tensor input, bool inplace) { in selu() function
53 return torch::selu(input); in selu()
60 /// https://pytorch.org/docs/main/nn.functional.html#torch.nn.functional.selu
69 /// F::selu(input, F::SELUFuncOptions(false));
71 inline Tensor selu(Tensor input, const SELUFuncOptions& options = {}) {
72 return detail::selu(std::move(input), options.inplace());
/aosp_15_r20/external/tensorflow/tensorflow/core/api_def/python_api/
H A Dapi_def_Selu.pbtxt2 graph_op_name: "Selu"
4 name: "nn.selu"
/aosp_15_r20/external/tensorflow/tensorflow/core/api_def/java_api/
H A Dapi_def_Selu.pbtxt2 graph_op_name: "Selu"
4 name: "nn.Selu"
/aosp_15_r20/external/tensorflow/tensorflow/core/ops/compat/ops_history_v2/
H A DSelu.pbtxt2 name: "Selu"
24 name: "Selu"
/aosp_15_r20/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/
H A DSelu.pbtxt2 name: "Selu"
24 name: "Selu"
/aosp_15_r20/external/pytorch/torch/nn/
H A Dinit.py86 SELU :math:`\frac{3}{4}`
91 you should use ``nonlinearity='linear'`` instead of ``nonlinearity='selu'``.
94 In contrast, the default gain for ``SELU`` sacrifices the normalization
134 elif nonlinearity == "selu":
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/options/
H A Dactivation.h41 /// Options for the `SELU` module.
45 /// SELU model(SELUOptions().inplace(true));
55 /// Options for `torch::nn::functional::selu`.
63 /// F::selu(input, F::SELUFuncOptions(false));
/aosp_15_r20/external/pytorch/functorch/dim/
H A Dop_properties.py271 torch.selu,
272 torch.nn.functional.selu,
/aosp_15_r20/external/pytorch/docs/source/
H A Donnx_torchscript.rst544 model = torch.nn.SELU()
549 def Selu(X):
561 return g.onnxscript_op(Selu, X).setType(X.type())
567 symbolic_name="aten::selu",
/aosp_15_r20/external/pytorch/torch/ao/pruning/_experimental/pruner/
H A Dbase_structured_sparsifier.py53 F.selu,
80 nn.SELU,

123456