Home
last modified time | relevance | path

Searched full:relu_ (Results 1 – 25 of 167) sorted by relevance

1234567

/aosp_15_r20/external/pytorch/torch/csrc/jit/passes/quantization/
H A Dquantization_patterns.h307 // aten::conv1d - aten::relu_ in quant_fusion_pattern_and_replacements()
314 %r = aten::relu_(%conv_out) in quant_fusion_pattern_and_replacements()
351 // aten::conv2d - aten::relu_ in quant_fusion_pattern_and_replacements()
358 %r = aten::relu_(%conv_out) in quant_fusion_pattern_and_replacements()
395 // aten::conv3d - aten::relu_ in quant_fusion_pattern_and_replacements()
402 %r = aten::relu_(%conv_out) in quant_fusion_pattern_and_replacements()
464 %r_relu = aten::relu_(%r_add) in quant_fusion_pattern_and_replacements()
482 %r_relu = aten::relu_(%r_add) in quant_fusion_pattern_and_replacements()
517 %r = aten::relu_(%linear_out) in quant_fusion_pattern_and_replacements()
595 %r = aten::relu_(%r_add) in quant_fusion_pattern_and_replacements()
[all …]
H A Dinsert_observers.cpp565 // nn.Linear + aten::relu_
570 %second_output = aten::relu_(%first_output)
600 // aten::linear + aten::relu_
605 %second_output = aten::relu_(%first_output)
636 %second_output = aten::relu_(%first_output)
668 %second_output = aten::relu_(%first_output)
700 %second_output = aten::relu_(%first_output)
745 %second_output = aten::relu_(%first_output)
757 %second_output = aten::relu_(%first_output)
788 %second_output = aten::relu_(%first_output)
[all …]
/aosp_15_r20/external/pytorch/test/jit/
H A Dtest_batch_mm.py108 torch.relu_(T1)
136 torch.relu_(T1)
172 torch.relu_(T1)
236 torch.relu_(T1)
272 torch.relu_(A)
H A Dtest_optimize_for_mobile_preserve_debug_info.py256 conv2d_activation=F.relu_,
257 conv2d_activation_kind="aten::relu_",
263 linear_activation=F.relu_,
264 linear_activation_kind="aten::relu_",
H A Dtest_convert_activation.py113 FileCheck().check_not("aten::relu_").run(fn.graph)
128 FileCheck().check_not("aten::relu_").run(fn.graph)
168 torch.relu_,
H A Dtest_device_analysis.py115 def relu_(x): function
116 return torch.nn.functional.relu_(x)
118 functions = [add_self, relu_]
/aosp_15_r20/external/pytorch/test/dynamo/
H A Dtest_subclasses.py957 return x.add_(1.0) + torch.nn.functional.relu_(x)
974 relu_: "f32[3, 4]" = torch.relu_(l_x_); l_x_ = None
975 add: "f32[3, 4]" = add_ + relu_; add_ = relu_ = None
996 relu_: "f32[3, 4]" = torch.relu_(l_x_); l_x_ = None
997 add: "f32[3, 4]" = add_ + relu_; add_ = relu_ = None
1038 relu_: "f32[3, 4]" = torch.relu_(l_x_); l_x_ = None
1039 add: "f32[3, 4]" = add_ + relu_; add_ = relu_ = None
/aosp_15_r20/external/pytorch/torch/csrc/jit/passes/
H A Dfuse_relu.cpp27 %res = aten::relu_(%add_res) in fuseAddReluImpl()
34 %res = aten::relu_(%add_res) in fuseAddReluImpl()
45 %res = aten::relu_(%add_res) in fuseAddReluImpl()
H A Dmetal_rewrite.cpp119 %res = aten::relu_(%linear_res) in fuseReluWithPackedOps()
129 %r = aten::relu_(%r) in fuseReluWithPackedOps()
H A Dxnnpack_rewrite.cpp325 %res = aten::relu_(%linear_res) in fuseReluWithPackedOps()
335 %res = aten::relu_(%conv2d_res) in fuseReluWithPackedOps()
/aosp_15_r20/external/pytorch/aten/src/ATen/native/metal/ops/
H A DMetalNeurons.mm59 static Tensor& relu_(Tensor& input) {
84 m.impl(TORCH_SELECTIVE_NAME("aten::relu_"), TORCH_FN(relu_));
/aosp_15_r20/external/executorch/backends/qualcomm/quantizer/
H A DREADME.md57 @register_annotator([torch.ops.aten.relu.default, torch.ops.aten.relu_.default])
59 Where `torch.ops.aten.relu.default` / `torch.ops.aten.relu_.default` map to `copy` / `in-place` ver…
162 @register_annotator([torch.ops.aten.relu.default, torch.ops.aten.relu_.default])
/aosp_15_r20/external/pytorch/torch/ao/quantization/quantizer/
H A Dxnnpack_quantizer_utils.py237 torch.ops.aten.relu_.default,
343 torch.ops.aten.relu_.default,
486 output = F.relu_(bn) if relu_is_inplace else F.relu(bn)
720 torch.ops.aten.relu_.default,
842 torch.ops.aten.relu_.default,
/aosp_15_r20/external/pytorch/benchmarks/operator_benchmark/pt/
H A Dunary_test.py124 ["relu_", torch.relu_],
/aosp_15_r20/external/pytorch/torch/ao/quantization/backend_config/
H A D_qnnpack_pt2e.py98 (torch.ops.aten.convolution.default, torch.ops.aten.relu_.default)
160 (op_with_quantized_bop_scalar_variant, torch.ops.aten.relu_.default),
/aosp_15_r20/external/pytorch/aten/src/ATen/native/vulkan/ops/
H A DClamp.cpp305 Tensor& relu_(Tensor& self) { in relu_() function
618 m.impl(TORCH_SELECTIVE_NAME("aten::relu_"), relu_); in TORCH_LIBRARY_IMPL()
/aosp_15_r20/external/pytorch/test/distributed/_tools/
H A Dtest_mod_tracker.py20 x = x["a"].relu_()
88 return self.foo(x).relu_()
/aosp_15_r20/external/pytorch/test/quantization/pt2e/
H A Dtest_x86inductor_quantizer.py622 "relu_inplace": [torch.nn.ReLU(inplace=True), torch.ops.aten.relu_.default],
1283 self._test_linear_unary_helper(nn.ReLU, aten.relu.default, aten.relu_.default)
1295 nn.ReLU, aten.relu.default, aten.relu_.default, is_qat=True
1308 nn.ReLU, aten.relu.default, aten.relu_.default, is_dynamic=True
1324 nn.ReLU, aten.relu.default, aten.relu_.default, is_qat=True, is_dynamic=True
1611 relu_op = aten.relu_.default if inplace_relu else aten.relu.default
1755 "relu_inplace": [torch.nn.ReLU(inplace=True), torch.ops.aten.relu_.default],
/aosp_15_r20/external/pytorch/torch/ao/ns/fx/
H A Dmappings.py105 "relu_",
704 "relu_",
/aosp_15_r20/external/pytorch/test/quantization/jit/
H A Dtest_quantize_jit.py1907 ).check_not("aten::relu_(").check_not("quantized::add(").check_not(
2116 ).check_not("aten::relu_(").check_not("quantized::add(").check_not(
2202 ).check_not("aten::relu_(").check_not(
2292 ).check_not("aten::relu_").run(model.graph)
2313 "aten::relu_"
2335 "aten::relu_"
2541 ).check_not("aten::relu_(").check_not("quantized::mul(").check_not(
2626 ).check_not("aten::relu_(").check_not(
2837 x.relu_()
/aosp_15_r20/external/pytorch/test/
H A Dtest_schema_check.py332 expected.relu_()
338 actual.relu_()
H A Dtest_module_tracker.py21 x = x["a"].relu_()
/aosp_15_r20/external/executorch/docs/source/
H A Dcompiler-custom-compiler-passes.md55 relu_ is the in-place version. Replace it with relu, which is the
60 if op != torch.ops.aten.relu_.default:
/aosp_15_r20/external/pytorch/torch/ao/quantization/pt2e/
H A Dgraph_utils.py25 {torch.nn.ReLU, torch.nn.functional.relu, torch.nn.functional.relu_},
/aosp_15_r20/external/pytorch/docs/source/
H A Dnn.functional.rst75 relu_

1234567