1 #pragma once 2 3 // ${generated_comment} 4 5 #ifdef TORCH_ASSERT_ONLY_METHOD_OPERATORS 6 #error This change adds a dependency on all pytorch operators, meaning the \ 7 file will need to be re-compiled every time an operator is changed or added. \ 8 Consider using the at::_ops::{name}::redispatch() interface by including \ 9 the specific operator from <ATen/ops/{my_operator}_ops.h> 10 #endif 11 12 #include <c10/core/Scalar.h> 13 #include <ATen/Tensor.h> 14 #include <c10/core/Storage.h> 15 #include <ATen/core/Generator.h> 16 #include <c10/util/Deprecated.h> 17 #include <ATen/DeviceGuard.h> 18 #include <c10/core/TensorOptions.h> 19 #include <ATen/core/Reduction.h> 20 #include <optional> 21 #include <ATen/TensorUtils.h> 22 #include <ATen/Context.h> 23 #include <ATen/TracerMode.h> 24 #include <ATen/Operators.h> 25 26 namespace at { 27 28 namespace redispatch { 29 ${function_redispatch_definitions} 30 } // namespace redispatch 31 32 } 33