Home
last modified time | relevance | path

Searched full:triu (Results 1 – 25 of 85) sorted by relevance

1234

/aosp_15_r20/external/pytorch/test/torch_np/numpy_tests/lib/
H A Dtest_twodim_base.py338 c = np.triu(a)
371 a_triu_observed = np.triu(a)
383 assert_array_equal(np.triu(arr), out_triu)
388 # tril and triu should return the same dtype as input
391 assert_equal(np.triu(arr).dtype, arr.dtype)
397 iu = mask_indices(3, np.triu)
401 iu1 = mask_indices(3, np.triu, 1)
/aosp_15_r20/external/pytorch/aten/src/ATen/native/
H A DTriangularOps.cpp29 TORCH_META_FUNC(triu)(const Tensor& self, int64_t k) { in TORCH_META_FUNC() argument
30 TORCH_CHECK(self.dim() >= 2, "triu: input tensor must have at least 2 dimensions") in TORCH_META_FUNC()
39 // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ triu/tril ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
129 static constexpr const char* op_name = "triu";
H A DTriangularOpsUtils.h8 * computes the number of batches for Triu and Tril. This ignores stride 0 dimension
20 /* Checks a necessary property for the triu and tril implementations, hence the name.
H A Dts_native_functions.yaml137 - triu
/aosp_15_r20/external/pytorch/torch/csrc/autograd/
H A DFunctionsManual.cpp1956 // tril(L^H gL) = tril(L^H (triu(gL, 1) + tril(gL))) in cholesky_backward()
1957 // = tril(L^H tril(gL)) + tril(L^H triu(gL, 1)) in cholesky_backward()
1959 // since tril(L^H triu(gL, 1)) = 0, as L^H triu(gL, 1) is upper triangular in cholesky_backward()
3917 // and define syminv(X) = triu(X) - 0.5 * diag(X) the inverse of in linalg_qr_jvp()
3918 // sym : Triu(k, diag \in \mathbb{R}) -> Her(k) to give in linalg_qr_jvp()
3952 auto ret = X.triu(); in linalg_qr_jvp()
4009 // need are syminv*(R) = 0.5 * (R.triu() + R.triu()^H - Re diag(R)) sym*(X) = in linalg_qr_backward()
4011 // syminvadj(triu(gR R^H - Q^H gQ)))R^{-H} in linalg_qr_backward()
4065 gA = Q.matmul(syminvadj(gA.triu())); in linalg_qr_backward()
4378 grad_a = grad_a.triu((int)unitriangular); in triangular_solve_backward()
[all …]
/aosp_15_r20/external/executorch/extension/llm/custom_ops/
H A Dtest_sdpa_with_kv_cache.py53 self.mask = torch.triu(self.mask, diagonal=1)
250 self.mask = torch.triu(self.mask, diagonal=1)
318 self.mask = torch.triu(self.mask, diagonal=1)
385 self.mask = torch.triu(self.mask, diagonal=1)
/aosp_15_r20/external/pytorch/functorch/dim/
H A DREADME.md508 def triu(A):
512 triu(torch.rand(3, 4))
606 ### Puzzle 6 - triu
608 Compute [triu](https://numpy.org/doc/stable/reference/generated/numpy.triu.html) - the upper triang…
619 def triu(j: int):
/aosp_15_r20/external/pytorch/torch/onnx/
H A Dsymbolic_opset14.py32 "triu",
53 @_onnx_symbolic("aten::triu")
54 def triu(g: jit_utils.GraphContext, self, diagonal, out=None): function
/aosp_15_r20/external/pytorch/test/jit/
H A Dtest_parametrization.py22 return X.triu() + X.triu(1).mT
/aosp_15_r20/external/pytorch/aten/src/ATen/native/cuda/
H A DTensorFactories.cu138 // for triu: in resolve_root_int()
197 // View the triu as a top rectangle stacked on a bottom trapezoid, where the
361 // # of triu elements in the first row in triu_indices_cuda()
H A DTriangularOps.cu35 // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ triu/tril ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
155 triu_tril_cuda_template<true>(result, self, k, "triu"); in TORCH_IMPL_FUNC()
/aosp_15_r20/external/pytorch/test/torch_np/numpy_tests/linalg/
H A Dtest_linalg.py1741 assert_almost_equal(np.triu(r), r)
1753 assert_almost_equal(np.triu(r1), r1)
1835 assert_almost_equal(np.triu(r[..., :, :]), r)
1849 assert_almost_equal(np.triu(r1[..., :, :]), r1)
2164 assert_almost_equal(np.triu(s[:-1, 1:]), np.triu(s_expected[:-1, 1:]))
2165 assert_almost_equal(np.triu(m), np.triu(m_expected))
/aosp_15_r20/external/pytorch/aten/src/ATen/functorch/
H A DBatchRulesViews.cpp547 TORCH_CHECK(self.dim() >= 2, "triu: The input tensor must have at least 2 dimensions."); in triu_batch_rule()
549 auto result = at::triu(self_, diagonal); in triu_batch_rule()
559 VMAP_SUPPORT(triu, triu_batch_rule); in TORCH_LIBRARY_IMPL()
/aosp_15_r20/external/pytorch/torch/nn/utils/
H A Dparametrize.py513 >>> return X.triu() + X.triu(1).T # Return a symmetric matrix
516 >>> return A.triu()
/aosp_15_r20/external/pytorch/test/functorch/
H A Dtest_dims.py66 def triu(A): function
143 self.assertTrue(torch.allclose(torch.triu(A_, 0), triu(A_)))
/aosp_15_r20/external/tensorflow/tensorflow/python/kernel_tests/linalg/
H A Dlu_op_test.py137 complex_data += np.triu(-1j * data, 1).astype(dtype)
158 complex_data += np.triu(-1j * data, 1).astype(dtype)
/aosp_15_r20/external/pytorch/test/nn/
H A Dtest_parametrization.py398 A = X.triu(1)
443 A = X.triu(1)
452 return X.triu(1)
490 self.assertEqual(model.parametrizations.weight.original, X.triu(1))
1686 zeros_grad = grad.triu(1)
/aosp_15_r20/external/pytorch/torch/csrc/api/src/nn/modules/
H A Dtransformer.cpp475 return torch::triu( in generate_square_subsequent_mask()
484 return torch::triu( in generate_square_subsequent_mask()
/aosp_15_r20/external/tensorflow/tensorflow/python/ops/numpy_ops/
H A Dnp_array_ops.py1224 @np_utils.np_doc('triu')
1225 def triu(m, k=0): # pylint: disable=missing-docstring function
1228 raise ValueError('Argument to triu should have known rank')
1232 raise ValueError('Argument to triu must have rank at least 2')
/aosp_15_r20/external/apache-commons-math/src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/noderiv/
H A DCMAESOptimizer.java777 C = triu(C, 0).add(triu(C, 1).transpose());
1107 private static RealMatrix triu(final RealMatrix m, int k) {
/aosp_15_r20/external/executorch/extension/gguf_util/converters/
H A Dllama_converter.py86 mask = torch.triu(mask, diagonal=1)
/aosp_15_r20/external/apache-commons-math/src/main/java/org/apache/commons/math3/optimization/direct/
H A DCMAESOptimizer.java894 C = triu(C, 0).add(triu(C, 1).transpose());
1194 private static RealMatrix triu(final RealMatrix m, int k) {
/aosp_15_r20/external/tensorflow/tensorflow/compiler/tests/
H A Dmatrix_band_part_test.py179 band_np = np.triu(band_np, -lower)
/aosp_15_r20/external/pytorch/functorch/op_analysis/
H A Dpublic_api304 triu
/aosp_15_r20/external/executorch/examples/models/llama/experimental/
H A Dload_gguf_q4_0.py158 mask = torch.triu(mask, diagonal=1)

1234