Home
last modified time | relevance | path

Searched defs:embed_dim (Results 1 – 16 of 16) sorted by relevance

/aosp_15_r20/external/libopus/dnn/torch/lpcnet/utils/layers/
H A Dpcm_embeddings.py39 def __init__(self, embed_dim=128, num_levels=256): argument
60 def __init__(self, embed_dim, num_levels=256): argument
/aosp_15_r20/external/pytorch/benchmarks/transformer/
H A Dattention_bias_benchmarks.py70 batch_size, q_sequence_length, kv_sequence_length, embed_dim, dtype, device argument
81 def __init__(self, num_heads, embed_dim, device=None, dtype=None): argument
/aosp_15_r20/external/pytorch/aten/src/ATen/native/transformers/
H A Dtransformer.cpp65 const int64_t embed_dim, in norm()
77 const int64_t embed_dim, in transformer_encoder_layer_forward()
H A Dattention.cpp189 const int64_t embed_dim, in qkv_projection()
265 const int64_t embed_dim, in native_multi_head_attention_cpu()
856 const int64_t embed_dim, in triton_multi_head_attention()
/aosp_15_r20/external/executorch/exir/tests/
H A Dtransformer.py15 def __init__(self, embed_dim, num_heads=2): argument
/aosp_15_r20/external/pytorch/torch/csrc/api/src/nn/options/
H A Dactivation.cpp25 int64_t embed_dim, in MultiheadAttentionOptions()
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/modules/
H A Dactivation.h826 MultiheadAttentionImpl(int64_t embed_dim, int64_t num_heads) in MultiheadAttentionImpl()
/aosp_15_r20/external/pytorch/test/
H A Dtest_native_mha.py160 def __init__(self, embed_dim, num_heads, qkv, proj): argument
H A Dtest_transformers.py902 embed_dim, argument
H A Dtest_jit.py14992 def __init__(self, embed_dim, num_heads): argument
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/functional/
H A Dactivation.h673 const auto& embed_dim = query_sizes[2]; variable
/aosp_15_r20/external/tensorflow/tensorflow/python/kernel_tests/nn_ops/
H A Dembedding_ops_test.py812 def _random_weights(self, vocab_size=4, embed_dim=4, num_shards=1): argument
/aosp_15_r20/external/pytorch/test/cpp/api/
H A Dsequential.cpp500 int64_t embed_dim = 8; in TEST_F() local
/aosp_15_r20/external/pytorch/torch/nn/modules/
H A Dactivation.py1042 embed_dim, argument
/aosp_15_r20/external/pytorch/aten/src/ATen/native/transformers/cuda/
H A Dattention.cu483 const int64_t embed_dim, in native_multi_head_attention_cuda()
/aosp_15_r20/external/pytorch/test/inductor/
H A Dtest_cpu_repro.py295 embed_dim, argument