xref: /aosp_15_r20/external/pytorch/docs/source/nn.functional.rst (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1.. role:: hidden
2    :class: hidden-section
3
4torch.nn.functional
5===================
6
7.. currentmodule:: torch.nn.functional
8
9Convolution functions
10----------------------------------
11
12.. autosummary::
13    :toctree: generated
14    :nosignatures:
15
16    conv1d
17    conv2d
18    conv3d
19    conv_transpose1d
20    conv_transpose2d
21    conv_transpose3d
22    unfold
23    fold
24
25Pooling functions
26----------------------------------
27
28.. autosummary::
29    :toctree: generated
30    :nosignatures:
31
32    avg_pool1d
33    avg_pool2d
34    avg_pool3d
35    max_pool1d
36    max_pool2d
37    max_pool3d
38    max_unpool1d
39    max_unpool2d
40    max_unpool3d
41    lp_pool1d
42    lp_pool2d
43    lp_pool3d
44    adaptive_max_pool1d
45    adaptive_max_pool2d
46    adaptive_max_pool3d
47    adaptive_avg_pool1d
48    adaptive_avg_pool2d
49    adaptive_avg_pool3d
50    fractional_max_pool2d
51    fractional_max_pool3d
52
53Attention Mechanisms
54-------------------------------
55
56The :mod:`torch.nn.attention.bias` module contains attention_biases that are designed to be used with
57scaled_dot_product_attention.
58
59.. autosummary::
60    :toctree: generated
61    :nosignatures:
62
63    scaled_dot_product_attention
64
65Non-linear activation functions
66-------------------------------
67
68.. autosummary::
69    :toctree: generated
70    :nosignatures:
71
72    threshold
73    threshold_
74    relu
75    relu_
76    hardtanh
77    hardtanh_
78    hardswish
79    relu6
80    elu
81    elu_
82    selu
83    celu
84    leaky_relu
85    leaky_relu_
86    prelu
87    rrelu
88    rrelu_
89    glu
90    gelu
91    logsigmoid
92    hardshrink
93    tanhshrink
94    softsign
95    softplus
96    softmin
97    softmax
98    softshrink
99    gumbel_softmax
100    log_softmax
101    tanh
102    sigmoid
103    hardsigmoid
104    silu
105    mish
106    batch_norm
107    group_norm
108    instance_norm
109    layer_norm
110    local_response_norm
111    rms_norm
112    normalize
113
114.. _Link 1: https://arxiv.org/abs/1611.00712
115.. _Link 2: https://arxiv.org/abs/1611.01144
116
117Linear functions
118----------------
119
120.. autosummary::
121    :toctree: generated
122    :nosignatures:
123
124    linear
125    bilinear
126
127Dropout functions
128-----------------
129
130.. autosummary::
131    :toctree: generated
132    :nosignatures:
133
134    dropout
135    alpha_dropout
136    feature_alpha_dropout
137    dropout1d
138    dropout2d
139    dropout3d
140
141Sparse functions
142----------------------------------
143
144.. autosummary::
145    :toctree: generated
146    :nosignatures:
147
148    embedding
149    embedding_bag
150    one_hot
151
152Distance functions
153----------------------------------
154
155.. autosummary::
156    :toctree: generated
157    :nosignatures:
158
159    pairwise_distance
160    cosine_similarity
161    pdist
162
163
164Loss functions
165--------------
166
167.. autosummary::
168    :toctree: generated
169    :nosignatures:
170
171    binary_cross_entropy
172    binary_cross_entropy_with_logits
173    poisson_nll_loss
174    cosine_embedding_loss
175    cross_entropy
176    ctc_loss
177    gaussian_nll_loss
178    hinge_embedding_loss
179    kl_div
180    l1_loss
181    mse_loss
182    margin_ranking_loss
183    multilabel_margin_loss
184    multilabel_soft_margin_loss
185    multi_margin_loss
186    nll_loss
187    huber_loss
188    smooth_l1_loss
189    soft_margin_loss
190    triplet_margin_loss
191    triplet_margin_with_distance_loss
192
193Vision functions
194----------------
195
196.. autosummary::
197    :toctree: generated
198    :nosignatures:
199
200    pixel_shuffle
201    pixel_unshuffle
202    pad
203    interpolate
204    upsample
205    upsample_nearest
206    upsample_bilinear
207    grid_sample
208    affine_grid
209
210DataParallel functions (multi-GPU, distributed)
211-----------------------------------------------
212
213:hidden:`data_parallel`
214~~~~~~~~~~~~~~~~~~~~~~~
215
216.. autosummary::
217    :toctree: generated
218    :nosignatures:
219
220    torch.nn.parallel.data_parallel
221