1 /*
2 * Copyright © 2022 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "anv_nir.h"
25
26 #include "compiler/brw_nir.h"
27
28 const struct anv_descriptor_set_layout *
anv_pipeline_layout_get_push_set(const struct anv_pipeline_sets_layout * layout,uint8_t * set_idx)29 anv_pipeline_layout_get_push_set(const struct anv_pipeline_sets_layout *layout,
30 uint8_t *set_idx)
31 {
32 for (unsigned s = 0; s < ARRAY_SIZE(layout->set); s++) {
33 struct anv_descriptor_set_layout *set_layout = layout->set[s].layout;
34
35 if (!set_layout ||
36 !(set_layout->flags &
37 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR))
38 continue;
39
40 if (set_idx)
41 *set_idx = s;
42
43 return set_layout;
44 }
45
46 return NULL;
47 }
48
49 /* This function returns a bitfield of used descriptors in the push descriptor
50 * set. You can only call this function before calling
51 * anv_nir_apply_pipeline_layout() as information required is lost after
52 * applying the pipeline layout.
53 */
54 uint32_t
anv_nir_compute_used_push_descriptors(nir_shader * shader,const struct anv_pipeline_sets_layout * layout)55 anv_nir_compute_used_push_descriptors(nir_shader *shader,
56 const struct anv_pipeline_sets_layout *layout)
57 {
58 uint8_t push_set;
59 const struct anv_descriptor_set_layout *push_set_layout =
60 anv_pipeline_layout_get_push_set(layout, &push_set);
61 if (push_set_layout == NULL)
62 return 0;
63
64 uint32_t used_push_bindings = 0;
65 nir_foreach_variable_with_modes(var, shader,
66 nir_var_uniform |
67 nir_var_image |
68 nir_var_mem_ubo |
69 nir_var_mem_ssbo) {
70 if (var->data.descriptor_set == push_set) {
71 uint32_t desc_idx =
72 push_set_layout->binding[var->data.binding].descriptor_index;
73 assert(desc_idx < MAX_PUSH_DESCRIPTORS);
74 used_push_bindings |= BITFIELD_BIT(desc_idx);
75 }
76 }
77
78 nir_foreach_function_impl(impl, shader) {
79 nir_foreach_block(block, impl) {
80 nir_foreach_instr(instr, block) {
81 if (instr->type != nir_instr_type_intrinsic)
82 continue;
83
84 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
85 if (intrin->intrinsic != nir_intrinsic_vulkan_resource_index)
86 continue;
87
88 uint8_t set = nir_intrinsic_desc_set(intrin);
89 if (set != push_set)
90 continue;
91
92 uint32_t binding = nir_intrinsic_binding(intrin);
93 uint32_t desc_idx =
94 push_set_layout->binding[binding].descriptor_index;
95 assert(desc_idx < MAX_PUSH_DESCRIPTORS);
96
97 used_push_bindings |= BITFIELD_BIT(desc_idx);
98 }
99 }
100 }
101
102 return used_push_bindings;
103 }
104
105 /* This function checks whether the shader accesses the push descriptor
106 * buffer. This function must be called after anv_nir_compute_push_layout().
107 */
108 bool
anv_nir_loads_push_desc_buffer(nir_shader * nir,const struct anv_pipeline_sets_layout * layout,const struct anv_pipeline_bind_map * bind_map)109 anv_nir_loads_push_desc_buffer(nir_shader *nir,
110 const struct anv_pipeline_sets_layout *layout,
111 const struct anv_pipeline_bind_map *bind_map)
112 {
113 uint8_t push_set;
114 const struct anv_descriptor_set_layout *push_set_layout =
115 anv_pipeline_layout_get_push_set(layout, &push_set);
116 if (push_set_layout == NULL)
117 return false;
118
119 nir_foreach_function_impl(impl, nir) {
120 nir_foreach_block(block, impl) {
121 nir_foreach_instr(instr, block) {
122 if (instr->type != nir_instr_type_intrinsic)
123 continue;
124
125 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
126 if (intrin->intrinsic != nir_intrinsic_load_ubo)
127 continue;
128
129 const unsigned bt_idx =
130 brw_nir_ubo_surface_index_get_bti(intrin->src[0]);
131 if (bt_idx == UINT32_MAX)
132 continue;
133
134 const struct anv_pipeline_binding *binding =
135 &bind_map->surface_to_descriptor[bt_idx];
136 if ((binding->set == ANV_DESCRIPTOR_SET_DESCRIPTORS ||
137 binding->set == ANV_DESCRIPTOR_SET_DESCRIPTORS_BUFFER) &&
138 binding->index == push_set) {
139 return true;
140 }
141 }
142 }
143 }
144
145 return false;
146 }
147
148 /* This function computes a bitfield of all the UBOs bindings in the push
149 * descriptor set that are fully promoted to push constants. If a binding's
150 * bit in the field is set, the corresponding binding table entry will not be
151 * accessed by the shader. This function must be called after
152 * anv_nir_compute_push_layout().
153 */
154 uint32_t
anv_nir_push_desc_ubo_fully_promoted(nir_shader * nir,const struct anv_pipeline_sets_layout * layout,const struct anv_pipeline_bind_map * bind_map)155 anv_nir_push_desc_ubo_fully_promoted(nir_shader *nir,
156 const struct anv_pipeline_sets_layout *layout,
157 const struct anv_pipeline_bind_map *bind_map)
158 {
159 uint8_t push_set;
160 const struct anv_descriptor_set_layout *push_set_layout =
161 anv_pipeline_layout_get_push_set(layout, &push_set);
162 if (push_set_layout == NULL)
163 return 0;
164
165 /* Assume every UBO can be promoted first. */
166 uint32_t ubos_fully_promoted = 0;
167 for (uint32_t b = 0; b < push_set_layout->binding_count; b++) {
168 const struct anv_descriptor_set_binding_layout *bind_layout =
169 &push_set_layout->binding[b];
170 if (bind_layout->type == -1)
171 continue;
172
173 assert(bind_layout->descriptor_index < MAX_PUSH_DESCRIPTORS);
174 if (bind_layout->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
175 ubos_fully_promoted |= BITFIELD_BIT(bind_layout->descriptor_index);
176 }
177
178 /* For each load_ubo intrinsic, if the descriptor index or the offset is
179 * not a constant, we could not promote to push constant. Then check the
180 * offset + size against the push ranges.
181 */
182 nir_foreach_function_impl(impl, nir) {
183 nir_foreach_block(block, impl) {
184 nir_foreach_instr(instr, block) {
185 if (instr->type != nir_instr_type_intrinsic)
186 continue;
187
188 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
189 if (intrin->intrinsic != nir_intrinsic_load_ubo)
190 continue;
191
192 /* Don't check the load_ubo from descriptor buffers */
193 nir_intrinsic_instr *resource =
194 intrin->src[0].ssa->parent_instr->type == nir_instr_type_intrinsic ?
195 nir_instr_as_intrinsic(intrin->src[0].ssa->parent_instr) : NULL;
196 if (resource == NULL || resource->intrinsic != nir_intrinsic_resource_intel)
197 continue;
198
199 /* Skip load_ubo not loading from the push descriptor */
200 if (nir_intrinsic_desc_set(resource) != push_set)
201 continue;
202
203 uint32_t binding = nir_intrinsic_binding(resource);
204
205 /* If we have indirect indexing in the binding, no push promotion
206 * in possible for the entire binding.
207 */
208 if (!nir_src_is_const(resource->src[1])) {
209 for (uint32_t i = 0; i < push_set_layout->binding[binding].array_size; i++) {
210 ubos_fully_promoted &=
211 ~BITFIELD_BIT(push_set_layout->binding[binding].descriptor_index + i);
212 }
213 continue;
214 }
215
216 const nir_const_value *const_bt_id =
217 nir_src_as_const_value(resource->src[1]);
218 uint32_t bt_id = const_bt_id[0].u32;
219
220 const struct anv_pipeline_binding *pipe_bind =
221 &bind_map->surface_to_descriptor[bt_id];
222
223 const uint32_t desc_idx =
224 push_set_layout->binding[binding].descriptor_index;
225
226 /* If the offset in the entry is dynamic, we can't tell if
227 * promoted or not.
228 */
229 const nir_const_value *const_load_offset =
230 nir_src_as_const_value(intrin->src[1]);
231 if (const_load_offset == NULL) {
232 ubos_fully_promoted &= ~BITFIELD_BIT(desc_idx);
233 continue;
234 }
235
236 /* Check if the load was promoted to a push constant. */
237 const unsigned load_offset = const_load_offset[0].u32;
238 const int load_bytes = nir_intrinsic_dest_components(intrin) *
239 (intrin->def.bit_size / 8);
240
241 bool promoted = false;
242 for (unsigned i = 0; i < ARRAY_SIZE(bind_map->push_ranges); i++) {
243 if (bind_map->push_ranges[i].set == pipe_bind->set &&
244 bind_map->push_ranges[i].index == desc_idx &&
245 bind_map->push_ranges[i].start * 32 <= load_offset &&
246 (bind_map->push_ranges[i].start +
247 bind_map->push_ranges[i].length) * 32 >=
248 (load_offset + load_bytes)) {
249 promoted = true;
250 break;
251 }
252 }
253
254 if (!promoted)
255 ubos_fully_promoted &= ~BITFIELD_BIT(desc_idx);
256 }
257 }
258 }
259
260 return ubos_fully_promoted;
261 }
262