1 /*
2 * Copyright © 2022 Google, Inc.
3 * SPDX-License-Identifier: MIT
4 */
5
6 #include "nir.h"
7 #include "nir_builder.h"
8
9 #include "ir3/ir3_descriptor.h"
10
11 static bool
lower_intrinsic(nir_builder * b,nir_intrinsic_instr * intr)12 lower_intrinsic(nir_builder *b, nir_intrinsic_instr *intr)
13 {
14 unsigned desc_offset;
15
16 switch (intr->intrinsic) {
17 case nir_intrinsic_load_ssbo:
18 case nir_intrinsic_store_ssbo:
19 case nir_intrinsic_ssbo_atomic:
20 case nir_intrinsic_ssbo_atomic_swap:
21 case nir_intrinsic_get_ssbo_size:
22 desc_offset = IR3_BINDLESS_SSBO_OFFSET;
23 break;
24 case nir_intrinsic_image_load:
25 case nir_intrinsic_image_store:
26 case nir_intrinsic_image_atomic:
27 case nir_intrinsic_image_atomic_swap:
28 case nir_intrinsic_image_size:
29 case nir_intrinsic_image_samples:
30 desc_offset = IR3_BINDLESS_IMAGE_OFFSET;
31 break;
32 default:
33 return false;
34 }
35
36 unsigned buffer_src;
37 if (intr->intrinsic == nir_intrinsic_store_ssbo) {
38 /* store_ssbo has the value first, and ssbo src as 2nd src: */
39 buffer_src = 1;
40 } else {
41 /* the rest have ssbo src as 1st src: */
42 buffer_src = 0;
43 }
44
45 unsigned set = ir3_shader_descriptor_set(b->shader->info.stage);
46 nir_def *src = intr->src[buffer_src].ssa;
47 src = nir_iadd_imm(b, src, desc_offset);
48 /* An out-of-bounds index into an SSBO/image array can cause a GPU fault
49 * on access to the descriptor (I don't see any hw mechanism to bound the
50 * access). We could just allow the resulting iova fault (it is a read
51 * fault, so shouldn't corrupt anything), but at the cost of one extra
52 * instruction (as long as IR3_BINDLESS_DESC_COUNT is a power-of-two) we
53 * can avoid the dmesg spam and users thinking this is a driver bug:
54 */
55 src = nir_umod_imm(b, src, IR3_BINDLESS_DESC_COUNT);
56 nir_def *bindless = nir_bindless_resource_ir3(b, 32, src, set);
57 nir_src_rewrite(&intr->src[buffer_src], bindless);
58
59 return true;
60 }
61
62 static bool
lower_instr(nir_builder * b,nir_instr * instr,void * cb_data)63 lower_instr(nir_builder *b, nir_instr *instr, void *cb_data)
64 {
65 b->cursor = nir_before_instr(instr);
66 switch (instr->type) {
67 case nir_instr_type_intrinsic:
68 return lower_intrinsic(b, nir_instr_as_intrinsic(instr));
69 default:
70 return false;
71 }
72 }
73
74 /**
75 * Lower bindful image/SSBO to bindless
76 */
77 bool
ir3_nir_lower_io_to_bindless(nir_shader * shader)78 ir3_nir_lower_io_to_bindless(nir_shader *shader)
79 {
80 /* Note: We don't currently support API level bindless, as we assume we
81 * can remap bindful images/SSBOs to bindless while controlling the entire
82 * descriptor set space.
83 *
84 * If we needed to support API level bindless, we could probably just remap
85 * bindful ops to a range of the descriptor set space that does not conflict
86 * with what we advertise for bindless descriptors? But I'm not sure that
87 * ARB_bindless_texture is of too much value to care about, especially for
88 * GLES
89 */
90 assert(!shader->info.uses_bindless);
91
92 return nir_shader_instructions_pass(shader, lower_instr, nir_metadata_none, NULL);
93 }
94