1 /*
2 * Copyright © 2013 Rob Clark <[email protected]>
3 * SPDX-License-Identifier: MIT
4 *
5 * Authors:
6 * Rob Clark <[email protected]>
7 */
8
9 #include "pipe/p_state.h"
10 #include "util/format/u_format.h"
11 #include "util/u_inlines.h"
12 #include "util/u_memory.h"
13 #include "util/u_string.h"
14
15 #include "freedreno_draw.h"
16 #include "freedreno_resource.h"
17 #include "freedreno_state.h"
18
19 #include "fd3_context.h"
20 #include "fd3_emit.h"
21 #include "fd3_format.h"
22 #include "fd3_gmem.h"
23 #include "fd3_program.h"
24 #include "fd3_zsa.h"
25
26 static void
fd3_gmem_emit_set_prog(struct fd_context * ctx,struct fd3_emit * emit,struct fd_program_stateobj * prog)27 fd3_gmem_emit_set_prog(struct fd_context *ctx, struct fd3_emit *emit,
28 struct fd_program_stateobj *prog)
29 {
30 emit->skip_consts = true;
31 emit->key.vs = prog->vs;
32 emit->key.fs = prog->fs;
33 emit->prog = fd3_program_state(
34 ir3_cache_lookup(ctx->shader_cache, &emit->key, &ctx->debug));
35 /* reset the fd3_emit_get_*p cache */
36 emit->vs = NULL;
37 emit->fs = NULL;
38 }
39
40 static void
emit_mrt(struct fd_ringbuffer * ring,unsigned nr_bufs,struct pipe_surface ** bufs,const uint32_t * bases,uint32_t bin_w,bool decode_srgb)41 emit_mrt(struct fd_ringbuffer *ring, unsigned nr_bufs,
42 struct pipe_surface **bufs, const uint32_t *bases, uint32_t bin_w,
43 bool decode_srgb)
44 {
45 enum a3xx_tile_mode tile_mode;
46 unsigned i;
47
48 for (i = 0; i < A3XX_MAX_RENDER_TARGETS; i++) {
49 enum pipe_format pformat = 0;
50 enum a3xx_color_fmt format = 0;
51 enum a3xx_color_swap swap = WZYX;
52 bool srgb = false;
53 struct fd_resource *rsc = NULL;
54 uint32_t stride = 0;
55 uint32_t base = 0;
56 uint32_t offset = 0;
57
58 if (bin_w) {
59 tile_mode = TILE_32X32;
60 } else {
61 tile_mode = LINEAR;
62 }
63
64 if ((i < nr_bufs) && bufs[i]) {
65 struct pipe_surface *psurf = bufs[i];
66
67 rsc = fd_resource(psurf->texture);
68 pformat = psurf->format;
69 /* In case we're drawing to Z32F_S8, the "color" actually goes to
70 * the stencil
71 */
72 if (rsc->stencil) {
73 rsc = rsc->stencil;
74 pformat = rsc->b.b.format;
75 if (bases)
76 bases++;
77 }
78 format = fd3_pipe2color(pformat);
79 if (decode_srgb)
80 srgb = util_format_is_srgb(pformat);
81 else
82 pformat = util_format_linear(pformat);
83
84 assert(psurf->u.tex.first_layer == psurf->u.tex.last_layer);
85
86 offset = fd_resource_offset(rsc, psurf->u.tex.level,
87 psurf->u.tex.first_layer);
88 swap = rsc->layout.tile_mode ? WZYX : fd3_pipe2swap(pformat);
89
90 if (bin_w) {
91 stride = bin_w << fdl_cpp_shift(&rsc->layout);
92
93 if (bases) {
94 base = bases[i];
95 }
96 } else {
97 stride = fd_resource_pitch(rsc, psurf->u.tex.level);
98 tile_mode = rsc->layout.tile_mode;
99 }
100 } else if (i < nr_bufs && bases) {
101 base = bases[i];
102 }
103
104 OUT_PKT0(ring, REG_A3XX_RB_MRT_BUF_INFO(i), 2);
105 OUT_RING(ring, A3XX_RB_MRT_BUF_INFO_COLOR_FORMAT(format) |
106 A3XX_RB_MRT_BUF_INFO_COLOR_TILE_MODE(tile_mode) |
107 A3XX_RB_MRT_BUF_INFO_COLOR_BUF_PITCH(stride) |
108 A3XX_RB_MRT_BUF_INFO_COLOR_SWAP(swap) |
109 COND(srgb, A3XX_RB_MRT_BUF_INFO_COLOR_SRGB));
110 if (bin_w || (i >= nr_bufs) || !bufs[i]) {
111 OUT_RING(ring, A3XX_RB_MRT_BUF_BASE_COLOR_BUF_BASE(base));
112 } else {
113 OUT_RELOC(ring, rsc->bo, offset, 0, -1);
114 }
115
116 OUT_PKT0(ring, REG_A3XX_SP_FS_IMAGE_OUTPUT_REG(i), 1);
117 OUT_RING(ring, COND((i < nr_bufs) && bufs[i],
118 A3XX_SP_FS_IMAGE_OUTPUT_REG_MRTFORMAT(
119 fd3_fs_output_format(pformat))));
120 }
121 }
122
123 static bool
use_hw_binning(struct fd_batch * batch)124 use_hw_binning(struct fd_batch *batch)
125 {
126 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
127
128 /* workaround: combining scissor optimization and hw binning
129 * seems problematic. Seems like we end up with a mismatch
130 * between binning pass and rendering pass, wrt. where the hw
131 * thinks the vertices belong. And the blob driver doesn't
132 * seem to implement anything like scissor optimization, so
133 * not entirely sure what I might be missing.
134 *
135 * But scissor optimization is mainly for window managers,
136 * which don't have many vertices (and therefore doesn't
137 * benefit much from binning pass).
138 *
139 * So for now just disable binning if scissor optimization is
140 * used.
141 */
142 if (gmem->minx || gmem->miny)
143 return false;
144
145 if ((gmem->maxpw * gmem->maxph) > 32)
146 return false;
147
148 if ((gmem->maxpw > 15) || (gmem->maxph > 15))
149 return false;
150
151 return fd_binning_enabled && ((gmem->nbins_x * gmem->nbins_y) > 2);
152 }
153
154 /* workaround for (hlsq?) lockup with hw binning on a3xx patchlevel 0 */
155 static void update_vsc_pipe(struct fd_batch *batch);
156 static void
emit_binning_workaround(struct fd_batch * batch)157 emit_binning_workaround(struct fd_batch *batch) assert_dt
158 {
159 struct fd_context *ctx = batch->ctx;
160 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
161 struct fd_ringbuffer *ring = batch->gmem;
162 struct fd3_emit emit = {
163 .debug = &ctx->debug,
164 .vtx = &ctx->solid_vbuf_state,
165 .key =
166 {
167 .vs = ctx->solid_prog.vs,
168 .fs = ctx->solid_prog.fs,
169 },
170 };
171
172 fd3_gmem_emit_set_prog(ctx, &emit, &ctx->solid_prog);
173
174 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 2);
175 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RESOLVE_PASS) |
176 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
177 A3XX_RB_MODE_CONTROL_MRT(0));
178 OUT_RING(ring, A3XX_RB_RENDER_CONTROL_BIN_WIDTH(32) |
179 A3XX_RB_RENDER_CONTROL_DISABLE_COLOR_PIPE |
180 A3XX_RB_RENDER_CONTROL_ALPHA_TEST_FUNC(FUNC_NEVER));
181
182 OUT_PKT0(ring, REG_A3XX_RB_COPY_CONTROL, 4);
183 OUT_RING(ring, A3XX_RB_COPY_CONTROL_MSAA_RESOLVE(MSAA_ONE) |
184 A3XX_RB_COPY_CONTROL_MODE(0) |
185 A3XX_RB_COPY_CONTROL_GMEM_BASE(0));
186 OUT_RELOC(ring, fd_resource(ctx->solid_vbuf)->bo, 0x20, 0,
187 -1); /* RB_COPY_DEST_BASE */
188 OUT_RING(ring, A3XX_RB_COPY_DEST_PITCH_PITCH(128));
189 OUT_RING(ring, A3XX_RB_COPY_DEST_INFO_TILE(LINEAR) |
190 A3XX_RB_COPY_DEST_INFO_FORMAT(RB_R8G8B8A8_UNORM) |
191 A3XX_RB_COPY_DEST_INFO_SWAP(WZYX) |
192 A3XX_RB_COPY_DEST_INFO_COMPONENT_ENABLE(0xf) |
193 A3XX_RB_COPY_DEST_INFO_ENDIAN(ENDIAN_NONE));
194
195 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
196 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_RESOLVE_PASS) |
197 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
198 A3XX_GRAS_SC_CONTROL_RASTER_MODE(1));
199
200 fd3_program_emit(ring, &emit, 0, NULL);
201 fd3_emit_vertex_bufs(ring, &emit);
202
203 OUT_PKT0(ring, REG_A3XX_HLSQ_CONTROL_0_REG, 4);
204 OUT_RING(ring, A3XX_HLSQ_CONTROL_0_REG_FSTHREADSIZE(FOUR_QUADS) |
205 A3XX_HLSQ_CONTROL_0_REG_FSSUPERTHREADENABLE |
206 A3XX_HLSQ_CONTROL_0_REG_RESERVED2 |
207 A3XX_HLSQ_CONTROL_0_REG_SPCONSTFULLUPDATE);
208 OUT_RING(ring, A3XX_HLSQ_CONTROL_1_REG_VSTHREADSIZE(TWO_QUADS) |
209 A3XX_HLSQ_CONTROL_1_REG_VSSUPERTHREADENABLE);
210 OUT_RING(ring, A3XX_HLSQ_CONTROL_2_REG_PRIMALLOCTHRESHOLD(31));
211 OUT_RING(ring, 0); /* HLSQ_CONTROL_3_REG */
212
213 OUT_PKT0(ring, REG_A3XX_HLSQ_CONST_FSPRESV_RANGE_REG, 1);
214 OUT_RING(ring, A3XX_HLSQ_CONST_FSPRESV_RANGE_REG_STARTENTRY(0x20) |
215 A3XX_HLSQ_CONST_FSPRESV_RANGE_REG_ENDENTRY(0x20));
216
217 OUT_PKT0(ring, REG_A3XX_RB_MSAA_CONTROL, 1);
218 OUT_RING(ring, A3XX_RB_MSAA_CONTROL_DISABLE |
219 A3XX_RB_MSAA_CONTROL_SAMPLES(MSAA_ONE) |
220 A3XX_RB_MSAA_CONTROL_SAMPLE_MASK(0xffff));
221
222 OUT_PKT0(ring, REG_A3XX_RB_DEPTH_CONTROL, 1);
223 OUT_RING(ring, A3XX_RB_DEPTH_CONTROL_ZFUNC(FUNC_NEVER));
224
225 OUT_PKT0(ring, REG_A3XX_RB_STENCIL_CONTROL, 1);
226 OUT_RING(ring, A3XX_RB_STENCIL_CONTROL_FUNC(FUNC_NEVER) |
227 A3XX_RB_STENCIL_CONTROL_FAIL(STENCIL_KEEP) |
228 A3XX_RB_STENCIL_CONTROL_ZPASS(STENCIL_KEEP) |
229 A3XX_RB_STENCIL_CONTROL_ZFAIL(STENCIL_KEEP) |
230 A3XX_RB_STENCIL_CONTROL_FUNC_BF(FUNC_NEVER) |
231 A3XX_RB_STENCIL_CONTROL_FAIL_BF(STENCIL_KEEP) |
232 A3XX_RB_STENCIL_CONTROL_ZPASS_BF(STENCIL_KEEP) |
233 A3XX_RB_STENCIL_CONTROL_ZFAIL_BF(STENCIL_KEEP));
234
235 OUT_PKT0(ring, REG_A3XX_GRAS_SU_MODE_CONTROL, 1);
236 OUT_RING(ring, A3XX_GRAS_SU_MODE_CONTROL_LINEHALFWIDTH(0.0f));
237
238 OUT_PKT0(ring, REG_A3XX_VFD_INDEX_MIN, 4);
239 OUT_RING(ring, 0); /* VFD_INDEX_MIN */
240 OUT_RING(ring, 2); /* VFD_INDEX_MAX */
241 OUT_RING(ring, 0); /* VFD_INSTANCEID_OFFSET */
242 OUT_RING(ring, 0); /* VFD_INDEX_OFFSET */
243
244 OUT_PKT0(ring, REG_A3XX_PC_PRIM_VTX_CNTL, 1);
245 OUT_RING(ring,
246 A3XX_PC_PRIM_VTX_CNTL_STRIDE_IN_VPC(0) |
247 A3XX_PC_PRIM_VTX_CNTL_POLYMODE_FRONT_PTYPE(PC_DRAW_TRIANGLES) |
248 A3XX_PC_PRIM_VTX_CNTL_POLYMODE_BACK_PTYPE(PC_DRAW_TRIANGLES) |
249 A3XX_PC_PRIM_VTX_CNTL_PROVOKING_VTX_LAST);
250
251 OUT_PKT0(ring, REG_A3XX_GRAS_SC_WINDOW_SCISSOR_TL, 2);
252 OUT_RING(ring, A3XX_GRAS_SC_WINDOW_SCISSOR_TL_X(0) |
253 A3XX_GRAS_SC_WINDOW_SCISSOR_TL_Y(1));
254 OUT_RING(ring, A3XX_GRAS_SC_WINDOW_SCISSOR_BR_X(0) |
255 A3XX_GRAS_SC_WINDOW_SCISSOR_BR_Y(1));
256
257 OUT_PKT0(ring, REG_A3XX_GRAS_SC_SCREEN_SCISSOR_TL, 2);
258 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_TL_X(0) |
259 A3XX_GRAS_SC_SCREEN_SCISSOR_TL_Y(0));
260 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_BR_X(31) |
261 A3XX_GRAS_SC_SCREEN_SCISSOR_BR_Y(0));
262
263 fd_wfi(batch, ring);
264 OUT_PKT0(ring, REG_A3XX_GRAS_CL_VPORT_XOFFSET, 6);
265 OUT_RING(ring, A3XX_GRAS_CL_VPORT_XOFFSET(0.0f));
266 OUT_RING(ring, A3XX_GRAS_CL_VPORT_XSCALE(1.0f));
267 OUT_RING(ring, A3XX_GRAS_CL_VPORT_YOFFSET(0.0f));
268 OUT_RING(ring, A3XX_GRAS_CL_VPORT_YSCALE(1.0f));
269 OUT_RING(ring, A3XX_GRAS_CL_VPORT_ZOFFSET(0.0f));
270 OUT_RING(ring, A3XX_GRAS_CL_VPORT_ZSCALE(1.0f));
271
272 OUT_PKT0(ring, REG_A3XX_GRAS_CL_CLIP_CNTL, 1);
273 OUT_RING(ring, A3XX_GRAS_CL_CLIP_CNTL_CLIP_DISABLE |
274 A3XX_GRAS_CL_CLIP_CNTL_ZFAR_CLIP_DISABLE |
275 A3XX_GRAS_CL_CLIP_CNTL_VP_CLIP_CODE_IGNORE |
276 A3XX_GRAS_CL_CLIP_CNTL_VP_XFORM_DISABLE |
277 A3XX_GRAS_CL_CLIP_CNTL_PERSP_DIVISION_DISABLE);
278
279 OUT_PKT0(ring, REG_A3XX_GRAS_CL_GB_CLIP_ADJ, 1);
280 OUT_RING(ring, A3XX_GRAS_CL_GB_CLIP_ADJ_HORZ(0) |
281 A3XX_GRAS_CL_GB_CLIP_ADJ_VERT(0));
282
283 OUT_PKT3(ring, CP_DRAW_INDX_2, 5);
284 OUT_RING(ring, 0x00000000); /* viz query info. */
285 OUT_RING(ring, DRAW(DI_PT_RECTLIST, DI_SRC_SEL_IMMEDIATE, INDEX_SIZE_32_BIT,
286 IGNORE_VISIBILITY, 0));
287 OUT_RING(ring, 2); /* NumIndices */
288 OUT_RING(ring, 2);
289 OUT_RING(ring, 1);
290 fd_reset_wfi(batch);
291
292 OUT_PKT0(ring, REG_A3XX_HLSQ_CONTROL_0_REG, 1);
293 OUT_RING(ring, A3XX_HLSQ_CONTROL_0_REG_FSTHREADSIZE(TWO_QUADS));
294
295 OUT_PKT0(ring, REG_A3XX_VFD_PERFCOUNTER0_SELECT, 1);
296 OUT_RING(ring, 0x00000000);
297
298 fd_wfi(batch, ring);
299 OUT_PKT0(ring, REG_A3XX_VSC_BIN_SIZE, 1);
300 OUT_RING(ring, A3XX_VSC_BIN_SIZE_WIDTH(gmem->bin_w) |
301 A3XX_VSC_BIN_SIZE_HEIGHT(gmem->bin_h));
302
303 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
304 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
305 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
306 A3XX_GRAS_SC_CONTROL_RASTER_MODE(0));
307
308 OUT_PKT0(ring, REG_A3XX_GRAS_CL_CLIP_CNTL, 1);
309 OUT_RING(ring, 0x00000000);
310 }
311
312 /* transfer from gmem to system memory (ie. normal RAM) */
313
314 static void
emit_gmem2mem_surf(struct fd_batch * batch,enum adreno_rb_copy_control_mode mode,bool stencil,uint32_t base,struct pipe_surface * psurf)315 emit_gmem2mem_surf(struct fd_batch *batch,
316 enum adreno_rb_copy_control_mode mode, bool stencil,
317 uint32_t base, struct pipe_surface *psurf)
318 {
319 struct fd_ringbuffer *ring = batch->gmem;
320 struct fd_resource *rsc = fd_resource(psurf->texture);
321 enum pipe_format format = psurf->format;
322
323 if (!rsc->valid)
324 return;
325
326 if (stencil) {
327 rsc = rsc->stencil;
328 format = rsc->b.b.format;
329 }
330
331 uint32_t offset =
332 fd_resource_offset(rsc, psurf->u.tex.level, psurf->u.tex.first_layer);
333 uint32_t pitch = fd_resource_pitch(rsc, psurf->u.tex.level);
334
335 assert(psurf->u.tex.first_layer == psurf->u.tex.last_layer);
336
337 OUT_PKT0(ring, REG_A3XX_RB_COPY_CONTROL, 4);
338 OUT_RING(ring, A3XX_RB_COPY_CONTROL_MSAA_RESOLVE(MSAA_ONE) |
339 A3XX_RB_COPY_CONTROL_MODE(mode) |
340 A3XX_RB_COPY_CONTROL_GMEM_BASE(base) |
341 COND(format == PIPE_FORMAT_Z32_FLOAT ||
342 format == PIPE_FORMAT_Z32_FLOAT_S8X24_UINT,
343 A3XX_RB_COPY_CONTROL_DEPTH32_RESOLVE));
344
345 OUT_RELOC(ring, rsc->bo, offset, 0, -1); /* RB_COPY_DEST_BASE */
346 OUT_RING(ring, A3XX_RB_COPY_DEST_PITCH_PITCH(pitch));
347 OUT_RING(ring, A3XX_RB_COPY_DEST_INFO_TILE(rsc->layout.tile_mode) |
348 A3XX_RB_COPY_DEST_INFO_FORMAT(fd3_pipe2color(format)) |
349 A3XX_RB_COPY_DEST_INFO_COMPONENT_ENABLE(0xf) |
350 A3XX_RB_COPY_DEST_INFO_ENDIAN(ENDIAN_NONE) |
351 A3XX_RB_COPY_DEST_INFO_SWAP(fd3_pipe2swap(format)));
352
353 fd_draw(batch, ring, DI_PT_RECTLIST, IGNORE_VISIBILITY,
354 DI_SRC_SEL_AUTO_INDEX, 2, 0, INDEX_SIZE_IGN, 0, 0, NULL);
355 }
356
357 static void
fd3_emit_tile_gmem2mem(struct fd_batch * batch,const struct fd_tile * tile)358 fd3_emit_tile_gmem2mem(struct fd_batch *batch,
359 const struct fd_tile *tile) assert_dt
360 {
361 struct fd_context *ctx = batch->ctx;
362 struct fd_ringbuffer *ring = batch->gmem;
363 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
364 struct pipe_framebuffer_state *pfb = &batch->framebuffer;
365 struct fd3_emit emit = {.debug = &ctx->debug,
366 .vtx = &ctx->solid_vbuf_state,
367 .key = {
368 .vs = ctx->solid_prog.vs,
369 .fs = ctx->solid_prog.fs,
370 }};
371 int i;
372
373 emit.prog = fd3_program_state(
374 ir3_cache_lookup(ctx->shader_cache, &emit.key, &ctx->debug));
375
376 OUT_PKT0(ring, REG_A3XX_RB_DEPTH_CONTROL, 1);
377 OUT_RING(ring, A3XX_RB_DEPTH_CONTROL_ZFUNC(FUNC_NEVER));
378
379 OUT_PKT0(ring, REG_A3XX_RB_STENCIL_CONTROL, 1);
380 OUT_RING(ring, A3XX_RB_STENCIL_CONTROL_FUNC(FUNC_NEVER) |
381 A3XX_RB_STENCIL_CONTROL_FAIL(STENCIL_KEEP) |
382 A3XX_RB_STENCIL_CONTROL_ZPASS(STENCIL_KEEP) |
383 A3XX_RB_STENCIL_CONTROL_ZFAIL(STENCIL_KEEP) |
384 A3XX_RB_STENCIL_CONTROL_FUNC_BF(FUNC_NEVER) |
385 A3XX_RB_STENCIL_CONTROL_FAIL_BF(STENCIL_KEEP) |
386 A3XX_RB_STENCIL_CONTROL_ZPASS_BF(STENCIL_KEEP) |
387 A3XX_RB_STENCIL_CONTROL_ZFAIL_BF(STENCIL_KEEP));
388
389 OUT_PKT0(ring, REG_A3XX_RB_STENCILREFMASK, 2);
390 OUT_RING(ring, 0xff000000 | A3XX_RB_STENCILREFMASK_STENCILREF(0) |
391 A3XX_RB_STENCILREFMASK_STENCILMASK(0) |
392 A3XX_RB_STENCILREFMASK_STENCILWRITEMASK(0xff));
393 OUT_RING(ring, 0xff000000 | A3XX_RB_STENCILREFMASK_STENCILREF(0) |
394 A3XX_RB_STENCILREFMASK_STENCILMASK(0) |
395 A3XX_RB_STENCILREFMASK_STENCILWRITEMASK(0xff));
396
397 OUT_PKT0(ring, REG_A3XX_GRAS_SU_MODE_CONTROL, 1);
398 OUT_RING(ring, A3XX_GRAS_SU_MODE_CONTROL_LINEHALFWIDTH(0));
399
400 OUT_PKT0(ring, REG_A3XX_GRAS_CL_CLIP_CNTL, 1);
401 OUT_RING(ring, 0x00000000); /* GRAS_CL_CLIP_CNTL */
402
403 fd_wfi(batch, ring);
404 OUT_PKT0(ring, REG_A3XX_GRAS_CL_VPORT_XOFFSET, 6);
405 OUT_RING(ring, A3XX_GRAS_CL_VPORT_XOFFSET((float)pfb->width / 2.0f - 0.5f));
406 OUT_RING(ring, A3XX_GRAS_CL_VPORT_XSCALE((float)pfb->width / 2.0f));
407 OUT_RING(ring, A3XX_GRAS_CL_VPORT_YOFFSET((float)pfb->height / 2.0f - 0.5f));
408 OUT_RING(ring, A3XX_GRAS_CL_VPORT_YSCALE(-(float)pfb->height / 2.0f));
409 OUT_RING(ring, A3XX_GRAS_CL_VPORT_ZOFFSET(0.0f));
410 OUT_RING(ring, A3XX_GRAS_CL_VPORT_ZSCALE(1.0f));
411
412 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 1);
413 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RESOLVE_PASS) |
414 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
415 A3XX_RB_MODE_CONTROL_MRT(0));
416
417 OUT_PKT0(ring, REG_A3XX_RB_RENDER_CONTROL, 1);
418 OUT_RING(ring,
419 A3XX_RB_RENDER_CONTROL_DISABLE_COLOR_PIPE |
420 A3XX_RB_RENDER_CONTROL_ENABLE_GMEM |
421 A3XX_RB_RENDER_CONTROL_ALPHA_TEST_FUNC(FUNC_NEVER) |
422 A3XX_RB_RENDER_CONTROL_BIN_WIDTH(batch->gmem_state->bin_w));
423
424 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
425 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_RESOLVE_PASS) |
426 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
427 A3XX_GRAS_SC_CONTROL_RASTER_MODE(1));
428
429 OUT_PKT0(ring, REG_A3XX_PC_PRIM_VTX_CNTL, 1);
430 OUT_RING(ring,
431 A3XX_PC_PRIM_VTX_CNTL_STRIDE_IN_VPC(0) |
432 A3XX_PC_PRIM_VTX_CNTL_POLYMODE_FRONT_PTYPE(PC_DRAW_TRIANGLES) |
433 A3XX_PC_PRIM_VTX_CNTL_POLYMODE_BACK_PTYPE(PC_DRAW_TRIANGLES) |
434 A3XX_PC_PRIM_VTX_CNTL_PROVOKING_VTX_LAST);
435
436 OUT_PKT0(ring, REG_A3XX_GRAS_SC_WINDOW_SCISSOR_TL, 2);
437 OUT_RING(ring, A3XX_GRAS_SC_WINDOW_SCISSOR_TL_X(0) |
438 A3XX_GRAS_SC_WINDOW_SCISSOR_TL_Y(0));
439 OUT_RING(ring, A3XX_GRAS_SC_WINDOW_SCISSOR_BR_X(pfb->width - 1) |
440 A3XX_GRAS_SC_WINDOW_SCISSOR_BR_Y(pfb->height - 1));
441
442 OUT_PKT0(ring, REG_A3XX_VFD_INDEX_MIN, 4);
443 OUT_RING(ring, 0); /* VFD_INDEX_MIN */
444 OUT_RING(ring, 2); /* VFD_INDEX_MAX */
445 OUT_RING(ring, 0); /* VFD_INSTANCEID_OFFSET */
446 OUT_RING(ring, 0); /* VFD_INDEX_OFFSET */
447
448 fd3_program_emit(ring, &emit, 0, NULL);
449 fd3_emit_vertex_bufs(ring, &emit);
450
451 if (batch->resolve & (FD_BUFFER_DEPTH | FD_BUFFER_STENCIL)) {
452 struct fd_resource *rsc = fd_resource(pfb->zsbuf->texture);
453 if (!rsc->stencil || batch->resolve & FD_BUFFER_DEPTH)
454 emit_gmem2mem_surf(batch, RB_COPY_DEPTH_STENCIL, false,
455 gmem->zsbuf_base[0], pfb->zsbuf);
456 if (rsc->stencil && batch->resolve & FD_BUFFER_STENCIL)
457 emit_gmem2mem_surf(batch, RB_COPY_DEPTH_STENCIL, true,
458 gmem->zsbuf_base[1], pfb->zsbuf);
459 }
460
461 if (batch->resolve & FD_BUFFER_COLOR) {
462 for (i = 0; i < pfb->nr_cbufs; i++) {
463 if (!pfb->cbufs[i])
464 continue;
465 if (!(batch->resolve & (PIPE_CLEAR_COLOR0 << i)))
466 continue;
467 emit_gmem2mem_surf(batch, RB_COPY_RESOLVE, false, gmem->cbuf_base[i],
468 pfb->cbufs[i]);
469 }
470 }
471
472 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 1);
473 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
474 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
475 A3XX_RB_MODE_CONTROL_MRT(MAX2(1, pfb->nr_cbufs) - 1));
476
477 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
478 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
479 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
480 A3XX_GRAS_SC_CONTROL_RASTER_MODE(0));
481 }
482
483 /* transfer from system memory to gmem */
484
485 static void
emit_mem2gmem_surf(struct fd_batch * batch,const uint32_t bases[],struct pipe_surface ** psurf,uint32_t bufs,uint32_t bin_w)486 emit_mem2gmem_surf(struct fd_batch *batch, const uint32_t bases[],
487 struct pipe_surface **psurf, uint32_t bufs, uint32_t bin_w)
488 {
489 struct fd_ringbuffer *ring = batch->gmem;
490 struct pipe_surface *zsbufs[2];
491
492 assert(bufs > 0);
493
494 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 1);
495 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
496 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
497 A3XX_RB_MODE_CONTROL_MRT(bufs - 1));
498
499 emit_mrt(ring, bufs, psurf, bases, bin_w, false);
500
501 if (psurf[0] && (psurf[0]->format == PIPE_FORMAT_Z32_FLOAT ||
502 psurf[0]->format == PIPE_FORMAT_Z32_FLOAT_S8X24_UINT)) {
503 /* Depth is stored as unorm in gmem, so we have to write it in using a
504 * special blit shader which writes depth.
505 */
506 OUT_PKT0(ring, REG_A3XX_RB_DEPTH_CONTROL, 1);
507 OUT_RING(ring, (A3XX_RB_DEPTH_CONTROL_FRAG_WRITES_Z |
508 A3XX_RB_DEPTH_CONTROL_Z_WRITE_ENABLE |
509 A3XX_RB_DEPTH_CONTROL_Z_TEST_ENABLE |
510 A3XX_RB_DEPTH_CONTROL_EARLY_Z_DISABLE |
511 A3XX_RB_DEPTH_CONTROL_ZFUNC(FUNC_ALWAYS)));
512
513 OUT_PKT0(ring, REG_A3XX_RB_DEPTH_INFO, 2);
514 OUT_RING(ring, A3XX_RB_DEPTH_INFO_DEPTH_BASE(bases[0]) |
515 A3XX_RB_DEPTH_INFO_DEPTH_FORMAT(DEPTHX_32));
516 OUT_RING(ring, A3XX_RB_DEPTH_PITCH(4 * batch->gmem_state->bin_w));
517
518 if (psurf[0]->format == PIPE_FORMAT_Z32_FLOAT) {
519 OUT_PKT0(ring, REG_A3XX_RB_MRT_CONTROL(0), 1);
520 OUT_RING(ring, 0);
521 } else {
522 /* The gmem_restore_tex logic will put the first buffer's stencil
523 * as color. Supply it with the proper information to make that
524 * happen.
525 */
526 zsbufs[0] = zsbufs[1] = psurf[0];
527 psurf = zsbufs;
528 bufs = 2;
529 }
530 } else {
531 OUT_PKT0(ring, REG_A3XX_SP_FS_OUTPUT_REG, 1);
532 OUT_RING(ring, A3XX_SP_FS_OUTPUT_REG_MRT(bufs - 1));
533 }
534
535 fd3_emit_gmem_restore_tex(ring, psurf, bufs);
536
537 fd_draw(batch, ring, DI_PT_RECTLIST, IGNORE_VISIBILITY,
538 DI_SRC_SEL_AUTO_INDEX, 2, 0, INDEX_SIZE_IGN, 0, 0, NULL);
539 }
540
541 static void
fd3_emit_tile_mem2gmem(struct fd_batch * batch,const struct fd_tile * tile)542 fd3_emit_tile_mem2gmem(struct fd_batch *batch,
543 const struct fd_tile *tile) assert_dt
544 {
545 struct fd_context *ctx = batch->ctx;
546 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
547 struct fd_ringbuffer *ring = batch->gmem;
548 struct pipe_framebuffer_state *pfb = &batch->framebuffer;
549 struct fd3_emit emit = {
550 .debug = &ctx->debug,
551 .vtx = &ctx->blit_vbuf_state,
552 .sprite_coord_enable = 1,
553 };
554 /* NOTE: They all use the same VP, this is for vtx bufs. */
555 fd3_gmem_emit_set_prog(ctx, &emit, &ctx->blit_prog[0]);
556
557 float x0, y0, x1, y1;
558 unsigned bin_w = tile->bin_w;
559 unsigned bin_h = tile->bin_h;
560 unsigned i;
561
562 /* write texture coordinates to vertexbuf: */
563 x0 = ((float)tile->xoff) / ((float)pfb->width);
564 x1 = ((float)tile->xoff + bin_w) / ((float)pfb->width);
565 y0 = ((float)tile->yoff) / ((float)pfb->height);
566 y1 = ((float)tile->yoff + bin_h) / ((float)pfb->height);
567
568 OUT_PKT3(ring, CP_MEM_WRITE, 5);
569 OUT_RELOC(ring, fd_resource(ctx->blit_texcoord_vbuf)->bo, 0, 0, 0);
570 OUT_RING(ring, fui(x0));
571 OUT_RING(ring, fui(y0));
572 OUT_RING(ring, fui(x1));
573 OUT_RING(ring, fui(y1));
574
575 fd3_emit_cache_flush(batch, ring);
576
577 for (i = 0; i < 4; i++) {
578 OUT_PKT0(ring, REG_A3XX_RB_MRT_CONTROL(i), 1);
579 OUT_RING(ring, A3XX_RB_MRT_CONTROL_ROP_CODE(ROP_COPY) |
580 A3XX_RB_MRT_CONTROL_DITHER_MODE(DITHER_DISABLE) |
581 A3XX_RB_MRT_CONTROL_COMPONENT_ENABLE(0xf));
582
583 OUT_PKT0(ring, REG_A3XX_RB_MRT_BLEND_CONTROL(i), 1);
584 OUT_RING(
585 ring,
586 A3XX_RB_MRT_BLEND_CONTROL_RGB_SRC_FACTOR(FACTOR_ONE) |
587 A3XX_RB_MRT_BLEND_CONTROL_RGB_BLEND_OPCODE(BLEND_DST_PLUS_SRC) |
588 A3XX_RB_MRT_BLEND_CONTROL_RGB_DEST_FACTOR(FACTOR_ZERO) |
589 A3XX_RB_MRT_BLEND_CONTROL_ALPHA_SRC_FACTOR(FACTOR_ONE) |
590 A3XX_RB_MRT_BLEND_CONTROL_ALPHA_BLEND_OPCODE(BLEND_DST_PLUS_SRC) |
591 A3XX_RB_MRT_BLEND_CONTROL_ALPHA_DEST_FACTOR(FACTOR_ZERO));
592 }
593
594 OUT_PKT0(ring, REG_A3XX_RB_RENDER_CONTROL, 1);
595 OUT_RING(ring, A3XX_RB_RENDER_CONTROL_ALPHA_TEST_FUNC(FUNC_ALWAYS) |
596 A3XX_RB_RENDER_CONTROL_BIN_WIDTH(gmem->bin_w));
597
598 fd_wfi(batch, ring);
599 OUT_PKT0(ring, REG_A3XX_RB_DEPTH_CONTROL, 1);
600 OUT_RING(ring, A3XX_RB_DEPTH_CONTROL_ZFUNC(FUNC_LESS));
601
602 OUT_PKT0(ring, REG_A3XX_RB_DEPTH_INFO, 2);
603 OUT_RING(ring, 0);
604 OUT_RING(ring, 0);
605
606 OUT_PKT0(ring, REG_A3XX_GRAS_CL_CLIP_CNTL, 1);
607 OUT_RING(ring,
608 A3XX_GRAS_CL_CLIP_CNTL_IJ_PERSP_CENTER); /* GRAS_CL_CLIP_CNTL */
609
610 fd_wfi(batch, ring);
611 OUT_PKT0(ring, REG_A3XX_GRAS_CL_VPORT_XOFFSET, 6);
612 OUT_RING(ring, A3XX_GRAS_CL_VPORT_XOFFSET((float)bin_w / 2.0f - 0.5f));
613 OUT_RING(ring, A3XX_GRAS_CL_VPORT_XSCALE((float)bin_w / 2.0f));
614 OUT_RING(ring, A3XX_GRAS_CL_VPORT_YOFFSET((float)bin_h / 2.0f - 0.5f));
615 OUT_RING(ring, A3XX_GRAS_CL_VPORT_YSCALE(-(float)bin_h / 2.0f));
616 OUT_RING(ring, A3XX_GRAS_CL_VPORT_ZOFFSET(0.0f));
617 OUT_RING(ring, A3XX_GRAS_CL_VPORT_ZSCALE(1.0f));
618
619 OUT_PKT0(ring, REG_A3XX_GRAS_SC_WINDOW_SCISSOR_TL, 2);
620 OUT_RING(ring, A3XX_GRAS_SC_WINDOW_SCISSOR_TL_X(0) |
621 A3XX_GRAS_SC_WINDOW_SCISSOR_TL_Y(0));
622 OUT_RING(ring, A3XX_GRAS_SC_WINDOW_SCISSOR_BR_X(bin_w - 1) |
623 A3XX_GRAS_SC_WINDOW_SCISSOR_BR_Y(bin_h - 1));
624
625 OUT_PKT0(ring, REG_A3XX_GRAS_SC_SCREEN_SCISSOR_TL, 2);
626 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_TL_X(0) |
627 A3XX_GRAS_SC_SCREEN_SCISSOR_TL_Y(0));
628 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_BR_X(bin_w - 1) |
629 A3XX_GRAS_SC_SCREEN_SCISSOR_BR_Y(bin_h - 1));
630
631 OUT_PKT0(ring, REG_A3XX_RB_STENCIL_CONTROL, 1);
632 OUT_RING(ring, 0x2 | A3XX_RB_STENCIL_CONTROL_FUNC(FUNC_ALWAYS) |
633 A3XX_RB_STENCIL_CONTROL_FAIL(STENCIL_KEEP) |
634 A3XX_RB_STENCIL_CONTROL_ZPASS(STENCIL_KEEP) |
635 A3XX_RB_STENCIL_CONTROL_ZFAIL(STENCIL_KEEP) |
636 A3XX_RB_STENCIL_CONTROL_FUNC_BF(FUNC_ALWAYS) |
637 A3XX_RB_STENCIL_CONTROL_FAIL_BF(STENCIL_KEEP) |
638 A3XX_RB_STENCIL_CONTROL_ZPASS_BF(STENCIL_KEEP) |
639 A3XX_RB_STENCIL_CONTROL_ZFAIL_BF(STENCIL_KEEP));
640
641 OUT_PKT0(ring, REG_A3XX_RB_STENCIL_INFO, 2);
642 OUT_RING(ring, 0); /* RB_STENCIL_INFO */
643 OUT_RING(ring, 0); /* RB_STENCIL_PITCH */
644
645 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
646 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
647 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
648 A3XX_GRAS_SC_CONTROL_RASTER_MODE(1));
649
650 OUT_PKT0(ring, REG_A3XX_PC_PRIM_VTX_CNTL, 1);
651 OUT_RING(ring,
652 A3XX_PC_PRIM_VTX_CNTL_STRIDE_IN_VPC(2) |
653 A3XX_PC_PRIM_VTX_CNTL_POLYMODE_FRONT_PTYPE(PC_DRAW_TRIANGLES) |
654 A3XX_PC_PRIM_VTX_CNTL_POLYMODE_BACK_PTYPE(PC_DRAW_TRIANGLES) |
655 A3XX_PC_PRIM_VTX_CNTL_PROVOKING_VTX_LAST);
656
657 OUT_PKT0(ring, REG_A3XX_VFD_INDEX_MIN, 4);
658 OUT_RING(ring, 0); /* VFD_INDEX_MIN */
659 OUT_RING(ring, 2); /* VFD_INDEX_MAX */
660 OUT_RING(ring, 0); /* VFD_INSTANCEID_OFFSET */
661 OUT_RING(ring, 0); /* VFD_INDEX_OFFSET */
662
663 fd3_emit_vertex_bufs(ring, &emit);
664
665 /* for gmem pitch/base calculations, we need to use the non-
666 * truncated tile sizes:
667 */
668 bin_w = gmem->bin_w;
669 bin_h = gmem->bin_h;
670
671 if (fd_gmem_needs_restore(batch, tile, FD_BUFFER_COLOR)) {
672 fd3_gmem_emit_set_prog(ctx, &emit, &ctx->blit_prog[pfb->nr_cbufs - 1]);
673 fd3_program_emit(ring, &emit, pfb->nr_cbufs, pfb->cbufs);
674 emit_mem2gmem_surf(batch, gmem->cbuf_base, pfb->cbufs, pfb->nr_cbufs,
675 bin_w);
676 }
677
678 if (fd_gmem_needs_restore(batch, tile,
679 FD_BUFFER_DEPTH | FD_BUFFER_STENCIL)) {
680 if (pfb->zsbuf->format != PIPE_FORMAT_Z32_FLOAT_S8X24_UINT &&
681 pfb->zsbuf->format != PIPE_FORMAT_Z32_FLOAT) {
682 /* Non-float can use a regular color write. It's split over 8-bit
683 * components, so half precision is always sufficient.
684 */
685 fd3_gmem_emit_set_prog(ctx, &emit, &ctx->blit_prog[0]);
686 } else {
687 /* Float depth needs special blit shader that writes depth */
688 if (pfb->zsbuf->format == PIPE_FORMAT_Z32_FLOAT)
689 fd3_gmem_emit_set_prog(ctx, &emit, &ctx->blit_z);
690 else
691 fd3_gmem_emit_set_prog(ctx, &emit, &ctx->blit_zs);
692 }
693 fd3_program_emit(ring, &emit, 1, &pfb->zsbuf);
694 emit_mem2gmem_surf(batch, gmem->zsbuf_base, &pfb->zsbuf, 1, bin_w);
695 }
696
697 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
698 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
699 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
700 A3XX_GRAS_SC_CONTROL_RASTER_MODE(0));
701
702 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 1);
703 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
704 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
705 A3XX_RB_MODE_CONTROL_MRT(MAX2(1, pfb->nr_cbufs) - 1));
706 }
707
708 static void
patch_draws(struct fd_batch * batch,enum pc_di_vis_cull_mode vismode)709 patch_draws(struct fd_batch *batch, enum pc_di_vis_cull_mode vismode)
710 {
711 unsigned i;
712 for (i = 0; i < fd_patch_num_elements(&batch->draw_patches); i++) {
713 struct fd_cs_patch *patch = fd_patch_element(&batch->draw_patches, i);
714 *patch->cs = patch->val | DRAW(0, 0, 0, vismode, 0);
715 }
716 util_dynarray_clear(&batch->draw_patches);
717 }
718
719 static void
patch_rbrc(struct fd_batch * batch,uint32_t val)720 patch_rbrc(struct fd_batch *batch, uint32_t val)
721 {
722 unsigned i;
723 for (i = 0; i < fd_patch_num_elements(&batch->rbrc_patches); i++) {
724 struct fd_cs_patch *patch = fd_patch_element(&batch->rbrc_patches, i);
725 *patch->cs = patch->val | val;
726 }
727 util_dynarray_clear(&batch->rbrc_patches);
728 }
729
730 /* for rendering directly to system memory: */
731 static void
fd3_emit_sysmem_prep(struct fd_batch * batch)732 fd3_emit_sysmem_prep(struct fd_batch *batch) assert_dt
733 {
734 struct pipe_framebuffer_state *pfb = &batch->framebuffer;
735 struct fd_ringbuffer *ring = batch->gmem;
736 uint32_t i, pitch = 0;
737
738 for (i = 0; i < pfb->nr_cbufs; i++) {
739 struct pipe_surface *psurf = pfb->cbufs[i];
740 if (!psurf)
741 continue;
742 struct fd_resource *rsc = fd_resource(psurf->texture);
743 pitch = fd_resource_pitch(rsc, psurf->u.tex.level) / rsc->layout.cpp;
744 }
745
746 fd3_emit_restore(batch, ring);
747
748 OUT_PKT0(ring, REG_A3XX_RB_FRAME_BUFFER_DIMENSION, 1);
749 OUT_RING(ring, A3XX_RB_FRAME_BUFFER_DIMENSION_WIDTH(pfb->width) |
750 A3XX_RB_FRAME_BUFFER_DIMENSION_HEIGHT(pfb->height));
751
752 emit_mrt(ring, pfb->nr_cbufs, pfb->cbufs, NULL, 0, true);
753
754 /* setup scissor/offset for current tile: */
755 OUT_PKT0(ring, REG_A3XX_RB_WINDOW_OFFSET, 1);
756 OUT_RING(ring, A3XX_RB_WINDOW_OFFSET_X(0) | A3XX_RB_WINDOW_OFFSET_Y(0));
757
758 OUT_PKT0(ring, REG_A3XX_GRAS_SC_SCREEN_SCISSOR_TL, 2);
759 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_TL_X(0) |
760 A3XX_GRAS_SC_SCREEN_SCISSOR_TL_Y(0));
761 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_BR_X(pfb->width - 1) |
762 A3XX_GRAS_SC_SCREEN_SCISSOR_BR_Y(pfb->height - 1));
763
764 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 1);
765 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
766 A3XX_RB_MODE_CONTROL_GMEM_BYPASS |
767 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
768 A3XX_RB_MODE_CONTROL_MRT(MAX2(1, pfb->nr_cbufs) - 1));
769
770 patch_draws(batch, IGNORE_VISIBILITY);
771 patch_rbrc(batch, A3XX_RB_RENDER_CONTROL_BIN_WIDTH(pitch));
772 }
773
774 static void
update_vsc_pipe(struct fd_batch * batch)775 update_vsc_pipe(struct fd_batch *batch) assert_dt
776 {
777 struct fd_context *ctx = batch->ctx;
778 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
779 struct fd3_context *fd3_ctx = fd3_context(ctx);
780 struct fd_ringbuffer *ring = batch->gmem;
781 int i;
782
783 OUT_PKT0(ring, REG_A3XX_VSC_SIZE_ADDRESS, 1);
784 OUT_RELOC(ring, fd3_ctx->vsc_size_mem, 0, 0, 0); /* VSC_SIZE_ADDRESS */
785
786 for (i = 0; i < 8; i++) {
787 const struct fd_vsc_pipe *pipe = &gmem->vsc_pipe[i];
788
789 if (!ctx->vsc_pipe_bo[i]) {
790 ctx->vsc_pipe_bo[i] = fd_bo_new(
791 ctx->dev, 0x40000, 0, "vsc_pipe[%u]", i);
792 }
793
794 OUT_PKT0(ring, REG_A3XX_VSC_PIPE(i), 3);
795 OUT_RING(ring, A3XX_VSC_PIPE_CONFIG_X(pipe->x) |
796 A3XX_VSC_PIPE_CONFIG_Y(pipe->y) |
797 A3XX_VSC_PIPE_CONFIG_W(pipe->w) |
798 A3XX_VSC_PIPE_CONFIG_H(pipe->h));
799 OUT_RELOC(ring, ctx->vsc_pipe_bo[i], 0, 0,
800 0); /* VSC_PIPE[i].DATA_ADDRESS */
801 OUT_RING(ring, fd_bo_size(ctx->vsc_pipe_bo[i]) -
802 32); /* VSC_PIPE[i].DATA_LENGTH */
803 }
804 }
805
806 static void
emit_binning_pass(struct fd_batch * batch)807 emit_binning_pass(struct fd_batch *batch) assert_dt
808 {
809 struct fd_context *ctx = batch->ctx;
810 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
811 struct pipe_framebuffer_state *pfb = &batch->framebuffer;
812 struct fd_ringbuffer *ring = batch->gmem;
813 int i;
814
815 uint32_t x1 = gmem->minx;
816 uint32_t y1 = gmem->miny;
817 uint32_t x2 = gmem->minx + gmem->width - 1;
818 uint32_t y2 = gmem->miny + gmem->height - 1;
819
820 if (ctx->screen->gpu_id == 320) {
821 emit_binning_workaround(batch);
822 fd_wfi(batch, ring);
823 OUT_PKT3(ring, CP_INVALIDATE_STATE, 1);
824 OUT_RING(ring, 0x00007fff);
825 }
826
827 OUT_PKT0(ring, REG_A3XX_VSC_BIN_CONTROL, 1);
828 OUT_RING(ring, A3XX_VSC_BIN_CONTROL_BINNING_ENABLE);
829
830 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
831 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_TILING_PASS) |
832 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
833 A3XX_GRAS_SC_CONTROL_RASTER_MODE(0));
834
835 OUT_PKT0(ring, REG_A3XX_RB_FRAME_BUFFER_DIMENSION, 1);
836 OUT_RING(ring, A3XX_RB_FRAME_BUFFER_DIMENSION_WIDTH(pfb->width) |
837 A3XX_RB_FRAME_BUFFER_DIMENSION_HEIGHT(pfb->height));
838
839 OUT_PKT0(ring, REG_A3XX_RB_RENDER_CONTROL, 1);
840 OUT_RING(ring, A3XX_RB_RENDER_CONTROL_ALPHA_TEST_FUNC(FUNC_NEVER) |
841 A3XX_RB_RENDER_CONTROL_DISABLE_COLOR_PIPE |
842 A3XX_RB_RENDER_CONTROL_BIN_WIDTH(gmem->bin_w));
843
844 /* setup scissor/offset for whole screen: */
845 OUT_PKT0(ring, REG_A3XX_RB_WINDOW_OFFSET, 1);
846 OUT_RING(ring, A3XX_RB_WINDOW_OFFSET_X(x1) | A3XX_RB_WINDOW_OFFSET_Y(y1));
847
848 OUT_PKT0(ring, REG_A3XX_RB_LRZ_VSC_CONTROL, 1);
849 OUT_RING(ring, A3XX_RB_LRZ_VSC_CONTROL_BINNING_ENABLE);
850
851 OUT_PKT0(ring, REG_A3XX_GRAS_SC_SCREEN_SCISSOR_TL, 2);
852 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_TL_X(x1) |
853 A3XX_GRAS_SC_SCREEN_SCISSOR_TL_Y(y1));
854 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_BR_X(x2) |
855 A3XX_GRAS_SC_SCREEN_SCISSOR_BR_Y(y2));
856
857 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 1);
858 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_TILING_PASS) |
859 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
860 A3XX_RB_MODE_CONTROL_MRT(0));
861
862 for (i = 0; i < 4; i++) {
863 OUT_PKT0(ring, REG_A3XX_RB_MRT_CONTROL(i), 1);
864 OUT_RING(ring, A3XX_RB_MRT_CONTROL_ROP_CODE(ROP_CLEAR) |
865 A3XX_RB_MRT_CONTROL_DITHER_MODE(DITHER_DISABLE) |
866 A3XX_RB_MRT_CONTROL_COMPONENT_ENABLE(0));
867 }
868
869 OUT_PKT0(ring, REG_A3XX_PC_VSTREAM_CONTROL, 1);
870 OUT_RING(ring,
871 A3XX_PC_VSTREAM_CONTROL_SIZE(1) | A3XX_PC_VSTREAM_CONTROL_N(0));
872
873 /* emit IB to binning drawcmds: */
874 fd3_emit_ib(ring, batch->binning);
875 fd_reset_wfi(batch);
876
877 fd_wfi(batch, ring);
878
879 /* and then put stuff back the way it was: */
880
881 OUT_PKT0(ring, REG_A3XX_VSC_BIN_CONTROL, 1);
882 OUT_RING(ring, 0x00000000);
883
884 OUT_PKT0(ring, REG_A3XX_SP_SP_CTRL_REG, 1);
885 OUT_RING(ring, A3XX_SP_SP_CTRL_REG_RESOLVE |
886 A3XX_SP_SP_CTRL_REG_CONSTMODE(1) |
887 A3XX_SP_SP_CTRL_REG_SLEEPMODE(1) |
888 A3XX_SP_SP_CTRL_REG_L0MODE(0));
889
890 OUT_PKT0(ring, REG_A3XX_RB_LRZ_VSC_CONTROL, 1);
891 OUT_RING(ring, 0x00000000);
892
893 OUT_PKT0(ring, REG_A3XX_GRAS_SC_CONTROL, 1);
894 OUT_RING(ring, A3XX_GRAS_SC_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
895 A3XX_GRAS_SC_CONTROL_MSAA_SAMPLES(MSAA_ONE) |
896 A3XX_GRAS_SC_CONTROL_RASTER_MODE(0));
897
898 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 2);
899 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
900 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
901 A3XX_RB_MODE_CONTROL_MRT(pfb->nr_cbufs - 1));
902 OUT_RING(ring, A3XX_RB_RENDER_CONTROL_ENABLE_GMEM |
903 A3XX_RB_RENDER_CONTROL_ALPHA_TEST_FUNC(FUNC_NEVER) |
904 A3XX_RB_RENDER_CONTROL_BIN_WIDTH(gmem->bin_w));
905
906 fd_event_write(batch, ring, CACHE_FLUSH);
907 fd_wfi(batch, ring);
908
909 if (ctx->screen->gpu_id == 320) {
910 /* dummy-draw workaround: */
911 OUT_PKT3(ring, CP_DRAW_INDX, 3);
912 OUT_RING(ring, 0x00000000);
913 OUT_RING(ring, DRAW(1, DI_SRC_SEL_AUTO_INDEX, INDEX_SIZE_IGN,
914 IGNORE_VISIBILITY, 0));
915 OUT_RING(ring, 0); /* NumIndices */
916 fd_reset_wfi(batch);
917 }
918
919 OUT_PKT3(ring, CP_NOP, 4);
920 OUT_RING(ring, 0x00000000);
921 OUT_RING(ring, 0x00000000);
922 OUT_RING(ring, 0x00000000);
923 OUT_RING(ring, 0x00000000);
924
925 fd_wfi(batch, ring);
926
927 if (ctx->screen->gpu_id == 320) {
928 emit_binning_workaround(batch);
929 }
930 }
931
932 /* before first tile */
933 static void
fd3_emit_tile_init(struct fd_batch * batch)934 fd3_emit_tile_init(struct fd_batch *batch) assert_dt
935 {
936 struct fd_ringbuffer *ring = batch->gmem;
937 struct pipe_framebuffer_state *pfb = &batch->framebuffer;
938 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
939 uint32_t rb_render_control;
940
941 fd3_emit_restore(batch, ring);
942
943 /* note: use gmem->bin_w/h, the bin_w/h parameters may be truncated
944 * at the right and bottom edge tiles
945 */
946 OUT_PKT0(ring, REG_A3XX_VSC_BIN_SIZE, 1);
947 OUT_RING(ring, A3XX_VSC_BIN_SIZE_WIDTH(gmem->bin_w) |
948 A3XX_VSC_BIN_SIZE_HEIGHT(gmem->bin_h));
949
950 update_vsc_pipe(batch);
951
952 fd_wfi(batch, ring);
953 OUT_PKT0(ring, REG_A3XX_RB_FRAME_BUFFER_DIMENSION, 1);
954 OUT_RING(ring, A3XX_RB_FRAME_BUFFER_DIMENSION_WIDTH(pfb->width) |
955 A3XX_RB_FRAME_BUFFER_DIMENSION_HEIGHT(pfb->height));
956
957 if (use_hw_binning(batch)) {
958 /* emit hw binning pass: */
959 emit_binning_pass(batch);
960
961 patch_draws(batch, USE_VISIBILITY);
962 } else {
963 patch_draws(batch, IGNORE_VISIBILITY);
964 }
965
966 rb_render_control = A3XX_RB_RENDER_CONTROL_ENABLE_GMEM |
967 A3XX_RB_RENDER_CONTROL_BIN_WIDTH(gmem->bin_w);
968
969 patch_rbrc(batch, rb_render_control);
970 }
971
972 /* before mem2gmem */
973 static void
fd3_emit_tile_prep(struct fd_batch * batch,const struct fd_tile * tile)974 fd3_emit_tile_prep(struct fd_batch *batch, const struct fd_tile *tile)
975 {
976 struct fd_ringbuffer *ring = batch->gmem;
977 struct pipe_framebuffer_state *pfb = &batch->framebuffer;
978
979 OUT_PKT0(ring, REG_A3XX_RB_MODE_CONTROL, 1);
980 OUT_RING(ring, A3XX_RB_MODE_CONTROL_RENDER_MODE(RB_RENDERING_PASS) |
981 A3XX_RB_MODE_CONTROL_MARB_CACHE_SPLIT_MODE |
982 A3XX_RB_MODE_CONTROL_MRT(MAX2(1, pfb->nr_cbufs) - 1));
983 }
984
985 /* before IB to rendering cmds: */
986 static void
fd3_emit_tile_renderprep(struct fd_batch * batch,const struct fd_tile * tile)987 fd3_emit_tile_renderprep(struct fd_batch *batch,
988 const struct fd_tile *tile) assert_dt
989 {
990 struct fd_context *ctx = batch->ctx;
991 struct fd3_context *fd3_ctx = fd3_context(ctx);
992 struct fd_ringbuffer *ring = batch->gmem;
993 const struct fd_gmem_stateobj *gmem = batch->gmem_state;
994 struct pipe_framebuffer_state *pfb = &batch->framebuffer;
995
996 uint32_t x1 = tile->xoff;
997 uint32_t y1 = tile->yoff;
998 uint32_t x2 = tile->xoff + tile->bin_w - 1;
999 uint32_t y2 = tile->yoff + tile->bin_h - 1;
1000
1001 uint32_t reg;
1002
1003 OUT_PKT0(ring, REG_A3XX_RB_DEPTH_INFO, 2);
1004 reg = A3XX_RB_DEPTH_INFO_DEPTH_BASE(gmem->zsbuf_base[0]);
1005 if (pfb->zsbuf) {
1006 reg |= A3XX_RB_DEPTH_INFO_DEPTH_FORMAT(fd_pipe2depth(pfb->zsbuf->format));
1007 }
1008 OUT_RING(ring, reg);
1009 if (pfb->zsbuf) {
1010 struct fd_resource *rsc = fd_resource(pfb->zsbuf->texture);
1011 OUT_RING(ring,
1012 A3XX_RB_DEPTH_PITCH(gmem->bin_w << fdl_cpp_shift(&rsc->layout)));
1013 if (rsc->stencil) {
1014 OUT_PKT0(ring, REG_A3XX_RB_STENCIL_INFO, 2);
1015 OUT_RING(ring, A3XX_RB_STENCIL_INFO_STENCIL_BASE(gmem->zsbuf_base[1]));
1016 OUT_RING(ring, A3XX_RB_STENCIL_PITCH(gmem->bin_w << fdl_cpp_shift(
1017 &rsc->stencil->layout)));
1018 }
1019 } else {
1020 OUT_RING(ring, 0x00000000);
1021 }
1022
1023 if (use_hw_binning(batch)) {
1024 const struct fd_vsc_pipe *pipe = &gmem->vsc_pipe[tile->p];
1025 struct fd_bo *pipe_bo = ctx->vsc_pipe_bo[tile->p];
1026
1027 assert(pipe->w && pipe->h);
1028
1029 fd_event_write(batch, ring, HLSQ_FLUSH);
1030 fd_wfi(batch, ring);
1031
1032 OUT_PKT0(ring, REG_A3XX_PC_VSTREAM_CONTROL, 1);
1033 OUT_RING(ring, A3XX_PC_VSTREAM_CONTROL_SIZE(pipe->w * pipe->h) |
1034 A3XX_PC_VSTREAM_CONTROL_N(tile->n));
1035
1036 OUT_PKT3(ring, CP_SET_BIN_DATA, 2);
1037 OUT_RELOC(ring, pipe_bo, 0, 0,
1038 0); /* BIN_DATA_ADDR <- VSC_PIPE[p].DATA_ADDRESS */
1039 OUT_RELOC(ring, fd3_ctx->vsc_size_mem, /* BIN_SIZE_ADDR <-
1040 VSC_SIZE_ADDRESS + (p * 4) */
1041 (tile->p * 4), 0, 0);
1042 } else {
1043 OUT_PKT0(ring, REG_A3XX_PC_VSTREAM_CONTROL, 1);
1044 OUT_RING(ring, 0x00000000);
1045 }
1046
1047 OUT_PKT3(ring, CP_SET_BIN, 3);
1048 OUT_RING(ring, 0x00000000);
1049 OUT_RING(ring, CP_SET_BIN_1_X1(x1) | CP_SET_BIN_1_Y1(y1));
1050 OUT_RING(ring, CP_SET_BIN_2_X2(x2) | CP_SET_BIN_2_Y2(y2));
1051
1052 emit_mrt(ring, pfb->nr_cbufs, pfb->cbufs, gmem->cbuf_base, gmem->bin_w,
1053 true);
1054
1055 /* setup scissor/offset for current tile: */
1056 OUT_PKT0(ring, REG_A3XX_RB_WINDOW_OFFSET, 1);
1057 OUT_RING(ring, A3XX_RB_WINDOW_OFFSET_X(tile->xoff) |
1058 A3XX_RB_WINDOW_OFFSET_Y(tile->yoff));
1059
1060 OUT_PKT0(ring, REG_A3XX_GRAS_SC_SCREEN_SCISSOR_TL, 2);
1061 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_TL_X(x1) |
1062 A3XX_GRAS_SC_SCREEN_SCISSOR_TL_Y(y1));
1063 OUT_RING(ring, A3XX_GRAS_SC_SCREEN_SCISSOR_BR_X(x2) |
1064 A3XX_GRAS_SC_SCREEN_SCISSOR_BR_Y(y2));
1065 }
1066
1067 void
fd3_gmem_init(struct pipe_context * pctx)1068 fd3_gmem_init(struct pipe_context *pctx) disable_thread_safety_analysis
1069 {
1070 struct fd_context *ctx = fd_context(pctx);
1071
1072 ctx->emit_sysmem_prep = fd3_emit_sysmem_prep;
1073 ctx->emit_tile_init = fd3_emit_tile_init;
1074 ctx->emit_tile_prep = fd3_emit_tile_prep;
1075 ctx->emit_tile_mem2gmem = fd3_emit_tile_mem2gmem;
1076 ctx->emit_tile_renderprep = fd3_emit_tile_renderprep;
1077 ctx->emit_tile_gmem2mem = fd3_emit_tile_gmem2mem;
1078 }
1079