1 /*
2 * Copyright © 2012-2013 Rob Clark <[email protected]>
3 * SPDX-License-Identifier: MIT
4 *
5 * Authors:
6 * Rob Clark <[email protected]>
7 */
8
9 #include "pipe/p_state.h"
10 #include "util/u_helpers.h"
11 #include "util/u_memory.h"
12 #include "util/u_string.h"
13
14 #include "freedreno_resource.h"
15
16 #include "fd2_blend.h"
17 #include "fd2_context.h"
18 #include "fd2_emit.h"
19 #include "fd2_program.h"
20 #include "fd2_rasterizer.h"
21 #include "fd2_texture.h"
22 #include "fd2_util.h"
23 #include "fd2_zsa.h"
24
25 /* NOTE: just define the position for const regs statically.. the blob
26 * driver doesn't seem to change these dynamically, and I can't really
27 * think of a good reason to so..
28 */
29 #define VS_CONST_BASE 0x20
30 #define PS_CONST_BASE 0x120
31
32 static void
emit_constants(struct fd_ringbuffer * ring,uint32_t base,struct fd_constbuf_stateobj * constbuf,struct fd2_shader_stateobj * shader)33 emit_constants(struct fd_ringbuffer *ring, uint32_t base,
34 struct fd_constbuf_stateobj *constbuf,
35 struct fd2_shader_stateobj *shader)
36 {
37 uint32_t enabled_mask = constbuf->enabled_mask;
38 uint32_t start_base = base;
39 unsigned i;
40
41 /* emit user constants: */
42 while (enabled_mask) {
43 unsigned index = ffs(enabled_mask) - 1;
44 struct pipe_constant_buffer *cb = &constbuf->cb[index];
45 unsigned size = align(cb->buffer_size, 4) / 4; /* size in dwords */
46
47 // I expect that size should be a multiple of vec4's:
48 assert(size == align(size, 4));
49
50 /* hmm, sometimes we still seem to end up with consts bound,
51 * even if shader isn't using them, which ends up overwriting
52 * const reg's used for immediates.. this is a hack to work
53 * around that:
54 */
55 if (shader && ((base - start_base) >= (shader->first_immediate * 4)))
56 break;
57
58 const uint32_t *dwords;
59
60 if (cb->user_buffer) {
61 dwords = cb->user_buffer;
62 } else {
63 struct fd_resource *rsc = fd_resource(cb->buffer);
64 dwords = fd_bo_map(rsc->bo);
65 }
66
67 dwords = (uint32_t *)(((uint8_t *)dwords) + cb->buffer_offset);
68
69 OUT_PKT3(ring, CP_SET_CONSTANT, size + 1);
70 OUT_RING(ring, base);
71 for (i = 0; i < size; i++)
72 OUT_RING(ring, *(dwords++));
73
74 base += size;
75 enabled_mask &= ~(1 << index);
76 }
77
78 /* emit shader immediates: */
79 if (shader) {
80 for (i = 0; i < shader->num_immediates; i++) {
81 OUT_PKT3(ring, CP_SET_CONSTANT, 5);
82 OUT_RING(ring, start_base + (4 * (shader->first_immediate + i)));
83 OUT_RING(ring, shader->immediates[i].val[0]);
84 OUT_RING(ring, shader->immediates[i].val[1]);
85 OUT_RING(ring, shader->immediates[i].val[2]);
86 OUT_RING(ring, shader->immediates[i].val[3]);
87 base += 4;
88 }
89 }
90 }
91
92 typedef uint32_t texmask;
93
94 static texmask
emit_texture(struct fd_ringbuffer * ring,struct fd_context * ctx,struct fd_texture_stateobj * tex,unsigned samp_id,texmask emitted)95 emit_texture(struct fd_ringbuffer *ring, struct fd_context *ctx,
96 struct fd_texture_stateobj *tex, unsigned samp_id, texmask emitted)
97 {
98 unsigned const_idx = fd2_get_const_idx(ctx, tex, samp_id);
99 static const struct fd2_sampler_stateobj dummy_sampler = {};
100 static const struct fd2_pipe_sampler_view dummy_view = {};
101 const struct fd2_sampler_stateobj *sampler;
102 const struct fd2_pipe_sampler_view *view;
103 struct fd_resource *rsc;
104
105 if (emitted & (1 << const_idx))
106 return 0;
107
108 sampler = tex->samplers[samp_id]
109 ? fd2_sampler_stateobj(tex->samplers[samp_id])
110 : &dummy_sampler;
111 view = tex->textures[samp_id] ? fd2_pipe_sampler_view(tex->textures[samp_id])
112 : &dummy_view;
113
114 rsc = view->base.texture ? fd_resource(view->base.texture) : NULL;
115
116 OUT_PKT3(ring, CP_SET_CONSTANT, 7);
117 OUT_RING(ring, 0x00010000 + (0x6 * const_idx));
118
119 OUT_RING(ring, sampler->tex0 | view->tex0);
120 if (rsc)
121 OUT_RELOC(ring, rsc->bo, fd_resource_offset(rsc, 0, 0), view->tex1, 0);
122 else
123 OUT_RING(ring, 0);
124
125 OUT_RING(ring, view->tex2);
126 OUT_RING(ring, sampler->tex3 | view->tex3);
127 OUT_RING(ring, sampler->tex4 | view->tex4);
128
129 if (rsc && rsc->b.b.last_level)
130 OUT_RELOC(ring, rsc->bo, fd_resource_offset(rsc, 1, 0), view->tex5, 0);
131 else
132 OUT_RING(ring, view->tex5);
133
134 return (1 << const_idx);
135 }
136
137 static void
emit_textures(struct fd_ringbuffer * ring,struct fd_context * ctx)138 emit_textures(struct fd_ringbuffer *ring, struct fd_context *ctx)
139 {
140 struct fd_texture_stateobj *fragtex = &ctx->tex[PIPE_SHADER_FRAGMENT];
141 struct fd_texture_stateobj *verttex = &ctx->tex[PIPE_SHADER_VERTEX];
142 texmask emitted = 0;
143 unsigned i;
144
145 for (i = 0; i < verttex->num_samplers; i++)
146 if (verttex->samplers[i])
147 emitted |= emit_texture(ring, ctx, verttex, i, emitted);
148
149 for (i = 0; i < fragtex->num_samplers; i++)
150 if (fragtex->samplers[i])
151 emitted |= emit_texture(ring, ctx, fragtex, i, emitted);
152 }
153
154 void
fd2_emit_vertex_bufs(struct fd_ringbuffer * ring,uint32_t val,struct fd2_vertex_buf * vbufs,uint32_t n)155 fd2_emit_vertex_bufs(struct fd_ringbuffer *ring, uint32_t val,
156 struct fd2_vertex_buf *vbufs, uint32_t n)
157 {
158 unsigned i;
159
160 OUT_PKT3(ring, CP_SET_CONSTANT, 1 + (2 * n));
161 OUT_RING(ring, (0x1 << 16) | (val & 0xffff));
162 for (i = 0; i < n; i++) {
163 struct fd_resource *rsc = fd_resource(vbufs[i].prsc);
164 OUT_RELOC(ring, rsc->bo, vbufs[i].offset, 3, 0);
165 OUT_RING(ring, vbufs[i].size);
166 }
167 }
168
169 void
fd2_emit_state_binning(struct fd_context * ctx,const enum fd_dirty_3d_state dirty)170 fd2_emit_state_binning(struct fd_context *ctx,
171 const enum fd_dirty_3d_state dirty)
172 {
173 struct fd2_blend_stateobj *blend = fd2_blend_stateobj(ctx->blend);
174 struct fd_ringbuffer *ring = ctx->batch->binning;
175
176 /* subset of fd2_emit_state needed for hw binning on a20x */
177
178 if (dirty & (FD_DIRTY_PROG | FD_DIRTY_VTXSTATE))
179 fd2_program_emit(ctx, ring, &ctx->prog);
180
181 if (dirty & (FD_DIRTY_PROG | FD_DIRTY_CONST)) {
182 emit_constants(ring, VS_CONST_BASE * 4,
183 &ctx->constbuf[PIPE_SHADER_VERTEX],
184 (dirty & FD_DIRTY_PROG) ? ctx->prog.vs : NULL);
185 }
186
187 if (dirty & FD_DIRTY_VIEWPORT) {
188 struct pipe_viewport_state *vp = & ctx->viewport[0];
189
190 OUT_PKT3(ring, CP_SET_CONSTANT, 9);
191 OUT_RING(ring, 0x00000184);
192 OUT_RING(ring, fui(vp->translate[0]));
193 OUT_RING(ring, fui(vp->translate[1]));
194 OUT_RING(ring, fui(vp->translate[2]));
195 OUT_RING(ring, fui(0.0f));
196 OUT_RING(ring, fui(vp->scale[0]));
197 OUT_RING(ring, fui(vp->scale[1]));
198 OUT_RING(ring, fui(vp->scale[2]));
199 OUT_RING(ring, fui(0.0f));
200 }
201
202 /* not sure why this is needed */
203 if (dirty & (FD_DIRTY_BLEND | FD_DIRTY_FRAMEBUFFER)) {
204 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
205 OUT_RING(ring, CP_REG(REG_A2XX_RB_BLEND_CONTROL));
206 OUT_RING(ring, blend->rb_blendcontrol);
207
208 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
209 OUT_RING(ring, CP_REG(REG_A2XX_RB_COLOR_MASK));
210 OUT_RING(ring, blend->rb_colormask);
211 }
212
213 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
214 OUT_RING(ring, CP_REG(REG_A2XX_PA_SU_SC_MODE_CNTL));
215 OUT_RING(ring, A2XX_PA_SU_SC_MODE_CNTL_FACE_KILL_ENABLE);
216 }
217
218 void
fd2_emit_state(struct fd_context * ctx,const enum fd_dirty_3d_state dirty)219 fd2_emit_state(struct fd_context *ctx, const enum fd_dirty_3d_state dirty)
220 {
221 struct fd2_blend_stateobj *blend = fd2_blend_stateobj(ctx->blend);
222 struct fd2_zsa_stateobj *zsa = fd2_zsa_stateobj(ctx->zsa);
223 struct fd2_shader_stateobj *fs = ctx->prog.fs;
224 struct fd_ringbuffer *ring = ctx->batch->draw;
225
226 /* NOTE: we probably want to eventually refactor this so each state
227 * object handles emitting it's own state.. although the mapping of
228 * state to registers is not always orthogonal, sometimes a single
229 * register contains bitfields coming from multiple state objects,
230 * so not sure the best way to deal with that yet.
231 */
232
233 if (dirty & FD_DIRTY_SAMPLE_MASK) {
234 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
235 OUT_RING(ring, CP_REG(REG_A2XX_PA_SC_AA_MASK));
236 OUT_RING(ring, ctx->sample_mask);
237 }
238
239 if (dirty & (FD_DIRTY_ZSA | FD_DIRTY_STENCIL_REF | FD_DIRTY_PROG)) {
240 struct pipe_stencil_ref *sr = &ctx->stencil_ref;
241 uint32_t val = zsa->rb_depthcontrol;
242
243 if (fs->has_kill)
244 val &= ~A2XX_RB_DEPTHCONTROL_EARLY_Z_ENABLE;
245
246 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
247 OUT_RING(ring, CP_REG(REG_A2XX_RB_DEPTHCONTROL));
248 OUT_RING(ring, val);
249
250 OUT_PKT3(ring, CP_SET_CONSTANT, 4);
251 OUT_RING(ring, CP_REG(REG_A2XX_RB_STENCILREFMASK_BF));
252 OUT_RING(ring, zsa->rb_stencilrefmask_bf |
253 A2XX_RB_STENCILREFMASK_STENCILREF(sr->ref_value[1]));
254 OUT_RING(ring, zsa->rb_stencilrefmask |
255 A2XX_RB_STENCILREFMASK_STENCILREF(sr->ref_value[0]));
256 OUT_RING(ring, zsa->rb_alpha_ref);
257 }
258
259 if (ctx->rasterizer && dirty & FD_DIRTY_RASTERIZER) {
260 struct fd2_rasterizer_stateobj *rasterizer =
261 fd2_rasterizer_stateobj(ctx->rasterizer);
262 OUT_PKT3(ring, CP_SET_CONSTANT, 3);
263 OUT_RING(ring, CP_REG(REG_A2XX_PA_CL_CLIP_CNTL));
264 OUT_RING(ring, rasterizer->pa_cl_clip_cntl);
265 OUT_RING(ring, rasterizer->pa_su_sc_mode_cntl |
266 A2XX_PA_SU_SC_MODE_CNTL_VTX_WINDOW_OFFSET_ENABLE);
267
268 OUT_PKT3(ring, CP_SET_CONSTANT, 5);
269 OUT_RING(ring, CP_REG(REG_A2XX_PA_SU_POINT_SIZE));
270 OUT_RING(ring, rasterizer->pa_su_point_size);
271 OUT_RING(ring, rasterizer->pa_su_point_minmax);
272 OUT_RING(ring, rasterizer->pa_su_line_cntl);
273 OUT_RING(ring, rasterizer->pa_sc_line_stipple);
274
275 OUT_PKT3(ring, CP_SET_CONSTANT, 6);
276 OUT_RING(ring, CP_REG(REG_A2XX_PA_SU_VTX_CNTL));
277 OUT_RING(ring, rasterizer->pa_su_vtx_cntl);
278 OUT_RING(ring, fui(1.0f)); /* PA_CL_GB_VERT_CLIP_ADJ */
279 OUT_RING(ring, fui(1.0f)); /* PA_CL_GB_VERT_DISC_ADJ */
280 OUT_RING(ring, fui(1.0f)); /* PA_CL_GB_HORZ_CLIP_ADJ */
281 OUT_RING(ring, fui(1.0f)); /* PA_CL_GB_HORZ_DISC_ADJ */
282
283 if (rasterizer->base.offset_tri) {
284 /* TODO: why multiply scale by 2 ? without it deqp test fails
285 * deqp/piglit tests aren't very precise
286 */
287 OUT_PKT3(ring, CP_SET_CONSTANT, 5);
288 OUT_RING(ring, CP_REG(REG_A2XX_PA_SU_POLY_OFFSET_FRONT_SCALE));
289 OUT_RING(ring,
290 fui(rasterizer->base.offset_scale * 2.0f)); /* FRONT_SCALE */
291 OUT_RING(ring, fui(rasterizer->base.offset_units)); /* FRONT_OFFSET */
292 OUT_RING(ring,
293 fui(rasterizer->base.offset_scale * 2.0f)); /* BACK_SCALE */
294 OUT_RING(ring, fui(rasterizer->base.offset_units)); /* BACK_OFFSET */
295 }
296 }
297
298 /* NOTE: scissor enabled bit is part of rasterizer state: */
299 if (dirty & (FD_DIRTY_SCISSOR | FD_DIRTY_RASTERIZER)) {
300 struct pipe_scissor_state *scissor = fd_context_get_scissor(ctx);
301
302 OUT_PKT3(ring, CP_SET_CONSTANT, 3);
303 OUT_RING(ring, CP_REG(REG_A2XX_PA_SC_WINDOW_SCISSOR_TL));
304 OUT_RING(ring, xy2d(scissor->minx, /* PA_SC_WINDOW_SCISSOR_TL */
305 scissor->miny));
306 OUT_RING(ring, xy2d(scissor->maxx, /* PA_SC_WINDOW_SCISSOR_BR */
307 scissor->maxy));
308
309 ctx->batch->max_scissor.minx =
310 MIN2(ctx->batch->max_scissor.minx, scissor->minx);
311 ctx->batch->max_scissor.miny =
312 MIN2(ctx->batch->max_scissor.miny, scissor->miny);
313 ctx->batch->max_scissor.maxx =
314 MAX2(ctx->batch->max_scissor.maxx, scissor->maxx);
315 ctx->batch->max_scissor.maxy =
316 MAX2(ctx->batch->max_scissor.maxy, scissor->maxy);
317 }
318
319 if (dirty & FD_DIRTY_VIEWPORT) {
320 struct pipe_viewport_state *vp = & ctx->viewport[0];
321
322 OUT_PKT3(ring, CP_SET_CONSTANT, 7);
323 OUT_RING(ring, CP_REG(REG_A2XX_PA_CL_VPORT_XSCALE));
324 OUT_RING(ring, fui(vp->scale[0])); /* PA_CL_VPORT_XSCALE */
325 OUT_RING(ring, fui(vp->translate[0])); /* PA_CL_VPORT_XOFFSET */
326 OUT_RING(ring, fui(vp->scale[1])); /* PA_CL_VPORT_YSCALE */
327 OUT_RING(ring, fui(vp->translate[1])); /* PA_CL_VPORT_YOFFSET */
328 OUT_RING(ring, fui(vp->scale[2])); /* PA_CL_VPORT_ZSCALE */
329 OUT_RING(ring, fui(vp->translate[2])); /* PA_CL_VPORT_ZOFFSET */
330
331 /* set viewport in C65/C66, for a20x hw binning and fragcoord.z */
332 OUT_PKT3(ring, CP_SET_CONSTANT, 9);
333 OUT_RING(ring, 0x00000184);
334
335 OUT_RING(ring, fui(vp->translate[0]));
336 OUT_RING(ring, fui(vp->translate[1]));
337 OUT_RING(ring, fui(vp->translate[2]));
338 OUT_RING(ring, fui(0.0f));
339
340 OUT_RING(ring, fui(vp->scale[0]));
341 OUT_RING(ring, fui(vp->scale[1]));
342 OUT_RING(ring, fui(vp->scale[2]));
343 OUT_RING(ring, fui(0.0f));
344 }
345
346 if (dirty & (FD_DIRTY_PROG | FD_DIRTY_VTXSTATE | FD_DIRTY_TEXSTATE))
347 fd2_program_emit(ctx, ring, &ctx->prog);
348
349 if (dirty & (FD_DIRTY_PROG | FD_DIRTY_CONST)) {
350 emit_constants(ring, VS_CONST_BASE * 4,
351 &ctx->constbuf[PIPE_SHADER_VERTEX],
352 (dirty & FD_DIRTY_PROG) ? ctx->prog.vs : NULL);
353 emit_constants(ring, PS_CONST_BASE * 4,
354 &ctx->constbuf[PIPE_SHADER_FRAGMENT],
355 (dirty & FD_DIRTY_PROG) ? ctx->prog.fs : NULL);
356 }
357
358 if (dirty & (FD_DIRTY_BLEND | FD_DIRTY_ZSA)) {
359 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
360 OUT_RING(ring, CP_REG(REG_A2XX_RB_COLORCONTROL));
361 OUT_RING(ring, zsa->rb_colorcontrol | blend->rb_colorcontrol);
362 }
363
364 if (dirty & (FD_DIRTY_BLEND | FD_DIRTY_FRAMEBUFFER)) {
365 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
366 OUT_RING(ring, CP_REG(REG_A2XX_RB_BLEND_CONTROL));
367 OUT_RING(ring, blend->rb_blendcontrol);
368
369 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
370 OUT_RING(ring, CP_REG(REG_A2XX_RB_COLOR_MASK));
371 OUT_RING(ring, blend->rb_colormask);
372 }
373
374 if (dirty & FD_DIRTY_BLEND_COLOR) {
375 OUT_PKT3(ring, CP_SET_CONSTANT, 5);
376 OUT_RING(ring, CP_REG(REG_A2XX_RB_BLEND_RED));
377 OUT_RING(ring, float_to_ubyte(ctx->blend_color.color[0]));
378 OUT_RING(ring, float_to_ubyte(ctx->blend_color.color[1]));
379 OUT_RING(ring, float_to_ubyte(ctx->blend_color.color[2]));
380 OUT_RING(ring, float_to_ubyte(ctx->blend_color.color[3]));
381 }
382
383 if (dirty & (FD_DIRTY_TEX | FD_DIRTY_PROG))
384 emit_textures(ring, ctx);
385 }
386
387 /* emit per-context initialization:
388 */
389 void
fd2_emit_restore(struct fd_context * ctx,struct fd_ringbuffer * ring)390 fd2_emit_restore(struct fd_context *ctx, struct fd_ringbuffer *ring)
391 {
392 if (is_a20x(ctx->screen)) {
393 OUT_PKT0(ring, REG_A2XX_RB_BC_CONTROL, 1);
394 OUT_RING(ring, A2XX_RB_BC_CONTROL_ACCUM_TIMEOUT_SELECT(3) |
395 A2XX_RB_BC_CONTROL_DISABLE_LZ_NULL_ZCMD_DROP |
396 A2XX_RB_BC_CONTROL_ENABLE_CRC_UPDATE |
397 A2XX_RB_BC_CONTROL_ACCUM_DATA_FIFO_LIMIT(8) |
398 A2XX_RB_BC_CONTROL_MEM_EXPORT_TIMEOUT_SELECT(3));
399
400 /* not sure why this is required */
401 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
402 OUT_RING(ring, CP_REG(REG_A2XX_PA_SC_VIZ_QUERY));
403 OUT_RING(ring, A2XX_PA_SC_VIZ_QUERY_VIZ_QUERY_ID(16));
404
405 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
406 OUT_RING(ring, CP_REG(REG_A2XX_VGT_VERTEX_REUSE_BLOCK_CNTL));
407 OUT_RING(ring, 0x00000002);
408
409 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
410 OUT_RING(ring, CP_REG(REG_A2XX_VGT_OUT_DEALLOC_CNTL));
411 OUT_RING(ring, 0x00000002);
412 } else {
413 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
414 OUT_RING(ring, CP_REG(REG_A2XX_VGT_VERTEX_REUSE_BLOCK_CNTL));
415 OUT_RING(ring, 0x0000003b);
416 }
417
418 /* enable perfcntrs */
419 OUT_PKT0(ring, REG_A2XX_CP_PERFMON_CNTL, 1);
420 OUT_RING(ring, COND(FD_DBG(PERFC), 1));
421
422 /* note: perfcntrs don't work without the PM_OVERRIDE bit */
423 OUT_PKT0(ring, REG_A2XX_RBBM_PM_OVERRIDE1, 2);
424 OUT_RING(ring, 0xffffffff);
425 OUT_RING(ring, 0x00000fff);
426
427 OUT_PKT0(ring, REG_A2XX_TP0_CHICKEN, 1);
428 OUT_RING(ring, 0x00000002);
429
430 OUT_PKT3(ring, CP_INVALIDATE_STATE, 1);
431 OUT_RING(ring, 0x00007fff);
432
433 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
434 OUT_RING(ring, CP_REG(REG_A2XX_SQ_VS_CONST));
435 OUT_RING(ring, A2XX_SQ_VS_CONST_BASE(VS_CONST_BASE) |
436 A2XX_SQ_VS_CONST_SIZE(0x100));
437
438 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
439 OUT_RING(ring, CP_REG(REG_A2XX_SQ_PS_CONST));
440 OUT_RING(ring,
441 A2XX_SQ_PS_CONST_BASE(PS_CONST_BASE) | A2XX_SQ_PS_CONST_SIZE(0xe0));
442
443 OUT_PKT3(ring, CP_SET_CONSTANT, 3);
444 OUT_RING(ring, CP_REG(REG_A2XX_VGT_MAX_VTX_INDX));
445 OUT_RING(ring, 0xffffffff); /* VGT_MAX_VTX_INDX */
446 OUT_RING(ring, 0x00000000); /* VGT_MIN_VTX_INDX */
447
448 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
449 OUT_RING(ring, CP_REG(REG_A2XX_VGT_INDX_OFFSET));
450 OUT_RING(ring, 0x00000000);
451
452 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
453 OUT_RING(ring, CP_REG(REG_A2XX_SQ_CONTEXT_MISC));
454 OUT_RING(ring, A2XX_SQ_CONTEXT_MISC_SC_SAMPLE_CNTL(CENTERS_ONLY));
455
456 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
457 OUT_RING(ring, CP_REG(REG_A2XX_SQ_INTERPOLATOR_CNTL));
458 OUT_RING(ring, 0xffffffff);
459
460 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
461 OUT_RING(ring, CP_REG(REG_A2XX_PA_SC_AA_CONFIG));
462 OUT_RING(ring, 0x00000000);
463
464 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
465 OUT_RING(ring, CP_REG(REG_A2XX_PA_SC_LINE_CNTL));
466 OUT_RING(ring, 0x00000000);
467
468 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
469 OUT_RING(ring, CP_REG(REG_A2XX_PA_SC_WINDOW_OFFSET));
470 OUT_RING(ring, 0x00000000);
471
472 // XXX we change this dynamically for draw/clear.. vs gmem<->mem..
473 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
474 OUT_RING(ring, CP_REG(REG_A2XX_RB_MODECONTROL));
475 OUT_RING(ring, A2XX_RB_MODECONTROL_EDRAM_MODE(COLOR_DEPTH));
476
477 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
478 OUT_RING(ring, CP_REG(REG_A2XX_RB_SAMPLE_POS));
479 OUT_RING(ring, 0x88888888);
480
481 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
482 OUT_RING(ring, CP_REG(REG_A2XX_RB_COLOR_DEST_MASK));
483 OUT_RING(ring, 0xffffffff);
484
485 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
486 OUT_RING(ring, CP_REG(REG_A2XX_RB_COPY_DEST_INFO));
487 OUT_RING(ring, A2XX_RB_COPY_DEST_INFO_FORMAT(COLORX_4_4_4_4) |
488 A2XX_RB_COPY_DEST_INFO_WRITE_RED |
489 A2XX_RB_COPY_DEST_INFO_WRITE_GREEN |
490 A2XX_RB_COPY_DEST_INFO_WRITE_BLUE |
491 A2XX_RB_COPY_DEST_INFO_WRITE_ALPHA);
492
493 OUT_PKT3(ring, CP_SET_CONSTANT, 3);
494 OUT_RING(ring, CP_REG(REG_A2XX_SQ_WRAPPING_0));
495 OUT_RING(ring, 0x00000000); /* SQ_WRAPPING_0 */
496 OUT_RING(ring, 0x00000000); /* SQ_WRAPPING_1 */
497
498 OUT_PKT3(ring, CP_SET_DRAW_INIT_FLAGS, 1);
499 OUT_RING(ring, 0x00000000);
500
501 OUT_PKT3(ring, CP_WAIT_REG_EQ, 4);
502 OUT_RING(ring, 0x000005d0);
503 OUT_RING(ring, 0x00000000);
504 OUT_RING(ring, 0x5f601000);
505 OUT_RING(ring, 0x00000001);
506
507 OUT_PKT0(ring, REG_A2XX_SQ_INST_STORE_MANAGMENT, 1);
508 OUT_RING(ring, 0x00000180);
509
510 OUT_PKT3(ring, CP_INVALIDATE_STATE, 1);
511 OUT_RING(ring, 0x00000300);
512
513 OUT_PKT3(ring, CP_SET_SHADER_BASES, 1);
514 OUT_RING(ring, 0x80000180);
515
516 /* not sure what this form of CP_SET_CONSTANT is.. */
517 OUT_PKT3(ring, CP_SET_CONSTANT, 13);
518 OUT_RING(ring, 0x00000000);
519 OUT_RING(ring, 0x00000000);
520 OUT_RING(ring, 0x00000000);
521 OUT_RING(ring, 0x00000000);
522 OUT_RING(ring, 0x00000000);
523 OUT_RING(ring, 0x469c4000);
524 OUT_RING(ring, 0x3f800000);
525 OUT_RING(ring, 0x3f000000);
526 OUT_RING(ring, 0x00000000);
527 OUT_RING(ring, 0x40000000);
528 OUT_RING(ring, 0x3f400000);
529 OUT_RING(ring, 0x3ec00000);
530 OUT_RING(ring, 0x3e800000);
531
532 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
533 OUT_RING(ring, CP_REG(REG_A2XX_RB_COLOR_MASK));
534 OUT_RING(ring,
535 A2XX_RB_COLOR_MASK_WRITE_RED | A2XX_RB_COLOR_MASK_WRITE_GREEN |
536 A2XX_RB_COLOR_MASK_WRITE_BLUE | A2XX_RB_COLOR_MASK_WRITE_ALPHA);
537
538 OUT_PKT3(ring, CP_SET_CONSTANT, 5);
539 OUT_RING(ring, CP_REG(REG_A2XX_RB_BLEND_RED));
540 OUT_RING(ring, 0x00000000); /* RB_BLEND_RED */
541 OUT_RING(ring, 0x00000000); /* RB_BLEND_GREEN */
542 OUT_RING(ring, 0x00000000); /* RB_BLEND_BLUE */
543 OUT_RING(ring, 0x000000ff); /* RB_BLEND_ALPHA */
544
545 OUT_PKT3(ring, CP_SET_CONSTANT, 2);
546 OUT_RING(ring, CP_REG(REG_A2XX_PA_CL_VTE_CNTL));
547 OUT_RING(ring, A2XX_PA_CL_VTE_CNTL_VTX_W0_FMT |
548 A2XX_PA_CL_VTE_CNTL_VPORT_X_SCALE_ENA |
549 A2XX_PA_CL_VTE_CNTL_VPORT_X_OFFSET_ENA |
550 A2XX_PA_CL_VTE_CNTL_VPORT_Y_SCALE_ENA |
551 A2XX_PA_CL_VTE_CNTL_VPORT_Y_OFFSET_ENA |
552 A2XX_PA_CL_VTE_CNTL_VPORT_Z_SCALE_ENA |
553 A2XX_PA_CL_VTE_CNTL_VPORT_Z_OFFSET_ENA);
554 }
555
556 void
fd2_emit_init_screen(struct pipe_screen * pscreen)557 fd2_emit_init_screen(struct pipe_screen *pscreen)
558 {
559 struct fd_screen *screen = fd_screen(pscreen);
560 screen->emit_ib = fd2_emit_ib;
561 }
562
563 void
fd2_emit_init(struct pipe_context * pctx)564 fd2_emit_init(struct pipe_context *pctx)
565 {
566 }
567