1 /*
2 * Copyright 2016 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26 #include "dce_mem_input.h"
27 #include "reg_helper.h"
28 #include "basics/conversion.h"
29
30 #define CTX \
31 dce_mi->base.ctx
32 #define REG(reg)\
33 dce_mi->regs->reg
34
35 #undef FN
36 #define FN(reg_name, field_name) \
37 dce_mi->shifts->field_name, dce_mi->masks->field_name
38
39 struct pte_setting {
40 unsigned int bpp;
41 unsigned int page_width;
42 unsigned int page_height;
43 unsigned char min_pte_before_flip_horiz_scan;
44 unsigned char min_pte_before_flip_vert_scan;
45 unsigned char pte_req_per_chunk;
46 unsigned char param_6;
47 unsigned char param_7;
48 unsigned char param_8;
49 };
50
51 enum mi_bits_per_pixel {
52 mi_bpp_8 = 0,
53 mi_bpp_16,
54 mi_bpp_32,
55 mi_bpp_64,
56 mi_bpp_count,
57 };
58
59 enum mi_tiling_format {
60 mi_tiling_linear = 0,
61 mi_tiling_1D,
62 mi_tiling_2D,
63 mi_tiling_count,
64 };
65
66 static const struct pte_setting pte_settings[mi_tiling_count][mi_bpp_count] = {
67 [mi_tiling_linear] = {
68 { 8, 4096, 1, 8, 0, 1, 0, 0, 0},
69 { 16, 2048, 1, 8, 0, 1, 0, 0, 0},
70 { 32, 1024, 1, 8, 0, 1, 0, 0, 0},
71 { 64, 512, 1, 8, 0, 1, 0, 0, 0}, /* new for 64bpp from HW */
72 },
73 [mi_tiling_1D] = {
74 { 8, 512, 8, 1, 0, 1, 0, 0, 0}, /* 0 for invalid */
75 { 16, 256, 8, 2, 0, 1, 0, 0, 0},
76 { 32, 128, 8, 4, 0, 1, 0, 0, 0},
77 { 64, 64, 8, 4, 0, 1, 0, 0, 0}, /* fake */
78 },
79 [mi_tiling_2D] = {
80 { 8, 64, 64, 8, 8, 1, 4, 0, 0},
81 { 16, 64, 32, 8, 16, 1, 8, 0, 0},
82 { 32, 32, 32, 16, 16, 1, 8, 0, 0},
83 { 64, 8, 32, 16, 16, 1, 8, 0, 0}, /* fake */
84 },
85 };
86
get_mi_bpp(enum surface_pixel_format format)87 static enum mi_bits_per_pixel get_mi_bpp(
88 enum surface_pixel_format format)
89 {
90 if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616)
91 return mi_bpp_64;
92 else if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB8888)
93 return mi_bpp_32;
94 else if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB1555)
95 return mi_bpp_16;
96 else
97 return mi_bpp_8;
98 }
99
get_mi_tiling(struct dc_tiling_info * tiling_info)100 static enum mi_tiling_format get_mi_tiling(
101 struct dc_tiling_info *tiling_info)
102 {
103 switch (tiling_info->gfx8.array_mode) {
104 case DC_ARRAY_1D_TILED_THIN1:
105 case DC_ARRAY_1D_TILED_THICK:
106 case DC_ARRAY_PRT_TILED_THIN1:
107 return mi_tiling_1D;
108 case DC_ARRAY_2D_TILED_THIN1:
109 case DC_ARRAY_2D_TILED_THICK:
110 case DC_ARRAY_2D_TILED_X_THICK:
111 case DC_ARRAY_PRT_2D_TILED_THIN1:
112 case DC_ARRAY_PRT_2D_TILED_THICK:
113 return mi_tiling_2D;
114 case DC_ARRAY_LINEAR_GENERAL:
115 case DC_ARRAY_LINEAR_ALLIGNED:
116 return mi_tiling_linear;
117 default:
118 return mi_tiling_2D;
119 }
120 }
121
is_vert_scan(enum dc_rotation_angle rotation)122 static bool is_vert_scan(enum dc_rotation_angle rotation)
123 {
124 switch (rotation) {
125 case ROTATION_ANGLE_90:
126 case ROTATION_ANGLE_270:
127 return true;
128 default:
129 return false;
130 }
131 }
132
dce_mi_program_pte_vm(struct mem_input * mi,enum surface_pixel_format format,struct dc_tiling_info * tiling_info,enum dc_rotation_angle rotation)133 static void dce_mi_program_pte_vm(
134 struct mem_input *mi,
135 enum surface_pixel_format format,
136 struct dc_tiling_info *tiling_info,
137 enum dc_rotation_angle rotation)
138 {
139 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
140 enum mi_bits_per_pixel mi_bpp = get_mi_bpp(format);
141 enum mi_tiling_format mi_tiling = get_mi_tiling(tiling_info);
142 const struct pte_setting *pte = &pte_settings[mi_tiling][mi_bpp];
143
144 unsigned int page_width = log_2(pte->page_width);
145 unsigned int page_height = log_2(pte->page_height);
146 unsigned int min_pte_before_flip = is_vert_scan(rotation) ?
147 pte->min_pte_before_flip_vert_scan :
148 pte->min_pte_before_flip_horiz_scan;
149
150 REG_UPDATE(GRPH_PIPE_OUTSTANDING_REQUEST_LIMIT,
151 GRPH_PIPE_OUTSTANDING_REQUEST_LIMIT, 0x7f);
152
153 REG_UPDATE_3(DVMM_PTE_CONTROL,
154 DVMM_PAGE_WIDTH, page_width,
155 DVMM_PAGE_HEIGHT, page_height,
156 DVMM_MIN_PTE_BEFORE_FLIP, min_pte_before_flip);
157
158 REG_UPDATE_2(DVMM_PTE_ARB_CONTROL,
159 DVMM_PTE_REQ_PER_CHUNK, pte->pte_req_per_chunk,
160 DVMM_MAX_PTE_REQ_OUTSTANDING, 0x7f);
161 }
162
program_urgency_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t urgency_low_wm,uint32_t urgency_high_wm)163 static void program_urgency_watermark(
164 struct dce_mem_input *dce_mi,
165 uint32_t wm_select,
166 uint32_t urgency_low_wm,
167 uint32_t urgency_high_wm)
168 {
169 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
170 URGENCY_WATERMARK_MASK, wm_select);
171
172 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0,
173 URGENCY_LOW_WATERMARK, urgency_low_wm,
174 URGENCY_HIGH_WATERMARK, urgency_high_wm);
175 }
176
177 #if defined(CONFIG_DRM_AMD_DC_SI)
dce60_program_urgency_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t urgency_low_wm,uint32_t urgency_high_wm)178 static void dce60_program_urgency_watermark(
179 struct dce_mem_input *dce_mi,
180 uint32_t wm_select,
181 uint32_t urgency_low_wm,
182 uint32_t urgency_high_wm)
183 {
184 REG_UPDATE(DPG_PIPE_ARBITRATION_CONTROL3,
185 URGENCY_WATERMARK_MASK, wm_select);
186
187 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0,
188 URGENCY_LOW_WATERMARK, urgency_low_wm,
189 URGENCY_HIGH_WATERMARK, urgency_high_wm);
190 }
191 #endif
192
dce120_program_urgency_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t urgency_low_wm,uint32_t urgency_high_wm)193 static void dce120_program_urgency_watermark(
194 struct dce_mem_input *dce_mi,
195 uint32_t wm_select,
196 uint32_t urgency_low_wm,
197 uint32_t urgency_high_wm)
198 {
199 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
200 URGENCY_WATERMARK_MASK, wm_select);
201
202 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0,
203 URGENCY_LOW_WATERMARK, urgency_low_wm,
204 URGENCY_HIGH_WATERMARK, urgency_high_wm);
205
206 REG_SET_2(DPG_PIPE_URGENT_LEVEL_CONTROL, 0,
207 URGENT_LEVEL_LOW_WATERMARK, urgency_low_wm,
208 URGENT_LEVEL_HIGH_WATERMARK, urgency_high_wm);
209
210 }
211
212 #if defined(CONFIG_DRM_AMD_DC_SI)
dce60_program_nbp_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t nbp_wm)213 static void dce60_program_nbp_watermark(
214 struct dce_mem_input *dce_mi,
215 uint32_t wm_select,
216 uint32_t nbp_wm)
217 {
218 REG_UPDATE(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
219 NB_PSTATE_CHANGE_WATERMARK_MASK, wm_select);
220
221 REG_UPDATE_3(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
222 NB_PSTATE_CHANGE_ENABLE, 1,
223 NB_PSTATE_CHANGE_URGENT_DURING_REQUEST, 1,
224 NB_PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1);
225
226 REG_UPDATE(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
227 NB_PSTATE_CHANGE_WATERMARK, nbp_wm);
228 }
229 #endif
230
program_nbp_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t nbp_wm)231 static void program_nbp_watermark(
232 struct dce_mem_input *dce_mi,
233 uint32_t wm_select,
234 uint32_t nbp_wm)
235 {
236 if (REG(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL)) {
237 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
238 NB_PSTATE_CHANGE_WATERMARK_MASK, wm_select);
239
240 REG_UPDATE_3(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
241 NB_PSTATE_CHANGE_ENABLE, 1,
242 NB_PSTATE_CHANGE_URGENT_DURING_REQUEST, 1,
243 NB_PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1);
244
245 REG_UPDATE(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
246 NB_PSTATE_CHANGE_WATERMARK, nbp_wm);
247 }
248
249 if (REG(DPG_PIPE_LOW_POWER_CONTROL)) {
250 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
251 PSTATE_CHANGE_WATERMARK_MASK, wm_select);
252
253 REG_UPDATE_3(DPG_PIPE_LOW_POWER_CONTROL,
254 PSTATE_CHANGE_ENABLE, 1,
255 PSTATE_CHANGE_URGENT_DURING_REQUEST, 1,
256 PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1);
257
258 REG_UPDATE(DPG_PIPE_LOW_POWER_CONTROL,
259 PSTATE_CHANGE_WATERMARK, nbp_wm);
260 }
261 }
262
263 #if defined(CONFIG_DRM_AMD_DC_SI)
dce60_program_stutter_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t stutter_mark)264 static void dce60_program_stutter_watermark(
265 struct dce_mem_input *dce_mi,
266 uint32_t wm_select,
267 uint32_t stutter_mark)
268 {
269 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL,
270 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select);
271
272 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL,
273 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark);
274 }
275 #endif
276
dce120_program_stutter_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t stutter_mark,uint32_t stutter_entry)277 static void dce120_program_stutter_watermark(
278 struct dce_mem_input *dce_mi,
279 uint32_t wm_select,
280 uint32_t stutter_mark,
281 uint32_t stutter_entry)
282 {
283 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
284 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select);
285
286 if (REG(DPG_PIPE_STUTTER_CONTROL2))
287 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL2,
288 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark,
289 STUTTER_ENTER_SELF_REFRESH_WATERMARK, stutter_entry);
290 else
291 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
292 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark,
293 STUTTER_ENTER_SELF_REFRESH_WATERMARK, stutter_entry);
294 }
295
program_stutter_watermark(struct dce_mem_input * dce_mi,uint32_t wm_select,uint32_t stutter_mark)296 static void program_stutter_watermark(
297 struct dce_mem_input *dce_mi,
298 uint32_t wm_select,
299 uint32_t stutter_mark)
300 {
301 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
302 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select);
303
304 if (REG(DPG_PIPE_STUTTER_CONTROL2))
305 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL2,
306 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark);
307 else
308 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL,
309 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark);
310 }
311
dce_mi_program_display_marks(struct mem_input * mi,struct dce_watermarks nbp,struct dce_watermarks stutter_exit,struct dce_watermarks stutter_enter,struct dce_watermarks urgent,uint32_t total_dest_line_time_ns)312 static void dce_mi_program_display_marks(
313 struct mem_input *mi,
314 struct dce_watermarks nbp,
315 struct dce_watermarks stutter_exit,
316 struct dce_watermarks stutter_enter,
317 struct dce_watermarks urgent,
318 uint32_t total_dest_line_time_ns)
319 {
320 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
321 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
322
323 program_urgency_watermark(dce_mi, 2, /* set a */
324 urgent.a_mark, total_dest_line_time_ns);
325 program_urgency_watermark(dce_mi, 1, /* set d */
326 urgent.d_mark, total_dest_line_time_ns);
327
328 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
329 STUTTER_ENABLE, stutter_en,
330 STUTTER_IGNORE_FBC, 1);
331 program_nbp_watermark(dce_mi, 2, nbp.a_mark); /* set a */
332 program_nbp_watermark(dce_mi, 1, nbp.d_mark); /* set d */
333
334 program_stutter_watermark(dce_mi, 2, stutter_exit.a_mark); /* set a */
335 program_stutter_watermark(dce_mi, 1, stutter_exit.d_mark); /* set d */
336 }
337
338 #if defined(CONFIG_DRM_AMD_DC_SI)
dce60_mi_program_display_marks(struct mem_input * mi,struct dce_watermarks nbp,struct dce_watermarks stutter_exit,struct dce_watermarks stutter_enter,struct dce_watermarks urgent,uint32_t total_dest_line_time_ns)339 static void dce60_mi_program_display_marks(
340 struct mem_input *mi,
341 struct dce_watermarks nbp,
342 struct dce_watermarks stutter_exit,
343 struct dce_watermarks stutter_enter,
344 struct dce_watermarks urgent,
345 uint32_t total_dest_line_time_ns)
346 {
347 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
348 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
349
350 dce60_program_urgency_watermark(dce_mi, 2, /* set a */
351 urgent.a_mark, total_dest_line_time_ns);
352 dce60_program_urgency_watermark(dce_mi, 1, /* set d */
353 urgent.d_mark, total_dest_line_time_ns);
354
355 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
356 STUTTER_ENABLE, stutter_en,
357 STUTTER_IGNORE_FBC, 1);
358 dce60_program_nbp_watermark(dce_mi, 2, nbp.a_mark); /* set a */
359 dce60_program_nbp_watermark(dce_mi, 1, nbp.d_mark); /* set d */
360
361 dce60_program_stutter_watermark(dce_mi, 2, stutter_exit.a_mark); /* set a */
362 dce60_program_stutter_watermark(dce_mi, 1, stutter_exit.d_mark); /* set d */
363 }
364 #endif
365
dce112_mi_program_display_marks(struct mem_input * mi,struct dce_watermarks nbp,struct dce_watermarks stutter_exit,struct dce_watermarks stutter_entry,struct dce_watermarks urgent,uint32_t total_dest_line_time_ns)366 static void dce112_mi_program_display_marks(struct mem_input *mi,
367 struct dce_watermarks nbp,
368 struct dce_watermarks stutter_exit,
369 struct dce_watermarks stutter_entry,
370 struct dce_watermarks urgent,
371 uint32_t total_dest_line_time_ns)
372 {
373 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
374 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
375
376 program_urgency_watermark(dce_mi, 0, /* set a */
377 urgent.a_mark, total_dest_line_time_ns);
378 program_urgency_watermark(dce_mi, 1, /* set b */
379 urgent.b_mark, total_dest_line_time_ns);
380 program_urgency_watermark(dce_mi, 2, /* set c */
381 urgent.c_mark, total_dest_line_time_ns);
382 program_urgency_watermark(dce_mi, 3, /* set d */
383 urgent.d_mark, total_dest_line_time_ns);
384
385 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
386 STUTTER_ENABLE, stutter_en,
387 STUTTER_IGNORE_FBC, 1);
388 program_nbp_watermark(dce_mi, 0, nbp.a_mark); /* set a */
389 program_nbp_watermark(dce_mi, 1, nbp.b_mark); /* set b */
390 program_nbp_watermark(dce_mi, 2, nbp.c_mark); /* set c */
391 program_nbp_watermark(dce_mi, 3, nbp.d_mark); /* set d */
392
393 program_stutter_watermark(dce_mi, 0, stutter_exit.a_mark); /* set a */
394 program_stutter_watermark(dce_mi, 1, stutter_exit.b_mark); /* set b */
395 program_stutter_watermark(dce_mi, 2, stutter_exit.c_mark); /* set c */
396 program_stutter_watermark(dce_mi, 3, stutter_exit.d_mark); /* set d */
397 }
398
dce120_mi_program_display_marks(struct mem_input * mi,struct dce_watermarks nbp,struct dce_watermarks stutter_exit,struct dce_watermarks stutter_entry,struct dce_watermarks urgent,uint32_t total_dest_line_time_ns)399 static void dce120_mi_program_display_marks(struct mem_input *mi,
400 struct dce_watermarks nbp,
401 struct dce_watermarks stutter_exit,
402 struct dce_watermarks stutter_entry,
403 struct dce_watermarks urgent,
404 uint32_t total_dest_line_time_ns)
405 {
406 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
407 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
408
409 dce120_program_urgency_watermark(dce_mi, 0, /* set a */
410 urgent.a_mark, total_dest_line_time_ns);
411 dce120_program_urgency_watermark(dce_mi, 1, /* set b */
412 urgent.b_mark, total_dest_line_time_ns);
413 dce120_program_urgency_watermark(dce_mi, 2, /* set c */
414 urgent.c_mark, total_dest_line_time_ns);
415 dce120_program_urgency_watermark(dce_mi, 3, /* set d */
416 urgent.d_mark, total_dest_line_time_ns);
417
418 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
419 STUTTER_ENABLE, stutter_en,
420 STUTTER_IGNORE_FBC, 1);
421 program_nbp_watermark(dce_mi, 0, nbp.a_mark); /* set a */
422 program_nbp_watermark(dce_mi, 1, nbp.b_mark); /* set b */
423 program_nbp_watermark(dce_mi, 2, nbp.c_mark); /* set c */
424 program_nbp_watermark(dce_mi, 3, nbp.d_mark); /* set d */
425
426 dce120_program_stutter_watermark(dce_mi, 0, stutter_exit.a_mark, stutter_entry.a_mark); /* set a */
427 dce120_program_stutter_watermark(dce_mi, 1, stutter_exit.b_mark, stutter_entry.b_mark); /* set b */
428 dce120_program_stutter_watermark(dce_mi, 2, stutter_exit.c_mark, stutter_entry.c_mark); /* set c */
429 dce120_program_stutter_watermark(dce_mi, 3, stutter_exit.d_mark, stutter_entry.d_mark); /* set d */
430 }
431
program_tiling(struct dce_mem_input * dce_mi,const struct dc_tiling_info * info)432 static void program_tiling(
433 struct dce_mem_input *dce_mi, const struct dc_tiling_info *info)
434 {
435 if (dce_mi->masks->GRPH_SW_MODE) { /* GFX9 */
436 REG_UPDATE_6(GRPH_CONTROL,
437 GRPH_SW_MODE, info->gfx9.swizzle,
438 GRPH_NUM_BANKS, log_2(info->gfx9.num_banks),
439 GRPH_NUM_SHADER_ENGINES, log_2(info->gfx9.num_shader_engines),
440 GRPH_NUM_PIPES, log_2(info->gfx9.num_pipes),
441 GRPH_COLOR_EXPANSION_MODE, 1,
442 GRPH_SE_ENABLE, info->gfx9.shaderEnable);
443 /* TODO: DCP0_GRPH_CONTROL__GRPH_SE_ENABLE where to get info
444 GRPH_SE_ENABLE, 1,
445 GRPH_Z, 0);
446 */
447 }
448
449 if (dce_mi->masks->GRPH_MICRO_TILE_MODE) { /* GFX8 */
450 REG_UPDATE_9(GRPH_CONTROL,
451 GRPH_NUM_BANKS, info->gfx8.num_banks,
452 GRPH_BANK_WIDTH, info->gfx8.bank_width,
453 GRPH_BANK_HEIGHT, info->gfx8.bank_height,
454 GRPH_MACRO_TILE_ASPECT, info->gfx8.tile_aspect,
455 GRPH_TILE_SPLIT, info->gfx8.tile_split,
456 GRPH_MICRO_TILE_MODE, info->gfx8.tile_mode,
457 GRPH_PIPE_CONFIG, info->gfx8.pipe_config,
458 GRPH_ARRAY_MODE, info->gfx8.array_mode,
459 GRPH_COLOR_EXPANSION_MODE, 1);
460 /* 01 - DCP_GRPH_COLOR_EXPANSION_MODE_ZEXP: zero expansion for YCbCr */
461 /*
462 GRPH_Z, 0);
463 */
464 }
465
466 if (dce_mi->masks->GRPH_ARRAY_MODE) { /* GFX6 but reuses gfx8 struct */
467 REG_UPDATE_8(GRPH_CONTROL,
468 GRPH_NUM_BANKS, info->gfx8.num_banks,
469 GRPH_BANK_WIDTH, info->gfx8.bank_width,
470 GRPH_BANK_HEIGHT, info->gfx8.bank_height,
471 GRPH_MACRO_TILE_ASPECT, info->gfx8.tile_aspect,
472 GRPH_TILE_SPLIT, info->gfx8.tile_split,
473 /* DCE6 has no GRPH_MICRO_TILE_MODE mask */
474 GRPH_PIPE_CONFIG, info->gfx8.pipe_config,
475 GRPH_ARRAY_MODE, info->gfx8.array_mode,
476 GRPH_COLOR_EXPANSION_MODE, 1);
477 /* 01 - DCP_GRPH_COLOR_EXPANSION_MODE_ZEXP: zero expansion for YCbCr */
478 /*
479 GRPH_Z, 0);
480 */
481 }
482 }
483
program_size_and_rotation(struct dce_mem_input * dce_mi,enum dc_rotation_angle rotation,const struct plane_size * plane_size)484 static void program_size_and_rotation(
485 struct dce_mem_input *dce_mi,
486 enum dc_rotation_angle rotation,
487 const struct plane_size *plane_size)
488 {
489 const struct rect *in_rect = &plane_size->surface_size;
490 struct rect hw_rect = plane_size->surface_size;
491 const uint32_t rotation_angles[ROTATION_ANGLE_COUNT] = {
492 [ROTATION_ANGLE_0] = 0,
493 [ROTATION_ANGLE_90] = 1,
494 [ROTATION_ANGLE_180] = 2,
495 [ROTATION_ANGLE_270] = 3,
496 };
497
498 if (rotation == ROTATION_ANGLE_90 || rotation == ROTATION_ANGLE_270) {
499 hw_rect.x = in_rect->y;
500 hw_rect.y = in_rect->x;
501
502 hw_rect.height = in_rect->width;
503 hw_rect.width = in_rect->height;
504 }
505
506 REG_SET(GRPH_X_START, 0,
507 GRPH_X_START, hw_rect.x);
508
509 REG_SET(GRPH_Y_START, 0,
510 GRPH_Y_START, hw_rect.y);
511
512 REG_SET(GRPH_X_END, 0,
513 GRPH_X_END, hw_rect.width);
514
515 REG_SET(GRPH_Y_END, 0,
516 GRPH_Y_END, hw_rect.height);
517
518 REG_SET(GRPH_PITCH, 0,
519 GRPH_PITCH, plane_size->surface_pitch);
520
521 REG_SET(HW_ROTATION, 0,
522 GRPH_ROTATION_ANGLE, rotation_angles[rotation]);
523 }
524
525 #if defined(CONFIG_DRM_AMD_DC_SI)
dce60_program_size(struct dce_mem_input * dce_mi,enum dc_rotation_angle rotation,const struct plane_size * plane_size)526 static void dce60_program_size(
527 struct dce_mem_input *dce_mi,
528 enum dc_rotation_angle rotation, /* not used in DCE6 */
529 const struct plane_size *plane_size)
530 {
531 struct rect hw_rect = plane_size->surface_size;
532 /* DCE6 has no HW rotation, skip rotation_angles declaration */
533
534 /* DCE6 has no HW rotation, skip ROTATION_ANGLE_* processing */
535
536 REG_SET(GRPH_X_START, 0,
537 GRPH_X_START, hw_rect.x);
538
539 REG_SET(GRPH_Y_START, 0,
540 GRPH_Y_START, hw_rect.y);
541
542 REG_SET(GRPH_X_END, 0,
543 GRPH_X_END, hw_rect.width);
544
545 REG_SET(GRPH_Y_END, 0,
546 GRPH_Y_END, hw_rect.height);
547
548 REG_SET(GRPH_PITCH, 0,
549 GRPH_PITCH, plane_size->surface_pitch);
550
551 /* DCE6 has no HW_ROTATION register, skip setting rotation_angles */
552 }
553 #endif
554
program_grph_pixel_format(struct dce_mem_input * dce_mi,enum surface_pixel_format format)555 static void program_grph_pixel_format(
556 struct dce_mem_input *dce_mi,
557 enum surface_pixel_format format)
558 {
559 uint32_t red_xbar = 0, blue_xbar = 0; /* no swap */
560 uint32_t grph_depth = 0, grph_format = 0;
561 uint32_t sign = 0, floating = 0;
562
563 if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888 ||
564 /*todo: doesn't look like we handle BGRA here,
565 * should problem swap endian*/
566 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010 ||
567 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS ||
568 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616 ||
569 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) {
570 /* ABGR formats */
571 red_xbar = 2;
572 blue_xbar = 2;
573 }
574
575 REG_SET_2(GRPH_SWAP_CNTL, 0,
576 GRPH_RED_CROSSBAR, red_xbar,
577 GRPH_BLUE_CROSSBAR, blue_xbar);
578
579 switch (format) {
580 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
581 grph_depth = 0;
582 grph_format = 0;
583 break;
584 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
585 grph_depth = 1;
586 grph_format = 0;
587 break;
588 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
589 grph_depth = 1;
590 grph_format = 1;
591 break;
592 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
593 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
594 grph_depth = 2;
595 grph_format = 0;
596 break;
597 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
598 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
599 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
600 grph_depth = 2;
601 grph_format = 1;
602 break;
603 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
604 sign = 1;
605 floating = 1;
606 fallthrough;
607 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F: /* shouldn't this get float too? */
608 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
609 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616:
610 grph_depth = 3;
611 grph_format = 0;
612 break;
613 default:
614 DC_ERR("unsupported grph pixel format");
615 break;
616 }
617
618 REG_UPDATE_2(GRPH_CONTROL,
619 GRPH_DEPTH, grph_depth,
620 GRPH_FORMAT, grph_format);
621
622 REG_UPDATE_4(PRESCALE_GRPH_CONTROL,
623 GRPH_PRESCALE_SELECT, floating,
624 GRPH_PRESCALE_R_SIGN, sign,
625 GRPH_PRESCALE_G_SIGN, sign,
626 GRPH_PRESCALE_B_SIGN, sign);
627 }
628
dce_mi_clear_tiling(struct mem_input * mi)629 static void dce_mi_clear_tiling(
630 struct mem_input *mi)
631 {
632 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
633
634 if (dce_mi->masks->GRPH_SW_MODE) { /* GFX9 */
635 REG_UPDATE(GRPH_CONTROL,
636 GRPH_SW_MODE, DC_SW_LINEAR);
637 }
638
639 if (dce_mi->masks->GRPH_MICRO_TILE_MODE) { /* GFX8 */
640 REG_UPDATE(GRPH_CONTROL,
641 GRPH_ARRAY_MODE, DC_SW_LINEAR);
642 }
643
644 if (dce_mi->masks->GRPH_ARRAY_MODE) { /* GFX6 but reuses gfx8 struct */
645 REG_UPDATE(GRPH_CONTROL,
646 GRPH_ARRAY_MODE, DC_SW_LINEAR);
647 }
648 }
649
dce_mi_program_surface_config(struct mem_input * mi,enum surface_pixel_format format,struct dc_tiling_info * tiling_info,struct plane_size * plane_size,enum dc_rotation_angle rotation,struct dc_plane_dcc_param * dcc,bool horizontal_mirror)650 static void dce_mi_program_surface_config(
651 struct mem_input *mi,
652 enum surface_pixel_format format,
653 struct dc_tiling_info *tiling_info,
654 struct plane_size *plane_size,
655 enum dc_rotation_angle rotation,
656 struct dc_plane_dcc_param *dcc,
657 bool horizontal_mirror)
658 {
659 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
660 REG_UPDATE(GRPH_ENABLE, GRPH_ENABLE, 1);
661
662 program_tiling(dce_mi, tiling_info);
663 program_size_and_rotation(dce_mi, rotation, plane_size);
664
665 if (format < SURFACE_PIXEL_FORMAT_VIDEO_BEGIN)
666 program_grph_pixel_format(dce_mi, format);
667 }
668
669 #if defined(CONFIG_DRM_AMD_DC_SI)
dce60_mi_program_surface_config(struct mem_input * mi,enum surface_pixel_format format,struct dc_tiling_info * tiling_info,struct plane_size * plane_size,enum dc_rotation_angle rotation,struct dc_plane_dcc_param * dcc,bool horizontal_mirror)670 static void dce60_mi_program_surface_config(
671 struct mem_input *mi,
672 enum surface_pixel_format format,
673 struct dc_tiling_info *tiling_info,
674 struct plane_size *plane_size,
675 enum dc_rotation_angle rotation, /* not used in DCE6 */
676 struct dc_plane_dcc_param *dcc,
677 bool horizontal_mirror)
678 {
679 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
680 REG_UPDATE(GRPH_ENABLE, GRPH_ENABLE, 1);
681
682 program_tiling(dce_mi, tiling_info);
683 dce60_program_size(dce_mi, rotation, plane_size);
684
685 if (format < SURFACE_PIXEL_FORMAT_VIDEO_BEGIN)
686 program_grph_pixel_format(dce_mi, format);
687 }
688 #endif
689
get_dmif_switch_time_us(uint32_t h_total,uint32_t v_total,uint32_t pix_clk_khz)690 static uint32_t get_dmif_switch_time_us(
691 uint32_t h_total,
692 uint32_t v_total,
693 uint32_t pix_clk_khz)
694 {
695 uint32_t frame_time;
696 uint32_t pixels_per_second;
697 uint32_t pixels_per_frame;
698 uint32_t refresh_rate;
699 const uint32_t us_in_sec = 1000000;
700 const uint32_t min_single_frame_time_us = 30000;
701 /*return double of frame time*/
702 const uint32_t single_frame_time_multiplier = 2;
703
704 if (!h_total || v_total || !pix_clk_khz)
705 return single_frame_time_multiplier * min_single_frame_time_us;
706
707 /*TODO: should we use pixel format normalized pixel clock here?*/
708 pixels_per_second = pix_clk_khz * 1000;
709 pixels_per_frame = h_total * v_total;
710
711 if (!pixels_per_second || !pixels_per_frame) {
712 /* avoid division by zero */
713 ASSERT(pixels_per_frame);
714 ASSERT(pixels_per_second);
715 return single_frame_time_multiplier * min_single_frame_time_us;
716 }
717
718 refresh_rate = pixels_per_second / pixels_per_frame;
719
720 if (!refresh_rate) {
721 /* avoid division by zero*/
722 ASSERT(refresh_rate);
723 return single_frame_time_multiplier * min_single_frame_time_us;
724 }
725
726 frame_time = us_in_sec / refresh_rate;
727
728 if (frame_time < min_single_frame_time_us)
729 frame_time = min_single_frame_time_us;
730
731 frame_time *= single_frame_time_multiplier;
732
733 return frame_time;
734 }
735
dce_mi_allocate_dmif(struct mem_input * mi,uint32_t h_total,uint32_t v_total,uint32_t pix_clk_khz,uint32_t total_stream_num)736 static void dce_mi_allocate_dmif(
737 struct mem_input *mi,
738 uint32_t h_total,
739 uint32_t v_total,
740 uint32_t pix_clk_khz,
741 uint32_t total_stream_num)
742 {
743 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
744 const uint32_t retry_delay = 10;
745 uint32_t retry_count = get_dmif_switch_time_us(
746 h_total,
747 v_total,
748 pix_clk_khz) / retry_delay;
749
750 uint32_t pix_dur;
751 uint32_t buffers_allocated;
752 uint32_t dmif_buffer_control;
753
754 dmif_buffer_control = REG_GET(DMIF_BUFFER_CONTROL,
755 DMIF_BUFFERS_ALLOCATED, &buffers_allocated);
756
757 if (buffers_allocated == 2)
758 return;
759
760 REG_SET(DMIF_BUFFER_CONTROL, dmif_buffer_control,
761 DMIF_BUFFERS_ALLOCATED, 2);
762
763 REG_WAIT(DMIF_BUFFER_CONTROL,
764 DMIF_BUFFERS_ALLOCATION_COMPLETED, 1,
765 retry_delay, retry_count);
766
767 if (pix_clk_khz != 0) {
768 pix_dur = 1000000000ULL / pix_clk_khz;
769
770 REG_UPDATE(DPG_PIPE_ARBITRATION_CONTROL1,
771 PIXEL_DURATION, pix_dur);
772 }
773
774 if (dce_mi->wa.single_head_rdreq_dmif_limit) {
775 uint32_t enable = (total_stream_num > 1) ? 0 :
776 dce_mi->wa.single_head_rdreq_dmif_limit;
777
778 REG_UPDATE(MC_HUB_RDREQ_DMIF_LIMIT,
779 ENABLE, enable);
780 }
781 }
782
dce_mi_free_dmif(struct mem_input * mi,uint32_t total_stream_num)783 static void dce_mi_free_dmif(
784 struct mem_input *mi,
785 uint32_t total_stream_num)
786 {
787 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
788 uint32_t buffers_allocated;
789 uint32_t dmif_buffer_control;
790
791 dmif_buffer_control = REG_GET(DMIF_BUFFER_CONTROL,
792 DMIF_BUFFERS_ALLOCATED, &buffers_allocated);
793
794 if (buffers_allocated == 0)
795 return;
796
797 REG_SET(DMIF_BUFFER_CONTROL, dmif_buffer_control,
798 DMIF_BUFFERS_ALLOCATED, 0);
799
800 REG_WAIT(DMIF_BUFFER_CONTROL,
801 DMIF_BUFFERS_ALLOCATION_COMPLETED, 1,
802 10, 3500);
803
804 if (dce_mi->wa.single_head_rdreq_dmif_limit) {
805 uint32_t enable = (total_stream_num > 1) ? 0 :
806 dce_mi->wa.single_head_rdreq_dmif_limit;
807
808 REG_UPDATE(MC_HUB_RDREQ_DMIF_LIMIT,
809 ENABLE, enable);
810 }
811 }
812
813
program_sec_addr(struct dce_mem_input * dce_mi,PHYSICAL_ADDRESS_LOC address)814 static void program_sec_addr(
815 struct dce_mem_input *dce_mi,
816 PHYSICAL_ADDRESS_LOC address)
817 {
818 /*high register MUST be programmed first*/
819 REG_SET(GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0,
820 GRPH_SECONDARY_SURFACE_ADDRESS_HIGH,
821 address.high_part);
822
823 REG_SET_2(GRPH_SECONDARY_SURFACE_ADDRESS, 0,
824 GRPH_SECONDARY_SURFACE_ADDRESS, address.low_part >> 8,
825 GRPH_SECONDARY_DFQ_ENABLE, 0);
826 }
827
program_pri_addr(struct dce_mem_input * dce_mi,PHYSICAL_ADDRESS_LOC address)828 static void program_pri_addr(
829 struct dce_mem_input *dce_mi,
830 PHYSICAL_ADDRESS_LOC address)
831 {
832 /*high register MUST be programmed first*/
833 REG_SET(GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0,
834 GRPH_PRIMARY_SURFACE_ADDRESS_HIGH,
835 address.high_part);
836
837 REG_SET(GRPH_PRIMARY_SURFACE_ADDRESS, 0,
838 GRPH_PRIMARY_SURFACE_ADDRESS,
839 address.low_part >> 8);
840 }
841
842
dce_mi_is_flip_pending(struct mem_input * mem_input)843 static bool dce_mi_is_flip_pending(struct mem_input *mem_input)
844 {
845 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mem_input);
846 uint32_t update_pending;
847
848 REG_GET(GRPH_UPDATE, GRPH_SURFACE_UPDATE_PENDING, &update_pending);
849 if (update_pending)
850 return true;
851
852 mem_input->current_address = mem_input->request_address;
853 return false;
854 }
855
dce_mi_program_surface_flip_and_addr(struct mem_input * mem_input,const struct dc_plane_address * address,bool flip_immediate)856 static bool dce_mi_program_surface_flip_and_addr(
857 struct mem_input *mem_input,
858 const struct dc_plane_address *address,
859 bool flip_immediate)
860 {
861 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mem_input);
862
863 REG_UPDATE(GRPH_UPDATE, GRPH_UPDATE_LOCK, 1);
864
865 REG_UPDATE(
866 GRPH_FLIP_CONTROL,
867 GRPH_SURFACE_UPDATE_H_RETRACE_EN, flip_immediate ? 1 : 0);
868
869 switch (address->type) {
870 case PLN_ADDR_TYPE_GRAPHICS:
871 if (address->grph.addr.quad_part == 0)
872 break;
873 program_pri_addr(dce_mi, address->grph.addr);
874 break;
875 case PLN_ADDR_TYPE_GRPH_STEREO:
876 if (address->grph_stereo.left_addr.quad_part == 0 ||
877 address->grph_stereo.right_addr.quad_part == 0)
878 break;
879 program_pri_addr(dce_mi, address->grph_stereo.left_addr);
880 program_sec_addr(dce_mi, address->grph_stereo.right_addr);
881 break;
882 default:
883 /* not supported */
884 BREAK_TO_DEBUGGER();
885 break;
886 }
887
888 mem_input->request_address = *address;
889
890 if (flip_immediate)
891 mem_input->current_address = *address;
892
893 REG_UPDATE(GRPH_UPDATE, GRPH_UPDATE_LOCK, 0);
894
895 return true;
896 }
897
898 static const struct mem_input_funcs dce_mi_funcs = {
899 .mem_input_program_display_marks = dce_mi_program_display_marks,
900 .allocate_mem_input = dce_mi_allocate_dmif,
901 .free_mem_input = dce_mi_free_dmif,
902 .mem_input_program_surface_flip_and_addr =
903 dce_mi_program_surface_flip_and_addr,
904 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
905 .mem_input_program_surface_config =
906 dce_mi_program_surface_config,
907 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
908 .mem_input_clear_tiling = dce_mi_clear_tiling,
909 };
910
911 #if defined(CONFIG_DRM_AMD_DC_SI)
912 static const struct mem_input_funcs dce60_mi_funcs = {
913 .mem_input_program_display_marks = dce60_mi_program_display_marks,
914 .allocate_mem_input = dce_mi_allocate_dmif,
915 .free_mem_input = dce_mi_free_dmif,
916 .mem_input_program_surface_flip_and_addr =
917 dce_mi_program_surface_flip_and_addr,
918 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
919 .mem_input_program_surface_config =
920 dce60_mi_program_surface_config,
921 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
922 .mem_input_clear_tiling = dce_mi_clear_tiling,
923 };
924 #endif
925
926 static const struct mem_input_funcs dce112_mi_funcs = {
927 .mem_input_program_display_marks = dce112_mi_program_display_marks,
928 .allocate_mem_input = dce_mi_allocate_dmif,
929 .free_mem_input = dce_mi_free_dmif,
930 .mem_input_program_surface_flip_and_addr =
931 dce_mi_program_surface_flip_and_addr,
932 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
933 .mem_input_program_surface_config =
934 dce_mi_program_surface_config,
935 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
936 .mem_input_clear_tiling = dce_mi_clear_tiling,
937 };
938
939 static const struct mem_input_funcs dce120_mi_funcs = {
940 .mem_input_program_display_marks = dce120_mi_program_display_marks,
941 .allocate_mem_input = dce_mi_allocate_dmif,
942 .free_mem_input = dce_mi_free_dmif,
943 .mem_input_program_surface_flip_and_addr =
944 dce_mi_program_surface_flip_and_addr,
945 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
946 .mem_input_program_surface_config =
947 dce_mi_program_surface_config,
948 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
949 .mem_input_clear_tiling = dce_mi_clear_tiling,
950 };
951
dce_mem_input_construct(struct dce_mem_input * dce_mi,struct dc_context * ctx,int inst,const struct dce_mem_input_registers * regs,const struct dce_mem_input_shift * mi_shift,const struct dce_mem_input_mask * mi_mask)952 void dce_mem_input_construct(
953 struct dce_mem_input *dce_mi,
954 struct dc_context *ctx,
955 int inst,
956 const struct dce_mem_input_registers *regs,
957 const struct dce_mem_input_shift *mi_shift,
958 const struct dce_mem_input_mask *mi_mask)
959 {
960 dce_mi->base.ctx = ctx;
961
962 dce_mi->base.inst = inst;
963 dce_mi->base.funcs = &dce_mi_funcs;
964
965 dce_mi->regs = regs;
966 dce_mi->shifts = mi_shift;
967 dce_mi->masks = mi_mask;
968 }
969
970 #if defined(CONFIG_DRM_AMD_DC_SI)
dce60_mem_input_construct(struct dce_mem_input * dce_mi,struct dc_context * ctx,int inst,const struct dce_mem_input_registers * regs,const struct dce_mem_input_shift * mi_shift,const struct dce_mem_input_mask * mi_mask)971 void dce60_mem_input_construct(
972 struct dce_mem_input *dce_mi,
973 struct dc_context *ctx,
974 int inst,
975 const struct dce_mem_input_registers *regs,
976 const struct dce_mem_input_shift *mi_shift,
977 const struct dce_mem_input_mask *mi_mask)
978 {
979 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
980 dce_mi->base.funcs = &dce60_mi_funcs;
981 }
982 #endif
983
dce112_mem_input_construct(struct dce_mem_input * dce_mi,struct dc_context * ctx,int inst,const struct dce_mem_input_registers * regs,const struct dce_mem_input_shift * mi_shift,const struct dce_mem_input_mask * mi_mask)984 void dce112_mem_input_construct(
985 struct dce_mem_input *dce_mi,
986 struct dc_context *ctx,
987 int inst,
988 const struct dce_mem_input_registers *regs,
989 const struct dce_mem_input_shift *mi_shift,
990 const struct dce_mem_input_mask *mi_mask)
991 {
992 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
993 dce_mi->base.funcs = &dce112_mi_funcs;
994 }
995
dce120_mem_input_construct(struct dce_mem_input * dce_mi,struct dc_context * ctx,int inst,const struct dce_mem_input_registers * regs,const struct dce_mem_input_shift * mi_shift,const struct dce_mem_input_mask * mi_mask)996 void dce120_mem_input_construct(
997 struct dce_mem_input *dce_mi,
998 struct dc_context *ctx,
999 int inst,
1000 const struct dce_mem_input_registers *regs,
1001 const struct dce_mem_input_shift *mi_shift,
1002 const struct dce_mem_input_mask *mi_mask)
1003 {
1004 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
1005 dce_mi->base.funcs = &dce120_mi_funcs;
1006 }
1007