1 /**************************************************************************
2 *
3 * Copyright 2018 Advanced Micro Devices, Inc.
4 *
5 * SPDX-License-Identifier: MIT
6 *
7 **************************************************************************/
8
9 #include "pipe/p_video_codec.h"
10 #include "radeon_uvd_enc.h"
11 #include "radeon_video.h"
12 #include "radeonsi/si_pipe.h"
13 #include "util/u_memory.h"
14 #include "util/u_video.h"
15 #include "vl/vl_video_buffer.h"
16
17 #include <stdio.h>
18
19 #define RADEON_ENC_CS(value) (enc->cs.current.buf[enc->cs.current.cdw++] = (value))
20 #define RADEON_ENC_BEGIN(cmd) \
21 { \
22 uint32_t *begin = &enc->cs.current.buf[enc->cs.current.cdw++]; \
23 RADEON_ENC_CS(cmd)
24 #define RADEON_ENC_READ(buf, domain, off) \
25 radeon_uvd_enc_add_buffer(enc, (buf), RADEON_USAGE_READ, (domain), (off))
26 #define RADEON_ENC_WRITE(buf, domain, off) \
27 radeon_uvd_enc_add_buffer(enc, (buf), RADEON_USAGE_WRITE, (domain), (off))
28 #define RADEON_ENC_READWRITE(buf, domain, off) \
29 radeon_uvd_enc_add_buffer(enc, (buf), RADEON_USAGE_READWRITE, (domain), (off))
30 #define RADEON_ENC_END() \
31 *begin = (&enc->cs.current.buf[enc->cs.current.cdw] - begin) * 4; \
32 enc->total_task_size += *begin; \
33 }
34
35 static const unsigned index_to_shifts[4] = {24, 16, 8, 0};
36
radeon_uvd_enc_add_buffer(struct radeon_uvd_encoder * enc,struct pb_buffer_lean * buf,unsigned usage,enum radeon_bo_domain domain,signed offset)37 static void radeon_uvd_enc_add_buffer(struct radeon_uvd_encoder *enc, struct pb_buffer_lean *buf,
38 unsigned usage, enum radeon_bo_domain domain,
39 signed offset)
40 {
41 enc->ws->cs_add_buffer(&enc->cs, buf, usage | RADEON_USAGE_SYNCHRONIZED, domain);
42 uint64_t addr;
43 addr = enc->ws->buffer_get_virtual_address(buf);
44 addr = addr + offset;
45 RADEON_ENC_CS(addr >> 32);
46 RADEON_ENC_CS(addr);
47 }
48
radeon_uvd_enc_set_emulation_prevention(struct radeon_uvd_encoder * enc,bool set)49 static void radeon_uvd_enc_set_emulation_prevention(struct radeon_uvd_encoder *enc, bool set)
50 {
51 if (set != enc->emulation_prevention) {
52 enc->emulation_prevention = set;
53 enc->num_zeros = 0;
54 }
55 }
56
radeon_uvd_enc_output_one_byte(struct radeon_uvd_encoder * enc,unsigned char byte)57 static void radeon_uvd_enc_output_one_byte(struct radeon_uvd_encoder *enc, unsigned char byte)
58 {
59 if (enc->byte_index == 0)
60 enc->cs.current.buf[enc->cs.current.cdw] = 0;
61 enc->cs.current.buf[enc->cs.current.cdw] |=
62 ((unsigned int)(byte) << index_to_shifts[enc->byte_index]);
63 enc->byte_index++;
64
65 if (enc->byte_index >= 4) {
66 enc->byte_index = 0;
67 enc->cs.current.cdw++;
68 }
69 }
70
radeon_uvd_enc_emulation_prevention(struct radeon_uvd_encoder * enc,unsigned char byte)71 static void radeon_uvd_enc_emulation_prevention(struct radeon_uvd_encoder *enc, unsigned char byte)
72 {
73 if (enc->emulation_prevention) {
74 if ((enc->num_zeros >= 2) &&
75 ((byte == 0x00) || (byte == 0x01) || (byte == 0x02) || (byte == 0x03))) {
76 radeon_uvd_enc_output_one_byte(enc, 0x03);
77 enc->bits_output += 8;
78 enc->num_zeros = 0;
79 }
80 enc->num_zeros = (byte == 0 ? (enc->num_zeros + 1) : 0);
81 }
82 }
83
radeon_uvd_enc_code_fixed_bits(struct radeon_uvd_encoder * enc,unsigned int value,unsigned int num_bits)84 static void radeon_uvd_enc_code_fixed_bits(struct radeon_uvd_encoder *enc, unsigned int value,
85 unsigned int num_bits)
86 {
87 unsigned int bits_to_pack = 0;
88
89 while (num_bits > 0) {
90 unsigned int value_to_pack = value & (0xffffffff >> (32 - num_bits));
91 bits_to_pack =
92 num_bits > (32 - enc->bits_in_shifter) ? (32 - enc->bits_in_shifter) : num_bits;
93
94 if (bits_to_pack < num_bits)
95 value_to_pack = value_to_pack >> (num_bits - bits_to_pack);
96
97 enc->shifter |= value_to_pack << (32 - enc->bits_in_shifter - bits_to_pack);
98 num_bits -= bits_to_pack;
99 enc->bits_in_shifter += bits_to_pack;
100
101 while (enc->bits_in_shifter >= 8) {
102 unsigned char output_byte = (unsigned char)(enc->shifter >> 24);
103 enc->shifter <<= 8;
104 radeon_uvd_enc_emulation_prevention(enc, output_byte);
105 radeon_uvd_enc_output_one_byte(enc, output_byte);
106 enc->bits_in_shifter -= 8;
107 enc->bits_output += 8;
108 }
109 }
110 }
111
radeon_uvd_enc_reset(struct radeon_uvd_encoder * enc)112 static void radeon_uvd_enc_reset(struct radeon_uvd_encoder *enc)
113 {
114 enc->emulation_prevention = false;
115 enc->shifter = 0;
116 enc->bits_in_shifter = 0;
117 enc->bits_output = 0;
118 enc->num_zeros = 0;
119 enc->byte_index = 0;
120 }
121
radeon_uvd_enc_byte_align(struct radeon_uvd_encoder * enc)122 static void radeon_uvd_enc_byte_align(struct radeon_uvd_encoder *enc)
123 {
124 unsigned int num_padding_zeros = (32 - enc->bits_in_shifter) % 8;
125
126 if (num_padding_zeros > 0)
127 radeon_uvd_enc_code_fixed_bits(enc, 0, num_padding_zeros);
128 }
129
radeon_uvd_enc_flush_headers(struct radeon_uvd_encoder * enc)130 static void radeon_uvd_enc_flush_headers(struct radeon_uvd_encoder *enc)
131 {
132 if (enc->bits_in_shifter != 0) {
133 unsigned char output_byte = (unsigned char)(enc->shifter >> 24);
134 radeon_uvd_enc_emulation_prevention(enc, output_byte);
135 radeon_uvd_enc_output_one_byte(enc, output_byte);
136 enc->bits_output += enc->bits_in_shifter;
137 enc->shifter = 0;
138 enc->bits_in_shifter = 0;
139 enc->num_zeros = 0;
140 }
141
142 if (enc->byte_index > 0) {
143 enc->cs.current.cdw++;
144 enc->byte_index = 0;
145 }
146 }
147
radeon_uvd_enc_code_ue(struct radeon_uvd_encoder * enc,unsigned int value)148 static void radeon_uvd_enc_code_ue(struct radeon_uvd_encoder *enc, unsigned int value)
149 {
150 int x = -1;
151 unsigned int ue_code = value + 1;
152 value += 1;
153
154 while (value) {
155 value = (value >> 1);
156 x += 1;
157 }
158
159 unsigned int ue_length = (x << 1) + 1;
160 radeon_uvd_enc_code_fixed_bits(enc, ue_code, ue_length);
161 }
162
radeon_uvd_enc_code_se(struct radeon_uvd_encoder * enc,int value)163 static void radeon_uvd_enc_code_se(struct radeon_uvd_encoder *enc, int value)
164 {
165 unsigned int v = 0;
166
167 if (value != 0)
168 v = (value < 0 ? ((unsigned int)(0 - value) << 1) : (((unsigned int)(value) << 1) - 1));
169
170 radeon_uvd_enc_code_ue(enc, v);
171 }
172
radeon_uvd_enc_session_info(struct radeon_uvd_encoder * enc)173 static void radeon_uvd_enc_session_info(struct radeon_uvd_encoder *enc)
174 {
175 unsigned int interface_version =
176 ((RENC_UVD_FW_INTERFACE_MAJOR_VERSION << RENC_UVD_IF_MAJOR_VERSION_SHIFT) |
177 (RENC_UVD_FW_INTERFACE_MINOR_VERSION << RENC_UVD_IF_MINOR_VERSION_SHIFT));
178 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SESSION_INFO);
179 RADEON_ENC_CS(0x00000000); // reserved
180 RADEON_ENC_CS(interface_version);
181 RADEON_ENC_READWRITE(enc->si->res->buf, enc->si->res->domains, 0x0);
182 RADEON_ENC_END();
183 }
184
radeon_uvd_enc_task_info(struct radeon_uvd_encoder * enc,bool need_feedback)185 static void radeon_uvd_enc_task_info(struct radeon_uvd_encoder *enc, bool need_feedback)
186 {
187 enc->enc_pic.task_info.task_id++;
188
189 if (need_feedback)
190 enc->enc_pic.task_info.allowed_max_num_feedbacks = 1;
191 else
192 enc->enc_pic.task_info.allowed_max_num_feedbacks = 0;
193
194 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_TASK_INFO);
195 enc->p_task_size = &enc->cs.current.buf[enc->cs.current.cdw++];
196 RADEON_ENC_CS(enc->enc_pic.task_info.task_id);
197 RADEON_ENC_CS(enc->enc_pic.task_info.allowed_max_num_feedbacks);
198 RADEON_ENC_END();
199 }
200
radeon_uvd_enc_session_init_hevc(struct radeon_uvd_encoder * enc)201 static void radeon_uvd_enc_session_init_hevc(struct radeon_uvd_encoder *enc)
202 {
203 enc->enc_pic.session_init.aligned_picture_width = align(enc->base.width, 64);
204 enc->enc_pic.session_init.aligned_picture_height = align(enc->base.height, 16);
205 enc->enc_pic.session_init.padding_width =
206 (enc->enc_pic.crop_left + enc->enc_pic.crop_right) * 2;
207 enc->enc_pic.session_init.padding_height =
208 (enc->enc_pic.crop_top + enc->enc_pic.crop_bottom) * 2;
209 enc->enc_pic.session_init.pre_encode_mode = RENC_UVD_PREENCODE_MODE_NONE;
210 enc->enc_pic.session_init.pre_encode_chroma_enabled = false;
211
212 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SESSION_INIT);
213 RADEON_ENC_CS(enc->enc_pic.session_init.aligned_picture_width);
214 RADEON_ENC_CS(enc->enc_pic.session_init.aligned_picture_height);
215 RADEON_ENC_CS(enc->enc_pic.session_init.padding_width);
216 RADEON_ENC_CS(enc->enc_pic.session_init.padding_height);
217 RADEON_ENC_CS(enc->enc_pic.session_init.pre_encode_mode);
218 RADEON_ENC_CS(enc->enc_pic.session_init.pre_encode_chroma_enabled);
219 RADEON_ENC_END();
220 }
221
radeon_uvd_enc_layer_control(struct radeon_uvd_encoder * enc)222 static void radeon_uvd_enc_layer_control(struct radeon_uvd_encoder *enc)
223 {
224 enc->enc_pic.layer_ctrl.max_num_temporal_layers = 1;
225 enc->enc_pic.layer_ctrl.num_temporal_layers = 1;
226
227 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_LAYER_CONTROL);
228 RADEON_ENC_CS(enc->enc_pic.layer_ctrl.max_num_temporal_layers);
229 RADEON_ENC_CS(enc->enc_pic.layer_ctrl.num_temporal_layers);
230 RADEON_ENC_END();
231 }
232
radeon_uvd_enc_layer_select(struct radeon_uvd_encoder * enc)233 static void radeon_uvd_enc_layer_select(struct radeon_uvd_encoder *enc)
234 {
235 enc->enc_pic.layer_sel.temporal_layer_index = 0;
236
237 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_LAYER_SELECT);
238 RADEON_ENC_CS(enc->enc_pic.layer_sel.temporal_layer_index);
239 RADEON_ENC_END();
240 }
241
radeon_uvd_enc_slice_control_hevc(struct radeon_uvd_encoder * enc)242 static void radeon_uvd_enc_slice_control_hevc(struct radeon_uvd_encoder *enc)
243 {
244 enc->enc_pic.hevc_slice_ctrl.slice_control_mode = RENC_UVD_SLICE_CONTROL_MODE_FIXED_CTBS;
245 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice =
246 align(enc->base.width, 64) / 64 * align(enc->base.height, 64) / 64;
247 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice_segment =
248 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice;
249
250 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SLICE_CONTROL);
251 RADEON_ENC_CS(enc->enc_pic.hevc_slice_ctrl.slice_control_mode);
252 RADEON_ENC_CS(enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice);
253 RADEON_ENC_CS(enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice_segment);
254 RADEON_ENC_END();
255 }
256
radeon_uvd_enc_spec_misc_hevc(struct radeon_uvd_encoder * enc,struct pipe_picture_desc * picture)257 static void radeon_uvd_enc_spec_misc_hevc(struct radeon_uvd_encoder *enc,
258 struct pipe_picture_desc *picture)
259 {
260 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
261 enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3 =
262 pic->seq.log2_min_luma_coding_block_size_minus3;
263 enc->enc_pic.hevc_spec_misc.amp_disabled = !pic->seq.amp_enabled_flag;
264 enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled =
265 pic->seq.strong_intra_smoothing_enabled_flag;
266 enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag = pic->pic.constrained_intra_pred_flag;
267 enc->enc_pic.hevc_spec_misc.cabac_init_flag = pic->slice.cabac_init_flag;
268 enc->enc_pic.hevc_spec_misc.half_pel_enabled = 1;
269 enc->enc_pic.hevc_spec_misc.quarter_pel_enabled = 1;
270
271 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SPEC_MISC);
272 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3);
273 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.amp_disabled);
274 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled);
275 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag);
276 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.cabac_init_flag);
277 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.half_pel_enabled);
278 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.quarter_pel_enabled);
279 RADEON_ENC_END();
280 }
281
radeon_uvd_enc_rc_session_init(struct radeon_uvd_encoder * enc,struct pipe_picture_desc * picture)282 static void radeon_uvd_enc_rc_session_init(struct radeon_uvd_encoder *enc,
283 struct pipe_picture_desc *picture)
284 {
285 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
286 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rc[0].vbv_buf_lv;
287 switch (pic->rc[0].rate_ctrl_method) {
288 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE:
289 enc->enc_pic.rc_session_init.rate_control_method = RENC_UVD_RATE_CONTROL_METHOD_NONE;
290 break;
291 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
292 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT:
293 enc->enc_pic.rc_session_init.rate_control_method = RENC_UVD_RATE_CONTROL_METHOD_CBR;
294 break;
295 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
296 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE:
297 enc->enc_pic.rc_session_init.rate_control_method =
298 RENC_UVD_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
299 break;
300 default:
301 enc->enc_pic.rc_session_init.rate_control_method = RENC_UVD_RATE_CONTROL_METHOD_NONE;
302 }
303
304 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_RATE_CONTROL_SESSION_INIT);
305 RADEON_ENC_CS(enc->enc_pic.rc_session_init.rate_control_method);
306 RADEON_ENC_CS(enc->enc_pic.rc_session_init.vbv_buffer_level);
307 RADEON_ENC_END();
308 }
309
radeon_uvd_enc_rc_layer_init(struct radeon_uvd_encoder * enc,struct pipe_picture_desc * picture)310 static void radeon_uvd_enc_rc_layer_init(struct radeon_uvd_encoder *enc,
311 struct pipe_picture_desc *picture)
312 {
313 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
314 enc->enc_pic.rc_layer_init.target_bit_rate = pic->rc[0].target_bitrate;
315 enc->enc_pic.rc_layer_init.peak_bit_rate = pic->rc[0].peak_bitrate;
316 enc->enc_pic.rc_layer_init.frame_rate_num = pic->rc[0].frame_rate_num;
317 enc->enc_pic.rc_layer_init.frame_rate_den = pic->rc[0].frame_rate_den;
318 enc->enc_pic.rc_layer_init.vbv_buffer_size = pic->rc[0].vbv_buffer_size;
319 enc->enc_pic.rc_layer_init.avg_target_bits_per_picture =
320 pic->rc[0].target_bitrate * ((float)pic->rc[0].frame_rate_den / pic->rc[0].frame_rate_num);
321 enc->enc_pic.rc_layer_init.peak_bits_per_picture_integer =
322 pic->rc[0].peak_bitrate * ((float)pic->rc[0].frame_rate_den / pic->rc[0].frame_rate_num);
323 enc->enc_pic.rc_layer_init.peak_bits_per_picture_fractional =
324 (((pic->rc[0].peak_bitrate * (uint64_t)pic->rc[0].frame_rate_den) % pic->rc[0].frame_rate_num) << 32) /
325 pic->rc[0].frame_rate_num;
326
327 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_RATE_CONTROL_LAYER_INIT);
328 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.target_bit_rate);
329 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.peak_bit_rate);
330 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.frame_rate_num);
331 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.frame_rate_den);
332 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.vbv_buffer_size);
333 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.avg_target_bits_per_picture);
334 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.peak_bits_per_picture_integer);
335 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.peak_bits_per_picture_fractional);
336 RADEON_ENC_END();
337 }
338
radeon_uvd_enc_deblocking_filter_hevc(struct radeon_uvd_encoder * enc,struct pipe_picture_desc * picture)339 static void radeon_uvd_enc_deblocking_filter_hevc(struct radeon_uvd_encoder *enc,
340 struct pipe_picture_desc *picture)
341 {
342 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
343 enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled =
344 pic->slice.slice_loop_filter_across_slices_enabled_flag;
345 enc->enc_pic.hevc_deblock.deblocking_filter_disabled =
346 pic->slice.slice_deblocking_filter_disabled_flag;
347 enc->enc_pic.hevc_deblock.beta_offset_div2 = pic->slice.slice_beta_offset_div2;
348 enc->enc_pic.hevc_deblock.tc_offset_div2 = pic->slice.slice_tc_offset_div2;
349 enc->enc_pic.hevc_deblock.cb_qp_offset = pic->slice.slice_cb_qp_offset;
350 enc->enc_pic.hevc_deblock.cr_qp_offset = pic->slice.slice_cr_qp_offset;
351
352 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_DEBLOCKING_FILTER);
353 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled);
354 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.deblocking_filter_disabled);
355 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.beta_offset_div2);
356 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.tc_offset_div2);
357 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.cb_qp_offset);
358 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.cr_qp_offset);
359 RADEON_ENC_END();
360 }
361
radeon_uvd_enc_quality_params(struct radeon_uvd_encoder * enc)362 static void radeon_uvd_enc_quality_params(struct radeon_uvd_encoder *enc)
363 {
364 enc->enc_pic.quality_params.vbaq_mode = 0;
365 enc->enc_pic.quality_params.scene_change_sensitivity = 0;
366 enc->enc_pic.quality_params.scene_change_min_idr_interval = 0;
367
368 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_QUALITY_PARAMS);
369 RADEON_ENC_CS(enc->enc_pic.quality_params.vbaq_mode);
370 RADEON_ENC_CS(enc->enc_pic.quality_params.scene_change_sensitivity);
371 RADEON_ENC_CS(enc->enc_pic.quality_params.scene_change_min_idr_interval);
372 RADEON_ENC_END();
373 }
374
radeon_uvd_enc_nalu_sps_hevc(struct radeon_uvd_encoder * enc)375 static void radeon_uvd_enc_nalu_sps_hevc(struct radeon_uvd_encoder *enc)
376 {
377 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
378 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_SPS);
379 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++];
380 int i;
381
382 radeon_uvd_enc_reset(enc);
383 radeon_uvd_enc_set_emulation_prevention(enc, false);
384 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
385 radeon_uvd_enc_code_fixed_bits(enc, 0x4201, 16);
386 radeon_uvd_enc_byte_align(enc);
387 radeon_uvd_enc_set_emulation_prevention(enc, true);
388 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 4);
389 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1, 3);
390 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
391 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
392 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_tier_flag, 1);
393 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_profile_idc, 5);
394 radeon_uvd_enc_code_fixed_bits(enc, 0x60000000, 32);
395 radeon_uvd_enc_code_fixed_bits(enc, 0xb0000000, 32);
396 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 16);
397 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_level_idc, 8);
398
399 for (i = 0; i < (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i++)
400 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
401
402 if ((enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1) > 0) {
403 for (i = (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i < 8; i++)
404 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
405 }
406
407 radeon_uvd_enc_code_ue(enc, 0x0);
408 radeon_uvd_enc_code_ue(enc, enc->enc_pic.chroma_format_idc);
409 radeon_uvd_enc_code_ue(enc, enc->enc_pic.session_init.aligned_picture_width);
410 radeon_uvd_enc_code_ue(enc, enc->enc_pic.session_init.aligned_picture_height);
411
412 int conformance_window_flag = (enc->enc_pic.crop_top > 0) || (enc->enc_pic.crop_bottom > 0) ||
413 (enc->enc_pic.crop_left > 0) || (enc->enc_pic.crop_right > 0)
414 ? 0x1
415 : 0x0;
416 radeon_uvd_enc_code_fixed_bits(enc, conformance_window_flag, 1);
417
418 if (conformance_window_flag == 1) {
419 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_left);
420 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_right);
421 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_top);
422 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_bottom);
423 }
424
425 radeon_uvd_enc_code_ue(enc, enc->enc_pic.bit_depth_luma_minus8);
426 radeon_uvd_enc_code_ue(enc, enc->enc_pic.bit_depth_chroma_minus8);
427 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_max_poc - 4);
428 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
429 radeon_uvd_enc_code_ue(enc, 1);
430 radeon_uvd_enc_code_ue(enc, 0x0);
431 radeon_uvd_enc_code_ue(enc, 0x0);
432 radeon_uvd_enc_code_ue(enc, enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3);
433 /* Only support CTBSize 64 */
434 radeon_uvd_enc_code_ue(
435 enc, 6 - (enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3 + 3));
436 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_min_transform_block_size_minus2);
437 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_diff_max_min_transform_block_size);
438 radeon_uvd_enc_code_ue(enc, enc->enc_pic.max_transform_hierarchy_depth_inter);
439 radeon_uvd_enc_code_ue(enc, enc->enc_pic.max_transform_hierarchy_depth_intra);
440
441 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
442 radeon_uvd_enc_code_fixed_bits(enc, !enc->enc_pic.hevc_spec_misc.amp_disabled, 1);
443 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.sample_adaptive_offset_enabled_flag, 1);
444 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.pcm_enabled_flag, 1);
445
446 radeon_uvd_enc_code_ue(enc, 1);
447 radeon_uvd_enc_code_ue(enc, 1);
448 radeon_uvd_enc_code_ue(enc, 0);
449 radeon_uvd_enc_code_ue(enc, 0);
450 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
451
452 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
453
454 radeon_uvd_enc_code_fixed_bits(enc, 0, 1);
455 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled,
456 1);
457
458 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.vui_parameters_present_flag), 1);
459 if (enc->enc_pic.vui_info.vui_parameters_present_flag) {
460 /* aspect ratio present flag */
461 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.flags.aspect_ratio_info_present_flag), 1);
462 if (enc->enc_pic.vui_info.flags.aspect_ratio_info_present_flag) {
463 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.aspect_ratio_idc), 8);
464 if (enc->enc_pic.vui_info.aspect_ratio_idc == PIPE_H2645_EXTENDED_SAR) {
465 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.sar_width), 16);
466 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.sar_height), 16);
467 }
468 }
469 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* overscan info present flag */
470 /* video signal type present flag */
471 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.flags.video_signal_type_present_flag, 1);
472 if (enc->enc_pic.vui_info.flags.video_signal_type_present_flag) {
473 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.video_format, 3);
474 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.video_full_range_flag, 1);
475 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.flags.colour_description_present_flag, 1);
476 if (enc->enc_pic.vui_info.flags.colour_description_present_flag) {
477 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.colour_primaries, 8);
478 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.transfer_characteristics, 8);
479 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.matrix_coefficients, 8);
480 }
481 }
482 /* chroma loc info present flag */
483 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.vui_info.flags.chroma_loc_info_present_flag, 1);
484 if (enc->enc_pic.vui_info.flags.chroma_loc_info_present_flag) {
485 radeon_uvd_enc_code_ue(enc, enc->enc_pic.vui_info.chroma_sample_loc_type_top_field);
486 radeon_uvd_enc_code_ue(enc, enc->enc_pic.vui_info.chroma_sample_loc_type_bottom_field);
487 }
488 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* neutral chroma indication flag */
489 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* field seq flag */
490 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* frame field info present flag */
491 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* default display windows flag */
492 /* vui timing info present flag */
493 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.flags.timing_info_present_flag), 1);
494 if (enc->enc_pic.vui_info.flags.timing_info_present_flag) {
495 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.num_units_in_tick), 32);
496 radeon_uvd_enc_code_fixed_bits(enc, (enc->enc_pic.vui_info.time_scale), 32);
497 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
498 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
499 }
500 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* bitstream restriction flag */
501 }
502
503 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
504
505 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
506
507 radeon_uvd_enc_byte_align(enc);
508 radeon_uvd_enc_flush_headers(enc);
509 *size_in_bytes = (enc->bits_output + 7) / 8;
510 RADEON_ENC_END();
511 }
512
radeon_uvd_enc_nalu_pps_hevc(struct radeon_uvd_encoder * enc)513 static void radeon_uvd_enc_nalu_pps_hevc(struct radeon_uvd_encoder *enc)
514 {
515 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
516 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_PPS);
517 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++];
518 radeon_uvd_enc_reset(enc);
519 radeon_uvd_enc_set_emulation_prevention(enc, false);
520 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
521 radeon_uvd_enc_code_fixed_bits(enc, 0x4401, 16);
522 radeon_uvd_enc_byte_align(enc);
523 radeon_uvd_enc_set_emulation_prevention(enc, true);
524 radeon_uvd_enc_code_ue(enc, 0x0);
525 radeon_uvd_enc_code_ue(enc, 0x0);
526 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
527 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* output_flag_resent_flag */
528 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 3); /* num_extra_slice_header_bits */
529 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
530 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
531 radeon_uvd_enc_code_ue(enc, 0x0);
532 radeon_uvd_enc_code_ue(enc, 0x0);
533 radeon_uvd_enc_code_se(enc, 0x0);
534 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag, 1);
535 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
536 if (enc->enc_pic.rc_session_init.rate_control_method == RENC_UVD_RATE_CONTROL_METHOD_NONE)
537 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
538 else {
539 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
540 radeon_uvd_enc_code_ue(enc, 0x0);
541 }
542 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.cb_qp_offset);
543 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.cr_qp_offset);
544 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
545 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
546 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
547 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
548 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
549 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled,
550 1);
551 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
552 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
553 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_deblock.deblocking_filter_disabled, 1);
554
555 if (!enc->enc_pic.hevc_deblock.deblocking_filter_disabled) {
556 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.beta_offset_div2);
557 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.tc_offset_div2);
558 }
559
560 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
561 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
562 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_parallel_merge_level_minus2);
563 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
564
565 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
566
567 radeon_uvd_enc_byte_align(enc);
568 radeon_uvd_enc_flush_headers(enc);
569 *size_in_bytes = (enc->bits_output + 7) / 8;
570 RADEON_ENC_END();
571 }
572
radeon_uvd_enc_nalu_vps_hevc(struct radeon_uvd_encoder * enc)573 static void radeon_uvd_enc_nalu_vps_hevc(struct radeon_uvd_encoder *enc)
574 {
575 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
576 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_VPS);
577 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++];
578 int i;
579
580 radeon_uvd_enc_reset(enc);
581 radeon_uvd_enc_set_emulation_prevention(enc, false);
582 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
583 radeon_uvd_enc_code_fixed_bits(enc, 0x4001, 16);
584 radeon_uvd_enc_byte_align(enc);
585 radeon_uvd_enc_set_emulation_prevention(enc, true);
586
587 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 4);
588 radeon_uvd_enc_code_fixed_bits(enc, 0x3, 2);
589 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
590 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1, 3);
591 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
592 radeon_uvd_enc_code_fixed_bits(enc, 0xffff, 16);
593 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
594 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_tier_flag, 1);
595 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_profile_idc, 5);
596 radeon_uvd_enc_code_fixed_bits(enc, 0x60000000, 32);
597 radeon_uvd_enc_code_fixed_bits(enc, 0xb0000000, 32);
598 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 16);
599 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_level_idc, 8);
600
601 for (i = 0; i < (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i++)
602 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
603
604 if ((enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1) > 0) {
605 for (i = (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i < 8; i++)
606 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
607 }
608
609 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
610 radeon_uvd_enc_code_ue(enc, 0x1);
611 radeon_uvd_enc_code_ue(enc, 0x0);
612 radeon_uvd_enc_code_ue(enc, 0x0);
613
614 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
615 radeon_uvd_enc_code_ue(enc, 0x0);
616 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
617 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
618
619 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
620
621 radeon_uvd_enc_byte_align(enc);
622 radeon_uvd_enc_flush_headers(enc);
623 *size_in_bytes = (enc->bits_output + 7) / 8;
624 RADEON_ENC_END();
625 }
626
radeon_uvd_enc_nalu_aud_hevc(struct radeon_uvd_encoder * enc)627 static void radeon_uvd_enc_nalu_aud_hevc(struct radeon_uvd_encoder *enc)
628 {
629 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
630 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_AUD);
631 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++];
632 radeon_uvd_enc_reset(enc);
633 radeon_uvd_enc_set_emulation_prevention(enc, false);
634 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
635 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
636 radeon_uvd_enc_code_fixed_bits(enc, 35, 6);
637 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
638 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 3);
639 radeon_uvd_enc_byte_align(enc);
640 radeon_uvd_enc_set_emulation_prevention(enc, true);
641 switch (enc->enc_pic.picture_type) {
642 case PIPE_H2645_ENC_PICTURE_TYPE_I:
643 case PIPE_H2645_ENC_PICTURE_TYPE_IDR:
644 radeon_uvd_enc_code_fixed_bits(enc, 0x00, 3);
645 break;
646 case PIPE_H2645_ENC_PICTURE_TYPE_P:
647 radeon_uvd_enc_code_fixed_bits(enc, 0x01, 3);
648 break;
649 case PIPE_H2645_ENC_PICTURE_TYPE_B:
650 radeon_uvd_enc_code_fixed_bits(enc, 0x02, 3);
651 break;
652 default:
653 assert(0 && "Unsupported picture type!");
654 }
655
656 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
657
658 radeon_uvd_enc_byte_align(enc);
659 radeon_uvd_enc_flush_headers(enc);
660 *size_in_bytes = (enc->bits_output + 7) / 8;
661 RADEON_ENC_END();
662 }
663
radeon_uvd_enc_slice_header_hevc(struct radeon_uvd_encoder * enc)664 static void radeon_uvd_enc_slice_header_hevc(struct radeon_uvd_encoder *enc)
665 {
666 uint32_t instruction[RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_NUM_INSTRUCTIONS] = {0};
667 uint32_t num_bits[RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_NUM_INSTRUCTIONS] = {0};
668 unsigned int inst_index = 0;
669 unsigned int bit_index = 0;
670 unsigned int bits_copied = 0;
671 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SLICE_HEADER);
672 radeon_uvd_enc_reset(enc);
673 radeon_uvd_enc_set_emulation_prevention(enc, false);
674
675 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
676 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.nal_unit_type, 6);
677 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
678 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 3);
679
680 radeon_uvd_enc_flush_headers(enc);
681 bit_index++;
682 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
683 num_bits[inst_index] = enc->bits_output - bits_copied;
684 bits_copied = enc->bits_output;
685 inst_index++;
686
687 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_FIRST_SLICE;
688 inst_index++;
689
690 if ((enc->enc_pic.nal_unit_type >= 16) && (enc->enc_pic.nal_unit_type <= 23))
691 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
692
693 radeon_uvd_enc_code_ue(enc, 0x0);
694
695 radeon_uvd_enc_flush_headers(enc);
696 bit_index++;
697 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
698 num_bits[inst_index] = enc->bits_output - bits_copied;
699 bits_copied = enc->bits_output;
700 inst_index++;
701
702 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_SLICE_SEGMENT;
703 inst_index++;
704
705 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_DEPENDENT_SLICE_END;
706 inst_index++;
707
708 switch (enc->enc_pic.picture_type) {
709 case PIPE_H2645_ENC_PICTURE_TYPE_I:
710 case PIPE_H2645_ENC_PICTURE_TYPE_IDR:
711 radeon_uvd_enc_code_ue(enc, 0x2);
712 break;
713 case PIPE_H2645_ENC_PICTURE_TYPE_P:
714 case PIPE_H2645_ENC_PICTURE_TYPE_SKIP:
715 radeon_uvd_enc_code_ue(enc, 0x1);
716 break;
717 case PIPE_H2645_ENC_PICTURE_TYPE_B:
718 radeon_uvd_enc_code_ue(enc, 0x0);
719 break;
720 default:
721 radeon_uvd_enc_code_ue(enc, 0x1);
722 }
723
724 if ((enc->enc_pic.nal_unit_type != 19) && (enc->enc_pic.nal_unit_type != 20)) {
725 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.pic_order_cnt, enc->enc_pic.log2_max_poc);
726 if (enc->enc_pic.picture_type == PIPE_H2645_ENC_PICTURE_TYPE_P)
727 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
728 else {
729 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
730 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
731 radeon_uvd_enc_code_ue(enc, 0x0);
732 radeon_uvd_enc_code_ue(enc, 0x0);
733 }
734 }
735
736 if (enc->enc_pic.sample_adaptive_offset_enabled_flag)
737 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* slice_sao_luma_flag */
738
739 if ((enc->enc_pic.picture_type == PIPE_H2645_ENC_PICTURE_TYPE_P) ||
740 (enc->enc_pic.picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B)) {
741 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
742 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_spec_misc.cabac_init_flag, 1);
743 radeon_uvd_enc_code_ue(enc, 5 - enc->enc_pic.max_num_merge_cand);
744 }
745
746 radeon_uvd_enc_flush_headers(enc);
747 bit_index++;
748 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
749 num_bits[inst_index] = enc->bits_output - bits_copied;
750 bits_copied = enc->bits_output;
751 inst_index++;
752
753 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_SLICE_QP_DELTA;
754 inst_index++;
755
756 if ((enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled) &&
757 (!enc->enc_pic.hevc_deblock.deblocking_filter_disabled)) {
758 radeon_uvd_enc_code_fixed_bits(
759 enc, enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled, 1);
760
761 radeon_uvd_enc_flush_headers(enc);
762 bit_index++;
763 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
764 num_bits[inst_index] = enc->bits_output - bits_copied;
765 bits_copied = enc->bits_output;
766 inst_index++;
767 }
768
769 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_END;
770
771 for (int i = bit_index; i < RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_TEMPLATE_SIZE_IN_DWORDS; i++)
772 RADEON_ENC_CS(0x00000000);
773
774 for (int j = 0; j < RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_NUM_INSTRUCTIONS; j++) {
775 RADEON_ENC_CS(instruction[j]);
776 RADEON_ENC_CS(num_bits[j]);
777 }
778
779 RADEON_ENC_END();
780 }
781
radeon_uvd_enc_ctx(struct radeon_uvd_encoder * enc)782 static void radeon_uvd_enc_ctx(struct radeon_uvd_encoder *enc)
783 {
784 struct si_screen *sscreen = (struct si_screen *)enc->screen;
785
786 enc->enc_pic.ctx_buf.swizzle_mode = 0;
787 if (sscreen->info.gfx_level < GFX9) {
788 enc->enc_pic.ctx_buf.rec_luma_pitch = (enc->luma->u.legacy.level[0].nblk_x * enc->luma->bpe);
789 enc->enc_pic.ctx_buf.rec_chroma_pitch =
790 (enc->chroma->u.legacy.level[0].nblk_x * enc->chroma->bpe);
791 } else {
792 enc->enc_pic.ctx_buf.rec_luma_pitch = enc->luma->u.gfx9.surf_pitch * enc->luma->bpe;
793 enc->enc_pic.ctx_buf.rec_chroma_pitch = enc->chroma->u.gfx9.surf_pitch * enc->chroma->bpe;
794 }
795 enc->enc_pic.ctx_buf.num_reconstructed_pictures = 2;
796
797 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_ENCODE_CONTEXT_BUFFER);
798 RADEON_ENC_READWRITE(enc->cpb.res->buf, enc->cpb.res->domains, 0);
799 RADEON_ENC_CS(0x00000000); // reserved
800 RADEON_ENC_CS(enc->enc_pic.ctx_buf.swizzle_mode);
801 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_luma_pitch);
802 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_chroma_pitch);
803 RADEON_ENC_CS(enc->enc_pic.ctx_buf.num_reconstructed_pictures);
804 /* reconstructed_picture_1_luma_offset */
805 RADEON_ENC_CS(0x00000000);
806 /* reconstructed_picture_1_chroma_offset */
807 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_chroma_pitch * align(enc->base.height, 16));
808 /* reconstructed_picture_2_luma_offset */
809 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_luma_pitch * align(enc->base.height, 16) * 3 / 2);
810 /* reconstructed_picture_2_chroma_offset */
811 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_chroma_pitch * align(enc->base.height, 16) * 5 / 2);
812
813 for (int i = 0; i < 136; i++)
814 RADEON_ENC_CS(0x00000000);
815
816 RADEON_ENC_END();
817 }
818
radeon_uvd_enc_bitstream(struct radeon_uvd_encoder * enc)819 static void radeon_uvd_enc_bitstream(struct radeon_uvd_encoder *enc)
820 {
821 enc->enc_pic.bit_buf.mode = RENC_UVD_SWIZZLE_MODE_LINEAR;
822 enc->enc_pic.bit_buf.video_bitstream_buffer_size = enc->bs_size;
823 enc->enc_pic.bit_buf.video_bitstream_data_offset = 0;
824
825 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_VIDEO_BITSTREAM_BUFFER);
826 RADEON_ENC_CS(enc->enc_pic.bit_buf.mode);
827 RADEON_ENC_WRITE(enc->bs_handle, RADEON_DOMAIN_GTT, 0);
828 RADEON_ENC_CS(enc->enc_pic.bit_buf.video_bitstream_buffer_size);
829 RADEON_ENC_CS(enc->enc_pic.bit_buf.video_bitstream_data_offset);
830 RADEON_ENC_END();
831 }
832
radeon_uvd_enc_feedback(struct radeon_uvd_encoder * enc)833 static void radeon_uvd_enc_feedback(struct radeon_uvd_encoder *enc)
834 {
835 enc->enc_pic.fb_buf.mode = RENC_UVD_FEEDBACK_BUFFER_MODE_LINEAR;
836 enc->enc_pic.fb_buf.feedback_buffer_size = 16;
837 enc->enc_pic.fb_buf.feedback_data_size = 40;
838
839 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_FEEDBACK_BUFFER);
840 RADEON_ENC_CS(enc->enc_pic.fb_buf.mode);
841 RADEON_ENC_WRITE(enc->fb->res->buf, enc->fb->res->domains, 0x0);
842 RADEON_ENC_CS(enc->enc_pic.fb_buf.feedback_buffer_size);
843 RADEON_ENC_CS(enc->enc_pic.fb_buf.feedback_data_size);
844 RADEON_ENC_END();
845 }
846
radeon_uvd_enc_intra_refresh(struct radeon_uvd_encoder * enc)847 static void radeon_uvd_enc_intra_refresh(struct radeon_uvd_encoder *enc)
848 {
849 enc->enc_pic.intra_ref.intra_refresh_mode = RENC_UVD_INTRA_REFRESH_MODE_NONE;
850 enc->enc_pic.intra_ref.offset = 0;
851 enc->enc_pic.intra_ref.region_size = 0;
852
853 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INTRA_REFRESH);
854 RADEON_ENC_CS(enc->enc_pic.intra_ref.intra_refresh_mode);
855 RADEON_ENC_CS(enc->enc_pic.intra_ref.offset);
856 RADEON_ENC_CS(enc->enc_pic.intra_ref.region_size);
857 RADEON_ENC_END();
858 }
859
radeon_uvd_enc_rc_per_pic(struct radeon_uvd_encoder * enc,struct pipe_picture_desc * picture)860 static void radeon_uvd_enc_rc_per_pic(struct radeon_uvd_encoder *enc,
861 struct pipe_picture_desc *picture)
862 {
863 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
864 enc->enc_pic.rc_per_pic.qp = pic->rc[0].quant_i_frames;
865 enc->enc_pic.rc_per_pic.min_qp_app = 0;
866 enc->enc_pic.rc_per_pic.max_qp_app = 51;
867 enc->enc_pic.rc_per_pic.max_au_size = 0;
868 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rc[0].fill_data_enable;
869 enc->enc_pic.rc_per_pic.skip_frame_enable = false;
870 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rc[0].enforce_hrd;
871
872 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_RATE_CONTROL_PER_PICTURE);
873 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.qp);
874 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.min_qp_app);
875 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.max_qp_app);
876 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.max_au_size);
877 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.enabled_filler_data);
878 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.skip_frame_enable);
879 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.enforce_hrd);
880 RADEON_ENC_END();
881 }
882
radeon_uvd_enc_encode_params_hevc(struct radeon_uvd_encoder * enc)883 static void radeon_uvd_enc_encode_params_hevc(struct radeon_uvd_encoder *enc)
884 {
885 struct si_screen *sscreen = (struct si_screen *)enc->screen;
886 switch (enc->enc_pic.picture_type) {
887 case PIPE_H2645_ENC_PICTURE_TYPE_I:
888 case PIPE_H2645_ENC_PICTURE_TYPE_IDR:
889 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_I;
890 break;
891 case PIPE_H2645_ENC_PICTURE_TYPE_P:
892 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_P;
893 break;
894 case PIPE_H2645_ENC_PICTURE_TYPE_SKIP:
895 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_P_SKIP;
896 break;
897 case PIPE_H2645_ENC_PICTURE_TYPE_B:
898 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_B;
899 break;
900 default:
901 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_I;
902 }
903
904 enc->enc_pic.enc_params.allowed_max_bitstream_size = enc->bs_size;
905 if (sscreen->info.gfx_level < GFX9) {
906 enc->enc_pic.enc_params.input_pic_luma_pitch =
907 (enc->luma->u.legacy.level[0].nblk_x * enc->luma->bpe);
908 enc->enc_pic.enc_params.input_pic_chroma_pitch =
909 (enc->chroma->u.legacy.level[0].nblk_x * enc->chroma->bpe);
910 } else {
911 enc->enc_pic.enc_params.input_pic_luma_pitch = enc->luma->u.gfx9.surf_pitch * enc->luma->bpe;
912 enc->enc_pic.enc_params.input_pic_chroma_pitch =
913 enc->chroma->u.gfx9.surf_pitch * enc->chroma->bpe;
914 }
915 enc->enc_pic.enc_params.input_pic_swizzle_mode = RENC_UVD_SWIZZLE_MODE_LINEAR;
916
917 if (enc->enc_pic.enc_params.pic_type == RENC_UVD_PICTURE_TYPE_I)
918 enc->enc_pic.enc_params.reference_picture_index = 0xFFFFFFFF;
919 else
920 enc->enc_pic.enc_params.reference_picture_index = (enc->enc_pic.frame_num - 1) % 2;
921
922 enc->enc_pic.enc_params.reconstructed_picture_index = enc->enc_pic.frame_num % 2;
923
924 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_ENCODE_PARAMS);
925 RADEON_ENC_CS(enc->enc_pic.enc_params.pic_type);
926 RADEON_ENC_CS(enc->enc_pic.enc_params.allowed_max_bitstream_size);
927
928 if (sscreen->info.gfx_level < GFX9) {
929 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, (uint64_t)enc->luma->u.legacy.level[0].offset_256B * 256);
930 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, (uint64_t)enc->chroma->u.legacy.level[0].offset_256B * 256);
931 } else {
932 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, enc->luma->u.gfx9.surf_offset);
933 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, enc->chroma->u.gfx9.surf_offset);
934 }
935 RADEON_ENC_CS(enc->enc_pic.enc_params.input_pic_luma_pitch);
936 RADEON_ENC_CS(enc->enc_pic.enc_params.input_pic_chroma_pitch);
937 RADEON_ENC_CS(0x00000000); // reserved
938 RADEON_ENC_CS(enc->enc_pic.enc_params.input_pic_swizzle_mode);
939 RADEON_ENC_CS(enc->enc_pic.enc_params.reference_picture_index);
940 RADEON_ENC_CS(enc->enc_pic.enc_params.reconstructed_picture_index);
941 RADEON_ENC_END();
942 }
943
radeon_uvd_enc_op_init(struct radeon_uvd_encoder * enc)944 static void radeon_uvd_enc_op_init(struct radeon_uvd_encoder *enc)
945 {
946 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_INITIALIZE);
947 RADEON_ENC_END();
948 }
949
radeon_uvd_enc_op_close(struct radeon_uvd_encoder * enc)950 static void radeon_uvd_enc_op_close(struct radeon_uvd_encoder *enc)
951 {
952 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_CLOSE_SESSION);
953 RADEON_ENC_END();
954 }
955
radeon_uvd_enc_op_enc(struct radeon_uvd_encoder * enc)956 static void radeon_uvd_enc_op_enc(struct radeon_uvd_encoder *enc)
957 {
958 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_ENCODE);
959 RADEON_ENC_END();
960 }
961
radeon_uvd_enc_op_init_rc(struct radeon_uvd_encoder * enc)962 static void radeon_uvd_enc_op_init_rc(struct radeon_uvd_encoder *enc)
963 {
964 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_INIT_RC);
965 RADEON_ENC_END();
966 }
967
radeon_uvd_enc_op_init_rc_vbv(struct radeon_uvd_encoder * enc)968 static void radeon_uvd_enc_op_init_rc_vbv(struct radeon_uvd_encoder *enc)
969 {
970 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_INIT_RC_VBV_BUFFER_LEVEL);
971 RADEON_ENC_END();
972 }
973
radeon_uvd_enc_op_speed(struct radeon_uvd_encoder * enc)974 static void radeon_uvd_enc_op_speed(struct radeon_uvd_encoder *enc)
975 {
976 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_SET_SPEED_ENCODING_MODE);
977 RADEON_ENC_END();
978 }
979
begin(struct radeon_uvd_encoder * enc,struct pipe_picture_desc * pic)980 static void begin(struct radeon_uvd_encoder *enc, struct pipe_picture_desc *pic)
981 {
982 radeon_uvd_enc_session_info(enc);
983 enc->total_task_size = 0;
984 radeon_uvd_enc_task_info(enc, enc->need_feedback);
985 radeon_uvd_enc_op_init(enc);
986
987 radeon_uvd_enc_session_init_hevc(enc);
988 radeon_uvd_enc_slice_control_hevc(enc);
989 radeon_uvd_enc_spec_misc_hevc(enc, pic);
990 radeon_uvd_enc_deblocking_filter_hevc(enc, pic);
991
992 radeon_uvd_enc_layer_control(enc);
993 radeon_uvd_enc_rc_session_init(enc, pic);
994 radeon_uvd_enc_quality_params(enc);
995 radeon_uvd_enc_layer_select(enc);
996 radeon_uvd_enc_rc_layer_init(enc, pic);
997 radeon_uvd_enc_layer_select(enc);
998 radeon_uvd_enc_rc_per_pic(enc, pic);
999 radeon_uvd_enc_op_init_rc(enc);
1000 radeon_uvd_enc_op_init_rc_vbv(enc);
1001 *enc->p_task_size = (enc->total_task_size);
1002 }
1003
encode(struct radeon_uvd_encoder * enc)1004 static void encode(struct radeon_uvd_encoder *enc)
1005 {
1006 radeon_uvd_enc_session_info(enc);
1007 enc->total_task_size = 0;
1008 radeon_uvd_enc_task_info(enc, enc->need_feedback);
1009
1010 radeon_uvd_enc_nalu_aud_hevc(enc);
1011
1012 if (enc->enc_pic.is_iframe) {
1013 radeon_uvd_enc_nalu_vps_hevc(enc);
1014 radeon_uvd_enc_nalu_pps_hevc(enc);
1015 radeon_uvd_enc_nalu_sps_hevc(enc);
1016 }
1017 radeon_uvd_enc_slice_header_hevc(enc);
1018 radeon_uvd_enc_encode_params_hevc(enc);
1019
1020 radeon_uvd_enc_ctx(enc);
1021 radeon_uvd_enc_bitstream(enc);
1022 radeon_uvd_enc_feedback(enc);
1023 radeon_uvd_enc_intra_refresh(enc);
1024
1025 radeon_uvd_enc_op_speed(enc);
1026 radeon_uvd_enc_op_enc(enc);
1027 *enc->p_task_size = (enc->total_task_size);
1028 }
1029
destroy(struct radeon_uvd_encoder * enc)1030 static void destroy(struct radeon_uvd_encoder *enc)
1031 {
1032 radeon_uvd_enc_session_info(enc);
1033 enc->total_task_size = 0;
1034 radeon_uvd_enc_task_info(enc, enc->need_feedback);
1035 radeon_uvd_enc_op_close(enc);
1036 *enc->p_task_size = (enc->total_task_size);
1037 }
1038
radeon_uvd_enc_1_1_init(struct radeon_uvd_encoder * enc)1039 void radeon_uvd_enc_1_1_init(struct radeon_uvd_encoder *enc)
1040 {
1041 enc->begin = begin;
1042 enc->encode = encode;
1043 enc->destroy = destroy;
1044 }
1045