1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2024 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 */
20 /*!
21 * \file
22 * \brief Video Encoding Session tests
23 */
24 /*--------------------------------------------------------------------*/
25
26 #include "vktVideoTestUtils.hpp"
27 #include "vktVideoEncodeTests.hpp"
28 #include "vktVideoTestUtils.hpp"
29 #include "vktTestCase.hpp"
30
31 #ifdef DE_BUILD_VIDEO
32 #include "vktVideoBaseDecodeUtils.hpp"
33 #endif
34
35 #include "tcuTextureUtil.hpp"
36 #include "tcuVectorUtil.hpp"
37 #include "tcuTestLog.hpp"
38 #include "tcuPlatform.hpp"
39 #include "tcuFunctionLibrary.hpp"
40 #include "tcuSurface.hpp"
41
42 #include "tcuTexture.hpp"
43 #include "tcuVector.hpp"
44 #include "tcuPixelFormat.hpp"
45 #include "tcuTextureUtil.hpp"
46 #include "tcuImageCompare.hpp"
47
48 #include "vkDefs.hpp"
49 #include "vkBufferWithMemory.hpp"
50 #include "vkImageWithMemory.hpp"
51 #include "vkImageUtil.hpp"
52 #include "vkBarrierUtil.hpp"
53 #include "vkObjUtil.hpp"
54 #include "vkTypeUtil.hpp"
55
56 #include "vktVideoClipInfo.hpp"
57 #include "ycbcr/vktYCbCrUtil.hpp"
58
59 #include <cstddef>
60 #include <cstdint>
61 #include <fstream>
62 #include <string>
63 #include <algorithm>
64
65 #ifndef VK_MAX_NUM_IMAGE_PLANES_KHR
66 #define VK_MAX_NUM_IMAGE_PLANES_KHR 4
67 #endif
68
69 #ifndef STREAM_DUMP_DEBUG
70 #define STREAM_DUMP_DEBUG 0
71 #endif
72
73 namespace vkt
74 {
75 namespace video
76 {
77 namespace
78 {
79 using namespace vk;
80 using namespace std;
81
82 using de::MovePtr;
83 using vkt::ycbcr::MultiPlaneImageData;
84
85 template <typename T>
refs(T a,T b)86 std::tuple<T, T> refs(T a, T b)
87 {
88 return std::make_tuple(a, b);
89 }
90
91 typedef de::SharedPtr<vk::Unique<vk::VkSemaphore>> SemaphoreSp;
92
93 enum TestType
94 {
95 TEST_TYPE_H264_ENCODE_I, // Encode one I frame
96 TEST_TYPE_H264_ENCODE_RC_VBR, // Encode one I frame with enabled variable rate control, maximum QP value equal to 42
97 TEST_TYPE_H264_ENCODE_RC_CBR, // Encode one I frame with enabled constant rate control, maximum QP value equal to 42
98 TEST_TYPE_H264_ENCODE_RC_DISABLE, // Encode one I frame with disabled rate control, constant QP value equal to 28
99 TEST_TYPE_H264_ENCODE_QUALITY_LEVEL, // Encode one I frame with quality level set to 0
100 TEST_TYPE_H264_ENCODE_USAGE, // Encode one I frame with non-default encode usage setup
101 TEST_TYPE_H264_ENCODE_I_P, // Encode one I frame and one P frame, recording and submission order match encode order
102 TEST_TYPE_H264_ENCODE_I_P_NOT_MATCHING_ORDER, // Encode one I frame, one P frame, recording and submission order not matching encoding order
103 TEST_TYPE_H264_I_P_B_13, // Encode two 13-frame GOPs, both I, P, and B frames recording and submission order match encode order
104 TEST_TYPE_H264_ENCODE_QUERY_RESULT_WITH_STATUS, // Encode one I frame, one P frame with status query reported successfully for both frames. Recording and submission order match encode order
105 TEST_TYPE_H264_ENCODE_INLINE_QUERY, // VK_KHR_video_maintenance1 required test: Encode one I frame with inline without vkCmdBegin/EndQuery
106 TEST_TYPE_H264_ENCODE_RESOURCES_WITHOUT_PROFILES, // VK_KHR_video_maintenance1 required test: Encode one I frame with DPB resources defined without passing an encode profile
107 TEST_TYPE_H264_ENCODE_RESOLUTION_CHANGE_DPB, // Encode one I frame and one P frame with session created with a smaller resolution than extracted frame
108
109 TEST_TYPE_H265_ENCODE_I,
110 TEST_TYPE_H265_ENCODE_RC_VBR,
111 TEST_TYPE_H265_ENCODE_RC_CBR,
112 TEST_TYPE_H265_ENCODE_RC_DISABLE,
113 TEST_TYPE_H265_ENCODE_QUALITY_LEVEL,
114 TEST_TYPE_H265_ENCODE_USAGE,
115 TEST_TYPE_H265_ENCODE_I_P,
116 TEST_TYPE_H265_ENCODE_I_P_NOT_MATCHING_ORDER,
117 TEST_TYPE_H265_I_P_B_13,
118 TEST_TYPE_H265_ENCODE_QUERY_RESULT_WITH_STATUS,
119 TEST_TYPE_H265_ENCODE_INLINE_QUERY,
120 TEST_TYPE_H265_ENCODE_RESOURCES_WITHOUT_PROFILES,
121 TEST_TYPE_H265_ENCODE_RESOLUTION_CHANGE_DPB,
122
123 TEST_TYPE_LAST
124 };
125
getTestName(const TestType testType)126 const char *getTestName(const TestType testType)
127 {
128 switch (testType)
129 {
130 case TEST_TYPE_H264_ENCODE_I:
131 return "h264_i";
132 case TEST_TYPE_H264_ENCODE_RC_VBR:
133 return "h264_rc_vbr";
134 case TEST_TYPE_H264_ENCODE_RC_CBR:
135 return "h264_rc_cbr";
136 case TEST_TYPE_H264_ENCODE_RC_DISABLE:
137 return "h264_rc_disable";
138 case TEST_TYPE_H264_ENCODE_QUALITY_LEVEL:
139 return "h264_quality_level";
140 case TEST_TYPE_H264_ENCODE_USAGE:
141 return "h264_usage";
142 case TEST_TYPE_H264_ENCODE_I_P:
143 return "h264_i_p";
144 case TEST_TYPE_H264_ENCODE_I_P_NOT_MATCHING_ORDER:
145 return "h264_i_p_not_matching_order";
146 case TEST_TYPE_H264_I_P_B_13:
147 return "h264_i_p_b_13";
148 case TEST_TYPE_H264_ENCODE_RESOLUTION_CHANGE_DPB:
149 return "h264_resolution_change_dpb";
150 case TEST_TYPE_H264_ENCODE_QUERY_RESULT_WITH_STATUS:
151 return "h264_query_with_status";
152 case TEST_TYPE_H264_ENCODE_INLINE_QUERY:
153 return "h264_inline_query";
154 case TEST_TYPE_H264_ENCODE_RESOURCES_WITHOUT_PROFILES:
155 return "h264_resources_without_profiles";
156 case TEST_TYPE_H265_ENCODE_I:
157 return "h265_i";
158 case TEST_TYPE_H265_ENCODE_RC_VBR:
159 return "h265_rc_vbr";
160 case TEST_TYPE_H265_ENCODE_RC_CBR:
161 return "h265_rc_cbr";
162 case TEST_TYPE_H265_ENCODE_RC_DISABLE:
163 return "h265_rc_disable";
164 case TEST_TYPE_H265_ENCODE_QUALITY_LEVEL:
165 return "h265_quality_level";
166 case TEST_TYPE_H265_ENCODE_USAGE:
167 return "h265_usage";
168 case TEST_TYPE_H265_ENCODE_I_P:
169 return "h265_i_p";
170 case TEST_TYPE_H265_ENCODE_I_P_NOT_MATCHING_ORDER:
171 return "h265_i_p_not_matching_order";
172 case TEST_TYPE_H265_I_P_B_13:
173 return "h265_i_p_b_13";
174 case TEST_TYPE_H265_ENCODE_RESOLUTION_CHANGE_DPB:
175 return "h265_resolution_change_dpb";
176 case TEST_TYPE_H265_ENCODE_QUERY_RESULT_WITH_STATUS:
177 return "h265_query_with_status";
178 case TEST_TYPE_H265_ENCODE_INLINE_QUERY:
179 return "h265_inline_query";
180 case TEST_TYPE_H265_ENCODE_RESOURCES_WITHOUT_PROFILES:
181 return "h265_resources_without_profiles";
182 default:
183 TCU_THROW(InternalError, "Unknown TestType");
184 }
185 }
186
187 enum FrameType
188 {
189 IDR_FRAME,
190 I_FRAME,
191 P_FRAME,
192 B_FRAME
193 };
194
195 enum Option : uint32_t
196 {
197 // The default is to do nothing additional to ordinary encode.
198 Default = 0,
199 UseStatusQueries =
200 1
201 << 0, // All encode operations will have their status checked for success (Q2 2023: not all vendors support these)
202 UseVariableBitrateControl = 1 << 1,
203 UseConstantBitrateControl = 1 << 2,
204 SwapOrder = 1 << 3,
205 DisableRateControl = 1 << 4, // const QP
206 ResolutionChange = 1 << 5,
207 UseQualityLevel = 1 << 6,
208 UseEncodeUsage = 1 << 7,
209 UseInlineQueries = 1 << 8, // Inline queries from the video_mainteance1 extension.
210 ResourcesWithoutProfiles = 1 << 9, // Test profile-less resources from the video_mainteance1 extension.
211 };
212
213 struct EncodeTestParam
214 {
215 TestType type;
216 ClipName clip;
217 uint32_t gops;
218 std::vector<FrameType> encodePattern;
219 std::vector<uint32_t> frameIdx;
220 std::vector<uint32_t> FrameNum;
221 uint8_t spsMaxRefFrames; // Sequence parameter set maximum reference frames.
222 std::tuple<uint8_t, uint8_t> ppsNumActiveRefs; // Picture parameter set number of active references
223 std::vector<std::tuple<uint8_t, uint8_t>> shNumActiveRefs; // Slice header number of active references,
224 std::vector<std::vector<uint8_t>> refSlots; // index of dpbImageVideoReferenceSlots
225 std::vector<int8_t> curSlot; // index of dpbImageVideoReferenceSlots
226 std::vector<std::tuple<std::vector<uint8_t>, std::vector<uint8_t>>>
227 frameReferences; // index of dpbImageVideoReferenceSlots
228 Option encoderOptions;
229 } g_EncodeTests[] = {
230 {TEST_TYPE_H264_ENCODE_I,
231 CLIP_E,
232 1,
233 {IDR_FRAME},
234 /* frameIdx */ {0},
235 /* FrameNum */ {0},
236 /* spsMaxRefFrames */ 1,
237 /* ppsNumActiveRefs */ {0, 0},
238 /* shNumActiveRefs */ {refs(0, 0)},
239 /* refSlots */ {{}},
240 /* curSlot */ {0},
241 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
242 /* encoderOptions */ Option::Default},
243 {TEST_TYPE_H264_ENCODE_RC_VBR,
244 CLIP_E,
245 1,
246 {IDR_FRAME},
247 /* frameIdx */ {0, 1},
248 /* FrameNum */ {0, 1},
249 /* spsMaxRefFrames */ 2,
250 /* ppsNumActiveRefs */ {0, 0},
251 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
252 /* refSlots */ {{}, {0}},
253 /* curSlot */ {0, 1},
254 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
255 /* encoderOptions */ Option::UseVariableBitrateControl},
256 {TEST_TYPE_H264_ENCODE_RC_CBR,
257 CLIP_E,
258 1,
259 {IDR_FRAME},
260 /* frameIdx */ {0},
261 /* FrameNum */ {0},
262 /* spsMaxRefFrames */ 1,
263 /* ppsNumActiveRefs */ {0, 0},
264 /* shNumActiveRefs */ {refs(0, 0)},
265 /* refSlots */ {{}},
266 /* curSlot */ {0},
267 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
268 /* encoderOptions */ Option::UseConstantBitrateControl},
269 {TEST_TYPE_H264_ENCODE_RC_DISABLE,
270 CLIP_E,
271 1,
272 {IDR_FRAME, P_FRAME},
273 /* frameIdx */ {0, 1},
274 /* FrameNum */ {0, 1},
275 /* spsMaxRefFrames */ 2,
276 /* ppsNumActiveRefs */ {0, 0},
277 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
278 /* refSlots */ {{}, {0}},
279 /* curSlot */ {0, 1},
280 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
281 /* encoderOptions */ Option::DisableRateControl},
282 {TEST_TYPE_H264_ENCODE_QUALITY_LEVEL,
283 CLIP_E,
284 1,
285 {IDR_FRAME},
286 /* frameIdx */ {0},
287 /* FrameNum */ {0},
288 /* spsMaxRefFrames */ 1,
289 /* ppsNumActiveRefs */ {0, 0},
290 /* shNumActiveRefs */ {refs(0, 0)},
291 /* refSlots */ {{}},
292 /* curSlot */ {0},
293 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
294 /* encoderOptions */ Option::UseQualityLevel},
295 {TEST_TYPE_H264_ENCODE_USAGE,
296 CLIP_E,
297 1,
298 {IDR_FRAME},
299 /* frameIdx */ {0},
300 /* FrameNum */ {0},
301 /* spsMaxRefFrames */ 1,
302 /* ppsNumActiveRefs */ {0, 0},
303 /* shNumActiveRefs */ {refs(0, 0)},
304 /* refSlots */ {{}},
305 /* curSlot */ {0},
306 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
307 /* encoderOptions */ Option::UseEncodeUsage},
308 {TEST_TYPE_H264_ENCODE_I_P,
309 CLIP_E,
310 1,
311 {IDR_FRAME, P_FRAME},
312 /* frameIdx */ {0, 1},
313 /* FrameNum */ {0, 1},
314 /* spsMaxRefFrames */ 2,
315 /* ppsNumActiveRefs */ {0, 0},
316 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
317 /* refSlots */ {{}, {0}},
318 /* curSlot */ {0, 1},
319 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
320 /* encoderOptions */ Option::Default},
321 {TEST_TYPE_H264_ENCODE_I_P_NOT_MATCHING_ORDER,
322 CLIP_E,
323 1,
324 {IDR_FRAME, P_FRAME},
325 /* frameIdx */ {0, 1},
326 /* FrameNum */ {0, 1},
327 /* spsMaxRefFrames */ 2,
328 /* ppsNumActiveRefs */ {0, 0},
329 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
330 /* refSlots */ {{}, {0}},
331 /* curSlot */ {0, 1},
332 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
333 /* encoderOptions */ Option::SwapOrder},
334 {TEST_TYPE_H264_ENCODE_QUERY_RESULT_WITH_STATUS,
335 CLIP_E,
336 1,
337 {IDR_FRAME, P_FRAME},
338 /* frameIdx */ {0, 1},
339 /* FrameNum */ {0, 1},
340 /* spsMaxRefFrames */ 2,
341 /* ppsNumActiveRefs */ {0, 0},
342 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
343 /* refSlots */ {{}, {0}},
344 /* curSlot */ {0, 1},
345 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
346 /* encoderOptions */ Option::UseStatusQueries},
347 {TEST_TYPE_H264_ENCODE_INLINE_QUERY,
348 CLIP_E,
349 1,
350 {IDR_FRAME},
351 /* frameIdx */ {0},
352 /* FrameNum */ {0},
353 /* spsMaxRefFrames */ 1,
354 /* ppsNumActiveRefs */ {0, 0},
355 /* shNumActiveRefs */ {refs(0, 0)},
356 /* refSlots */ {{}},
357 /* curSlot */ {0},
358 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
359 /* encoderOptions */ Option::UseInlineQueries},
360 {TEST_TYPE_H264_ENCODE_RESOURCES_WITHOUT_PROFILES,
361 CLIP_E,
362 1,
363 {IDR_FRAME, P_FRAME},
364 /* frameIdx */ {0, 1},
365 /* FrameNum */ {0, 1},
366 /* spsMaxRefFrames */ 2,
367 /* ppsNumActiveRefs */ {0, 0},
368 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
369 /* refSlots */ {{}, {0}},
370 /* curSlot */ {0, 1},
371 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
372 /* encoderOptions */ Option::ResourcesWithoutProfiles},
373 {TEST_TYPE_H264_ENCODE_RESOLUTION_CHANGE_DPB,
374 CLIP_G,
375 2,
376 {IDR_FRAME, P_FRAME},
377 /* frameIdx */ {0, 1},
378 /* FrameNum */ {0, 1},
379 /* spsMaxRefFrames */ 2,
380 /* ppsNumActiveRefs */ {0, 0},
381 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
382 /* refSlots */ {{}, {0}},
383 /* curSlot */ {0, 1},
384 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
385 /* encoderOptions */ Option::ResolutionChange},
386 {TEST_TYPE_H264_I_P_B_13,
387 CLIP_E,
388 2,
389 {IDR_FRAME, P_FRAME, B_FRAME, B_FRAME, P_FRAME, B_FRAME, B_FRAME, P_FRAME, B_FRAME, B_FRAME, P_FRAME, B_FRAME,
390 B_FRAME, P_FRAME},
391 /* frameIdx */ {0, 3, 1, 2, 6, 4, 5, 9, 7, 8, 12, 10, 11, 13},
392 /* frameNum */ {0, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5},
393 /* spsMaxRefFrames */ 4,
394 /* ppsNumActiveRefs */ {2, 2},
395 /* shNumActiveRefs */
396 {refs(0, 0), refs(1, 0), refs(2, 2), refs(2, 2), refs(2, 0), refs(2, 2), refs(2, 2), refs(2, 0), refs(2, 2),
397 refs(2, 2), refs(2, 0), refs(2, 2), refs(2, 2), refs(2, 0)},
398 /* refSlots */
399 {{},
400 {0},
401 {0, 1},
402 {0, 1},
403 {0, 1},
404 {0, 1, 2},
405 {0, 1, 2},
406 {0, 1, 2},
407 {0, 1, 2, 3},
408 {0, 1, 2, 3},
409 {0, 1, 2, 3},
410 {1, 2, 3, 4},
411 {1, 2, 3, 4},
412 {1, 2, 3, 4}},
413 /* curSlot */ {0, 1, -1, -1, 2, -1, -1, 3, -1, -1, 4, -1, -1, 5},
414 /* frameReferences */
415 {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {}),
416 refs<std::vector<uint8_t>>({0, 1}, {1, 0}), refs<std::vector<uint8_t>>({0, 1}, {1, 0}),
417 refs<std::vector<uint8_t>>({1, 0}, {}), refs<std::vector<uint8_t>>({1, 0}, {2, 1}),
418 refs<std::vector<uint8_t>>({1, 0}, {2, 1}), refs<std::vector<uint8_t>>({2, 1}, {}),
419 refs<std::vector<uint8_t>>({2, 1}, {3, 2}), refs<std::vector<uint8_t>>({2, 1}, {3, 2}),
420 refs<std::vector<uint8_t>>({3, 2}, {}), refs<std::vector<uint8_t>>({3, 2}, {4, 3}),
421 refs<std::vector<uint8_t>>({3, 2}, {4, 3}), refs<std::vector<uint8_t>>({4, 3}, {})},
422 /* encoderOptions */ Option::Default},
423 {TEST_TYPE_H265_ENCODE_I,
424 CLIP_F,
425 1,
426 {IDR_FRAME},
427 /* frameIdx */ {0},
428 /* FrameNum */ {0},
429 /* spsMaxRefFrames */ 1,
430 /* ppsNumActiveRefs */ {0, 0},
431 /* shNumActiveRefs */ {refs(0, 0)},
432 /* refSlots */ {{}},
433 /* curSlot */ {0},
434 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
435 /* encoderOptions */ Option::Default},
436 {TEST_TYPE_H265_ENCODE_RC_VBR,
437 CLIP_F,
438 1,
439 {IDR_FRAME},
440 /* frameIdx */ {0, 1},
441 /* FrameNum */ {0, 1},
442 /* spsMaxRefFrames */ 2,
443 /* ppsNumActiveRefs */ {0, 0},
444 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
445 /* refSlots */ {{}, {0}},
446 /* curSlot */ {0, 1},
447 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
448 /* encoderOptions */ Option::UseVariableBitrateControl},
449 {TEST_TYPE_H265_ENCODE_RC_CBR,
450 CLIP_F,
451 1,
452 {IDR_FRAME},
453 /* frameIdx */ {0},
454 /* FrameNum */ {0},
455 /* spsMaxRefFrames */ 1,
456 /* ppsNumActiveRefs */ {0, 0},
457 /* shNumActiveRefs */ {refs(0, 0)},
458 /* refSlots */ {{}},
459 /* curSlot */ {0},
460 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
461 /* encoderOptions */ Option::UseConstantBitrateControl},
462 {TEST_TYPE_H265_ENCODE_RC_DISABLE,
463 CLIP_F,
464 1,
465 {IDR_FRAME, P_FRAME},
466 /* frameIdx */ {0, 1},
467 /* FrameNum */ {0, 1},
468 /* spsMaxRefFrames */ 2,
469 /* ppsNumActiveRefs */ {0, 0},
470 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
471 /* refSlots */ {{}, {0}},
472 /* curSlot */ {0, 1},
473 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
474 /* encoderOptions */ Option::DisableRateControl},
475 {TEST_TYPE_H265_ENCODE_QUALITY_LEVEL,
476 CLIP_F,
477 1,
478 {IDR_FRAME},
479 /* frameIdx */ {0},
480 /* FrameNum */ {0},
481 /* spsMaxRefFrames */ 1,
482 /* ppsNumActiveRefs */ {0, 0},
483 /* shNumActiveRefs */ {refs(0, 0)},
484 /* refSlots */ {{}},
485 /* curSlot */ {0},
486 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
487 /* encoderOptions */ Option::UseQualityLevel},
488 {TEST_TYPE_H265_ENCODE_USAGE,
489 CLIP_F,
490 1,
491 {IDR_FRAME},
492 /* frameIdx */ {0},
493 /* FrameNum */ {0},
494 /* spsMaxRefFrames */ 1,
495 /* ppsNumActiveRefs */ {0, 0},
496 /* shNumActiveRefs */ {refs(0, 0)},
497 /* refSlots */ {{}},
498 /* curSlot */ {0},
499 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
500 /* encoderOptions */ Option::UseEncodeUsage},
501 {TEST_TYPE_H265_ENCODE_I_P,
502 CLIP_F,
503 1,
504 {IDR_FRAME, P_FRAME},
505 /* frameIdx */ {0, 1},
506 /* FrameNum */ {0, 1},
507 /* spsMaxRefFrames */ 2,
508 /* ppsNumActiveRefs */ {0, 0},
509 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
510 /* refSlots */ {{}, {0}},
511 /* curSlot */ {0, 1},
512 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
513 /* encoderOptions */ Option::Default},
514 {TEST_TYPE_H265_ENCODE_I_P_NOT_MATCHING_ORDER,
515 CLIP_F,
516 1,
517 {IDR_FRAME, P_FRAME},
518 /* frameIdx */ {0, 1},
519 /* FrameNum */ {0, 1},
520 /* spsMaxRefFrames */ 2,
521 /* ppsNumActiveRefs */ {0, 0},
522 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
523 /* refSlots */ {{}, {0}},
524 /* curSlot */ {0, 1},
525 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
526 /* encoderOptions */ Option::SwapOrder},
527 {TEST_TYPE_H265_ENCODE_QUERY_RESULT_WITH_STATUS,
528 CLIP_F,
529 1,
530 {IDR_FRAME, P_FRAME},
531 /* frameIdx */ {0, 1},
532 /* FrameNum */ {0, 1},
533 /* spsMaxRefFrames */ 2,
534 /* ppsNumActiveRefs */ {0, 0},
535 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
536 /* refSlots */ {{}, {0}},
537 /* curSlot */ {0, 1},
538 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
539 /* encoderOptions */ Option::UseStatusQueries},
540 {TEST_TYPE_H265_ENCODE_INLINE_QUERY,
541 CLIP_F,
542 1,
543 {IDR_FRAME},
544 /* frameIdx */ {0},
545 /* FrameNum */ {0},
546 /* spsMaxRefFrames */ 1,
547 /* ppsNumActiveRefs */ {0, 0},
548 /* shNumActiveRefs */ {refs(0, 0)},
549 /* refSlots */ {{}},
550 /* curSlot */ {0},
551 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {})},
552 /* encoderOptions */ Option::UseInlineQueries},
553 {TEST_TYPE_H265_ENCODE_RESOURCES_WITHOUT_PROFILES,
554 CLIP_F,
555 1,
556 {IDR_FRAME, P_FRAME},
557 /* frameIdx */ {0, 1},
558 /* FrameNum */ {0, 1},
559 /* spsMaxRefFrames */ 2,
560 /* ppsNumActiveRefs */ {0, 0},
561 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
562 /* refSlots */ {{}, {0}},
563 /* curSlot */ {0, 1},
564 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
565 /* encoderOptions */ Option::ResourcesWithoutProfiles},
566 {TEST_TYPE_H265_ENCODE_RESOLUTION_CHANGE_DPB,
567 CLIP_H,
568 2,
569 {IDR_FRAME, P_FRAME},
570 /* frameIdx */ {0, 1},
571 /* FrameNum */ {0, 1},
572 /* spsMaxRefFrames */ 2,
573 /* ppsNumActiveRefs */ {0, 0},
574 /* shNumActiveRefs */ {refs(0, 0), refs(1, 0)},
575 /* refSlots */ {{}, {0}},
576 /* curSlot */ {0, 1},
577 /* frameReferences */ {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {})},
578 /* encoderOptions */ Option::ResolutionChange},
579 {TEST_TYPE_H265_I_P_B_13,
580 CLIP_F,
581 2,
582 {IDR_FRAME, P_FRAME, B_FRAME, B_FRAME, P_FRAME, B_FRAME, B_FRAME, P_FRAME, B_FRAME, B_FRAME, P_FRAME, B_FRAME,
583 B_FRAME, P_FRAME},
584 /* frameIdx */ {0, 3, 1, 2, 6, 4, 5, 9, 7, 8, 12, 10, 11, 13},
585 /* frameNum */ {0, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5},
586 /* spsMaxRefFrames */ 2,
587 /* ppsNumActiveRefs */ {1, 1},
588 /* shNumActiveRefs */
589 {refs(0, 0), refs(1, 0), refs(1, 1), refs(1, 1), refs(1, 0), refs(1, 1), refs(1, 1), refs(1, 0), refs(1, 1),
590 refs(1, 1), refs(1, 0), refs(1, 1), refs(1, 1), refs(1, 0)},
591 /* refSlots */
592 {{},
593 {0},
594 {0, 1},
595 {0, 1},
596 {0, 1},
597 {0, 1, 2},
598 {0, 1, 2},
599 {0, 1, 2},
600 {0, 1, 2, 3},
601 {0, 1, 2, 3},
602 {0, 1, 2, 3},
603 {1, 2, 3, 4},
604 {1, 2, 3, 4},
605 {1, 2, 3, 4}},
606 /* curSlot */ {0, 1, -1, -1, 2, -1, -1, 3, -1, -1, 4, -1, -1, 5},
607 /* frameReferences */
608 {refs<std::vector<uint8_t>>({}, {}), refs<std::vector<uint8_t>>({0}, {}),
609 refs<std::vector<uint8_t>>({0, 1}, {1, 0}), refs<std::vector<uint8_t>>({0, 1}, {1, 0}),
610 refs<std::vector<uint8_t>>({1, 0}, {}), refs<std::vector<uint8_t>>({1, 0}, {2, 1}),
611 refs<std::vector<uint8_t>>({1, 0}, {2, 1}), refs<std::vector<uint8_t>>({2, 1}, {}),
612 refs<std::vector<uint8_t>>({2, 1}, {3, 2}), refs<std::vector<uint8_t>>({2, 1}, {3, 2}),
613 refs<std::vector<uint8_t>>({3, 2}, {}), refs<std::vector<uint8_t>>({3, 2}, {4, 3}),
614 refs<std::vector<uint8_t>>({3, 2}, {4, 3}), refs<std::vector<uint8_t>>({4, 3}, {})},
615 /* encoderOptions */ Option::Default},
616 };
617
618 class TestDefinition
619 {
620 public:
create(EncodeTestParam params)621 static MovePtr<TestDefinition> create(EncodeTestParam params)
622 {
623 return MovePtr<TestDefinition>(new TestDefinition(params));
624 }
625
TestDefinition(EncodeTestParam params)626 TestDefinition(EncodeTestParam params) : m_params(params), m_info(clipInfo(params.clip))
627 {
628 VideoProfileInfo profile = m_info->sessionProfiles[0];
629 m_profile = VkVideoCoreProfile(profile.codecOperation, profile.subsamplingFlags, profile.lumaBitDepth,
630 profile.chromaBitDepth, profile.profileIDC);
631 }
632
getTestType() const633 TestType getTestType() const
634 {
635 return m_params.type;
636 }
637
getClipFilename() const638 const char *getClipFilename() const
639 {
640 return m_info->filename;
641 }
642
getClipWidth() const643 uint32_t getClipWidth() const
644 {
645 return m_info->frameWidth;
646 }
647
getClipHeight() const648 uint32_t getClipHeight() const
649 {
650 return m_info->frameHeight;
651 }
652
getClipFrameRate() const653 uint32_t getClipFrameRate() const
654 {
655 return m_info->frameRate;
656 }
657
getCodecOperation() const658 VkVideoCodecOperationFlagBitsKHR getCodecOperation() const
659 {
660 return m_profile.GetCodecType();
661 }
662
getDecodeProfileExtension() const663 void *getDecodeProfileExtension() const
664 {
665 if (m_profile.IsH264())
666 {
667 const VkVideoDecodeH264ProfileInfoKHR *videoProfileExtention = m_profile.GetDecodeH264Profile();
668 return reinterpret_cast<void *>(const_cast<VkVideoDecodeH264ProfileInfoKHR *>(videoProfileExtention));
669 }
670 if (m_profile.IsH265())
671 {
672 const VkVideoDecodeH265ProfileInfoKHR *videoProfileExtention = m_profile.GetDecodeH265Profile();
673 return reinterpret_cast<void *>(const_cast<VkVideoDecodeH265ProfileInfoKHR *>(videoProfileExtention));
674 }
675 TCU_THROW(InternalError, "Unsupported codec");
676 }
677
getEncodeProfileExtension() const678 void *getEncodeProfileExtension() const
679 {
680 if (m_profile.IsH264())
681 {
682 const VkVideoEncodeH264ProfileInfoKHR *videoProfileExtention = m_profile.GetEncodeH264Profile();
683 return reinterpret_cast<void *>(const_cast<VkVideoEncodeH264ProfileInfoKHR *>(videoProfileExtention));
684 }
685 if (m_profile.IsH265())
686 {
687 const VkVideoEncodeH265ProfileInfoKHR *videoProfileExtention = m_profile.GetEncodeH265Profile();
688 return reinterpret_cast<void *>(const_cast<VkVideoEncodeH265ProfileInfoKHR *>(videoProfileExtention));
689 }
690 TCU_THROW(InternalError, "Unsupported codec");
691 }
692
getProfile() const693 const VkVideoCoreProfile *getProfile() const
694 {
695 return &m_profile;
696 }
697
gopCount() const698 uint32_t gopCount() const
699 {
700 return m_params.gops;
701 }
702
gopFrameCount() const703 uint32_t gopFrameCount() const
704 {
705 return static_cast<uint32_t>(m_params.encodePattern.size());
706 }
707
gopReferenceFrameCount() const708 int gopReferenceFrameCount() const
709 {
710 int count = 0;
711 for (const auto &frame : m_params.encodePattern)
712 {
713 if (frame != B_FRAME)
714 {
715 count++;
716 }
717 }
718 return count;
719 }
720
gopCycles() const721 int gopCycles() const
722 {
723 int gopNum = 0;
724
725 for (auto &frame : m_params.encodePattern)
726 if (frame == IDR_FRAME || frame == I_FRAME)
727 gopNum++;
728
729 DE_ASSERT(gopNum);
730
731 return gopNum;
732 }
733
patternContain(FrameType type) const734 bool patternContain(FrameType type) const
735 {
736 return std::find(m_params.encodePattern.begin(), m_params.encodePattern.end(), type) !=
737 m_params.encodePattern.end();
738 }
739
frameIdx(uint32_t Idx) const740 uint32_t frameIdx(uint32_t Idx) const
741 {
742 return m_params.frameIdx[Idx];
743 }
744
frameType(uint32_t Idx) const745 FrameType frameType(uint32_t Idx) const
746 {
747 return m_params.encodePattern[Idx];
748 }
749
maxNumRefs() const750 uint8_t maxNumRefs() const
751 {
752 return m_params.spsMaxRefFrames;
753 }
754
ppsActiveRefs0() const755 uint8_t ppsActiveRefs0() const
756 {
757 return std::get<0>(m_params.ppsNumActiveRefs);
758 }
759
ppsActiveRefs1() const760 uint8_t ppsActiveRefs1() const
761 {
762 return std::get<1>(m_params.ppsNumActiveRefs);
763 }
764
shActiveRefs0(uint32_t Idx) const765 uint8_t shActiveRefs0(uint32_t Idx) const
766 {
767 return std::get<0>(m_params.shNumActiveRefs[Idx]);
768 }
769
shActiveRefs1(uint32_t Idx) const770 uint8_t shActiveRefs1(uint32_t Idx) const
771 {
772 return std::get<1>(m_params.shNumActiveRefs[Idx]);
773 }
774
ref0(uint32_t Idx) const775 std::vector<uint8_t> ref0(uint32_t Idx) const
776 {
777 std::tuple<std::vector<uint8_t>, std::vector<uint8_t>> ref = m_params.frameReferences[Idx];
778 return std::get<0>(ref);
779 }
780
ref1(uint32_t Idx) const781 std::vector<uint8_t> ref1(uint32_t Idx) const
782 {
783 std::tuple<std::vector<uint8_t>, std::vector<uint8_t>> ref = m_params.frameReferences[Idx];
784 return std::get<1>(ref);
785 }
786
refSlots(uint32_t Idx) const787 std::vector<uint8_t> refSlots(uint32_t Idx) const
788 {
789 std::vector<uint8_t> refs = m_params.refSlots[Idx];
790 return refs;
791 }
792
refsCount(uint32_t Idx) const793 uint8_t refsCount(uint32_t Idx) const
794 {
795 return static_cast<uint8_t>(m_params.refSlots[Idx].size());
796 }
797
curSlot(uint32_t Idx) const798 int8_t curSlot(uint32_t Idx) const
799 {
800 return m_params.curSlot[Idx];
801 }
802
frameNumber(uint32_t Idx) const803 uint32_t frameNumber(uint32_t Idx) const
804 {
805 return m_params.FrameNum[Idx];
806 }
807
getConsecutiveBFrameCount(void) const808 uint32_t getConsecutiveBFrameCount(void) const
809 {
810 uint32_t maxConsecutiveBFrameCount = 0;
811 uint32_t currentConsecutiveBFrameCount = 0;
812
813 for (const auto &frame : m_params.encodePattern)
814 {
815 if (frame == B_FRAME)
816 {
817 currentConsecutiveBFrameCount++;
818 }
819 else
820 {
821 if (currentConsecutiveBFrameCount > maxConsecutiveBFrameCount)
822 {
823 maxConsecutiveBFrameCount = currentConsecutiveBFrameCount;
824 }
825 currentConsecutiveBFrameCount = 0;
826 }
827 }
828
829 return maxConsecutiveBFrameCount;
830 }
831
framesToCheck() const832 size_t framesToCheck() const
833 {
834 return m_params.encodePattern.size() * m_params.gops;
835 }
836
hasOption(Option o) const837 bool hasOption(Option o) const
838 {
839 return (m_params.encoderOptions & o) != 0;
840 }
841
requiredDeviceFlags() const842 VideoDevice::VideoDeviceFlags requiredDeviceFlags() const
843 {
844 switch (m_profile.GetCodecType())
845 {
846 case VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR:
847 case VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR:
848 {
849 VideoDevice::VideoDeviceFlags flags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED;
850
851 if (hasOption(Option::UseStatusQueries))
852 flags |= VideoDevice::VIDEO_DEVICE_FLAG_QUERY_WITH_STATUS_FOR_ENCODE_SUPPORT;
853
854 if (hasOption(Option::UseInlineQueries) || hasOption(Option::ResourcesWithoutProfiles))
855 flags |= VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_MAINTENANCE_1;
856
857 return flags;
858 }
859 default:
860 tcu::die("Unsupported video codec %s\n", util::codecToName(m_profile.GetCodecType()));
861 break;
862 }
863
864 TCU_THROW(InternalError, "Unsupported codec");
865 }
866
extensionProperties() const867 const VkExtensionProperties *extensionProperties() const
868 {
869 static const VkExtensionProperties h264StdExtensionVersion = {
870 VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME, VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION};
871 static const VkExtensionProperties h265StdExtensionVersion = {
872 VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME, VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION};
873
874 switch (m_profile.GetCodecType())
875 {
876 case VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR:
877 return &h264StdExtensionVersion;
878 case VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR:
879 return &h265StdExtensionVersion;
880 default:
881 tcu::die("Unsupported video codec %s\n", util::codecToName(m_profile.GetCodecType()));
882 break;
883 }
884
885 TCU_THROW(InternalError, "Unsupported codec");
886 };
887
888 private:
889 EncodeTestParam m_params;
890 const ClipInfo *m_info{};
891 VkVideoCoreProfile m_profile;
892 };
893
894 struct bytestreamWriteWithStatus
895 {
896 uint32_t bitstreamOffset;
897 uint32_t bitstreamWrite;
898 VkQueryResultStatusKHR status;
899 };
900
processQueryPoolResults(const DeviceInterface & vk,const VkDevice device,VkQueryPool encodeQueryPool,VkDeviceSize & bitstreamBufferOffset,VkDeviceSize & minBitstreamBufferOffsetAlignment,const bool queryStatus)901 bool processQueryPoolResults(const DeviceInterface &vk, const VkDevice device, VkQueryPool encodeQueryPool,
902 VkDeviceSize &bitstreamBufferOffset, VkDeviceSize &minBitstreamBufferOffsetAlignment,
903 const bool queryStatus)
904 {
905 bytestreamWriteWithStatus queryResultWithStatus;
906 deMemset(&queryResultWithStatus, 0xFF, sizeof(queryResultWithStatus));
907
908 if (vk.getQueryPoolResults(device, encodeQueryPool, 1, 1, sizeof(queryResultWithStatus), &queryResultWithStatus,
909 sizeof(queryResultWithStatus),
910 VK_QUERY_RESULT_WITH_STATUS_BIT_KHR | VK_QUERY_RESULT_WAIT_BIT) == VK_SUCCESS)
911 {
912 bitstreamBufferOffset += queryResultWithStatus.bitstreamWrite;
913
914 // Align buffer offset after adding written data
915 bitstreamBufferOffset = deAlign64(bitstreamBufferOffset, minBitstreamBufferOffsetAlignment);
916
917 if (queryStatus && queryResultWithStatus.status != VK_QUERY_RESULT_STATUS_COMPLETE_KHR)
918 {
919 return false;
920 }
921 }
922 return true;
923 }
924
getH264PictureType(const FrameType frameType)925 StdVideoH264PictureType getH264PictureType(const FrameType frameType)
926 {
927 switch (frameType)
928 {
929 case IDR_FRAME:
930 return STD_VIDEO_H264_PICTURE_TYPE_IDR;
931 case I_FRAME:
932 return STD_VIDEO_H264_PICTURE_TYPE_I;
933 case P_FRAME:
934 return STD_VIDEO_H264_PICTURE_TYPE_P;
935 case B_FRAME:
936 return STD_VIDEO_H264_PICTURE_TYPE_B;
937 default:
938 return {};
939 }
940 }
941
getH264SliceType(const FrameType frameType)942 StdVideoH264SliceType getH264SliceType(const FrameType frameType)
943 {
944 switch (frameType)
945 {
946 case IDR_FRAME:
947 case I_FRAME:
948 return STD_VIDEO_H264_SLICE_TYPE_I;
949 case P_FRAME:
950 return STD_VIDEO_H264_SLICE_TYPE_P;
951 case B_FRAME:
952 return STD_VIDEO_H264_SLICE_TYPE_B;
953 default:
954 return {};
955 }
956 }
957
getH265PictureType(const FrameType frameType)958 StdVideoH265PictureType getH265PictureType(const FrameType frameType)
959 {
960 switch (frameType)
961 {
962 case IDR_FRAME:
963 return STD_VIDEO_H265_PICTURE_TYPE_IDR;
964 case I_FRAME:
965 return STD_VIDEO_H265_PICTURE_TYPE_I;
966 case P_FRAME:
967 return STD_VIDEO_H265_PICTURE_TYPE_P;
968 case B_FRAME:
969 return STD_VIDEO_H265_PICTURE_TYPE_B;
970 default:
971 return {};
972 }
973 }
974
getH265SliceType(const FrameType frameType)975 StdVideoH265SliceType getH265SliceType(const FrameType frameType)
976 {
977 switch (frameType)
978 {
979 case IDR_FRAME:
980 case I_FRAME:
981 return STD_VIDEO_H265_SLICE_TYPE_I;
982 case P_FRAME:
983 return STD_VIDEO_H265_SLICE_TYPE_P;
984 case B_FRAME:
985 return STD_VIDEO_H265_SLICE_TYPE_B;
986 default:
987 return {};
988 }
989 }
990
getCodecDecodeOperationFromEncode(VkVideoCodecOperationFlagBitsKHR encodeOperation)991 VkVideoCodecOperationFlagBitsKHR getCodecDecodeOperationFromEncode(VkVideoCodecOperationFlagBitsKHR encodeOperation)
992 {
993 switch (encodeOperation)
994 {
995 case VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR:
996 return VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR;
997 case VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR:
998 return VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR;
999 default:
1000 return VK_VIDEO_CODEC_OPERATION_NONE_KHR;
1001 }
1002 }
1003
extractYUV420pFrame(std::vector<uint8_t> & videoDataPtr,uint32_t frameNumber,uint32_t width,uint32_t height,MultiPlaneImageData * imageData,bool half_size)1004 void extractYUV420pFrame(std::vector<uint8_t> &videoDataPtr, uint32_t frameNumber, uint32_t width, uint32_t height,
1005 MultiPlaneImageData *imageData, bool half_size)
1006 {
1007 uint32_t uOffset = width * height;
1008 uint32_t vOffset = uOffset + (uOffset / 4);
1009 uint32_t frameSize = uOffset + (uOffset / 2);
1010
1011 // Ensure the videoDataPtr is large enough for the requested frame
1012 if (videoDataPtr.size() < (frameNumber + 1) * frameSize)
1013 {
1014 TCU_THROW(NotSupportedError, "Video data pointer content is too small for requested frame");
1015 }
1016
1017 const uint8_t *yPlane = videoDataPtr.data() + frameNumber * frameSize;
1018 const uint8_t *uPlane = videoDataPtr.data() + frameNumber * frameSize + uOffset;
1019 const uint8_t *vPlane = videoDataPtr.data() + frameNumber * frameSize + vOffset;
1020
1021 uint8_t *yPlaneData = static_cast<uint8_t *>(imageData->getPlanePtr(0));
1022 uint8_t *uvPlaneData = static_cast<uint8_t *>(imageData->getPlanePtr(1));
1023
1024 // If half_size is true, perform a simple 2x reduction
1025 if (half_size)
1026 {
1027 for (uint32_t j = 0; j < height; j += 2)
1028 {
1029 for (uint32_t i = 0; i < width; i += 2)
1030 {
1031 yPlaneData[(j / 2) * (width / 2) + (i / 2)] = yPlane[j * width + i];
1032 }
1033 }
1034 for (uint32_t j = 0; j < height / 2; j += 2)
1035 {
1036 for (uint32_t i = 0; i < width / 2; i += 2)
1037 {
1038 uint32_t reducedIndex = (j / 2) * (width / 4) + (i / 2);
1039 uint32_t fullIndex = j * (width / 2) + i;
1040
1041 uvPlaneData[2 * reducedIndex] = uPlane[fullIndex];
1042 uvPlaneData[2 * reducedIndex + 1] = vPlane[fullIndex];
1043 }
1044 }
1045 }
1046 else
1047 {
1048 // Writing NV12 frame
1049 uint32_t yPlaneSize = width * height;
1050 memcpy(yPlaneData, yPlane, yPlaneSize);
1051
1052 uint32_t uvPlaneSize = yPlaneSize / 2;
1053 for (uint32_t i = 0; i < uvPlaneSize; i += 2)
1054 {
1055 uvPlaneData[i] = uPlane[i / 2];
1056 uvPlaneData[i + 1] = vPlane[i / 2];
1057 }
1058 }
1059 }
1060
saveNV12FrameAsYUV(MultiPlaneImageData * imageData)1061 de::MovePtr<std::vector<uint8_t>> saveNV12FrameAsYUV(MultiPlaneImageData *imageData)
1062 {
1063 uint8_t *yPlaneData = static_cast<uint8_t *>(imageData->getPlanePtr(0));
1064 uint8_t *uvPlaneData = static_cast<uint8_t *>(imageData->getPlanePtr(1));
1065
1066 const uint32_t width = imageData->getSize().x();
1067 const uint32_t height = imageData->getSize().y();
1068
1069 uint32_t uOffset = width * height;
1070 uint32_t frameSize = uOffset + (uOffset / 2);
1071
1072 de::MovePtr<std::vector<uint8_t>> outputDataPtr =
1073 de::MovePtr<std::vector<uint8_t>>(new std::vector<uint8_t>(frameSize));
1074
1075 memcpy(outputDataPtr.get()->data(), yPlaneData, uOffset);
1076 memcpy(&outputDataPtr.get()->data()[uOffset], uvPlaneData, uOffset / 2);
1077
1078 return outputDataPtr;
1079 }
1080
fillBuffer(const DeviceInterface & vk,const VkDevice device,Allocation & bufferAlloc,const void * data,const VkDeviceSize dataSize,VkDeviceSize dataOffset=0)1081 void fillBuffer(const DeviceInterface &vk, const VkDevice device, Allocation &bufferAlloc, const void *data,
1082 const VkDeviceSize dataSize, VkDeviceSize dataOffset = 0)
1083 {
1084 const VkMappedMemoryRange memRange = {
1085 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, // VkStructureType sType;
1086 DE_NULL, // const void* pNext;
1087 bufferAlloc.getMemory(), // VkDeviceMemory memory;
1088 bufferAlloc.getOffset(), // VkDeviceSize offset;
1089 VK_WHOLE_SIZE // VkDeviceSize size;
1090 };
1091
1092 uint8_t *hostPtr = static_cast<uint8_t *>(bufferAlloc.getHostPtr());
1093 deMemcpy(hostPtr + dataOffset, data, static_cast<uint32_t>(dataSize));
1094
1095 VK_CHECK(vk.flushMappedMemoryRanges(device, 1u, &memRange));
1096 }
1097
makeVideoPictureResource(const VkExtent2D & codedExtent,uint32_t baseArrayLayer,const VkImageView imageView,const void * pNext=DE_NULL)1098 VkVideoPictureResourceInfoKHR makeVideoPictureResource(const VkExtent2D &codedExtent, uint32_t baseArrayLayer,
1099 const VkImageView imageView, const void *pNext = DE_NULL)
1100 {
1101 const VkVideoPictureResourceInfoKHR videoPictureResource = {
1102 VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR, // VkStructureType sType;
1103 pNext, // const void* pNext;
1104 {0, 0}, // VkOffset2D codedOffset;
1105 codedExtent, // VkExtent2D codedExtent;
1106 baseArrayLayer, // uint32_t baseArrayLayer;
1107 imageView, // VkImageView imageViewBinding;
1108 };
1109
1110 return videoPictureResource;
1111 }
1112
makeVideoReferenceSlot(int32_t slotIndex,const VkVideoPictureResourceInfoKHR * pPictureResource,const void * pNext=DE_NULL)1113 VkVideoReferenceSlotInfoKHR makeVideoReferenceSlot(int32_t slotIndex,
1114 const VkVideoPictureResourceInfoKHR *pPictureResource,
1115 const void *pNext = DE_NULL)
1116 {
1117 const VkVideoReferenceSlotInfoKHR videoReferenceSlotKHR = {
1118 VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR, // VkStructureType sType;
1119 pNext, // const void* pNext;
1120 slotIndex, // int32_t slotIndex;
1121 pPictureResource, // const VkVideoPictureResourceInfoKHR* pPictureResource;
1122 };
1123
1124 return videoReferenceSlotKHR;
1125 }
1126 // Vulkan video is not supported on android platform
1127 // all external libraries, helper functions and test instances has been excluded
1128 #ifdef DE_BUILD_VIDEO
1129
createBasicDecoder(DeviceContext * deviceContext,const VkVideoCoreProfile * profile,size_t framesToCheck,bool resolutionChange)1130 static shared_ptr<VideoBaseDecoder> createBasicDecoder(DeviceContext *deviceContext, const VkVideoCoreProfile *profile,
1131 size_t framesToCheck, bool resolutionChange)
1132 {
1133 VkSharedBaseObj<VulkanVideoFrameBuffer> vkVideoFrameBuffer;
1134
1135 VK_CHECK(VulkanVideoFrameBuffer::Create(deviceContext,
1136 false, // UseResultStatusQueries
1137 false, // ResourcesWithoutProfiles
1138 vkVideoFrameBuffer));
1139
1140 VideoBaseDecoder::Parameters params;
1141
1142 params.profile = profile;
1143 params.context = deviceContext;
1144 params.framebuffer = vkVideoFrameBuffer;
1145 params.framesToCheck = framesToCheck;
1146 params.queryDecodeStatus = false;
1147 params.outOfOrderDecoding = false;
1148 params.alwaysRecreateDPB = resolutionChange;
1149
1150 return std::make_shared<VideoBaseDecoder>(std::move(params));
1151 }
1152
getDecodedImageFromContext(DeviceContext & deviceContext,VkImageLayout layout,const DecodedFrame * frame)1153 de::MovePtr<vkt::ycbcr::MultiPlaneImageData> getDecodedImageFromContext(DeviceContext &deviceContext,
1154 VkImageLayout layout, const DecodedFrame *frame)
1155 {
1156 auto &videoDeviceDriver = deviceContext.getDeviceDriver();
1157 auto device = deviceContext.device;
1158 auto queueFamilyIndexDecode = deviceContext.decodeQueueFamilyIdx();
1159 auto queueFamilyIndexTransfer = deviceContext.transferQueueFamilyIdx();
1160 const VkExtent2D imageExtent{(uint32_t)frame->displayWidth, (uint32_t)frame->displayHeight};
1161 const VkImage image = frame->outputImageView->GetImageResource()->GetImage();
1162 const VkFormat format = frame->outputImageView->GetImageResource()->GetImageCreateInfo().format;
1163 uint32_t imageLayerIndex = frame->imageLayerIndex;
1164
1165 MovePtr<vkt::ycbcr::MultiPlaneImageData> multiPlaneImageData(
1166 new vkt::ycbcr::MultiPlaneImageData(format, tcu::UVec2(imageExtent.width, imageExtent.height)));
1167 const VkQueue queueDecode = getDeviceQueue(videoDeviceDriver, device, queueFamilyIndexDecode, 0u);
1168 const VkQueue queueTransfer = getDeviceQueue(videoDeviceDriver, device, queueFamilyIndexTransfer, 0u);
1169 const VkImageSubresourceRange imageSubresourceRange =
1170 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, imageLayerIndex, 1);
1171
1172 const VkImageMemoryBarrier2KHR imageBarrierDecode =
1173 makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR,
1174 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR, VK_ACCESS_NONE_KHR, layout,
1175 VK_IMAGE_LAYOUT_GENERAL, image, imageSubresourceRange);
1176
1177 const VkImageMemoryBarrier2KHR imageBarrierOwnershipDecode = makeImageMemoryBarrier2(
1178 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR, VK_ACCESS_NONE_KHR, VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
1179 VK_ACCESS_NONE_KHR, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL, image, imageSubresourceRange,
1180 queueFamilyIndexDecode, queueFamilyIndexTransfer);
1181
1182 const VkImageMemoryBarrier2KHR imageBarrierOwnershipTransfer = makeImageMemoryBarrier2(
1183 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, VK_ACCESS_NONE_KHR, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
1184 VK_ACCESS_NONE_KHR, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL, image, imageSubresourceRange,
1185 queueFamilyIndexDecode, queueFamilyIndexTransfer);
1186
1187 const VkImageMemoryBarrier2KHR imageBarrierTransfer = makeImageMemoryBarrier2(
1188 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_ACCESS_MEMORY_WRITE_BIT, VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR,
1189 VK_ACCESS_TRANSFER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, image,
1190 imageSubresourceRange);
1191
1192 const Move<VkCommandPool> cmdDecodePool(makeCommandPool(videoDeviceDriver, device, queueFamilyIndexDecode));
1193 const Move<VkCommandBuffer> cmdDecodeBuffer(
1194 allocateCommandBuffer(videoDeviceDriver, device, *cmdDecodePool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1195 const Move<VkCommandPool> cmdTransferPool(makeCommandPool(videoDeviceDriver, device, queueFamilyIndexTransfer));
1196 const Move<VkCommandBuffer> cmdTransferBuffer(
1197 allocateCommandBuffer(videoDeviceDriver, device, *cmdTransferPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1198
1199 Move<VkSemaphore> semaphore = createSemaphore(videoDeviceDriver, device);
1200 Move<VkFence> decodeFence = createFence(videoDeviceDriver, device);
1201 Move<VkFence> transferFence = createFence(videoDeviceDriver, device);
1202 VkFence fences[] = {*decodeFence, *transferFence};
1203 const VkPipelineStageFlags waitDstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
1204
1205 VkSubmitInfo decodeSubmitInfo = {
1206 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
1207 DE_NULL, // const void* pNext;
1208 0u, // uint32_t waitSemaphoreCount;
1209 DE_NULL, // const VkSemaphore* pWaitSemaphores;
1210 DE_NULL, // const VkPipelineStageFlags* pWaitDstStageMask;
1211 1u, // uint32_t commandBufferCount;
1212 &*cmdDecodeBuffer, // const VkCommandBuffer* pCommandBuffers;
1213 1u, // uint32_t signalSemaphoreCount;
1214 &*semaphore, // const VkSemaphore* pSignalSemaphores;
1215 };
1216 if (frame->frameCompleteSemaphore != VK_NULL_HANDLE)
1217 {
1218 decodeSubmitInfo.waitSemaphoreCount = 1;
1219 decodeSubmitInfo.pWaitSemaphores = &frame->frameCompleteSemaphore;
1220 decodeSubmitInfo.pWaitDstStageMask = &waitDstStageMask;
1221 }
1222 const VkSubmitInfo transferSubmitInfo = {
1223 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
1224 DE_NULL, // const void* pNext;
1225 1u, // uint32_t waitSemaphoreCount;
1226 &*semaphore, // const VkSemaphore* pWaitSemaphores;
1227 &waitDstStageMask, // const VkPipelineStageFlags* pWaitDstStageMask;
1228 1u, // uint32_t commandBufferCount;
1229 &*cmdTransferBuffer, // const VkCommandBuffer* pCommandBuffers;
1230 0u, // uint32_t signalSemaphoreCount;
1231 DE_NULL, // const VkSemaphore* pSignalSemaphores;
1232 };
1233
1234 beginCommandBuffer(videoDeviceDriver, *cmdDecodeBuffer, 0u);
1235 cmdPipelineImageMemoryBarrier2(videoDeviceDriver, *cmdDecodeBuffer, &imageBarrierDecode);
1236 cmdPipelineImageMemoryBarrier2(videoDeviceDriver, *cmdDecodeBuffer, &imageBarrierOwnershipDecode);
1237 endCommandBuffer(videoDeviceDriver, *cmdDecodeBuffer);
1238
1239 beginCommandBuffer(videoDeviceDriver, *cmdTransferBuffer, 0u);
1240 cmdPipelineImageMemoryBarrier2(videoDeviceDriver, *cmdTransferBuffer, &imageBarrierOwnershipTransfer);
1241 cmdPipelineImageMemoryBarrier2(videoDeviceDriver, *cmdTransferBuffer, &imageBarrierTransfer);
1242 endCommandBuffer(videoDeviceDriver, *cmdTransferBuffer);
1243
1244 VK_CHECK(videoDeviceDriver.queueSubmit(queueDecode, 1u, &decodeSubmitInfo, *decodeFence));
1245 VK_CHECK(videoDeviceDriver.queueSubmit(queueTransfer, 1u, &transferSubmitInfo, *transferFence));
1246
1247 VK_CHECK(videoDeviceDriver.waitForFences(device, DE_LENGTH_OF_ARRAY(fences), fences, true, ~0ull));
1248
1249 vkt::ycbcr::downloadImage(videoDeviceDriver, device, queueFamilyIndexTransfer, deviceContext.allocator(), image,
1250 multiPlaneImageData.get(), 0, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageLayerIndex);
1251
1252 const VkImageMemoryBarrier2KHR imageBarrierTransfer2 =
1253 makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR,
1254 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR, VK_ACCESS_NONE_KHR,
1255 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, layout, image, imageSubresourceRange);
1256
1257 videoDeviceDriver.resetCommandBuffer(*cmdTransferBuffer, 0u);
1258 videoDeviceDriver.resetFences(device, 1, &*transferFence);
1259 beginCommandBuffer(videoDeviceDriver, *cmdTransferBuffer, 0u);
1260 cmdPipelineImageMemoryBarrier2(videoDeviceDriver, *cmdTransferBuffer, &imageBarrierTransfer2);
1261 endCommandBuffer(videoDeviceDriver, *cmdTransferBuffer);
1262
1263 const VkSubmitInfo transferSubmitInfo2 = {
1264 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
1265 DE_NULL, // const void* pNext;
1266 0u, // uint32_t waitSemaphoreCount;
1267 DE_NULL, // const VkSemaphore* pWaitSemaphores;
1268 DE_NULL, // const VkPipelineStageFlags* pWaitDstStageMask;
1269 1u, // uint32_t commandBufferCount;
1270 &*cmdTransferBuffer, // const VkCommandBuffer* pCommandBuffers;
1271 0u, // uint32_t signalSemaphoreCount;
1272 DE_NULL, // const VkSemaphore* pSignalSemaphores;
1273 };
1274
1275 VK_CHECK(videoDeviceDriver.queueSubmit(queueTransfer, 1u, &transferSubmitInfo2, *transferFence));
1276 VK_CHECK(videoDeviceDriver.waitForFences(device, 1, &*transferFence, true, ~0ull));
1277
1278 return multiPlaneImageData;
1279 }
1280 #endif // DE_BUILD_VIDEO
1281
1282 class VideoEncodeTestInstance : public VideoBaseTestInstance
1283 {
1284 public:
1285 VideoEncodeTestInstance(Context &context, const TestDefinition *testDefinition);
1286 ~VideoEncodeTestInstance(void);
1287
1288 tcu::TestStatus iterate(void);
1289
1290 protected:
1291 Move<VkQueryPool> createEncodeVideoQueries(const DeviceInterface &videoDeviceDriver, VkDevice device,
1292 uint32_t numQueries, const VkVideoProfileInfoKHR *pVideoProfile);
1293
1294 VkFormat checkImageFormat(VkImageUsageFlags flags, const VkVideoProfileListInfoKHR *videoProfileList,
1295 const VkFormat requiredFormat);
1296
1297 bool checkQueryResultSupport(void);
1298
1299 void printBuffer(const DeviceInterface &videoDeviceDriver, VkDevice device, const BufferWithMemory &buffer,
1300 VkDeviceSize bufferSize);
1301
1302 VkFormat getResultImageFormat(void);
1303
1304 const TestDefinition *m_testDefinition;
1305 };
1306
VideoEncodeTestInstance(Context & context,const TestDefinition * testDefinition)1307 VideoEncodeTestInstance::VideoEncodeTestInstance(Context &context, const TestDefinition *testDefinition)
1308 : VideoBaseTestInstance(context)
1309 , m_testDefinition(testDefinition)
1310 {
1311 }
1312
~VideoEncodeTestInstance(void)1313 VideoEncodeTestInstance::~VideoEncodeTestInstance(void)
1314 {
1315 }
1316
createEncodeVideoQueries(const DeviceInterface & videoDeviceDriver,VkDevice device,uint32_t numQueries,const VkVideoProfileInfoKHR * pVideoProfile)1317 Move<VkQueryPool> VideoEncodeTestInstance::createEncodeVideoQueries(const DeviceInterface &videoDeviceDriver,
1318 VkDevice device, uint32_t numQueries,
1319 const VkVideoProfileInfoKHR *pVideoProfile)
1320 {
1321
1322 VkQueryPoolVideoEncodeFeedbackCreateInfoKHR encodeFeedbackQueryType = {
1323 VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR, // VkStructureType sType;
1324 pVideoProfile, // const void* pNext;
1325 VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR |
1326 VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, // VkVideoEncodeFeedbackFlagsKHR encodeFeedbackFlags;
1327 };
1328
1329 const VkQueryPoolCreateInfo queryPoolCreateInfo = {
1330 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, // VkStructureType sType;
1331 static_cast<const void *>(&encodeFeedbackQueryType), // const void* pNext;
1332 0, // VkQueryPoolCreateFlags flags;
1333 VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR, // VkQueryType queryType;
1334 numQueries, // uint32_t queryCount;
1335 0, // VkQueryPipelineStatisticFlags pipelineStatistics;
1336 };
1337
1338 return createQueryPool(videoDeviceDriver, device, &queryPoolCreateInfo);
1339 }
1340
checkImageFormat(VkImageUsageFlags flags,const VkVideoProfileListInfoKHR * videoProfileList,const VkFormat requiredFormat)1341 VkFormat VideoEncodeTestInstance::checkImageFormat(VkImageUsageFlags flags,
1342 const VkVideoProfileListInfoKHR *videoProfileList,
1343 const VkFormat requiredFormat)
1344 {
1345 const InstanceInterface &vki = m_context.getInstanceInterface();
1346 const VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
1347 MovePtr<vector<VkFormat>> supportedFormats = getSupportedFormats(vki, physicalDevice, flags, videoProfileList);
1348
1349 if (supportedFormats == DE_NULL || supportedFormats->empty())
1350 TCU_THROW(NotSupportedError, "No supported picture formats");
1351
1352 for (const auto &supportedFormat : *supportedFormats)
1353 if (supportedFormat == requiredFormat)
1354 return requiredFormat;
1355
1356 TCU_THROW(NotSupportedError, "Failed to find required picture format");
1357 }
1358
checkQueryResultSupport(void)1359 bool VideoEncodeTestInstance::checkQueryResultSupport(void)
1360 {
1361 uint32_t count = 0;
1362 auto &vkif = m_context.getInstanceInterface();
1363 vkif.getPhysicalDeviceQueueFamilyProperties2(m_context.getPhysicalDevice(), &count, nullptr);
1364 std::vector<VkQueueFamilyProperties2> queues(count);
1365 std::vector<VkQueueFamilyVideoPropertiesKHR> videoQueues(count);
1366 std::vector<VkQueueFamilyQueryResultStatusPropertiesKHR> queryResultStatus(count);
1367
1368 for (std::vector<VkQueueFamilyProperties2>::size_type i = 0; i < queues.size(); i++)
1369 {
1370 queues[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
1371 videoQueues[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR;
1372 queues[i].pNext = &videoQueues[i];
1373 queryResultStatus[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR;
1374 videoQueues[i].pNext = &queryResultStatus[i];
1375 }
1376 vkif.getPhysicalDeviceQueueFamilyProperties2(m_context.getPhysicalDevice(), &count, queues.data());
1377
1378 for (auto &property : queryResultStatus)
1379 {
1380 if (property.queryResultStatusSupport)
1381 return true;
1382 }
1383
1384 return false;
1385 }
1386
1387 #if STREAM_DUMP_DEBUG
saveBufferAsFile(const BufferWithMemory & buffer,VkDeviceSize bufferSize,const string & outputFileName)1388 bool saveBufferAsFile(const BufferWithMemory &buffer, VkDeviceSize bufferSize, const string &outputFileName)
1389 {
1390 auto &bufferAlloc = buffer.getAllocation();
1391 const auto dataPtr = reinterpret_cast<uint8_t *>(bufferAlloc.getHostPtr());
1392 ofstream outFile(outputFileName, ios::binary | ios::out);
1393
1394 if (!outFile.is_open())
1395 {
1396 cerr << "Error: Unable to open output file '" << outputFileName << "'." << endl;
1397 return false;
1398 }
1399
1400 outFile.write(reinterpret_cast<char *>(dataPtr), static_cast<std::streamsize>(bufferSize));
1401 outFile.close();
1402
1403 return true;
1404 }
1405
saveYUVfile(const de::MovePtr<std::vector<uint8_t>> & data,const string & outputFileName)1406 bool saveYUVfile(const de::MovePtr<std::vector<uint8_t>> &data, const string &outputFileName)
1407 {
1408 ofstream outFile(outputFileName, ios::binary | ios::out);
1409
1410 if (!outFile.is_open())
1411 {
1412 cerr << "Error: Unable to open output file '" << outputFileName << "'." << endl;
1413 return false;
1414 }
1415
1416 if (data.get() == nullptr || data.get()->empty())
1417 {
1418 cerr << "Error: Data is empty or doesn't exist" << endl;
1419 return false;
1420 }
1421
1422 outFile.write(reinterpret_cast<char *>(data.get()->data()), data.get()->size());
1423 outFile.close();
1424
1425 return true;
1426 }
1427 #endif
1428
iterate(void)1429 tcu::TestStatus VideoEncodeTestInstance::iterate(void)
1430 {
1431 const VkVideoCodecOperationFlagBitsKHR videoCodecEncodeOperation = m_testDefinition->getCodecOperation();
1432 const VkVideoCodecOperationFlagBitsKHR videoCodecDecodeOperation =
1433 getCodecDecodeOperationFromEncode(videoCodecEncodeOperation);
1434
1435 const uint32_t gopCount = m_testDefinition->gopCount();
1436 const uint32_t gopFrameCount = m_testDefinition->gopFrameCount();
1437 const uint32_t dpbSlots = m_testDefinition->gopReferenceFrameCount();
1438
1439 const bool queryStatus = m_testDefinition->hasOption(Option::UseStatusQueries);
1440 const bool useInlineQueries = m_testDefinition->hasOption(Option::UseInlineQueries);
1441 const bool resourcesWithoutProfiles = m_testDefinition->hasOption(Option::ResourcesWithoutProfiles);
1442 const bool resolutionChange = m_testDefinition->hasOption(Option::ResolutionChange);
1443 const bool swapOrder = m_testDefinition->hasOption(Option::SwapOrder);
1444 const bool useVariableBitrate = m_testDefinition->hasOption(Option::UseVariableBitrateControl);
1445 const bool useConstantBitrate = m_testDefinition->hasOption(Option::UseConstantBitrateControl);
1446 const bool disableRateControl = m_testDefinition->hasOption(Option::DisableRateControl);
1447 const bool customEncodeUsage = m_testDefinition->hasOption(Option::UseEncodeUsage);
1448 const bool useQualityLevel = m_testDefinition->hasOption(Option::UseQualityLevel);
1449
1450 const bool rateControl = useVariableBitrate || useConstantBitrate || disableRateControl;
1451 const uint32_t constQp = 28;
1452 const uint32_t maxQpValue = rateControl ? 42 : 51;
1453
1454 const VkExtent2D codedExtent = {m_testDefinition->getClipWidth(), m_testDefinition->getClipHeight()};
1455
1456 const MovePtr<VkVideoEncodeUsageInfoKHR> encodeUsageInfo = getEncodeUsageInfo(
1457 m_testDefinition->getEncodeProfileExtension(),
1458 customEncodeUsage ? VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR : VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR,
1459 customEncodeUsage ? VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR : VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR,
1460 customEncodeUsage ? VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR : VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR);
1461
1462 const MovePtr<VkVideoProfileInfoKHR> videoEncodeProfile =
1463 getVideoProfile(videoCodecEncodeOperation, encodeUsageInfo.get());
1464 const MovePtr<VkVideoProfileInfoKHR> videoDecodeProfile =
1465 getVideoProfile(videoCodecDecodeOperation, m_testDefinition->getDecodeProfileExtension());
1466
1467 const MovePtr<VkVideoProfileListInfoKHR> videoEncodeProfileList = getVideoProfileList(videoEncodeProfile.get(), 1);
1468
1469 const VkFormat imageFormat = checkImageFormat(VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, videoEncodeProfileList.get(),
1470 VK_FORMAT_G8_B8R8_2PLANE_420_UNORM);
1471 const VkFormat dpbImageFormat = checkImageFormat(VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR,
1472 videoEncodeProfileList.get(), VK_FORMAT_G8_B8R8_2PLANE_420_UNORM);
1473
1474 const VideoDevice::VideoDeviceFlags videoDeviceFlags = m_testDefinition->requiredDeviceFlags();
1475
1476 if (queryStatus && !checkQueryResultSupport())
1477 TCU_THROW(NotSupportedError, "Implementation does not support query status");
1478
1479 const InstanceInterface &vki = m_context.getInstanceInterface();
1480 const VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
1481 const VkDevice videoDevice =
1482 getDeviceSupportingQueue(VK_QUEUE_VIDEO_ENCODE_BIT_KHR | VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT,
1483 videoCodecEncodeOperation | videoCodecDecodeOperation, videoDeviceFlags);
1484 const DeviceInterface &videoDeviceDriver = getDeviceDriver();
1485
1486 const uint32_t encodeQueueFamilyIndex = getQueueFamilyIndexEncode();
1487 const uint32_t decodeQueueFamilyIndex = getQueueFamilyIndexDecode();
1488 const uint32_t transferQueueFamilyIndex = getQueueFamilyIndexTransfer();
1489
1490 const VkQueue encodeQueue = getDeviceQueue(videoDeviceDriver, videoDevice, encodeQueueFamilyIndex, 0u);
1491 const VkQueue decodeQueue = getDeviceQueue(videoDeviceDriver, videoDevice, decodeQueueFamilyIndex, 0u);
1492 const VkQueue transferQueue = getDeviceQueue(videoDeviceDriver, videoDevice, transferQueueFamilyIndex, 0u);
1493
1494 const MovePtr<VkVideoEncodeH264CapabilitiesKHR> videoH264CapabilitiesExtension =
1495 getVideoCapabilitiesExtensionH264E();
1496 const MovePtr<VkVideoEncodeH265CapabilitiesKHR> videoH265CapabilitiesExtension =
1497 getVideoCapabilitiesExtensionH265E();
1498
1499 void *videoCapabilitiesExtensionPtr = NULL;
1500
1501 if (m_testDefinition->getProfile()->IsH264())
1502 {
1503 videoCapabilitiesExtensionPtr = static_cast<void *>(videoH264CapabilitiesExtension.get());
1504 }
1505 else if (m_testDefinition->getProfile()->IsH265())
1506 {
1507 videoCapabilitiesExtensionPtr = static_cast<void *>(videoH265CapabilitiesExtension.get());
1508 }
1509 DE_ASSERT(videoCapabilitiesExtensionPtr);
1510
1511 const MovePtr<VkVideoEncodeCapabilitiesKHR> videoEncodeCapabilities =
1512 getVideoEncodeCapabilities(videoCapabilitiesExtensionPtr);
1513 const MovePtr<VkVideoCapabilitiesKHR> videoCapabilities =
1514 getVideoCapabilities(vki, physicalDevice, videoEncodeProfile.get(), videoEncodeCapabilities.get());
1515
1516 DE_ASSERT(videoEncodeCapabilities->supportedEncodeFeedbackFlags &
1517 VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR);
1518
1519 // Check support for P and B frames
1520 if (m_testDefinition->getProfile()->IsH264())
1521 {
1522 bool minPReferenceCount = videoH264CapabilitiesExtension->maxPPictureL0ReferenceCount > 0;
1523 bool minBReferenceCount = videoH264CapabilitiesExtension->maxBPictureL0ReferenceCount > 0;
1524 bool minL1ReferenceCount = videoH264CapabilitiesExtension->maxL1ReferenceCount > 0;
1525
1526 if (m_testDefinition->patternContain(P_FRAME) && !minPReferenceCount)
1527 {
1528 TCU_THROW(NotSupportedError, "Implementation does not support H264 P frames encoding");
1529 }
1530 else if (m_testDefinition->patternContain(B_FRAME) && !minBReferenceCount && !minL1ReferenceCount)
1531 {
1532 TCU_THROW(NotSupportedError, "Implementation does not support H264 B frames encoding");
1533 }
1534 }
1535 else if (m_testDefinition->getProfile()->IsH265())
1536 {
1537 bool minPReferenceCount = videoH265CapabilitiesExtension->maxPPictureL0ReferenceCount > 0;
1538 bool minBReferenceCount = videoH265CapabilitiesExtension->maxBPictureL0ReferenceCount > 0;
1539 bool minL1ReferenceCount = videoH265CapabilitiesExtension->maxL1ReferenceCount > 0;
1540
1541 if (m_testDefinition->patternContain(P_FRAME) && !minPReferenceCount)
1542 {
1543 TCU_THROW(NotSupportedError, "Implementation does not support H265 P frames encoding");
1544 }
1545 else if (m_testDefinition->patternContain(B_FRAME) && !minBReferenceCount && !minL1ReferenceCount)
1546 {
1547 TCU_THROW(NotSupportedError, "Implementation does not support H265 B frames encoding");
1548 }
1549 }
1550
1551 // Check support for bitrate control
1552 if (m_testDefinition->hasOption(Option::UseVariableBitrateControl))
1553 {
1554 if ((videoEncodeCapabilities->rateControlModes & VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR) == 0)
1555 TCU_THROW(NotSupportedError, "Implementation does not support variable bitrate control");
1556
1557 DE_ASSERT(videoEncodeCapabilities->maxBitrate > 0);
1558 }
1559 else if (m_testDefinition->hasOption(Option::UseConstantBitrateControl))
1560 {
1561 if ((videoEncodeCapabilities->rateControlModes & VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR) == 0)
1562 TCU_THROW(NotSupportedError, "Implementation does not support constant bitrate control");
1563
1564 DE_ASSERT(videoEncodeCapabilities->maxBitrate > 0);
1565 }
1566
1567 VkDeviceSize bitstreamBufferOffset = 0u;
1568 VkDeviceSize minBitstreamBufferOffsetAlignment = videoCapabilities->minBitstreamBufferOffsetAlignment;
1569
1570 Allocator &allocator = getAllocator();
1571
1572 DE_ASSERT(videoCapabilities->maxDpbSlots >= dpbSlots);
1573
1574 const MovePtr<VkVideoSessionCreateInfoKHR> videoEncodeSessionCreateInfo = getVideoSessionCreateInfo(
1575 encodeQueueFamilyIndex, useInlineQueries ? VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR : 0,
1576 videoEncodeProfile.get(), codedExtent, imageFormat, dpbImageFormat, dpbSlots,
1577 videoCapabilities->maxActiveReferencePictures);
1578
1579 const Move<VkVideoSessionKHR> videoEncodeSession =
1580 createVideoSessionKHR(videoDeviceDriver, videoDevice, videoEncodeSessionCreateInfo.get());
1581 const vector<AllocationPtr> encodeAllocation =
1582 getAndBindVideoSessionMemory(videoDeviceDriver, videoDevice, *videoEncodeSession, allocator);
1583
1584 // Must be smaller than the maxQualityLevels capabilities limit supported by the specified video profile
1585 uint32_t qualityLevel = 0;
1586 DE_ASSERT(qualityLevel < videoEncodeCapabilities->maxQualityLevels);
1587
1588 const MovePtr<VkVideoEncodeQualityLevelInfoKHR> videoEncodeQualityLevelInfo =
1589 getVideoEncodeQualityLevelInfo(qualityLevel, DE_NULL);
1590
1591 std::vector<MovePtr<StdVideoH264SequenceParameterSet>> stdVideoH264SequenceParameterSets;
1592 std::vector<MovePtr<StdVideoH264PictureParameterSet>> stdVideoH264PictureParameterSets;
1593 std::vector<MovePtr<VkVideoEncodeH264SessionParametersAddInfoKHR>> encodeH264SessionParametersAddInfoKHRs;
1594 std::vector<MovePtr<VkVideoEncodeH264SessionParametersCreateInfoKHR>> H264sessionParametersCreateInfos;
1595
1596 std::vector<MovePtr<StdVideoH265ProfileTierLevel>> stdVideoH265ProfileTierLevels;
1597 std::vector<MovePtr<StdVideoH265DecPicBufMgr>> stdVideoH265DecPicBufMgrs;
1598 std::vector<MovePtr<StdVideoH265VideoParameterSet>> stdVideoH265VideoParameterSets;
1599 std::vector<MovePtr<StdVideoH265SequenceParameterSetVui>> stdVideoH265SequenceParameterSetVuis;
1600 std::vector<MovePtr<StdVideoH265SequenceParameterSet>> stdVideoH265SequenceParameterSets;
1601 std::vector<MovePtr<StdVideoH265PictureParameterSet>> stdVideoH265PictureParameterSets;
1602 std::vector<MovePtr<VkVideoEncodeH265SessionParametersAddInfoKHR>> encodeH265SessionParametersAddInfoKHRs;
1603 std::vector<MovePtr<VkVideoEncodeH265SessionParametersCreateInfoKHR>> H265sessionParametersCreateInfos;
1604
1605 std::vector<MovePtr<VkVideoSessionParametersCreateInfoKHR>> videoEncodeSessionParametersCreateInfos;
1606 std::vector<Move<VkVideoSessionParametersKHR>> videoEncodeSessionParameters;
1607
1608 for (int i = 0; i < (resolutionChange ? 2 : 1); ++i)
1609 {
1610 // Second videoEncodeSessionParameters is being created with half the size
1611 uint32_t extentWidth = i == 0 ? codedExtent.width : codedExtent.width / 2;
1612 uint32_t extentHeight = i == 0 ? codedExtent.height : codedExtent.height / 2;
1613
1614 stdVideoH264SequenceParameterSets.push_back(getStdVideoH264EncodeSequenceParameterSet(
1615 extentWidth, extentHeight, m_testDefinition->maxNumRefs(), DE_NULL));
1616 stdVideoH264PictureParameterSets.push_back(getStdVideoH264EncodePictureParameterSet(
1617 m_testDefinition->ppsActiveRefs0(), m_testDefinition->ppsActiveRefs1()));
1618 encodeH264SessionParametersAddInfoKHRs.push_back(createVideoEncodeH264SessionParametersAddInfoKHR(
1619 1u, stdVideoH264SequenceParameterSets.back().get(), 1u, stdVideoH264PictureParameterSets.back().get()));
1620 H264sessionParametersCreateInfos.push_back(createVideoEncodeH264SessionParametersCreateInfoKHR(
1621 useQualityLevel ? videoEncodeQualityLevelInfo.get() : DE_NULL, 1u, 1u,
1622 encodeH264SessionParametersAddInfoKHRs.back().get()));
1623
1624 stdVideoH265ProfileTierLevels.push_back(
1625 getStdVideoH265ProfileTierLevel(STD_VIDEO_H265_PROFILE_IDC_MAIN, STD_VIDEO_H265_LEVEL_IDC_6_2));
1626 stdVideoH265DecPicBufMgrs.push_back(getStdVideoH265DecPicBufMgr());
1627 stdVideoH265VideoParameterSets.push_back(getStdVideoH265VideoParameterSet(
1628 stdVideoH265DecPicBufMgrs.back().get(), stdVideoH265ProfileTierLevels.back().get()));
1629 stdVideoH265SequenceParameterSetVuis.push_back(
1630 getStdVideoH265SequenceParameterSetVui(m_testDefinition->getClipFrameRate()));
1631 stdVideoH265SequenceParameterSets.push_back(getStdVideoH265SequenceParameterSet(
1632 extentWidth, extentHeight, videoH265CapabilitiesExtension->ctbSizes,
1633 videoH265CapabilitiesExtension->transformBlockSizes, stdVideoH265DecPicBufMgrs.back().get(),
1634 stdVideoH265ProfileTierLevels.back().get(), stdVideoH265SequenceParameterSetVuis.back().get()));
1635 stdVideoH265PictureParameterSets.push_back(
1636 getStdVideoH265PictureParameterSet(videoH265CapabilitiesExtension.get()));
1637 encodeH265SessionParametersAddInfoKHRs.push_back(getVideoEncodeH265SessionParametersAddInfoKHR(
1638 1u, stdVideoH265VideoParameterSets.back().get(), 1u, stdVideoH265SequenceParameterSets.back().get(), 1u,
1639 stdVideoH265PictureParameterSets.back().get()));
1640 H265sessionParametersCreateInfos.push_back(getVideoEncodeH265SessionParametersCreateInfoKHR(
1641 useQualityLevel ? videoEncodeQualityLevelInfo.get() : DE_NULL, 1u, 1u, 1u,
1642 encodeH265SessionParametersAddInfoKHRs.back().get()));
1643
1644 const void *sessionParametersCreateInfoPtr = DE_NULL;
1645
1646 if (m_testDefinition->getProfile()->IsH264())
1647 {
1648 sessionParametersCreateInfoPtr = static_cast<const void *>(H264sessionParametersCreateInfos.back().get());
1649 }
1650 else if (m_testDefinition->getProfile()->IsH265())
1651 {
1652 sessionParametersCreateInfoPtr = static_cast<const void *>(H265sessionParametersCreateInfos.back().get());
1653 }
1654 DE_ASSERT(sessionParametersCreateInfoPtr);
1655
1656 videoEncodeSessionParametersCreateInfos.push_back(
1657 getVideoSessionParametersCreateInfoKHR(sessionParametersCreateInfoPtr, *videoEncodeSession));
1658 videoEncodeSessionParameters.push_back(createVideoSessionParametersKHR(
1659 videoDeviceDriver, videoDevice, videoEncodeSessionParametersCreateInfos.back().get()));
1660 }
1661
1662 const VkImageUsageFlags dpbImageUsage = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1663 // If the implementation does not support individual images for DPB and so must use arrays
1664 const bool separateReferenceImages =
1665 videoCapabilities.get()->flags & VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR;
1666 const VkImageCreateInfo dpbImageCreateInfo =
1667 makeImageCreateInfo(imageFormat, codedExtent, 0, &encodeQueueFamilyIndex, dpbImageUsage,
1668 videoEncodeProfileList.get(), separateReferenceImages ? 1 : dpbSlots);
1669 const VkImageViewType dpbImageViewType =
1670 separateReferenceImages ? VK_IMAGE_VIEW_TYPE_2D : VK_IMAGE_VIEW_TYPE_2D_ARRAY;
1671
1672 std::vector<std::unique_ptr<const ImageWithMemory>> dpbImages;
1673
1674 for (uint8_t i = 0; i < (separateReferenceImages ? dpbSlots : 1); ++i)
1675 {
1676 std::unique_ptr<ImageWithMemory> dpbImage(new ImageWithMemory(videoDeviceDriver, videoDevice, getAllocator(),
1677 dpbImageCreateInfo, MemoryRequirement::Any));
1678 dpbImages.push_back(std::move(dpbImage));
1679 }
1680
1681 std::vector<MovePtr<StdVideoEncodeH264ReferenceInfo>> H264refInfos;
1682 std::vector<MovePtr<StdVideoEncodeH265ReferenceInfo>> H265refInfos;
1683
1684 std::vector<MovePtr<VkVideoEncodeH264DpbSlotInfoKHR>> H264dpbSlotInfos;
1685 std::vector<MovePtr<VkVideoEncodeH265DpbSlotInfoKHR>> H265dpbSlotInfos;
1686
1687 for (uint8_t i = 0, j = 0; i < gopFrameCount; ++i)
1688 {
1689 if (m_testDefinition->frameType(i) == B_FRAME)
1690 continue;
1691
1692 H264refInfos.push_back(getStdVideoEncodeH264ReferenceInfo(getH264PictureType(m_testDefinition->frameType(i)),
1693 m_testDefinition->frameNumber(i),
1694 m_testDefinition->frameIdx(i) * 2));
1695 H265refInfos.push_back(getStdVideoEncodeH265ReferenceInfo(getH265PictureType(m_testDefinition->frameType(i)),
1696 m_testDefinition->frameIdx(i)));
1697
1698 H264dpbSlotInfos.push_back(getVideoEncodeH264DpbSlotInfo(H264refInfos[j].get()));
1699 H265dpbSlotInfos.push_back(getVideoEncodeH265DpbSlotInfo(H265refInfos[j].get()));
1700
1701 j++;
1702 }
1703
1704 std::vector<std::unique_ptr<const Move<VkImageView>>> dpbImageViews;
1705 std::vector<std::unique_ptr<const VkVideoPictureResourceInfoKHR>> dpbPictureResources;
1706
1707 std::vector<VkVideoReferenceSlotInfoKHR> dpbImageVideoReferenceSlots;
1708
1709 for (uint8_t i = 0, j = 0; i < gopFrameCount; ++i)
1710 {
1711 if (m_testDefinition->frameType(i) == B_FRAME)
1712 continue;
1713
1714 const VkImageSubresourceRange dpbImageSubresourceRange = {
1715 VK_IMAGE_ASPECT_COLOR_BIT, // VkImageAspectFlags aspectMask;
1716 0, // uint32_t baseMipLevel;
1717 1, // uint32_t levelCount;
1718 separateReferenceImages ? static_cast<uint32_t>(0) : static_cast<uint32_t>(j), // uint32_t baseArrayLayer;
1719 1, // uint32_t layerCount;
1720 };
1721
1722 std::unique_ptr<Move<VkImageView>> dpbImageView(new Move<VkImageView>(
1723 makeImageView(videoDeviceDriver, videoDevice, dpbImages[separateReferenceImages ? j : 0]->get(),
1724 dpbImageViewType, imageFormat, dpbImageSubresourceRange)));
1725 std::unique_ptr<VkVideoPictureResourceInfoKHR> dpbPictureResource(
1726 new VkVideoPictureResourceInfoKHR(makeVideoPictureResource(codedExtent, 0, dpbImageView->get())));
1727
1728 dpbImageViews.push_back(std::move(dpbImageView));
1729 dpbPictureResources.push_back(std::move(dpbPictureResource));
1730
1731 const void *dpbSlotInfoPtr = DE_NULL;
1732
1733 if (m_testDefinition->getProfile()->IsH264())
1734 {
1735 dpbSlotInfoPtr = static_cast<const void *>(H264dpbSlotInfos[j].get());
1736 }
1737 else if (m_testDefinition->getProfile()->IsH265())
1738 {
1739 dpbSlotInfoPtr = static_cast<const void *>(H265dpbSlotInfos[j].get());
1740 }
1741 DE_ASSERT(dpbSlotInfoPtr);
1742
1743 dpbImageVideoReferenceSlots.push_back(
1744 makeVideoReferenceSlot(swapOrder ? j : -1, dpbPictureResources[j].get(), dpbSlotInfoPtr));
1745
1746 j++;
1747 }
1748
1749 const VkImageUsageFlags imageUsage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR;
1750
1751 const VkImageSubresourceRange imageSubresourceRange =
1752 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
1753 const VkImageViewType imageViewType = VK_IMAGE_VIEW_TYPE_2D;
1754
1755 std::vector<std::unique_ptr<const ImageWithMemory>> imageVector;
1756 std::vector<std::unique_ptr<const Move<VkImageView>>> imageViewVector;
1757 std::vector<std::unique_ptr<const VkVideoPictureResourceInfoKHR>> imagePictureResourceVector;
1758
1759 for (uint32_t i = 0; i < gopCount; ++i)
1760 {
1761 for (uint32_t j = 0; j < gopFrameCount; ++j)
1762 {
1763 VkExtent2D currentCodedExtent = codedExtent;
1764 if (resolutionChange && i == 1)
1765 {
1766 currentCodedExtent.width /= 2;
1767 currentCodedExtent.height /= 2;
1768 }
1769
1770 const VkImageCreateInfo imageCreateInfo =
1771 makeImageCreateInfo(imageFormat, currentCodedExtent,
1772 resourcesWithoutProfiles ? VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR : 0,
1773 &transferQueueFamilyIndex, imageUsage,
1774 resourcesWithoutProfiles ? DE_NULL : videoEncodeProfileList.get());
1775
1776 std::unique_ptr<const ImageWithMemory> image(new ImageWithMemory(
1777 videoDeviceDriver, videoDevice, getAllocator(), imageCreateInfo, MemoryRequirement::Any));
1778 std::unique_ptr<const Move<VkImageView>> imageView(new Move<VkImageView>(makeImageView(
1779 videoDeviceDriver, videoDevice, image->get(), imageViewType, imageFormat, imageSubresourceRange)));
1780 std::unique_ptr<const VkVideoPictureResourceInfoKHR> imagePictureResource(
1781 new VkVideoPictureResourceInfoKHR(makeVideoPictureResource(currentCodedExtent, 0, **imageView)));
1782
1783 imageVector.push_back(std::move(image));
1784 imageViewVector.push_back(std::move(imageView));
1785 imagePictureResourceVector.push_back(std::move(imagePictureResource));
1786 }
1787 }
1788
1789 const vector<uint32_t> encodeQueueFamilyIndices(1u, encodeQueueFamilyIndex);
1790
1791 const VkBufferUsageFlags encodeBufferUsageFlags =
1792 VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1793 const VkDeviceSize encodeFrameBufferSize = getBufferSize(imageFormat, codedExtent.width, codedExtent.height);
1794 const VkDeviceSize encodeFrameBufferSizeAligned =
1795 deAlign64(encodeFrameBufferSize, videoCapabilities->minBitstreamBufferSizeAlignment);
1796 const VkDeviceSize encodeBufferSize = encodeFrameBufferSizeAligned * gopFrameCount * gopCount;
1797
1798 const VkBufferCreateInfo encodeBufferCreateInfo = makeBufferCreateInfo(
1799 encodeBufferSize, encodeBufferUsageFlags, encodeQueueFamilyIndices, 0, videoEncodeProfileList.get());
1800
1801 BufferWithMemory encodeBuffer(videoDeviceDriver, videoDevice, getAllocator(), encodeBufferCreateInfo,
1802 MemoryRequirement::Local | MemoryRequirement::HostVisible);
1803
1804 Allocation &encodeBufferAlloc = encodeBuffer.getAllocation();
1805 void *encodeBufferHostPtr = encodeBufferAlloc.getHostPtr();
1806
1807 Move<VkQueryPool> encodeQueryPool =
1808 createEncodeVideoQueries(videoDeviceDriver, videoDevice, 2, videoEncodeProfile.get());
1809
1810 deMemset(encodeBufferHostPtr, 0x00, static_cast<uint32_t>(encodeBufferSize));
1811 flushAlloc(videoDeviceDriver, videoDevice, encodeBufferAlloc);
1812
1813 de::MovePtr<vector<uint8_t>> clip = loadVideoData(m_testDefinition->getClipFilename());
1814
1815 std::vector<MovePtr<MultiPlaneImageData>> multiPlaneImageDataVector;
1816 std::vector<de::MovePtr<std::vector<uint8_t>>> inVector;
1817
1818 for (uint32_t i = 0; i < gopCount; ++i)
1819 {
1820 for (uint32_t j = 0; j < gopFrameCount; ++j)
1821 {
1822 uint32_t index = i * gopFrameCount + j;
1823
1824 uint32_t extentWidth = codedExtent.width;
1825 uint32_t extentHeight = codedExtent.height;
1826
1827 bool half_size = false;
1828
1829 if (resolutionChange && i == 1)
1830 {
1831 extentWidth /= 2;
1832 extentHeight /= 2;
1833 half_size = true;
1834 }
1835
1836 MovePtr<MultiPlaneImageData> multiPlaneImageData(
1837 new MultiPlaneImageData(imageFormat, tcu::UVec2(extentWidth, extentHeight)));
1838 extractYUV420pFrame(*clip, index, codedExtent.width, codedExtent.height, multiPlaneImageData.get(),
1839 half_size);
1840
1841 // Save NV12 frame as YUV
1842 de::MovePtr<std::vector<uint8_t>> in = saveNV12FrameAsYUV(multiPlaneImageData.get());
1843
1844 #if STREAM_DUMP_DEBUG
1845 std::string filename = "in_" + std::to_string(index) + ".yuv";
1846 saveYUVfile(in, filename.c_str());
1847 #endif
1848
1849 vkt::ycbcr::uploadImage(videoDeviceDriver, videoDevice, transferQueueFamilyIndex, allocator,
1850 *(*imageVector[index]), *multiPlaneImageData, 0, VK_IMAGE_LAYOUT_GENERAL);
1851
1852 multiPlaneImageDataVector.push_back(std::move(multiPlaneImageData));
1853 inVector.push_back(std::move(in));
1854 }
1855 }
1856
1857 VkVideoEncodeSessionParametersFeedbackInfoKHR videoEncodeSessionParametersFeedbackInfo = {
1858 VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, // VkStructureType sType;
1859 DE_NULL, // void* pNext;
1860 false, // VkBool32 hasOverrides;
1861 };
1862
1863 const VkVideoEncodeH264SessionParametersGetInfoKHR videoEncodeH264SessionParametersGetInfo = {
1864 VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR, // VkStructureType sType;
1865 DE_NULL, // const void* pNext;
1866 true, // VkBool32 writeStdSPS;
1867 true, // VkBool32 writeStdPPS;
1868 0, // uint32_t stdSPSId;
1869 0, // uint32_t stdPPSId;
1870 };
1871
1872 const VkVideoEncodeH265SessionParametersGetInfoKHR videoEncodeH265SessionParametersGetInfo = {
1873 VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR, // VkStructureType sType;
1874 DE_NULL, // const void* pNext;
1875 true, // VkBool32 writeStdVPS;
1876 true, // VkBool32 writeStdSPS;
1877 true, // VkBool32 writeStdPPS;
1878 0, // uint32_t stdVPSId;
1879 0, // uint32_t stdSPSId;
1880 0, // uint32_t stdPPSId;
1881 };
1882
1883 const void *videoEncodeSessionParametersGetInfoPtr = DE_NULL;
1884
1885 if (m_testDefinition->getProfile()->IsH264())
1886 {
1887 videoEncodeSessionParametersGetInfoPtr = static_cast<const void *>(&videoEncodeH264SessionParametersGetInfo);
1888 }
1889 else if (m_testDefinition->getProfile()->IsH265())
1890 {
1891 videoEncodeSessionParametersGetInfoPtr = static_cast<const void *>(&videoEncodeH265SessionParametersGetInfo);
1892 }
1893 DE_ASSERT(videoEncodeSessionParametersGetInfoPtr);
1894
1895 std::vector<std::vector<uint8_t>> headersData;
1896
1897 for (int i = 0; i < (resolutionChange ? 2 : 1); ++i)
1898 {
1899 const VkVideoEncodeSessionParametersGetInfoKHR videoEncodeSessionParametersGetInfo = {
1900 VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR, // VkStructureType sType;
1901 videoEncodeSessionParametersGetInfoPtr, // const void* pNext;
1902 videoEncodeSessionParameters[i].get(), // VkVideoSessionParametersKHR videoSessionParameters;
1903 };
1904
1905 std::vector<uint8_t> headerData;
1906
1907 size_t requiredHeaderSize = 0;
1908 VK_CHECK(videoDeviceDriver.getEncodedVideoSessionParametersKHR(
1909 videoDevice, &videoEncodeSessionParametersGetInfo, &videoEncodeSessionParametersFeedbackInfo,
1910 &requiredHeaderSize, DE_NULL));
1911
1912 DE_ASSERT(requiredHeaderSize != 0);
1913
1914 headerData.resize(requiredHeaderSize);
1915 VK_CHECK(videoDeviceDriver.getEncodedVideoSessionParametersKHR(
1916 videoDevice, &videoEncodeSessionParametersGetInfo, &videoEncodeSessionParametersFeedbackInfo,
1917 &requiredHeaderSize, headerData.data()));
1918
1919 headersData.push_back(std::move(headerData));
1920 }
1921
1922 // Pre fill buffer with SPS and PPS header
1923 fillBuffer(videoDeviceDriver, videoDevice, encodeBufferAlloc, headersData[0].data(), headersData[0].size(),
1924 bitstreamBufferOffset);
1925
1926 // Move offset to accommodate header data
1927 bitstreamBufferOffset =
1928 deAlign64(bitstreamBufferOffset + headersData[0].size(), videoCapabilities->minBitstreamBufferSizeAlignment);
1929
1930 const Unique<VkCommandPool> encodeCmdPool(makeCommandPool(videoDeviceDriver, videoDevice, encodeQueueFamilyIndex));
1931 const Unique<VkCommandBuffer> firstEncodeCmdBuffer(
1932 allocateCommandBuffer(videoDeviceDriver, videoDevice, *encodeCmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1933 const Unique<VkCommandBuffer> secondEncodeCmdBuffer(
1934 allocateCommandBuffer(videoDeviceDriver, videoDevice, *encodeCmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1935
1936 // Rate control
1937 const de::MovePtr<VkVideoEncodeH264RateControlLayerInfoKHR> videoEncodeH264RateControlLayerInfo =
1938 getVideoEncodeH264RateControlLayerInfo(true, 0, 0, 0, true, maxQpValue, maxQpValue, maxQpValue);
1939 const de::MovePtr<VkVideoEncodeH265RateControlLayerInfoKHR> videoEncodeH265RateControlLayerInfo =
1940 getVideoEncodeH265RateControlLayerInfo(true, maxQpValue, maxQpValue, maxQpValue);
1941
1942 const void *videoEncodeRateControlLayerInfoPtr = DE_NULL;
1943
1944 if (m_testDefinition->getProfile()->IsH264())
1945 {
1946 videoEncodeRateControlLayerInfoPtr = static_cast<const void *>(videoEncodeH264RateControlLayerInfo.get());
1947 }
1948 else if (m_testDefinition->getProfile()->IsH265())
1949 {
1950 videoEncodeRateControlLayerInfoPtr = static_cast<const void *>(videoEncodeH265RateControlLayerInfo.get());
1951 }
1952 DE_ASSERT(videoEncodeRateControlLayerInfoPtr);
1953
1954 const VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode =
1955 !rateControl ? VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR :
1956 (disableRateControl ? VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR :
1957 (useVariableBitrate ? VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR :
1958 VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR));
1959
1960 const de::MovePtr<VkVideoEncodeRateControlLayerInfoKHR> videoEncodeRateControlLayerInfo =
1961 getVideoEncodeRateControlLayerInfo(videoEncodeRateControlLayerInfoPtr, rateControlMode,
1962 m_testDefinition->getClipFrameRate());
1963
1964 const VkVideoEncodeH264RateControlInfoKHR videoEncodeH264RateControlInfo = {
1965 VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR, // VkStructureType sType;
1966 DE_NULL, // const void* pNext;
1967 VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR, // VkVideoEncodeH264RateControlFlagsKHR flags;
1968 m_testDefinition->gopFrameCount(), // uint32_t gopFrameCount;
1969 m_testDefinition->gopFrameCount(), // uint32_t idrPeriod;
1970 m_testDefinition->getConsecutiveBFrameCount(), // uint32_t consecutiveBFrameCount;
1971 1, // uint32_t temporalLayerCount;
1972 };
1973
1974 const VkVideoEncodeH265RateControlInfoKHR videoEncodeH265RateControlInfo = {
1975 VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR, // VkStructureType sType;
1976 DE_NULL, // const void* pNext;
1977 VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR, // VkVideoEncodeH265RateControlFlagsKHR flags;
1978 m_testDefinition->gopFrameCount(), // uint32_t gopFrameCount;
1979 m_testDefinition->gopFrameCount(), // uint32_t idrPeriod;
1980 m_testDefinition->getConsecutiveBFrameCount(), // uint32_t consecutiveBFrameCount;
1981 (useConstantBitrate || useVariableBitrate) ? 1U : 0, // uint32_t subLayerCount;
1982 };
1983
1984 const void *videoEncodeRateControlInfoPtr = DE_NULL;
1985
1986 if (m_testDefinition->getProfile()->IsH264())
1987 {
1988 videoEncodeRateControlInfoPtr = static_cast<const void *>(&videoEncodeH264RateControlInfo);
1989 }
1990 else if (m_testDefinition->getProfile()->IsH265())
1991 {
1992 videoEncodeRateControlInfoPtr = static_cast<const void *>(&videoEncodeH265RateControlInfo);
1993 }
1994 DE_ASSERT(videoEncodeRateControlInfoPtr);
1995
1996 const de::MovePtr<VkVideoEncodeRateControlInfoKHR> videoEncodeRateControlInfo = getVideoEncodeRateControlInfo(
1997 !disableRateControl ? videoEncodeRateControlInfoPtr : DE_NULL, rateControlMode,
1998 (useConstantBitrate || useVariableBitrate) ? videoEncodeRateControlLayerInfo.get() : DE_NULL);
1999 // End coding
2000 const VkVideoEndCodingInfoKHR videoEndCodingInfo = {
2001 VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, // VkStructureType sType;
2002 DE_NULL, // const void* pNext;
2003 0u, // VkVideoEndCodingFlagsKHR flags;
2004 };
2005
2006 std::vector<de::MovePtr<StdVideoEncodeH264SliceHeader>> stdVideoEncodeH264SliceHeaders;
2007 std::vector<de::MovePtr<VkVideoEncodeH264NaluSliceInfoKHR>> videoEncodeH264NaluSlices;
2008 std::vector<de::MovePtr<StdVideoEncodeH264ReferenceListsInfo>> videoEncodeH264ReferenceListInfos;
2009 std::vector<de::MovePtr<StdVideoEncodeH264PictureInfo>> H264pictureInfos;
2010 std::vector<de::MovePtr<VkVideoEncodeH264PictureInfoKHR>> videoEncodeH264PictureInfo;
2011
2012 std::vector<de::MovePtr<StdVideoEncodeH265SliceSegmentHeader>> stdVideoEncodeH265SliceSegmentHeaders;
2013 std::vector<de::MovePtr<StdVideoH265ShortTermRefPicSet>> stdVideoH265ShortTermRefPicSets;
2014 std::vector<de::MovePtr<VkVideoEncodeH265NaluSliceSegmentInfoKHR>> videoEncodeH265NaluSliceSegments;
2015 std::vector<de::MovePtr<StdVideoEncodeH265ReferenceListsInfo>> videoEncodeH265ReferenceListInfos;
2016 std::vector<de::MovePtr<StdVideoEncodeH265PictureInfo>> H265pictureInfos;
2017 std::vector<de::MovePtr<VkVideoEncodeH265PictureInfoKHR>> videoEncodeH265PictureInfos;
2018
2019 std::vector<de::MovePtr<VkVideoEncodeInfoKHR>> videoEncodeFrameInfos;
2020
2021 for (uint16_t GOPIdx = 0; GOPIdx < gopCount; ++GOPIdx)
2022 {
2023 uint32_t emptyRefSlotIdx = swapOrder ? 1 : 0;
2024
2025 if (resolutionChange && GOPIdx == 1)
2026 {
2027 // Pre fill buffer with new SPS/PPS/VPS header
2028 fillBuffer(videoDeviceDriver, videoDevice, encodeBufferAlloc, headersData[1].data(), headersData[1].size(),
2029 bitstreamBufferOffset);
2030 bitstreamBufferOffset =
2031 deAlign64(bitstreamBufferOffset + headersData[1].size(), minBitstreamBufferOffsetAlignment);
2032 }
2033
2034 for (uint32_t NALIdx = emptyRefSlotIdx; NALIdx < gopFrameCount; (swapOrder ? --NALIdx : ++NALIdx))
2035 {
2036 VkCommandBuffer encodeCmdBuffer =
2037 (NALIdx == 1 && swapOrder) ? *secondEncodeCmdBuffer : *firstEncodeCmdBuffer;
2038
2039 beginCommandBuffer(videoDeviceDriver, encodeCmdBuffer, 0u);
2040
2041 videoDeviceDriver.cmdResetQueryPool(encodeCmdBuffer, encodeQueryPool.get(), 0, 2);
2042
2043 de::MovePtr<VkVideoBeginCodingInfoKHR> videoBeginCodingFrameInfoKHR = getVideoBeginCodingInfo(
2044 *videoEncodeSession,
2045 resolutionChange ? videoEncodeSessionParameters[GOPIdx].get() : videoEncodeSessionParameters[0].get(),
2046 dpbSlots, &dpbImageVideoReferenceSlots[0],
2047 (rateControl && NALIdx > 0) ? videoEncodeRateControlInfo.get() : DE_NULL);
2048
2049 videoDeviceDriver.cmdBeginVideoCodingKHR(encodeCmdBuffer, videoBeginCodingFrameInfoKHR.get());
2050
2051 if (NALIdx == 0)
2052 {
2053 de::MovePtr<VkVideoCodingControlInfoKHR> resetVideoEncodingControl =
2054 getVideoCodingControlInfo(VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR);
2055 videoDeviceDriver.cmdControlVideoCodingKHR(encodeCmdBuffer, resetVideoEncodingControl.get());
2056
2057 if (rateControl)
2058 {
2059 de::MovePtr<VkVideoCodingControlInfoKHR> videoRateConstrolInfo = getVideoCodingControlInfo(
2060 VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR, videoEncodeRateControlInfo.get());
2061 videoDeviceDriver.cmdControlVideoCodingKHR(encodeCmdBuffer, videoRateConstrolInfo.get());
2062 }
2063 if (useQualityLevel)
2064 {
2065 de::MovePtr<VkVideoCodingControlInfoKHR> videoQualityControlInfo = getVideoCodingControlInfo(
2066 VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR, videoEncodeQualityLevelInfo.get());
2067 videoDeviceDriver.cmdControlVideoCodingKHR(encodeCmdBuffer, videoQualityControlInfo.get());
2068 }
2069 }
2070
2071 StdVideoH264PictureType stdVideoH264PictureType = getH264PictureType(m_testDefinition->frameType(NALIdx));
2072 StdVideoH265PictureType stdVideoH265PictureType = getH265PictureType(m_testDefinition->frameType(NALIdx));
2073
2074 StdVideoH264SliceType stdVideoH264SliceType = getH264SliceType(m_testDefinition->frameType(NALIdx));
2075 StdVideoH265SliceType stdVideoH265SliceType = getH265SliceType(m_testDefinition->frameType(NALIdx));
2076
2077 uint32_t refsPool = 0;
2078
2079 uint8_t H264RefPicList0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
2080 uint8_t H265RefPicList0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
2081
2082 std::fill(H264RefPicList0, H264RefPicList0 + STD_VIDEO_H264_MAX_NUM_LIST_REF,
2083 STD_VIDEO_H264_NO_REFERENCE_PICTURE);
2084 std::fill(H265RefPicList0, H265RefPicList0 + STD_VIDEO_H265_MAX_NUM_LIST_REF,
2085 STD_VIDEO_H265_NO_REFERENCE_PICTURE);
2086
2087 uint8_t numL0 = 0;
2088 uint8_t numL1 = 0;
2089
2090 bool pType = stdVideoH264PictureType == STD_VIDEO_H264_PICTURE_TYPE_P ||
2091 stdVideoH265PictureType == STD_VIDEO_H265_PICTURE_TYPE_P;
2092 bool bType = stdVideoH264PictureType == STD_VIDEO_H264_PICTURE_TYPE_B ||
2093 stdVideoH265PictureType == STD_VIDEO_H265_PICTURE_TYPE_B;
2094
2095 if (pType)
2096 {
2097 refsPool = 1;
2098
2099 std::vector<uint8_t> list0 = m_testDefinition->ref0(NALIdx);
2100 for (auto idx : list0)
2101 {
2102 H264RefPicList0[numL0] = idx;
2103 H265RefPicList0[numL0++] = idx;
2104 }
2105 }
2106
2107 uint8_t H264RefPicList1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
2108 uint8_t H265RefPicList1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
2109
2110 std::fill(H264RefPicList1, H264RefPicList1 + STD_VIDEO_H264_MAX_NUM_LIST_REF,
2111 STD_VIDEO_H264_NO_REFERENCE_PICTURE);
2112 std::fill(H265RefPicList1, H265RefPicList1 + STD_VIDEO_H265_MAX_NUM_LIST_REF,
2113 STD_VIDEO_H265_NO_REFERENCE_PICTURE);
2114
2115 if (bType)
2116 {
2117 refsPool = 2;
2118
2119 std::vector<uint8_t> list0 = m_testDefinition->ref0(NALIdx);
2120 for (auto idx : list0)
2121 {
2122 H264RefPicList0[numL0] = idx;
2123 H265RefPicList0[numL0++] = idx;
2124 }
2125
2126 std::vector<uint8_t> list1 = m_testDefinition->ref1(NALIdx);
2127 for (auto idx : list1)
2128 {
2129 H264RefPicList1[numL1] = idx;
2130 H265RefPicList1[numL1++] = idx;
2131 }
2132 }
2133
2134 bool h264ActiveOverrideFlag =
2135 (stdVideoH264SliceType != STD_VIDEO_H264_SLICE_TYPE_I) &&
2136 ((m_testDefinition->ppsActiveRefs0() != m_testDefinition->shActiveRefs0(NALIdx)) ||
2137 (m_testDefinition->ppsActiveRefs1() != m_testDefinition->shActiveRefs1(NALIdx)));
2138
2139 stdVideoEncodeH264SliceHeaders.push_back(
2140 getStdVideoEncodeH264SliceHeader(stdVideoH264SliceType, h264ActiveOverrideFlag));
2141 videoEncodeH264NaluSlices.push_back(getVideoEncodeH264NaluSlice(
2142 stdVideoEncodeH264SliceHeaders.back().get(),
2143 rateControlMode == VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR ? constQp : 0));
2144 videoEncodeH264ReferenceListInfos.push_back(
2145 getVideoEncodeH264ReferenceListsInfo(H264RefPicList0, H264RefPicList1, numL0, numL1));
2146 H264pictureInfos.push_back(getStdVideoEncodeH264PictureInfo(
2147 getH264PictureType(m_testDefinition->frameType(NALIdx)), m_testDefinition->frameNumber(NALIdx),
2148 m_testDefinition->frameIdx(NALIdx) * 2, GOPIdx,
2149 NALIdx > 0 ? videoEncodeH264ReferenceListInfos.back().get() : DE_NULL));
2150 videoEncodeH264PictureInfo.push_back(
2151 getVideoEncodeH264PictureInfo(H264pictureInfos.back().get(), videoEncodeH264NaluSlices.back().get()));
2152
2153 stdVideoEncodeH265SliceSegmentHeaders.push_back(
2154 getStdVideoEncodeH265SliceSegmentHeader(stdVideoH265SliceType));
2155 videoEncodeH265NaluSliceSegments.push_back(getVideoEncodeH265NaluSliceSegment(
2156 stdVideoEncodeH265SliceSegmentHeaders.back().get(),
2157 rateControlMode == VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR ? constQp : 0));
2158 videoEncodeH265ReferenceListInfos.push_back(
2159 getVideoEncodeH265ReferenceListsInfo(H265RefPicList0, H265RefPicList1));
2160 stdVideoH265ShortTermRefPicSets.push_back(getStdVideoH265ShortTermRefPicSet(
2161 getH265PictureType(m_testDefinition->frameType(NALIdx)), m_testDefinition->frameIdx(NALIdx),
2162 m_testDefinition->getConsecutiveBFrameCount()));
2163 H265pictureInfos.push_back(getStdVideoEncodeH265PictureInfo(
2164 getH265PictureType(m_testDefinition->frameType(NALIdx)), m_testDefinition->frameIdx(NALIdx),
2165 NALIdx > 0 ? videoEncodeH265ReferenceListInfos.back().get() : DE_NULL,
2166 stdVideoH265ShortTermRefPicSets.back().get()));
2167 videoEncodeH265PictureInfos.push_back(getVideoEncodeH265PictureInfo(
2168 H265pictureInfos.back().get(), videoEncodeH265NaluSliceSegments.back().get()));
2169
2170 const void *videoEncodePictureInfoPtr = DE_NULL;
2171
2172 if (m_testDefinition->getProfile()->IsH264())
2173 {
2174 videoEncodePictureInfoPtr = static_cast<const void *>(videoEncodeH264PictureInfo.back().get());
2175 }
2176 else if (m_testDefinition->getProfile()->IsH265())
2177 {
2178 videoEncodePictureInfoPtr = static_cast<const void *>(videoEncodeH265PictureInfos.back().get());
2179 }
2180 DE_ASSERT(videoEncodePictureInfoPtr);
2181
2182 VkVideoReferenceSlotInfoKHR *setupReferenceSlotPtr = DE_NULL;
2183
2184 int8_t curSlotIdx = m_testDefinition->curSlot(NALIdx);
2185 if (!bType)
2186 {
2187 setupReferenceSlotPtr = &dpbImageVideoReferenceSlots[curSlotIdx];
2188 setupReferenceSlotPtr->slotIndex = curSlotIdx;
2189 }
2190
2191 int32_t startRefSlot = refsPool == 0 ? -1 : m_testDefinition->refSlots(NALIdx)[0];
2192 VkVideoReferenceSlotInfoKHR *referenceSlots =
2193 &dpbImageVideoReferenceSlots[separateReferenceImages && startRefSlot > -1 ? startRefSlot : 0];
2194 uint8_t refsCount = m_testDefinition->refsCount(NALIdx);
2195 uint32_t srcPictureResourceIdx = (GOPIdx * gopFrameCount) + m_testDefinition->frameIdx(NALIdx);
2196
2197 VkDeviceSize dstBufferOffset;
2198
2199 // Due to the invert command order dstBufferOffset for P frame is unknown during the recording, set offset to a "safe" values
2200 if (swapOrder)
2201 {
2202 if (NALIdx == 0)
2203 {
2204 dstBufferOffset = deAlign64(256, minBitstreamBufferOffsetAlignment);
2205 }
2206 else
2207 {
2208 dstBufferOffset = deAlign64(encodeFrameBufferSizeAligned + 256, minBitstreamBufferOffsetAlignment);
2209 }
2210 }
2211 else
2212 {
2213 dstBufferOffset = bitstreamBufferOffset;
2214 }
2215
2216 const void *pNext = DE_NULL;
2217 de::MovePtr<VkVideoInlineQueryInfoKHR> inlineQueryInfo =
2218 getVideoInlineQueryInfo(encodeQueryPool.get(), 0, 1, videoEncodePictureInfoPtr);
2219
2220 if (useInlineQueries)
2221 {
2222 pNext = inlineQueryInfo.get();
2223 }
2224 else
2225 {
2226 pNext = videoEncodePictureInfoPtr;
2227 }
2228
2229 videoEncodeFrameInfos.push_back(getVideoEncodeInfo(
2230 pNext, *encodeBuffer, dstBufferOffset, (*imagePictureResourceVector[srcPictureResourceIdx]),
2231 setupReferenceSlotPtr, refsCount, (refsPool == 0) ? DE_NULL : referenceSlots));
2232
2233 if (!useInlineQueries)
2234 videoDeviceDriver.cmdBeginQuery(encodeCmdBuffer, encodeQueryPool.get(), 1, 0);
2235
2236 videoDeviceDriver.cmdEncodeVideoKHR(encodeCmdBuffer, videoEncodeFrameInfos.back().get());
2237
2238 if (!useInlineQueries)
2239 videoDeviceDriver.cmdEndQuery(encodeCmdBuffer, encodeQueryPool.get(), 1);
2240 videoDeviceDriver.cmdEndVideoCodingKHR(encodeCmdBuffer, &videoEndCodingInfo);
2241
2242 endCommandBuffer(videoDeviceDriver, encodeCmdBuffer);
2243
2244 if (!swapOrder)
2245 {
2246 submitCommandsAndWait(videoDeviceDriver, videoDevice, encodeQueue, encodeCmdBuffer);
2247
2248 if (!useInlineQueries)
2249 if (!processQueryPoolResults(videoDeviceDriver, videoDevice, encodeQueryPool.get(),
2250 bitstreamBufferOffset, minBitstreamBufferOffsetAlignment, queryStatus))
2251 return tcu::TestStatus::fail("Unexpected query result status");
2252 }
2253
2254 if (!bType)
2255 {
2256 if (swapOrder)
2257 emptyRefSlotIdx--;
2258 else
2259 emptyRefSlotIdx++;
2260 }
2261 }
2262 }
2263
2264 if (swapOrder)
2265 {
2266 Move<VkSemaphore> frameEncodedSemaphore = createSemaphore(videoDeviceDriver, videoDevice);
2267 const VkPipelineStageFlags waitDstStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2268
2269 const auto firstCommandFence =
2270 submitCommands(videoDeviceDriver, videoDevice, encodeQueue, *firstEncodeCmdBuffer, false, 1U, 0, nullptr,
2271 nullptr, 1, &frameEncodedSemaphore.get());
2272 waitForFence(videoDeviceDriver, videoDevice, *firstCommandFence);
2273
2274 if (!processQueryPoolResults(videoDeviceDriver, videoDevice, encodeQueryPool.get(), bitstreamBufferOffset,
2275 minBitstreamBufferOffsetAlignment, queryStatus))
2276 return tcu::TestStatus::fail("Unexpected query result status");
2277
2278 const auto secondCommandFence =
2279 submitCommands(videoDeviceDriver, videoDevice, encodeQueue, *secondEncodeCmdBuffer, false, 1U, 1,
2280 &frameEncodedSemaphore.get(), &waitDstStageMask);
2281 waitForFence(videoDeviceDriver, videoDevice, *secondCommandFence);
2282
2283 if (!processQueryPoolResults(videoDeviceDriver, videoDevice, encodeQueryPool.get(), bitstreamBufferOffset,
2284 minBitstreamBufferOffsetAlignment, queryStatus))
2285 return tcu::TestStatus::fail("Unexpected query result status");
2286 }
2287
2288 #if STREAM_DUMP_DEBUG
2289 if (m_testDefinition->getProfile()->IsH264())
2290 {
2291 saveBufferAsFile(encodeBuffer, encodeBufferSize, "out.h264");
2292 }
2293 else if (m_testDefinition->getProfile()->IsH265())
2294 {
2295 saveBufferAsFile(encodeBuffer, encodeBufferSize, "out.h265");
2296 }
2297 #endif
2298
2299 // Vulkan video is not supported on android platform
2300 // all external libraries, helper functions and test instances has been excluded
2301 #ifdef DE_BUILD_VIDEO
2302 DeviceContext deviceContext(&m_context, &m_videoDevice, physicalDevice, videoDevice, decodeQueue, encodeQueue,
2303 transferQueue);
2304
2305 const Unique<VkCommandPool> decodeCmdPool(makeCommandPool(videoDeviceDriver, videoDevice, decodeQueueFamilyIndex));
2306 const Unique<VkCommandBuffer> decodeCmdBuffer(
2307 allocateCommandBuffer(videoDeviceDriver, videoDevice, *decodeCmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2308
2309 uint32_t H264profileIdc = STD_VIDEO_H264_PROFILE_IDC_MAIN;
2310 uint32_t H265profileIdc = STD_VIDEO_H265_PROFILE_IDC_MAIN;
2311
2312 uint32_t profileIdc = 0;
2313
2314 if (m_testDefinition->getProfile()->IsH264())
2315 {
2316 profileIdc = H264profileIdc;
2317 }
2318 else if (m_testDefinition->getProfile()->IsH265())
2319 {
2320 profileIdc = H265profileIdc;
2321 }
2322 DE_ASSERT(profileIdc);
2323
2324 auto decodeProfile =
2325 VkVideoCoreProfile(videoCodecDecodeOperation, VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR,
2326 VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, profileIdc);
2327 auto basicDecoder =
2328 createBasicDecoder(&deviceContext, &decodeProfile, m_testDefinition->framesToCheck(), resolutionChange);
2329
2330 Demuxer::Params demuxParams = {};
2331 demuxParams.data = std::make_unique<BufferedReader>(
2332 static_cast<const char *>(encodeBuffer.getAllocation().getHostPtr()), encodeBufferSize);
2333 demuxParams.codecOperation = videoCodecDecodeOperation;
2334 demuxParams.framing = ElementaryStreamFraming::H26X_BYTE_STREAM;
2335 auto demuxer = Demuxer::create(std::move(demuxParams));
2336 VkVideoParser parser;
2337 // TODO: Check for decoder extension support before attempting validation!
2338 createParser(demuxer->codecOperation(), basicDecoder, parser, demuxer->framing());
2339
2340 FrameProcessor processor(std::move(demuxer), basicDecoder);
2341 std::vector<int> incorrectFrames;
2342 std::vector<int> correctFrames;
2343
2344 for (int NALIdx = 0; NALIdx < m_testDefinition->framesToCheck(); NALIdx++)
2345 {
2346 DecodedFrame frame;
2347 TCU_CHECK_AND_THROW(
2348 InternalError, processor.getNextFrame(&frame) > 0,
2349 "Expected more frames from the bitstream. Most likely an internal CTS bug, or maybe an invalid bitstream");
2350
2351 auto resultImage =
2352 getDecodedImageFromContext(deviceContext,
2353 basicDecoder->dpbAndOutputCoincide() ? VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR :
2354 VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR,
2355 &frame);
2356 de::MovePtr<std::vector<uint8_t>> out = saveNV12FrameAsYUV(resultImage.get());
2357
2358 #if STREAM_DUMP_DEBUG
2359 const string outputFileName = "out_" + std::to_string(NALIdx) + ".yuv";
2360 saveYUVfile(out, outputFileName);
2361 #endif
2362 double psnr = util::PSNR(*inVector[NALIdx], *out);
2363
2364 double higherPsnrThreshold = 30.0;
2365 double lowerPsnrThreshold = 20.0;
2366 double criticalPsnrThreshold = 10;
2367 double psnrThresholdLowerLimit = disableRateControl ? lowerPsnrThreshold : higherPsnrThreshold;
2368 string failMessage;
2369
2370 if (psnr < psnrThresholdLowerLimit)
2371 {
2372 double difference = psnrThresholdLowerLimit - psnr;
2373
2374 if (psnr > criticalPsnrThreshold)
2375 {
2376 failMessage = "Frame " + std::to_string(NALIdx) + " with PSNR " + std::to_string(psnr) + " is " +
2377 std::to_string(difference) + " points below the lower threshold";
2378 return tcu::TestStatus(QP_TEST_RESULT_QUALITY_WARNING, failMessage);
2379 }
2380 else
2381 {
2382 failMessage = "Frame " + std::to_string(NALIdx) + " with PSNR " + std::to_string(psnr) + " is " +
2383 std::to_string(difference) + " points below the critical threshold";
2384 return tcu::TestStatus::fail(failMessage);
2385 }
2386 }
2387 }
2388 const string passMessage = std::to_string(m_testDefinition->framesToCheck()) + " correctly encoded frames";
2389 return tcu::TestStatus::pass(passMessage);
2390
2391 #else
2392 DE_UNREF(transferQueue);
2393 DE_UNREF(decodeQueue);
2394 TCU_THROW(NotSupportedError, "Vulkan video is not supported on android platform");
2395 #endif
2396 }
2397
2398 class VideoEncodeTestCase : public TestCase
2399 {
2400 public:
2401 VideoEncodeTestCase(tcu::TestContext &context, const char *name, MovePtr<TestDefinition> testDefinition);
2402 ~VideoEncodeTestCase(void);
2403
2404 virtual TestInstance *createInstance(Context &context) const;
2405 virtual void checkSupport(Context &context) const;
2406
2407 private:
2408 MovePtr<TestDefinition> m_testDefinition;
2409 };
2410
VideoEncodeTestCase(tcu::TestContext & context,const char * name,MovePtr<TestDefinition> testDefinition)2411 VideoEncodeTestCase::VideoEncodeTestCase(tcu::TestContext &context, const char *name,
2412 MovePtr<TestDefinition> testDefinition)
2413 : vkt::TestCase(context, name)
2414 , m_testDefinition(testDefinition)
2415 {
2416 }
2417
~VideoEncodeTestCase(void)2418 VideoEncodeTestCase::~VideoEncodeTestCase(void)
2419 {
2420 }
2421
checkSupport(Context & context) const2422 void VideoEncodeTestCase::checkSupport(Context &context) const
2423 {
2424 context.requireDeviceFunctionality("VK_KHR_video_queue");
2425 context.requireDeviceFunctionality("VK_KHR_synchronization2");
2426 context.requireDeviceFunctionality("VK_KHR_video_encode_queue");
2427
2428 switch (m_testDefinition->getTestType())
2429 {
2430 case TEST_TYPE_H264_ENCODE_I:
2431 case TEST_TYPE_H264_ENCODE_RC_VBR:
2432 case TEST_TYPE_H264_ENCODE_RC_CBR:
2433 case TEST_TYPE_H264_ENCODE_RC_DISABLE:
2434 case TEST_TYPE_H264_ENCODE_QUALITY_LEVEL:
2435 case TEST_TYPE_H264_ENCODE_USAGE:
2436 case TEST_TYPE_H264_ENCODE_I_P:
2437 case TEST_TYPE_H264_ENCODE_I_P_NOT_MATCHING_ORDER:
2438 case TEST_TYPE_H264_I_P_B_13:
2439 case TEST_TYPE_H264_ENCODE_RESOLUTION_CHANGE_DPB:
2440 case TEST_TYPE_H264_ENCODE_QUERY_RESULT_WITH_STATUS:
2441 context.requireDeviceFunctionality("VK_KHR_video_encode_h264");
2442 break;
2443 case TEST_TYPE_H264_ENCODE_INLINE_QUERY:
2444 case TEST_TYPE_H264_ENCODE_RESOURCES_WITHOUT_PROFILES:
2445 context.requireDeviceFunctionality("VK_KHR_video_encode_h264");
2446 context.requireDeviceFunctionality("VK_KHR_video_maintenance1");
2447 break;
2448 case TEST_TYPE_H265_ENCODE_I:
2449 case TEST_TYPE_H265_ENCODE_RC_VBR:
2450 case TEST_TYPE_H265_ENCODE_RC_CBR:
2451 case TEST_TYPE_H265_ENCODE_RC_DISABLE:
2452 case TEST_TYPE_H265_ENCODE_QUALITY_LEVEL:
2453 case TEST_TYPE_H265_ENCODE_USAGE:
2454 case TEST_TYPE_H265_ENCODE_I_P:
2455 case TEST_TYPE_H265_ENCODE_I_P_NOT_MATCHING_ORDER:
2456 case TEST_TYPE_H265_I_P_B_13:
2457 case TEST_TYPE_H265_ENCODE_RESOLUTION_CHANGE_DPB:
2458 case TEST_TYPE_H265_ENCODE_QUERY_RESULT_WITH_STATUS:
2459 context.requireDeviceFunctionality("VK_KHR_video_encode_h265");
2460 break;
2461 case TEST_TYPE_H265_ENCODE_INLINE_QUERY:
2462 case TEST_TYPE_H265_ENCODE_RESOURCES_WITHOUT_PROFILES:
2463 context.requireDeviceFunctionality("VK_KHR_video_encode_h265");
2464 context.requireDeviceFunctionality("VK_KHR_video_maintenance1");
2465 break;
2466 default:
2467 TCU_THROW(InternalError, "Unknown TestType");
2468 }
2469 }
2470
createInstance(Context & context) const2471 TestInstance *VideoEncodeTestCase::createInstance(Context &context) const
2472 {
2473 #ifdef DE_BUILD_VIDEO
2474 return new VideoEncodeTestInstance(context, m_testDefinition.get());
2475 #else
2476 DE_UNREF(context);
2477 return nullptr;
2478 #endif
2479 }
2480
2481 } // namespace
2482
createVideoEncodeTests(tcu::TestContext & testCtx)2483 tcu::TestCaseGroup *createVideoEncodeTests(tcu::TestContext &testCtx)
2484 {
2485 MovePtr<tcu::TestCaseGroup> group(new tcu::TestCaseGroup(testCtx, "encode", "Video encoding session tests"));
2486
2487 for (const auto &encodeTest : g_EncodeTests)
2488 {
2489 auto defn = TestDefinition::create(encodeTest);
2490
2491 const char *testName = getTestName(defn->getTestType());
2492 group->addChild(new VideoEncodeTestCase(testCtx, testName, defn));
2493 }
2494
2495 return group.release();
2496 }
2497 } // namespace video
2498 } // namespace vkt
2499