xref: /aosp_15_r20/cts/tests/tests/media/codec/src/android/media/codec/cts/VideoCodecTestBase.java (revision b7c941bb3fa97aba169d73cee0bed2de8ac964bf)
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.codec.cts;
18 
19 import static org.junit.Assert.assertTrue;
20 import static org.junit.Assume.assumeTrue;
21 
22 import android.media.MediaCodec;
23 import android.media.MediaCodec.CodecException;
24 import android.media.MediaCodecInfo;
25 import android.media.MediaCodecInfo.CodecCapabilities;
26 import android.media.MediaCodecList;
27 import android.media.MediaFormat;
28 import android.media.cts.MediaCodecWrapper;
29 import android.media.cts.NdkMediaCodec;
30 import android.media.cts.SdkMediaCodec;
31 import android.os.Bundle;
32 import android.os.Environment;
33 import android.os.Handler;
34 import android.os.Looper;
35 import android.platform.test.annotations.AppModeFull;
36 import android.util.Log;
37 
38 import com.android.compatibility.common.util.MediaUtils;
39 import com.android.compatibility.common.util.Preconditions;
40 
41 import java.io.File;
42 import java.io.FileInputStream;
43 import java.io.FileOutputStream;
44 import java.io.InputStream;
45 import java.nio.ByteBuffer;
46 import java.util.ArrayList;
47 import java.util.Locale;
48 import java.util.concurrent.Callable;
49 import java.util.concurrent.CountDownLatch;
50 
51 /**
52  * Verification test for video encoder and decoder.
53  *
54  * A raw yv12 stream is encoded at various settings and written to an IVF
55  * file. Encoded stream bitrate and key frame interval are checked against target values.
56  * The stream is later decoded by the decoder to verify frames are decodable and to
57  * calculate PSNR values for various bitrates.
58  */
59 @AppModeFull(reason = "Instant apps cannot access the SD card")
60 public class VideoCodecTestBase {
61 
62     protected static final String TAG = "VideoCodecTestBase";
63     protected static final String VP8_MIME = MediaFormat.MIMETYPE_VIDEO_VP8;
64     protected static final String VP9_MIME = MediaFormat.MIMETYPE_VIDEO_VP9;
65     protected static final String AVC_MIME = MediaFormat.MIMETYPE_VIDEO_AVC;
66     protected static final String HEVC_MIME = MediaFormat.MIMETYPE_VIDEO_HEVC;
67     protected static final String AV1_MIME = MediaFormat.MIMETYPE_VIDEO_AV1;
68     protected static final String SDCARD_DIR =
69             Environment.getExternalStorageDirectory().getAbsolutePath();
70     static final String mInpPrefix = WorkDir.getMediaDirString();
71 
72     // Default timeout for MediaCodec buffer dequeue - 200 ms.
73     protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000;
74     // Default timeout for MediaEncoderAsync - 30 sec.
75     protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000;
76     // Default sync frame interval in frames
77     private static final int SYNC_FRAME_INTERVAL = 30;
78     // Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h
79     protected static final int VIDEO_ControlRateVariable = 1;
80     protected static final int VIDEO_ControlRateConstant = 2;
81     // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
82     // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
83     private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
84     // Allowable color formats supported by codec - in order of preference.
85     private static final int[] mSupportedColorList = {
86             CodecCapabilities.COLOR_FormatYUV420Planar,
87             CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
88             CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
89             COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
90     };
91     // Scaled image cache list - contains scale factors, for which up-scaled frames
92     // were calculated and were written to yuv file.
93     ArrayList<Integer> mScaledImages = new ArrayList<Integer>();
94 
95     /**
96      *  Video codec properties generated by getVideoCodecProperties() function.
97      */
98     private class CodecProperties {
CodecProperties(String codecName, int colorFormat)99         CodecProperties(String codecName, int colorFormat) {
100             this.codecName = codecName;
101             this.colorFormat = colorFormat;
102         }
103         public final String codecName; // OpenMax component name for Video codec.
104         public final int colorFormat;  // Color format supported by codec.
105     }
106 
107     /**
108      * Function to find Video codec.
109      *
110      * Iterates through the list of available codecs and tries to find
111      * Video codec, which can support either YUV420 planar or NV12 color formats.
112      *
113      * @param isEncoder     Flag if encoder is requested.
114      */
getVideoCodecProperties(boolean isEncoder, MediaFormat format)115     private CodecProperties getVideoCodecProperties(boolean isEncoder, MediaFormat format)
116             throws Exception {
117         CodecProperties codecProperties = null;
118         String mime = format.getString(MediaFormat.KEY_MIME);
119 
120         // Loop through the list of codec components in case platform specific codec
121         // is requested.
122         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
123         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
124             if (isEncoder != codecInfo.isEncoder()) {
125                 continue;
126             }
127             Log.v(TAG, codecInfo.getName());
128 
129             for (String type : codecInfo.getSupportedTypes()) {
130                 if (!type.equalsIgnoreCase(mime)) {
131                     continue;
132                 }
133                 CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(type);
134                 if (!capabilities.isFormatSupported(format)) {
135                     continue;
136                 }
137 
138                 // Get candidate codec properties.
139                 Log.v(TAG, "Found candidate codec " + codecInfo.getName());
140                 for (int colorFormat: capabilities.colorFormats) {
141                     Log.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
142                 }
143 
144                 // Check supported color formats.
145                 for (int supportedColorFormat : mSupportedColorList) {
146                     for (int codecColorFormat : capabilities.colorFormats) {
147                         if (codecColorFormat == supportedColorFormat) {
148                             codecProperties = new CodecProperties(codecInfo.getName(),
149                                     codecColorFormat);
150                             Log.v(TAG, "Found target codec " + codecProperties.codecName +
151                                     ". Color: 0x" + Integer.toHexString(codecColorFormat));
152                             // return first vendor codec (hopefully HW) found
153                             if (codecInfo.isVendor()) {
154                                 return codecProperties;
155                             }
156                         }
157                     }
158                 }
159             }
160         }
161         if (codecProperties == null) {
162             Log.i(TAG, "no suitable " + (isEncoder ? "encoder " : "decoder ") + "found for " +
163                     format);
164         }
165         return codecProperties;
166     }
167 
getEncoderProperties(String codecName, MediaFormat format)168     private CodecProperties getEncoderProperties(String codecName, MediaFormat format)
169             throws Exception {
170         assumeTrue("Media format " + format + " is not supported by " + codecName,
171                 MediaUtils.supports(codecName, format));
172         CodecProperties codecProperties = null;
173         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
174         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
175             if (!codecInfo.isEncoder() || !codecName.equals(codecInfo.getName())) {
176                 continue;
177             }
178             Log.v(TAG, codecInfo.getName());
179             String mime = format.getString(MediaFormat.KEY_MIME);
180             Log.d(TAG, "Name : " + codecInfo.getName() + " mime: " + mime);
181             CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mime);
182             for (int supportedColorFormat : mSupportedColorList) {
183                 for (int codecColorFormat : capabilities.colorFormats) {
184                     if (codecColorFormat == supportedColorFormat) {
185                         codecProperties = new CodecProperties(codecInfo.getName(),
186                                 codecColorFormat);
187                         Log.v(TAG, "Found target codec " + codecProperties.codecName +
188                                 ". Color: 0x" + Integer.toHexString(codecColorFormat));
189                         return codecProperties;
190                     }
191                 }
192             }
193         }
194         assumeTrue("Codec " + codecName + " doesn't support color YUV 420 color formats",
195                 codecProperties != null);
196         return codecProperties;
197     }
198 
199     /**
200      * Parameters for encoded video stream.
201      */
202     protected class EncoderOutputStreamParameters {
203         // Name of raw YUV420 input file. When the value of this parameter
204         // is set to null input file descriptor from inputResource parameter
205         // is used instead.
206         public String inputYuvFilename;
207         // Name of scaled YUV420 input file.
208         public String scaledYuvFilename;
209         // File descriptor for the raw input file (YUV420). Used only if
210         // inputYuvFilename parameter is null.
211         public String inputResource;
212         // Name of the IVF file to write encoded bitsream
213         public String outputIvfFilename;
214         // Mime Type of the Encoded content.
215         public String codecMimeType;
216         // Component Name.
217         public String codecName;
218         // Number of frames to encode.
219         int frameCount;
220         // Frame rate of input file in frames per second.
221         int frameRate;
222         // Encoded frame width.
223         public int frameWidth;
224         // Encoded frame height.
225         public int frameHeight;
226         // Encoding bitrate array in bits/second for every frame. If array length
227         // is shorter than the total number of frames, the last value is re-used for
228         // all remaining frames. For constant bitrate encoding single element
229         // array can be used with first element set to target bitrate value.
230         public int[] bitrateSet;
231         // Encoding bitrate type - VBR or CBR
232         public int bitrateType;
233         // Number of temporal layers
234         public int temporalLayers;
235         // Desired key frame interval - codec is asked to generate key frames
236         // at a period defined by this parameter.
237         public int syncFrameInterval;
238         // Optional parameter - forced key frame interval. Used to
239         // explicitly request the codec to generate key frames using
240         // MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter.
241         public int syncForceFrameInterval;
242         // Buffer timeout
243         long timeoutDequeue;
244         // Flag if encoder should run in Looper thread.
245         boolean runInLooperThread;
246         // Flag if use NdkMediaCodec
247         boolean useNdk;
248         // Encoding Statistics Level
249         // 0: None, 1: Average block QP and picture type of a frame
250         public int encodingStatisticsLevel;
251     }
252 
253     /**
254      * Encoding Statistics for a whole sequence
255      */
256     protected class EncodingStatisticsInfo {
257         public float averageSeqQp = 0; // Average qp of a whole sequence,
258                                          // i.e. average of 'per-frame average block QP'
259         public int encodedFrames = 0; // # of encoded frames,
260                                        // i.e. # of average_block_qp is reported
261     }
262 
263     /**
264      * Encoding Statistics for a whole sequence
265      */
266     protected class VideoEncodeOutput{
267         public ArrayList<MediaCodec.BufferInfo> bufferInfo;
268         public EncodingStatisticsInfo encStat;
269 
VideoEncodeOutput( ArrayList<MediaCodec.BufferInfo> bufferInfo, EncodingStatisticsInfo encStat)270         VideoEncodeOutput(
271                 ArrayList<MediaCodec.BufferInfo> bufferInfo,
272                 EncodingStatisticsInfo encStat) {
273             this.bufferInfo = bufferInfo;
274             this.encStat = encStat;
275         }
276     }
277 
getCodecSuffix(String codecMimeType)278     private String getCodecSuffix(String codecMimeType) {
279         switch(codecMimeType) {
280         case VP8_MIME:
281             return "vp8";
282         case VP9_MIME:
283             return "vp9";
284         case AVC_MIME:
285             return "avc";
286         case HEVC_MIME:
287             return "hevc";
288         default:
289             Log.w(TAG, "getCodecSuffix got an unexpected codecMimeType.");
290         }
291         return "video";
292     }
293 
294     /**
295      * Generates an array of default parameters for encoder output stream based on
296      * upscaling value.
297      */
getDefaultEncodingParameterList( String inputYuvName, String outputIvfBaseName, String codecName, String codecMimeType, int encodeSeconds, int[] resolutionScales, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int[] bitrates, boolean syncEncoding)298     protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList(
299             String inputYuvName,
300             String outputIvfBaseName,
301             String codecName,
302             String codecMimeType,
303             int encodeSeconds,
304             int[] resolutionScales,
305             int frameWidth,
306             int frameHeight,
307             int frameRate,
308             int bitrateMode,
309             int[] bitrates,
310             boolean syncEncoding) {
311         assertTrue(resolutionScales.length == bitrates.length);
312         int numCodecs = resolutionScales.length;
313         ArrayList<EncoderOutputStreamParameters> outputParameters =
314                 new ArrayList<EncoderOutputStreamParameters>(numCodecs);
315         for (int i = 0; i < numCodecs; i++) {
316             EncoderOutputStreamParameters params = new EncoderOutputStreamParameters();
317             if (inputYuvName != null) {
318                 params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName;
319             } else {
320                 params.inputYuvFilename = null;
321             }
322             params.scaledYuvFilename = SDCARD_DIR + File.separator +
323                     outputIvfBaseName + resolutionScales[i]+ ".yuv";
324             params.inputResource = "football_qvga.yuv";
325             params.codecMimeType = codecMimeType;
326             String codecSuffix = getCodecSuffix(codecMimeType);
327             params.outputIvfFilename = SDCARD_DIR + File.separator +
328                     outputIvfBaseName + resolutionScales[i] + "_" + codecSuffix + ".ivf";
329             params.codecName = codecName;
330             params.frameCount = encodeSeconds * frameRate;
331             params.frameRate = frameRate;
332             params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280);
333             params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720);
334             params.bitrateSet = new int[1];
335             params.bitrateSet[0] = bitrates[i];
336             params.bitrateType = bitrateMode;
337             params.temporalLayers = 0;
338             params.syncFrameInterval = SYNC_FRAME_INTERVAL;
339             params.syncForceFrameInterval = 0;
340             if (syncEncoding) {
341                 params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US;
342                 params.runInLooperThread = false;
343             } else {
344                 params.timeoutDequeue = 0;
345                 params.runInLooperThread = true;
346             }
347             outputParameters.add(params);
348             params.encodingStatisticsLevel = MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_NONE;
349         }
350         return outputParameters;
351     }
352 
getDefaultEncodingParameters( String inputYuvName, String outputIvfBaseName, String codecName, String codecMimeType, int encodeSeconds, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int bitrate, boolean syncEncoding)353     protected EncoderOutputStreamParameters getDefaultEncodingParameters(
354             String inputYuvName,
355             String outputIvfBaseName,
356             String codecName,
357             String codecMimeType,
358             int encodeSeconds,
359             int frameWidth,
360             int frameHeight,
361             int frameRate,
362             int bitrateMode,
363             int bitrate,
364             boolean syncEncoding) {
365         int[] scaleValues = { 1 };
366         int[] bitrates = { bitrate };
367         return getDefaultEncodingParameterList(
368                 inputYuvName,
369                 outputIvfBaseName,
370                 codecName,
371                 codecMimeType,
372                 encodeSeconds,
373                 scaleValues,
374                 frameWidth,
375                 frameHeight,
376                 frameRate,
377                 bitrateMode,
378                 bitrates,
379                 syncEncoding).get(0);
380     }
381 
382     /**
383      * Converts (interleaves) YUV420 planar to NV12.
384      * Assumes packed, macroblock-aligned frame with no cropping
385      * (visible/coded row length == stride).
386      */
YUV420ToNV(int width, int height, byte[] yuv)387     private static byte[] YUV420ToNV(int width, int height, byte[] yuv) {
388         byte[] nv = new byte[yuv.length];
389         // Y plane we just copy.
390         System.arraycopy(yuv, 0, nv, 0, width * height);
391 
392         // U & V plane we interleave.
393         int u_offset = width * height;
394         int v_offset = u_offset + u_offset / 4;
395         int nv_offset = width * height;
396         for (int i = 0; i < width * height / 4; i++) {
397             nv[nv_offset++] = yuv[u_offset++];
398             nv[nv_offset++] = yuv[v_offset++];
399         }
400         return nv;
401     }
402 
403     /**
404      * Converts (de-interleaves) NV12 to YUV420 planar.
405      * Stride may be greater than width, slice height may be greater than height.
406      */
NV12ToYUV420(int width, int height, int stride, int sliceHeight, byte[] nv12)407     private static byte[] NV12ToYUV420(int width, int height,
408             int stride, int sliceHeight, byte[] nv12) {
409         byte[] yuv = new byte[width * height * 3 / 2];
410 
411         // Y plane we just copy.
412         for (int i = 0; i < height; i++) {
413             System.arraycopy(nv12, i * stride, yuv, i * width, width);
414         }
415 
416         // U & V plane - de-interleave.
417         int u_offset = width * height;
418         int v_offset = u_offset + u_offset / 4;
419         int nv_offset;
420         for (int i = 0; i < height / 2; i++) {
421             nv_offset = stride * (sliceHeight + i);
422             for (int j = 0; j < width / 2; j++) {
423                 yuv[u_offset++] = nv12[nv_offset++];
424                 yuv[v_offset++] = nv12[nv_offset++];
425             }
426         }
427         return yuv;
428     }
429 
430     /**
431      * Packs YUV420 frame by moving it to a smaller size buffer with stride and slice
432      * height equal to the crop window.
433      */
PackYUV420(int left, int top, int width, int height, int stride, int sliceHeight, byte[] src)434     private static byte[] PackYUV420(int left, int top, int width, int height,
435             int stride, int sliceHeight, byte[] src) {
436         byte[] dst = new byte[width * height * 3 / 2];
437         // Y copy.
438         for (int i = 0; i < height; i++) {
439             System.arraycopy(src, (i + top) * stride + left, dst, i * width, width);
440         }
441         // U and V copy.
442         int u_src_offset = stride * sliceHeight;
443         int v_src_offset = u_src_offset + u_src_offset / 4;
444         int u_dst_offset = width * height;
445         int v_dst_offset = u_dst_offset + u_dst_offset / 4;
446         // Downsample and align to floor-2 for crop origin.
447         left /= 2;
448         top /= 2;
449         for (int i = 0; i < height / 2; i++) {
450             System.arraycopy(src, u_src_offset + (i + top) * (stride / 2) + left,
451                     dst, u_dst_offset + i * (width / 2), width / 2);
452             System.arraycopy(src, v_src_offset + (i + top) * (stride / 2) + left,
453                     dst, v_dst_offset + i * (width / 2), width / 2);
454         }
455         return dst;
456     }
457 
458 
imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, byte[] dst, int dstByteOffset, int dstWidth, int dstHeight)459     private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride,
460             byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) {
461         for (int i = 0; i < dstHeight/2 - 1; i++) {
462             int dstOffset0 = 2 * i * dstWidth + dstByteOffset;
463             int dstOffset1 = dstOffset0 + dstWidth;
464             int srcOffset0 = i * srcStride + srcByteOffset;
465             int srcOffset1 = srcOffset0 + srcStride;
466             int pixel00 = (int)src[srcOffset0++] & 0xff;
467             int pixel10 = (int)src[srcOffset1++] & 0xff;
468             for (int j = 0; j < dstWidth/2 - 1; j++) {
469                 int pixel01 = (int)src[srcOffset0++] & 0xff;
470                 int pixel11 = (int)src[srcOffset1++] & 0xff;
471                 dst[dstOffset0++] = (byte)pixel00;
472                 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
473                 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
474                 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4);
475                 pixel00 = pixel01;
476                 pixel10 = pixel11;
477             }
478             // last column
479             dst[dstOffset0++] = (byte)pixel00;
480             dst[dstOffset0++] = (byte)pixel00;
481             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
482             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
483         }
484 
485         // last row
486         int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset;
487         int dstOffset1 = dstOffset0 + dstWidth;
488         int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset;
489         int pixel00 = (int)src[srcOffset0++] & 0xff;
490         for (int j = 0; j < dstWidth/2 - 1; j++) {
491             int pixel01 = (int)src[srcOffset0++] & 0xff;
492             dst[dstOffset0++] = (byte)pixel00;
493             dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
494             dst[dstOffset1++] = (byte)pixel00;
495             dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2);
496             pixel00 = pixel01;
497         }
498         // the very last pixel - bottom right
499         dst[dstOffset0++] = (byte)pixel00;
500         dst[dstOffset0++] = (byte)pixel00;
501         dst[dstOffset1++] = (byte)pixel00;
502         dst[dstOffset1++] = (byte)pixel00;
503     }
504 
505     /**
506     * Up-scale image.
507     * Scale factor is defined by source and destination width ratio.
508     * Only 1:2 and 1:4 up-scaling is supported for now.
509     * For 640x480 -> 1280x720 conversion only top 640x360 part of the original
510     * image is scaled.
511     */
imageScale(byte[] src, int srcWidth, int srcHeight, int dstWidth, int dstHeight)512     private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight,
513             int dstWidth, int dstHeight) throws Exception {
514         int srcYSize = srcWidth * srcHeight;
515         int dstYSize = dstWidth * dstHeight;
516         byte[] dst = null;
517         if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) {
518             // 1:2 upscale
519             dst = new byte[dstWidth * dstHeight * 3 / 2];
520             imageUpscale1To2(src, 0, srcWidth,
521                     dst, 0, dstWidth, dstHeight);                                 // Y
522             imageUpscale1To2(src, srcYSize, srcWidth / 2,
523                     dst, dstYSize, dstWidth / 2, dstHeight / 2);                  // U
524             imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2,
525                     dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2);          // V
526         } else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) {
527             // 1:4 upscale - in two steps
528             int midWidth = 2 * srcWidth;
529             int midHeight = 2 * srcHeight;
530             byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight);
531             dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight);
532 
533         } else {
534             throw new RuntimeException("Can not find proper scaling function");
535         }
536 
537         return dst;
538     }
539 
cacheScaledImage( String srcYuvFilename, String srcResource, int srcFrameWidth, int srcFrameHeight, String dstYuvFilename, int dstFrameWidth, int dstFrameHeight)540     private void cacheScaledImage(
541             String srcYuvFilename, String srcResource, int srcFrameWidth, int srcFrameHeight,
542             String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception {
543         InputStream srcStream = OpenFileOrResource(srcYuvFilename, srcResource);
544         FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false);
545         int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2;
546         byte[] srcFrame = new byte[srcFrameSize];
547         byte[] dstFrame = null;
548         Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename);
549         while (true) {
550             int bytesRead = srcStream.read(srcFrame);
551             if (bytesRead != srcFrame.length) {
552                 break;
553             }
554             if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) {
555                 dstFrame = srcFrame;
556             } else {
557                 dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight,
558                         dstFrameWidth, dstFrameHeight);
559             }
560             dstFile.write(dstFrame);
561         }
562         srcStream.close();
563         dstFile.close();
564     }
565 
566 
567     /**
568      * A basic check if an encoded stream is decodable.
569      *
570      * The most basic confirmation we can get about a frame
571      * being properly encoded is trying to decode it.
572      * (Especially in realtime mode encode output is non-
573      * deterministic, therefore a more thorough check like
574      * md5 sum comparison wouldn't work.)
575      *
576      * Indeed, MediaCodec will raise an IllegalStateException
577      * whenever video decoder fails to decode a frame, and
578      * this test uses that fact to verify the bitstream.
579      *
580      * @param inputIvfFilename  The name of the IVF file containing encoded bitsream.
581      * @param outputYuvFilename The name of the output YUV file (optional).
582      * @param frameRate         Frame rate of input file in frames per second
583      * @param codecConfigs      Codec config buffers to be added to the format
584      */
decode( String inputIvfFilename, String outputYuvFilename, String codecMimeType, int frameRate, ArrayList<ByteBuffer> codecConfigs)585     protected ArrayList<MediaCodec.BufferInfo> decode(
586             String inputIvfFilename,
587             String outputYuvFilename,
588             String codecMimeType,
589             int frameRate,
590             ArrayList<ByteBuffer> codecConfigs) throws Exception {
591         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
592 
593         // Open input/output.
594         IvfReader ivf = new IvfReader(inputIvfFilename);
595         int frameWidth = ivf.getWidth();
596         int frameHeight = ivf.getHeight();
597         int frameCount = ivf.getFrameCount();
598         int frameStride = frameWidth;
599         int frameSliceHeight = frameHeight;
600         int cropLeft = 0;
601         int cropTop = 0;
602         int cropWidth = frameWidth;
603         int cropHeight = frameHeight;
604         assertTrue(frameWidth > 0);
605         assertTrue(frameHeight > 0);
606         assertTrue(frameCount > 0);
607 
608         // Create decoder.
609         MediaFormat format = MediaFormat.createVideoFormat(
610                 codecMimeType, ivf.getWidth(), ivf.getHeight());
611         CodecProperties properties = getVideoCodecProperties(false /* encoder */, format);
612         if (properties == null) {
613             ivf.close();
614             return null;
615         }
616         int frameColorFormat = properties.colorFormat;
617         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
618         int csdIndex = 0;
619         for (ByteBuffer config : codecConfigs) {
620             format.setByteBuffer("csd-" + csdIndex, config);
621             ++csdIndex;
622         }
623 
624         FileOutputStream yuv = null;
625         if (outputYuvFilename != null) {
626             yuv = new FileOutputStream(outputYuvFilename, false);
627         }
628 
629         Log.d(TAG, "Creating decoder " + properties.codecName +
630                 ". Color format: 0x" + Integer.toHexString(frameColorFormat) +
631                 ". " + frameWidth + " x " + frameHeight);
632         Log.d(TAG, "  Format: " + format);
633         Log.d(TAG, "  In: " + inputIvfFilename + ". Out:" + outputYuvFilename);
634         MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName);
635         decoder.configure(format,
636                           null,  // surface
637                           null,  // crypto
638                           0);    // flags
639         decoder.start();
640 
641         ByteBuffer[] inputBuffers = decoder.getInputBuffers();
642         ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
643         MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
644 
645         // decode loop
646         int inputFrameIndex = 0;
647         int outputFrameIndex = 0;
648         long inPresentationTimeUs = 0;
649         long outPresentationTimeUs = 0;
650         boolean sawOutputEOS = false;
651         boolean sawInputEOS = false;
652 
653         while (!sawOutputEOS) {
654             if (!sawInputEOS) {
655                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US);
656                 if (inputBufIndex >= 0) {
657                     byte[] frame = ivf.readFrame(inputFrameIndex);
658 
659                     if (inputFrameIndex == frameCount - 1) {
660                         Log.d(TAG, "  Input EOS for frame # " + inputFrameIndex);
661                         sawInputEOS = true;
662                     }
663 
664                     inputBuffers[inputBufIndex].clear();
665                     inputBuffers[inputBufIndex].put(frame);
666                     inputBuffers[inputBufIndex].rewind();
667                     inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate;
668 
669                     decoder.queueInputBuffer(
670                             inputBufIndex,
671                             0,  // offset
672                             frame.length,
673                             inPresentationTimeUs,
674                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
675 
676                     inputFrameIndex++;
677                 }
678             }
679 
680             int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
681             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
682                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
683                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
684                     outputBuffers = decoder.getOutputBuffers();
685                 } else  if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
686                     // Process format change
687                     format = decoder.getOutputFormat();
688                     frameWidth = format.getInteger(MediaFormat.KEY_WIDTH);
689                     frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
690                     frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
691                     Log.d(TAG, "Decoder output format change. Color: 0x" +
692                             Integer.toHexString(frameColorFormat));
693                     Log.d(TAG, "Format: " + format.toString());
694 
695                     // Parse frame and slice height from undocumented values
696                     if (format.containsKey("stride")) {
697                         frameStride = format.getInteger("stride");
698                     } else {
699                         frameStride = frameWidth;
700                     }
701                     if (format.containsKey("slice-height")) {
702                         frameSliceHeight = format.getInteger("slice-height");
703                     } else {
704                         frameSliceHeight = frameHeight;
705                     }
706                     Log.d(TAG, "Frame stride and slice height: " + frameStride +
707                             " x " + frameSliceHeight);
708                     frameStride = Math.max(frameWidth, frameStride);
709                     frameSliceHeight = Math.max(frameHeight, frameSliceHeight);
710 
711                     // Parse crop window for the area of recording decoded frame data.
712                     if (format.containsKey("crop-left")) {
713                         cropLeft = format.getInteger("crop-left");
714                     }
715                     if (format.containsKey("crop-top")) {
716                         cropTop = format.getInteger("crop-top");
717                     }
718                     if (format.containsKey("crop-right")) {
719                         cropWidth = format.getInteger("crop-right") - cropLeft + 1;
720                     } else {
721                         cropWidth = frameWidth;
722                     }
723                     if (format.containsKey("crop-bottom")) {
724                         cropHeight = format.getInteger("crop-bottom") - cropTop + 1;
725                     } else {
726                         cropHeight = frameHeight;
727                     }
728                     Log.d(TAG, "Frame crop window origin: " + cropLeft + " x " + cropTop
729                             + ", size: " + cropWidth + " x " + cropHeight);
730                     cropWidth = Math.min(frameWidth - cropLeft, cropWidth);
731                     cropHeight = Math.min(frameHeight - cropTop, cropHeight);
732                 }
733                 result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
734             }
735             if (result >= 0) {
736                 int outputBufIndex = result;
737                 outPresentationTimeUs = bufferInfo.presentationTimeUs;
738                 Log.v(TAG, "Writing buffer # " + outputFrameIndex +
739                         ". Size: " + bufferInfo.size +
740                         ". InTime: " + (inPresentationTimeUs + 500)/1000 +
741                         ". OutTime: " + (outPresentationTimeUs + 500)/1000);
742                 if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
743                     sawOutputEOS = true;
744                     Log.d(TAG, "   Output EOS for frame # " + outputFrameIndex);
745                 }
746 
747                 if (bufferInfo.size > 0) {
748                     // Save decoder output to yuv file.
749                     if (yuv != null) {
750                         byte[] frame = new byte[bufferInfo.size];
751                         outputBuffers[outputBufIndex].position(bufferInfo.offset);
752                         outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size);
753                         // Convert NV12 to YUV420 if necessary.
754                         if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
755                             frame = NV12ToYUV420(frameWidth, frameHeight,
756                                     frameStride, frameSliceHeight, frame);
757                         }
758                         int writeLength = Math.min(cropWidth * cropHeight * 3 / 2, frame.length);
759                         // Pack frame if necessary.
760                         if (writeLength < frame.length &&
761                                 (frameStride > cropWidth || frameSliceHeight > cropHeight)) {
762                             frame = PackYUV420(cropLeft, cropTop, cropWidth, cropHeight,
763                                     frameStride, frameSliceHeight, frame);
764                         }
765                         yuv.write(frame, 0, writeLength);
766                     }
767                     outputFrameIndex++;
768 
769                     // Update statistics - store presentation time delay in offset
770                     long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs;
771                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
772                     bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size,
773                             outPresentationTimeUs, bufferInfo.flags);
774                     bufferInfos.add(bufferInfoCopy);
775                 }
776                 decoder.releaseOutputBuffer(outputBufIndex, false);
777             }
778         }
779         decoder.stop();
780         decoder.release();
781         ivf.close();
782         if (yuv != null) {
783             yuv.close();
784         }
785 
786         return bufferInfos;
787     }
788 
789 
790     /**
791      * Helper function to return InputStream from either fully specified filename (if set)
792      * or resource name within test assets (if filename is not set).
793      */
OpenFileOrResource(String filename, final String resource)794     private InputStream OpenFileOrResource(String filename, final String resource)
795             throws Exception {
796         if (filename != null) {
797             Preconditions.assertTestFileExists(filename);
798             return new FileInputStream(filename);
799         }
800         Preconditions.assertTestFileExists(mInpPrefix + resource);
801         return new FileInputStream(mInpPrefix + resource);
802     }
803 
804     /**
805      * Results of frame encoding.
806      */
807     protected class MediaEncoderOutput {
808         public long inPresentationTimeUs;
809         public long outPresentationTimeUs;
810         public boolean outputGenerated;
811         public int flags;
812         public byte[] buffer;
813     }
814 
815     protected class MediaEncoderAsyncHelper {
816         private final EncoderOutputStreamParameters mStreamParams;
817         private final CodecProperties mProperties;
818         private final ArrayList<MediaCodec.BufferInfo> mBufferInfos;
819         private final IvfWriter mIvf;
820         private final ArrayList<ByteBuffer> mCodecConfigs;
821         private final byte[] mSrcFrame;
822 
823         private InputStream mYuvStream;
824         private int mInputFrameIndex;
825         private final EncodingStatisticsInfo mEncStatInfo;
826 
MediaEncoderAsyncHelper( EncoderOutputStreamParameters streamParams, CodecProperties properties, ArrayList<MediaCodec.BufferInfo> bufferInfos, IvfWriter ivf, ArrayList<ByteBuffer> codecConfigs, EncodingStatisticsInfo encStatInfo)827         MediaEncoderAsyncHelper(
828                 EncoderOutputStreamParameters streamParams,
829                 CodecProperties properties,
830                 ArrayList<MediaCodec.BufferInfo> bufferInfos,
831                 IvfWriter ivf,
832                 ArrayList<ByteBuffer> codecConfigs,
833                 EncodingStatisticsInfo encStatInfo)
834                 throws Exception {
835             mStreamParams = streamParams;
836             mProperties = properties;
837             mBufferInfos = bufferInfos;
838             mIvf = ivf;
839             mCodecConfigs = codecConfigs;
840             mEncStatInfo = encStatInfo;
841 
842             int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
843             mSrcFrame = new byte[srcFrameSize];
844 
845             mYuvStream = OpenFileOrResource(
846                     streamParams.inputYuvFilename, streamParams.inputResource);
847         }
848 
getInputFrame()849         public byte[] getInputFrame() {
850             // Check EOS
851             if (mStreamParams.frameCount == 0
852                     || (mStreamParams.frameCount > 0
853                             && mInputFrameIndex >= mStreamParams.frameCount)) {
854                 Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex);
855                 return null;
856             }
857 
858             try {
859                 int bytesRead = mYuvStream.read(mSrcFrame);
860 
861                 if (bytesRead == -1) {
862                     // rewind to beginning of file
863                     mYuvStream.close();
864                     mYuvStream = OpenFileOrResource(
865                             mStreamParams.inputYuvFilename, mStreamParams.inputResource);
866                     bytesRead = mYuvStream.read(mSrcFrame);
867                 }
868             } catch (Exception e) {
869                 Log.e(TAG, "Failed to read YUV file.");
870                 return null;
871             }
872             mInputFrameIndex++;
873 
874             // Convert YUV420 to NV12 if necessary
875             if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
876                 return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight,
877                         mSrcFrame);
878             } else {
879                 return mSrcFrame;
880             }
881         }
882 
saveOutputFrame(MediaEncoderOutput out)883         public boolean saveOutputFrame(MediaEncoderOutput out) {
884             if (out.outputGenerated) {
885                 if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
886                     Log.d(TAG, "Storing codec config separately");
887                     ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
888                     csdBuffer.rewind();
889                     mCodecConfigs.add(csdBuffer);
890                     out.buffer = new byte[0];
891                 }
892                 if (out.buffer.length > 0) {
893                     // Save frame
894                     try {
895                         mIvf.writeFrame(out.buffer, out.outPresentationTimeUs);
896                     } catch (Exception e) {
897                         Log.d(TAG, "Failed to write frame");
898                         return true;
899                     }
900 
901                     // Update statistics - store presentation time delay in offset
902                     long presentationTimeUsDelta = out.inPresentationTimeUs -
903                             out.outPresentationTimeUs;
904                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
905                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
906                             out.outPresentationTimeUs, out.flags);
907                     mBufferInfos.add(bufferInfoCopy);
908                 }
909                 // Detect output EOS
910                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
911                     Log.d(TAG, "----Output EOS ");
912                     return true;
913                 }
914             }
915             return false;
916         }
917 
saveAvgQp(int avg_qp)918         public void saveAvgQp(int avg_qp) {
919             mEncStatInfo.averageSeqQp += (float) avg_qp;
920             ++mEncStatInfo.encodedFrames;  // Note: Duplicated info to  mOutputFrameIndex
921         }
922     }
923 
924     /**
925      * Video encoder wrapper class.
926      * Allows to run the encoder either in a callee's thread or in a looper thread
927      * using buffer dequeue ready notification callbacks.
928      *
929      * Function feedInput() is used to send raw video frame to the encoder input. When encoder
930      * is configured to run in async mode the function will run in a looper thread.
931      * Encoded frame can be retrieved by calling getOutput() function.
932      */
933     protected class MediaEncoderAsync extends Thread {
934         private int mId;
935         private MediaCodecWrapper mCodec;
936         private ByteBuffer[] mInputBuffers;
937         private ByteBuffer[] mOutputBuffers;
938         private int mInputFrameIndex;
939         private int mOutputFrameIndex;
940         private int mInputBufIndex;
941         private int mFrameRate;
942         private long mTimeout;
943         private MediaCodec.BufferInfo mBufferInfo;
944         private long mInPresentationTimeUs;
945         private long mOutPresentationTimeUs;
946         private boolean mAsync;
947         // Flag indicating if input frame was consumed by the encoder in feedInput() call.
948         private boolean mConsumedInput;
949         // Result of frame encoding returned by getOutput() call.
950         private MediaEncoderOutput mOutput;
951         // Object used to signal that looper thread has started and Handler instance associated
952         // with looper thread has been allocated.
953         private final Object mThreadEvent = new Object();
954         // Object used to signal that MediaCodec buffer dequeue notification callback
955         // was received.
956         private final Object mCallbackEvent = new Object();
957         private Handler mHandler;
958         private boolean mCallbackReceived;
959         private MediaEncoderAsyncHelper mHelper;
960         private final Object mCompletionEvent = new Object();
961         private boolean mCompleted;
962         private boolean mInitialSyncFrameReceived;
963 
964         private MediaCodec.Callback mCallback = new MediaCodec.Callback() {
965             @Override
966             public void onInputBufferAvailable(MediaCodec codec, int index) {
967                 if (mHelper == null) {
968                     Log.e(TAG, "async helper not available");
969                     return;
970                 }
971 
972                 byte[] encFrame = mHelper.getInputFrame();
973                 boolean inputEOS = (encFrame == null);
974 
975                 int encFrameLength = 0;
976                 int flags = 0;
977                 if (inputEOS) {
978                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
979                 } else {
980                     encFrameLength = encFrame.length;
981 
982                     ByteBuffer byteBuffer = mCodec.getInputBuffer(index);
983                     byteBuffer.put(encFrame);
984                     byteBuffer.rewind();
985 
986                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
987 
988                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
989                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
990 
991                     mInputFrameIndex++;
992                 }
993 
994                 mCodec.queueInputBuffer(
995                         index,
996                         0,  // offset
997                         encFrameLength,  // size
998                         mInPresentationTimeUs,
999                         flags);
1000             }
1001 
1002             @Override
1003             public void onOutputBufferAvailable(MediaCodec codec,
1004                     int index, MediaCodec.BufferInfo info) {
1005                 if (mHelper == null) {
1006                     Log.e(TAG, "async helper not available");
1007                     return;
1008                 }
1009 
1010                 MediaEncoderOutput out = new MediaEncoderOutput();
1011 
1012                 out.buffer = new byte[info.size];
1013                 ByteBuffer outputBuffer = mCodec.getOutputBuffer(index);
1014                 outputBuffer.get(out.buffer, 0, info.size);
1015                 mOutPresentationTimeUs = info.presentationTimeUs;
1016 
1017                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
1018                 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1019                     logStr += " CONFIG. ";
1020                 }
1021                 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1022                     logStr += " KEY. ";
1023                     if (!mInitialSyncFrameReceived) {
1024                         mInitialSyncFrameReceived = true;
1025                     }
1026                 }
1027                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1028                     logStr += " EOS. ";
1029                 }
1030                 logStr += " Size: " + info.size;
1031                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
1032                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
1033                 Log.v(TAG, logStr);
1034 
1035                 if (!mInitialSyncFrameReceived
1036                         && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
1037                     throw new RuntimeException("Non codec_config_frame before first sync.");
1038                 }
1039 
1040                 if (info.size > 0) {
1041                     mOutputFrameIndex++;
1042                     out.inPresentationTimeUs = mInPresentationTimeUs;
1043                     out.outPresentationTimeUs = mOutPresentationTimeUs;
1044                 }
1045 
1046                 MediaFormat format = codec.getOutputFormat(index);
1047                 if (format.containsKey(MediaFormat.KEY_VIDEO_QP_AVERAGE)) {
1048                     int avgQp = format.getInteger(MediaFormat.KEY_VIDEO_QP_AVERAGE);
1049                     // Copy per-frame avgQp to sequence level buffer
1050                     mHelper.saveAvgQp(avgQp);
1051                 }
1052 
1053                 mCodec.releaseOutputBuffer(index, false);
1054 
1055                 out.flags = info.flags;
1056                 out.outputGenerated = true;
1057 
1058                 if (mHelper.saveOutputFrame(out)) {
1059                     // output EOS
1060                     signalCompletion();
1061                 }
1062             }
1063 
1064             @Override
1065             public void onError(MediaCodec codec, CodecException e) {
1066                 Log.e(TAG, "onError: " + e
1067                         + ", transient " + e.isTransient()
1068                         + ", recoverable " + e.isRecoverable()
1069                         + ", error " + e.getErrorCode());
1070             }
1071 
1072             @Override
1073             public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
1074                 Log.i(TAG, "onOutputFormatChanged: " + format.toString());
1075             }
1076         };
1077 
requestStart()1078         private synchronized void requestStart() throws Exception {
1079             mHandler = null;
1080             start();
1081             // Wait for Hander allocation
1082             synchronized (mThreadEvent) {
1083                 while (mHandler == null) {
1084                     mThreadEvent.wait();
1085                 }
1086             }
1087         }
1088 
setAsyncHelper(MediaEncoderAsyncHelper helper)1089         public void setAsyncHelper(MediaEncoderAsyncHelper helper) {
1090             mHelper = helper;
1091         }
1092 
1093         @Override
run()1094         public void run() {
1095             Looper.prepare();
1096             setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
1097                 @Override
1098                 public void uncaughtException(Thread t, Throwable e) {
1099                     Log.e(TAG, "thread " + t + " exception " + e);
1100                     try {
1101                         deleteCodec();
1102                     } catch (Exception ex) {
1103                         Log.e(TAG, "exception from deleteCodec " + e);
1104                     }
1105                 }
1106             });
1107             synchronized (mThreadEvent) {
1108                 mHandler = new Handler();
1109                 mThreadEvent.notify();
1110             }
1111             Looper.loop();
1112         }
1113 
runCallable(final Callable<?> callable)1114         private void runCallable(final Callable<?> callable) throws Exception {
1115             if (mAsync) {
1116                 final Exception[] exception = new Exception[1];
1117                 final CountDownLatch countDownLatch = new CountDownLatch(1);
1118                 mHandler.post( new Runnable() {
1119                     @Override
1120                     public void run() {
1121                         try {
1122                             callable.call();
1123                         } catch (Exception e) {
1124                             exception[0] = e;
1125                         } finally {
1126                             countDownLatch.countDown();
1127                         }
1128                     }
1129                 } );
1130 
1131                 // Wait for task completion
1132                 countDownLatch.await();
1133                 if (exception[0] != null) {
1134                     throw exception[0];
1135                 }
1136             } else {
1137                 callable.call();
1138             }
1139         }
1140 
requestStop()1141         private synchronized void requestStop() throws Exception {
1142             mHandler.post( new Runnable() {
1143                 @Override
1144                 public void run() {
1145                     // This will run on the Looper thread
1146                     Log.v(TAG, "MediaEncoder looper quitting");
1147                     Looper.myLooper().quitSafely();
1148                 }
1149             } );
1150             // Wait for completion
1151             join();
1152             mHandler = null;
1153         }
1154 
createCodecInternal(final String name, final MediaFormat format, final long timeout, boolean useNdk)1155         private void createCodecInternal(final String name,
1156                 final MediaFormat format, final long timeout, boolean useNdk) throws Exception {
1157             mBufferInfo = new MediaCodec.BufferInfo();
1158             mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
1159             mTimeout = timeout;
1160             mInputFrameIndex = 0;
1161             mOutputFrameIndex = 0;
1162             mInPresentationTimeUs = 0;
1163             mOutPresentationTimeUs = 0;
1164 
1165             if (useNdk) {
1166                 mCodec = new NdkMediaCodec(name);
1167             } else {
1168                 mCodec = new SdkMediaCodec(MediaCodec.createByCodecName(name), mAsync);
1169             }
1170             if (mAsync) {
1171                 mCodec.setCallback(mCallback);
1172             }
1173             mCodec.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE);
1174             mCodec.start();
1175 
1176             // get the cached input/output only in sync mode
1177             if (!mAsync) {
1178                 mInputBuffers = mCodec.getInputBuffers();
1179                 mOutputBuffers = mCodec.getOutputBuffers();
1180             }
1181         }
1182 
createCodec(int id, final String name, final MediaFormat format, final long timeout, boolean async, final boolean useNdk)1183         public void createCodec(int id, final String name, final MediaFormat format,
1184                 final long timeout, boolean async, final boolean useNdk)  throws Exception {
1185             mId = id;
1186             mAsync = async;
1187             if (mAsync) {
1188                 requestStart(); // start looper thread
1189             }
1190             runCallable( new Callable<Void>() {
1191                 @Override
1192                 public Void call() throws Exception {
1193                     createCodecInternal(name, format, timeout, useNdk);
1194                     return null;
1195                 }
1196             } );
1197         }
1198 
feedInputInternal(final byte[] encFrame, final boolean inputEOS)1199         private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) {
1200             mConsumedInput = false;
1201             // Feed input
1202             mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout);
1203 
1204             if (mInputBufIndex >= 0) {
1205                 ByteBuffer inputBuffer = mCodec.getInputBuffer(mInputBufIndex);
1206                 inputBuffer.clear();
1207                 inputBuffer.put(encFrame);
1208                 inputBuffer.rewind();
1209                 int encFrameLength = encFrame.length;
1210                 int flags = 0;
1211                 if (inputEOS) {
1212                     encFrameLength = 0;
1213                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
1214                 }
1215                 if (!inputEOS) {
1216                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
1217                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
1218                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
1219                     mInputFrameIndex++;
1220                 }
1221 
1222                 mCodec.queueInputBuffer(
1223                         mInputBufIndex,
1224                         0,  // offset
1225                         encFrameLength,  // size
1226                         mInPresentationTimeUs,
1227                         flags);
1228 
1229                 mConsumedInput = true;
1230             } else {
1231                 Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER");
1232             }
1233             mCallbackReceived = false;
1234         }
1235 
feedInput(final byte[] encFrame, final boolean inputEOS)1236         public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception {
1237             runCallable( new Callable<Void>() {
1238                 @Override
1239                 public Void call() throws Exception {
1240                     feedInputInternal(encFrame, inputEOS);
1241                     return null;
1242                 }
1243             } );
1244             return mConsumedInput;
1245         }
1246 
getOutputInternal()1247         private void getOutputInternal() {
1248             mOutput = new MediaEncoderOutput();
1249             mOutput.inPresentationTimeUs = mInPresentationTimeUs;
1250             mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
1251             mOutput.outputGenerated = false;
1252 
1253             // Get output from the encoder
1254             int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
1255             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
1256                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1257                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
1258                     mOutputBuffers = mCodec.getOutputBuffers();
1259                 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1260                     Log.d(TAG, "Format changed: " + mCodec.getOutputFormatString());
1261                 }
1262                 result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
1263             }
1264             if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
1265                 Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER");
1266             }
1267 
1268             if (result >= 0) {
1269                 int outputBufIndex = result;
1270                 mOutput.buffer = new byte[mBufferInfo.size];
1271                 ByteBuffer outputBuffer = mCodec.getOutputBuffer(outputBufIndex);
1272                 outputBuffer.position(mBufferInfo.offset);
1273                 outputBuffer.get(mOutput.buffer, 0, mBufferInfo.size);
1274                 mOutPresentationTimeUs = mBufferInfo.presentationTimeUs;
1275 
1276                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
1277                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1278                     logStr += " CONFIG. ";
1279                 }
1280                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1281                     logStr += " KEY. ";
1282                     if (!mInitialSyncFrameReceived) {
1283                         mInitialSyncFrameReceived = true;
1284                     }
1285                 }
1286                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1287                     logStr += " EOS. ";
1288                 }
1289                 logStr += " Size: " + mBufferInfo.size;
1290                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
1291                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
1292                 Log.v(TAG, logStr);
1293 
1294                 if (!mInitialSyncFrameReceived
1295                         && (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
1296                     throw new RuntimeException("Non codec_config_frame before first sync.");
1297                 }
1298 
1299                 if (mBufferInfo.size > 0) {
1300                     mOutputFrameIndex++;
1301                     mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
1302                 }
1303                 mCodec.releaseOutputBuffer(outputBufIndex, false);
1304 
1305                 mOutput.flags = mBufferInfo.flags;
1306                 mOutput.outputGenerated = true;
1307             }
1308             mCallbackReceived = false;
1309         }
1310 
getOutput()1311         public MediaEncoderOutput getOutput() throws Exception {
1312             runCallable( new Callable<Void>() {
1313                 @Override
1314                 public Void call() throws Exception {
1315                     getOutputInternal();
1316                     return null;
1317                 }
1318             } );
1319             return mOutput;
1320         }
1321 
forceSyncFrame()1322         public void forceSyncFrame() throws Exception {
1323             final Bundle syncFrame = new Bundle();
1324             syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
1325             runCallable( new Callable<Void>() {
1326                 @Override
1327                 public Void call() throws Exception {
1328                     mCodec.setParameters(syncFrame);
1329                     return null;
1330                 }
1331             } );
1332         }
1333 
updateBitrate(int bitrate)1334         public void updateBitrate(int bitrate) throws Exception {
1335             final Bundle bitrateUpdate = new Bundle();
1336             bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate);
1337             runCallable( new Callable<Void>() {
1338                 @Override
1339                 public Void call() throws Exception {
1340                     mCodec.setParameters(bitrateUpdate);
1341                     return null;
1342                 }
1343             } );
1344         }
1345 
1346 
waitForBufferEvent()1347         public void waitForBufferEvent() throws Exception {
1348             Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent");
1349             if (mAsync) {
1350                 synchronized (mCallbackEvent) {
1351                     if (!mCallbackReceived) {
1352                         mCallbackEvent.wait(1000); // wait 1 sec for a callback
1353                         // throw an exception if callback was not received
1354                         if (!mCallbackReceived) {
1355                             throw new RuntimeException("MediaCodec callback was not received");
1356                         }
1357                     }
1358                 }
1359             } else {
1360                 Thread.sleep(5);
1361             }
1362             Log.v(TAG, "----Waiting for bufferEvent done");
1363         }
1364 
1365 
waitForCompletion(long timeoutMs)1366         public void waitForCompletion(long timeoutMs) throws Exception {
1367             synchronized (mCompletionEvent) {
1368                 long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs;
1369 
1370                 while (!mCompleted) {
1371                     mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis());
1372                     if (System.currentTimeMillis() >= timeoutExpiredMs) {
1373                         throw new RuntimeException("encoding has timed out!");
1374                     }
1375                 }
1376             }
1377         }
1378 
signalCompletion()1379         public void signalCompletion() {
1380             synchronized (mCompletionEvent) {
1381                 mCompleted = true;
1382                 mCompletionEvent.notify();
1383             }
1384         }
1385 
deleteCodec()1386         public void deleteCodec() throws Exception {
1387             runCallable( new Callable<Void>() {
1388                 @Override
1389                 public Void call() throws Exception {
1390                     mCodec.stop();
1391                     mCodec.release();
1392                     return null;
1393                 }
1394             } );
1395             if (mAsync) {
1396                 requestStop(); // Stop looper thread
1397             }
1398         }
1399     }
1400 
1401     /**
1402      * @see #encode(EncoderOutputStreamParameters, ArrayList<ByteBuffer>)
1403      */
encode( EncoderOutputStreamParameters streamParams)1404     protected VideoEncodeOutput encode(
1405             EncoderOutputStreamParameters streamParams) throws Exception {
1406         return encode(streamParams, new ArrayList<ByteBuffer>());
1407     }
1408 
1409     /**
1410      * Video encoding loop supporting encoding single streams with an option
1411      * to run in a looper thread and use buffer ready notification callbacks.
1412      *
1413      * Output stream is described by encodingParams parameters.
1414      *
1415      * MediaCodec will raise an IllegalStateException
1416      * whenever video encoder fails to encode a frame.
1417      *
1418      * Color format of input file should be YUV420, and frameWidth,
1419      * frameHeight should be supplied correctly as raw input file doesn't
1420      * include any header data.
1421      *
1422      * @param streamParams  Structure with encoder parameters
1423      * @param codecConfigs  List to be filled with codec config buffers
1424      * @return              Returns VideoEncodeOutput, which consists of
1425      *                      array of encoded frames information for each frame and Encoding
1426      *                      Statistics Information.
1427      */
encode( EncoderOutputStreamParameters streamParams, ArrayList<ByteBuffer> codecConfigs)1428     protected VideoEncodeOutput encode(
1429             EncoderOutputStreamParameters streamParams,
1430             ArrayList<ByteBuffer> codecConfigs) throws Exception {
1431 
1432         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
1433         EncodingStatisticsInfo encStatInfo = new EncodingStatisticsInfo();
1434         Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
1435                 streamParams.frameHeight);
1436         int bitrate = streamParams.bitrateSet[0];
1437 
1438         // Create minimal media format signifying desired output.
1439         MediaFormat format = MediaFormat.createVideoFormat(
1440                 streamParams.codecMimeType, streamParams.frameWidth,
1441                 streamParams.frameHeight);
1442         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1443         CodecProperties properties = getEncoderProperties(streamParams.codecName, format);
1444 
1445         // Open input/output
1446         InputStream yuvStream = OpenFileOrResource(
1447                 streamParams.inputYuvFilename, streamParams.inputResource);
1448         IvfWriter ivf = new IvfWriter(
1449                 streamParams.outputIvfFilename, streamParams.codecMimeType,
1450                 streamParams.frameWidth, streamParams.frameHeight);
1451 
1452         // Create a media format signifying desired output.
1453         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
1454             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1455         }
1456         if (streamParams.temporalLayers > 0) {
1457             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
1458         }
1459         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1460         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
1461         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
1462                 streamParams.frameRate;
1463         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1464         if (streamParams.encodingStatisticsLevel !=
1465                 MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_NONE) {
1466             format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
1467                     streamParams.encodingStatisticsLevel);
1468         }
1469 
1470         // Create encoder
1471         Log.d(TAG, "Creating encoder " + properties.codecName +
1472                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1473                 streamParams.frameWidth + " x " + streamParams.frameHeight +
1474                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
1475                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
1476                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
1477                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
1478         Log.d(TAG, "  Format: " + format);
1479         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
1480         MediaEncoderAsync codec = new MediaEncoderAsync();
1481         codec.createCodec(0, properties.codecName, format,
1482                 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
1483 
1484         // encode loop
1485         boolean sawInputEOS = false;  // no more data
1486         boolean consumedInputEOS = false; // EOS flag is consumed dy encoder
1487         boolean sawOutputEOS = false;
1488         boolean inputConsumed = true;
1489         int inputFrameIndex = 0;
1490         int lastBitrate = bitrate;
1491         int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
1492         byte[] srcFrame = new byte[srcFrameSize];
1493 
1494         while (!sawOutputEOS) {
1495 
1496             // Read and feed input frame
1497             if (!consumedInputEOS) {
1498 
1499                 // Read new input buffers - if previous input was consumed and no EOS
1500                 if (inputConsumed && !sawInputEOS) {
1501                     int bytesRead = yuvStream.read(srcFrame);
1502 
1503                     // Check EOS
1504                     if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) {
1505                         sawInputEOS = true;
1506                         Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
1507                     }
1508 
1509                     if (!sawInputEOS && bytesRead == -1) {
1510                         if (streamParams.frameCount == 0) {
1511                             sawInputEOS = true;
1512                             Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
1513                         } else {
1514                             yuvStream.close();
1515                             yuvStream = OpenFileOrResource(
1516                                     streamParams.inputYuvFilename, streamParams.inputResource);
1517                             bytesRead = yuvStream.read(srcFrame);
1518                         }
1519                     }
1520 
1521                     // Force sync frame if syncForceFrameinterval is set.
1522                     if (!sawInputEOS && inputFrameIndex > 0 &&
1523                             streamParams.syncForceFrameInterval > 0 &&
1524                             (inputFrameIndex % streamParams.syncForceFrameInterval) == 0) {
1525                         Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex);
1526                         codec.forceSyncFrame();
1527                     }
1528 
1529                     // Dynamic bitrate change.
1530                     if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) {
1531                         int newBitrate = streamParams.bitrateSet[inputFrameIndex];
1532                         if (newBitrate != lastBitrate) {
1533                             Log.d(TAG, "--- Requesting new bitrate " + newBitrate +
1534                                     " for frame " + inputFrameIndex);
1535                             codec.updateBitrate(newBitrate);
1536                             lastBitrate = newBitrate;
1537                         }
1538                     }
1539 
1540                     // Convert YUV420 to NV12 if necessary
1541                     if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
1542                         srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight,
1543                                 srcFrame);
1544                     }
1545                 }
1546 
1547                 inputConsumed = codec.feedInput(srcFrame, sawInputEOS);
1548                 if (inputConsumed) {
1549                     inputFrameIndex++;
1550                     consumedInputEOS = sawInputEOS;
1551                 }
1552             }
1553 
1554             // Get output from the encoder
1555             MediaEncoderOutput out = codec.getOutput();
1556             if (out.outputGenerated) {
1557                 // Detect output EOS
1558                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1559                     Log.d(TAG, "----Output EOS ");
1560                     sawOutputEOS = true;
1561                 }
1562                 if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1563                     Log.d(TAG, "Storing codec config separately");
1564                     ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
1565                     csdBuffer.rewind();
1566                     codecConfigs.add(csdBuffer);
1567                     out.buffer = new byte[0];
1568                 }
1569 
1570                 if (out.buffer.length > 0) {
1571                     // Save frame
1572                     ivf.writeFrame(out.buffer, out.outPresentationTimeUs);
1573 
1574                     // Update statistics - store presentation time delay in offset
1575                     long presentationTimeUsDelta = out.inPresentationTimeUs -
1576                             out.outPresentationTimeUs;
1577                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
1578                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
1579                             out.outPresentationTimeUs, out.flags);
1580                     bufferInfos.add(bufferInfoCopy);
1581                 }
1582             }
1583 
1584             // If codec is not ready to accept input/poutput - wait for buffer ready callback
1585             if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) {
1586                 codec.waitForBufferEvent();
1587             }
1588         }
1589 
1590         codec.deleteCodec();
1591         ivf.close();
1592         yuvStream.close();
1593 
1594         return new VideoEncodeOutput(bufferInfos, encStatInfo);
1595     }
1596 
1597     /**
1598      * Video encoding run in a looper thread and use buffer ready callbacks.
1599      *
1600      * Output stream is described by encodingParams parameters.
1601      *
1602      * MediaCodec will raise an IllegalStateException
1603      * whenever video encoder fails to encode a frame.
1604      *
1605      * Color format of input file should be YUV420, and frameWidth,
1606      * frameHeight should be supplied correctly as raw input file doesn't
1607      * include any header data.
1608      *
1609      * @param streamParams  Structure with encoder parameters
1610      * @param codecConfigs  List to be filled with codec config buffers
1611      * @return              Returns VideoEncodeOutput, which consists of
1612      *                      array of encoded frames information for each frame and Encoding
1613      *                      Statistics Information.
1614      */
encodeAsync( EncoderOutputStreamParameters streamParams, ArrayList<ByteBuffer> codecConfigs)1615     protected VideoEncodeOutput encodeAsync(
1616             EncoderOutputStreamParameters streamParams,
1617             ArrayList<ByteBuffer> codecConfigs) throws Exception {
1618         if (!streamParams.runInLooperThread) {
1619             throw new RuntimeException("encodeAsync should run with a looper thread!");
1620         }
1621 
1622         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
1623         EncodingStatisticsInfo encStatInfo = new EncodingStatisticsInfo();
1624         Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
1625                 streamParams.frameHeight);
1626         int bitrate = streamParams.bitrateSet[0];
1627 
1628         // Create minimal media format signifying desired output.
1629         MediaFormat format = MediaFormat.createVideoFormat(
1630                 streamParams.codecMimeType, streamParams.frameWidth,
1631                 streamParams.frameHeight);
1632         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1633         CodecProperties properties = getEncoderProperties(streamParams.codecName, format);
1634 
1635         // Open input/output
1636         IvfWriter ivf = new IvfWriter(
1637                 streamParams.outputIvfFilename, streamParams.codecMimeType,
1638                 streamParams.frameWidth, streamParams.frameHeight);
1639 
1640         // Create a media format signifying desired output.
1641         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
1642             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1643         }
1644         if (streamParams.temporalLayers > 0) {
1645             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
1646         }
1647         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1648         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
1649         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
1650                 streamParams.frameRate;
1651         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1652         if (streamParams.encodingStatisticsLevel !=
1653                 MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_NONE) {
1654             format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
1655                     MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_1);
1656         }
1657         // Create encoder
1658         Log.d(TAG, "Creating encoder " + properties.codecName +
1659                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1660                 streamParams.frameWidth + " x " + streamParams.frameHeight +
1661                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
1662                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
1663                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
1664                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
1665         Log.d(TAG, "  Format: " + format);
1666         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
1667 
1668         MediaEncoderAsync codec = new MediaEncoderAsync();
1669         MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper(
1670                 streamParams, properties, bufferInfos, ivf, codecConfigs, encStatInfo);
1671 
1672         codec.setAsyncHelper(helper);
1673         codec.createCodec(0, properties.codecName, format,
1674                 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
1675         codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS);
1676 
1677         codec.deleteCodec();
1678         ivf.close();
1679 
1680         return new VideoEncodeOutput(bufferInfos, encStatInfo);
1681     }
1682 
1683     /**
1684      * Video encoding loop supporting encoding multiple streams at a time.
1685      * Each output stream is described by encodingParams parameters allowing
1686      * simultaneous encoding of various resolutions, bitrates with an option to
1687      * control key frame and dynamic bitrate for each output stream indepandently.
1688      *
1689      * MediaCodec will raise an IllegalStateException
1690      * whenever video encoder fails to encode a frame.
1691      *
1692      * Color format of input file should be YUV420, and frameWidth,
1693      * frameHeight should be supplied correctly as raw input file doesn't
1694      * include any header data.
1695      *
1696      * @param srcFrameWidth     Frame width of input yuv file
1697      * @param srcFrameHeight    Frame height of input yuv file
1698      * @param encodingParams    Encoder parameters
1699      * @param codecConfigs      List to be filled with codec config buffers
1700      * @return                  Returns 2D array of encoded frames information for each stream and
1701      *                          for each frame.
1702      */
encodeSimulcast( int srcFrameWidth, int srcFrameHeight, ArrayList<EncoderOutputStreamParameters> encodingParams, ArrayList<ArrayList<ByteBuffer>> codecConfigs)1703     protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast(
1704             int srcFrameWidth,
1705             int srcFrameHeight,
1706             ArrayList<EncoderOutputStreamParameters> encodingParams,
1707             ArrayList<ArrayList<ByteBuffer>> codecConfigs) throws Exception {
1708         int numEncoders = encodingParams.size();
1709 
1710         // Create arrays of input/output, formats, bitrates etc
1711         ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos =
1712                 new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders);
1713         InputStream yuvStream[] = new InputStream[numEncoders];
1714         IvfWriter[] ivf = new IvfWriter[numEncoders];
1715         FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders];
1716         MediaFormat[] format = new MediaFormat[numEncoders];
1717         MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders];
1718         int[] inputFrameIndex = new int[numEncoders];
1719         boolean[] sawInputEOS = new boolean[numEncoders];
1720         boolean[] consumedInputEOS = new boolean[numEncoders];
1721         boolean[] inputConsumed = new boolean[numEncoders];
1722         boolean[] bufferConsumed = new boolean[numEncoders];
1723         boolean[] sawOutputEOS = new boolean[numEncoders];
1724         byte[][] srcFrame = new byte[numEncoders][];
1725         boolean sawOutputEOSTotal = false;
1726         boolean bufferConsumedTotal = false;
1727         CodecProperties[] codecProperties = new CodecProperties[numEncoders];
1728 
1729         numEncoders = 0;
1730         for (EncoderOutputStreamParameters params : encodingParams) {
1731             int i = numEncoders;
1732             Log.d(TAG, "Source resolution: " + params.frameWidth + " x " +
1733                     params.frameHeight);
1734             int bitrate = params.bitrateSet[0];
1735 
1736             // Create minimal media format signifying desired output.
1737             format[i] = MediaFormat.createVideoFormat(
1738                     params.codecMimeType, params.frameWidth,
1739                     params.frameHeight);
1740             format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1741             CodecProperties properties = getEncoderProperties(params.codecName, format[i]);
1742 
1743             // Check if scaled image was created
1744             int scale = params.frameWidth / srcFrameWidth;
1745             if (!mScaledImages.contains(scale)) {
1746                 // resize image
1747                 cacheScaledImage(params.inputYuvFilename, params.inputResource,
1748                         srcFrameWidth, srcFrameHeight,
1749                         params.scaledYuvFilename, params.frameWidth, params.frameHeight);
1750                 mScaledImages.add(scale);
1751             }
1752 
1753             // Create buffer info storage
1754             bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>());
1755 
1756             // Create YUV reader
1757             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
1758 
1759             // Create IVF writer
1760             ivf[i] = new IvfWriter(
1761                     params.outputIvfFilename, params.codecMimeType,
1762                     params.frameWidth, params.frameHeight);
1763 
1764             // Frame buffer
1765             int frameSize = params.frameWidth * params.frameHeight * 3 / 2;
1766             srcFrame[i] = new byte[frameSize];
1767 
1768             // Create a media format signifying desired output.
1769             if (params.bitrateType == VIDEO_ControlRateConstant) {
1770                 format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1771             }
1772             if (params.temporalLayers > 0) {
1773                 format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer
1774             }
1775             format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1776             format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate);
1777             int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) /
1778                     params.frameRate; // in sec
1779             format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1780             // Create encoder
1781             Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName +
1782                     ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1783                     params.frameWidth + " x " + params.frameHeight +
1784                     ". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType +
1785                     ". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers +
1786                     ". Key frame:" + syncFrameInterval * params.frameRate +
1787                     ". Force keyFrame: " + params.syncForceFrameInterval);
1788             Log.d(TAG, "  Format: " + format[i]);
1789             Log.d(TAG, "  Output ivf:" + params.outputIvfFilename);
1790 
1791             // Create encoder
1792             codec[i] = new MediaEncoderAsync();
1793             codec[i].createCodec(i, properties.codecName, format[i],
1794                     params.timeoutDequeue, params.runInLooperThread, params.useNdk);
1795             codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat);
1796 
1797             inputConsumed[i] = true;
1798             ++numEncoders;
1799         }
1800         if (numEncoders == 0) {
1801             Log.i(TAG, "no suitable encoders found for any of the streams");
1802             return null;
1803         }
1804 
1805         while (!sawOutputEOSTotal) {
1806             // Feed input buffer to all encoders
1807             for (int i = 0; i < numEncoders; i++) {
1808                 bufferConsumed[i] = false;
1809                 if (consumedInputEOS[i]) {
1810                     continue;
1811                 }
1812 
1813                 EncoderOutputStreamParameters params = encodingParams.get(i);
1814                 // Read new input buffers - if previous input was consumed and no EOS
1815                 if (inputConsumed[i] && !sawInputEOS[i]) {
1816                     int bytesRead = yuvStream[i].read(srcFrame[i]);
1817 
1818                     // Check EOS
1819                     if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) {
1820                         sawInputEOS[i] = true;
1821                         Log.d(TAG, "---Enc" + i +
1822                                 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
1823                     }
1824 
1825                     if (!sawInputEOS[i] && bytesRead == -1) {
1826                         if (params.frameCount == 0) {
1827                             sawInputEOS[i] = true;
1828                             Log.d(TAG, "---Enc" + i +
1829                                     ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
1830                         } else {
1831                             yuvStream[i].close();
1832                             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
1833                             bytesRead = yuvStream[i].read(srcFrame[i]);
1834                         }
1835                     }
1836 
1837                     // Convert YUV420 to NV12 if necessary
1838                     if (codecProperties[i].colorFormat !=
1839                             CodecCapabilities.COLOR_FormatYUV420Planar) {
1840                         srcFrame[i] =
1841                             YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]);
1842                     }
1843                 }
1844 
1845                 inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]);
1846                 if (inputConsumed[i]) {
1847                     inputFrameIndex[i]++;
1848                     consumedInputEOS[i] = sawInputEOS[i];
1849                     bufferConsumed[i] = true;
1850                 }
1851 
1852             }
1853 
1854             // Get output from all encoders
1855             for (int i = 0; i < numEncoders; i++) {
1856                 if (sawOutputEOS[i]) {
1857                     continue;
1858                 }
1859 
1860                 MediaEncoderOutput out = codec[i].getOutput();
1861                 if (out.outputGenerated) {
1862                     bufferConsumed[i] = true;
1863                     // Detect output EOS
1864                     if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1865                         Log.d(TAG, "----Enc" + i + ". Output EOS ");
1866                         sawOutputEOS[i] = true;
1867                     }
1868                     if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1869                         Log.d(TAG, "----Enc" + i + ". Storing codec config separately");
1870                         ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
1871                         csdBuffer.rewind();
1872                         codecConfigs.get(i).add(csdBuffer);
1873                         out.buffer = new byte[0];
1874                     }
1875 
1876                     if (out.buffer.length > 0) {
1877                         // Save frame
1878                         ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs);
1879 
1880                         // Update statistics - store presentation time delay in offset
1881                         long presentationTimeUsDelta = out.inPresentationTimeUs -
1882                                 out.outPresentationTimeUs;
1883                         MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
1884                         bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
1885                                 out.outPresentationTimeUs, out.flags);
1886                         bufferInfos.get(i).add(bufferInfoCopy);
1887                     }
1888                 }
1889             }
1890 
1891             // If codec is not ready to accept input/output - wait for buffer ready callback
1892             bufferConsumedTotal = false;
1893             for (boolean bufferConsumedCurrent : bufferConsumed) {
1894                 bufferConsumedTotal |= bufferConsumedCurrent;
1895             }
1896             if (!bufferConsumedTotal) {
1897                 // Pick the encoder to wait for
1898                 for (int i = 0; i < numEncoders; i++) {
1899                     if (!bufferConsumed[i] && !sawOutputEOS[i]) {
1900                         codec[i].waitForBufferEvent();
1901                         break;
1902                     }
1903                 }
1904             }
1905 
1906             // Check if EOS happened for all encoders
1907             sawOutputEOSTotal = true;
1908             for (boolean sawOutputEOSStream : sawOutputEOS) {
1909                 sawOutputEOSTotal &= sawOutputEOSStream;
1910             }
1911         }
1912 
1913         for (int i = 0; i < numEncoders; i++) {
1914             codec[i].deleteCodec();
1915             ivf[i].close();
1916             yuvStream[i].close();
1917             if (yuvScaled[i] != null) {
1918                 yuvScaled[i].close();
1919             }
1920         }
1921 
1922         return bufferInfos;
1923     }
1924 
1925     /**
1926      * Some encoding statistics.
1927      */
1928     protected class VideoEncodingStatistics {
VideoEncodingStatistics()1929         VideoEncodingStatistics() {
1930             mBitrates = new ArrayList<Integer>();
1931             mFrames = new ArrayList<Integer>();
1932             mKeyFrames = new ArrayList<Integer>();
1933             mMinimumKeyFrameInterval = Integer.MAX_VALUE;
1934         }
1935 
1936         public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream.
1937         public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream.
1938         public int mAverageBitrate;         // Average stream bitrate.
1939         public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream.
1940         public int mAverageKeyFrameInterval; // Average key frame interval.
1941         public int mMaximumKeyFrameInterval; // Maximum key frame interval.
1942         public int mMinimumKeyFrameInterval; // Minimum key frame interval.
1943     }
1944 
1945     /**
1946      * Calculates average bitrate and key frame interval for the encoded streams.
1947      * Output mBitrates field will contain bitrate values for every second
1948      * of the encoded stream.
1949      * Average stream bitrate will be stored in mAverageBitrate field.
1950      * mKeyFrames array will contain the position of key frames in the encoded stream and
1951      * mKeyFrameInterval - average key frame interval.
1952      */
computeEncodingStatistics(int encoderId, ArrayList<MediaCodec.BufferInfo> bufferInfos )1953     protected VideoEncodingStatistics computeEncodingStatistics(int encoderId,
1954             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
1955         VideoEncodingStatistics statistics = new VideoEncodingStatistics();
1956 
1957         int totalSize = 0;
1958         int frames = 0;
1959         int framesPerSecond = 0;
1960         int totalFrameSizePerSecond = 0;
1961         int maxFrameSize = 0;
1962         int currentSecond;
1963         int nextSecond = 0;
1964         String keyFrameList = "  IFrame List: ";
1965         String bitrateList = "  Bitrate list: ";
1966         String framesList = "  FPS list: ";
1967 
1968 
1969         for (int j = 0; j < bufferInfos.size(); j++) {
1970             MediaCodec.BufferInfo info = bufferInfos.get(j);
1971             currentSecond = (int)(info.presentationTimeUs / 1000000);
1972             boolean lastFrame = (j == bufferInfos.size() - 1);
1973             if (!lastFrame) {
1974                 nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000);
1975             }
1976 
1977             totalSize += info.size;
1978             totalFrameSizePerSecond += info.size;
1979             maxFrameSize = Math.max(maxFrameSize, info.size);
1980             framesPerSecond++;
1981             frames++;
1982 
1983             // Update the bitrate statistics if the next frame will
1984             // be for the next second
1985             if (lastFrame || nextSecond > currentSecond) {
1986                 int currentBitrate = totalFrameSizePerSecond * 8;
1987                 bitrateList += (currentBitrate + " ");
1988                 framesList += (framesPerSecond + " ");
1989                 statistics.mBitrates.add(currentBitrate);
1990                 statistics.mFrames.add(framesPerSecond);
1991                 totalFrameSizePerSecond = 0;
1992                 framesPerSecond = 0;
1993             }
1994 
1995             // Update key frame statistics.
1996             if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1997                 statistics.mKeyFrames.add(j);
1998                 keyFrameList += (j + "  ");
1999             }
2000         }
2001         int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000);
2002         duration = (duration + 500) / 1000;
2003         statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration);
2004         Log.d(TAG, "Statistics for encoder # " + encoderId);
2005         // Calculate average key frame interval in frames.
2006         int keyFrames = statistics.mKeyFrames.size();
2007         if (keyFrames > 1) {
2008             statistics.mAverageKeyFrameInterval =
2009                     statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0);
2010             statistics.mAverageKeyFrameInterval =
2011                     Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1));
2012             for (int j = 1; j < keyFrames; j++) {
2013                 int keyFrameInterval =
2014                         statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1);
2015                 statistics.mMaximumKeyFrameInterval =
2016                         Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval);
2017                 statistics.mMinimumKeyFrameInterval =
2018                         Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval);
2019             }
2020             Log.d(TAG, "  Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval +
2021                     ". Min: " + statistics.mMinimumKeyFrameInterval +
2022                     ". Avg: " + statistics.mAverageKeyFrameInterval);
2023         }
2024         Log.d(TAG, "  Frames: " + frames + ". Duration: " + duration +
2025                 ". Total size: " + totalSize + ". Key frames: " + keyFrames);
2026         Log.d(TAG, keyFrameList);
2027         Log.d(TAG, bitrateList);
2028         Log.d(TAG, framesList);
2029         Log.d(TAG, "  Bitrate average: " + statistics.mAverageBitrate);
2030         Log.d(TAG, "  Maximum frame size: " + maxFrameSize);
2031 
2032         return statistics;
2033     }
2034 
computeEncodingStatistics( ArrayList<MediaCodec.BufferInfo> bufferInfos )2035     protected VideoEncodingStatistics computeEncodingStatistics(
2036             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
2037         return computeEncodingStatistics(0, bufferInfos);
2038     }
2039 
computeSimulcastEncodingStatistics( ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos)2040     protected ArrayList<VideoEncodingStatistics> computeSimulcastEncodingStatistics(
2041             ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) {
2042         int numCodecs = bufferInfos.size();
2043         ArrayList<VideoEncodingStatistics> statistics = new ArrayList<VideoEncodingStatistics>();
2044 
2045         for (int i = 0; i < numCodecs; i++) {
2046             VideoEncodingStatistics currentStatistics =
2047                     computeEncodingStatistics(i, bufferInfos.get(i));
2048             statistics.add(currentStatistics);
2049         }
2050         return statistics;
2051     }
2052 
2053     /**
2054      * Calculates maximum latency for encoder/decoder based on buffer info array
2055      * generated either by encoder or decoder.
2056      */
maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos)2057     protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) {
2058         int maxValue = 0;
2059         for (MediaCodec.BufferInfo bufferInfo : bufferInfos) {
2060             maxValue = Math.max(maxValue,  bufferInfo.offset);
2061         }
2062         maxValue = (maxValue + 500) / 1000; // mcs -> ms
2063         return maxValue;
2064     }
2065 
2066     /**
2067      * Decoding PSNR statistics.
2068      */
2069     protected class VideoDecodingStatistics {
VideoDecodingStatistics()2070         VideoDecodingStatistics() {
2071             mMinimumPSNR = Integer.MAX_VALUE;
2072         }
2073         public double mAveragePSNR;
2074         public double mMinimumPSNR;
2075     }
2076 
2077     /**
2078      * Calculates PSNR value between two video frames.
2079      */
computePSNR(byte[] data0, byte[] data1)2080     private double computePSNR(byte[] data0, byte[] data1) {
2081         long squareError = 0;
2082         assertTrue(data0.length == data1.length);
2083         int length = data0.length;
2084         for (int i = 0 ; i < length; i++) {
2085             int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff);
2086             squareError += diff * diff;
2087         }
2088         double meanSquareError = (double)squareError / length;
2089         double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError);
2090         return psnr;
2091     }
2092 
2093     /**
2094      * Calculates average and minimum PSNR values between
2095      * set of reference and decoded video frames.
2096      * Runs PSNR calculation for the full duration of the decoded data.
2097      */
computeDecodingStatistics( String referenceYuvFilename, String referenceYuvRaw, String decodedYuvFilename, int width, int height)2098     protected VideoDecodingStatistics computeDecodingStatistics(
2099             String referenceYuvFilename,
2100             String referenceYuvRaw,
2101             String decodedYuvFilename,
2102             int width,
2103             int height) throws Exception {
2104         VideoDecodingStatistics statistics = new VideoDecodingStatistics();
2105         InputStream referenceStream =
2106                 OpenFileOrResource(referenceYuvFilename, referenceYuvRaw);
2107         InputStream decodedStream = new FileInputStream(decodedYuvFilename);
2108 
2109         int ySize = width * height;
2110         int uvSize = width * height / 4;
2111         byte[] yRef = new byte[ySize];
2112         byte[] yDec = new byte[ySize];
2113         byte[] uvRef = new byte[uvSize];
2114         byte[] uvDec = new byte[uvSize];
2115 
2116         int frames = 0;
2117         double averageYPSNR = 0;
2118         double averageUPSNR = 0;
2119         double averageVPSNR = 0;
2120         double minimumYPSNR = Integer.MAX_VALUE;
2121         double minimumUPSNR = Integer.MAX_VALUE;
2122         double minimumVPSNR = Integer.MAX_VALUE;
2123         int minimumPSNRFrameIndex = 0;
2124 
2125         while (true) {
2126             // Calculate Y PSNR.
2127             int bytesReadRef = referenceStream.read(yRef);
2128             int bytesReadDec = decodedStream.read(yDec);
2129             if (bytesReadDec == -1) {
2130                 break;
2131             }
2132             if (bytesReadRef == -1) {
2133                 // Reference file wrapping up
2134                 referenceStream.close();
2135                 referenceStream =
2136                         OpenFileOrResource(referenceYuvFilename, referenceYuvRaw);
2137                 bytesReadRef = referenceStream.read(yRef);
2138             }
2139             double curYPSNR = computePSNR(yRef, yDec);
2140             averageYPSNR += curYPSNR;
2141             minimumYPSNR = Math.min(minimumYPSNR, curYPSNR);
2142             double curMinimumPSNR = curYPSNR;
2143 
2144             // Calculate U PSNR.
2145             bytesReadRef = referenceStream.read(uvRef);
2146             bytesReadDec = decodedStream.read(uvDec);
2147             double curUPSNR = computePSNR(uvRef, uvDec);
2148             averageUPSNR += curUPSNR;
2149             minimumUPSNR = Math.min(minimumUPSNR, curUPSNR);
2150             curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR);
2151 
2152             // Calculate V PSNR.
2153             bytesReadRef = referenceStream.read(uvRef);
2154             bytesReadDec = decodedStream.read(uvDec);
2155             double curVPSNR = computePSNR(uvRef, uvDec);
2156             averageVPSNR += curVPSNR;
2157             minimumVPSNR = Math.min(minimumVPSNR, curVPSNR);
2158             curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR);
2159 
2160             // Frame index for minimum PSNR value - help to detect possible distortions
2161             if (curMinimumPSNR < statistics.mMinimumPSNR) {
2162                 statistics.mMinimumPSNR = curMinimumPSNR;
2163                 minimumPSNRFrameIndex = frames;
2164             }
2165 
2166             String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f",
2167                     frames, curYPSNR, curUPSNR, curVPSNR);
2168             Log.v(TAG, logStr);
2169 
2170             frames++;
2171         }
2172 
2173         averageYPSNR /= frames;
2174         averageUPSNR /= frames;
2175         averageVPSNR /= frames;
2176         statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6;
2177 
2178         Log.d(TAG, "PSNR statistics for " + frames + " frames.");
2179         String logStr = String.format(Locale.US,
2180                 "Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f",
2181                 averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR);
2182         Log.d(TAG, logStr);
2183         logStr = String.format(Locale.US,
2184                 "Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d",
2185                 minimumYPSNR, minimumUPSNR, minimumVPSNR,
2186                 statistics.mMinimumPSNR, minimumPSNRFrameIndex);
2187         Log.d(TAG, logStr);
2188 
2189         referenceStream.close();
2190         decodedStream.close();
2191         return statistics;
2192     }
2193 }
2194