1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.hardware.camera2.CameraCharacteristics.*; 20 import static android.hardware.camera2.cts.CameraTestUtils.*; 21 22 import android.graphics.Point; 23 import android.graphics.PointF; 24 import android.graphics.Rect; 25 import android.graphics.SurfaceTexture; 26 import android.hardware.camera2.CameraCharacteristics; 27 import android.hardware.camera2.CameraDevice; 28 import android.hardware.camera2.CameraMetadata; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.TotalCaptureResult; 32 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 33 import android.hardware.camera2.cts.helpers.StaticMetadata; 34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 35 import android.hardware.camera2.params.BlackLevelPattern; 36 import android.hardware.camera2.params.Capability; 37 import android.hardware.camera2.params.ColorSpaceTransform; 38 import android.hardware.camera2.params.Face; 39 import android.hardware.camera2.params.LensShadingMap; 40 import android.hardware.camera2.params.MeteringRectangle; 41 import android.hardware.camera2.params.RggbChannelVector; 42 import android.hardware.camera2.params.TonemapCurve; 43 import android.hardware.cts.helpers.CameraUtils; 44 import android.media.Image; 45 import android.os.Build; 46 import android.os.Parcel; 47 import android.platform.test.annotations.AppModeFull; 48 import android.platform.test.annotations.RequiresFlagsEnabled; 49 import android.platform.test.flag.junit.CheckFlagsRule; 50 import android.platform.test.flag.junit.DeviceFlagsValueProvider; 51 import android.text.TextUtils; 52 import android.util.ArraySet; 53 import android.util.Log; 54 import android.util.Pair; 55 import android.util.Range; 56 import android.util.Rational; 57 import android.util.Size; 58 import android.view.Surface; 59 60 import com.android.compatibility.common.util.PropertyUtil; 61 import com.android.internal.camera.flags.Flags; 62 63 import org.junit.Rule; 64 import org.junit.Test; 65 import org.junit.runner.RunWith; 66 import org.junit.runners.Parameterized; 67 68 import java.nio.ByteBuffer; 69 import java.util.ArrayList; 70 import java.util.Arrays; 71 import java.util.List; 72 73 /** 74 * <p> 75 * Basic test for camera CaptureRequest key controls. 76 * </p> 77 * <p> 78 * Several test categories are covered: manual sensor control, 3A control, 79 * manual ISP control and other per-frame control and synchronization. 80 * </p> 81 */ 82 83 @RunWith(Parameterized.class) 84 public class CaptureRequestTest extends Camera2SurfaceViewTestCase { 85 private static final String TAG = "CaptureRequestTest"; 86 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 87 private static final int NUM_FRAMES_VERIFIED = 15; 88 private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60; 89 /** 30ms exposure time must be supported by full capability devices. */ 90 private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms 91 private static final int DEFAULT_SENSITIVITY = 100; 92 private static final int RGGB_COLOR_CHANNEL_COUNT = 4; 93 private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT; 94 private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT; 95 private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L; 96 private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms 97 private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation. 98 private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation. 99 private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation. 100 private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.06f; // 6%, Approximation. 101 private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3; 102 private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8; 103 private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100; 104 private static final int NUM_RESULTS_WAIT_TIMEOUT = 100; 105 private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8; 106 private static final int NUM_FRAMES_WAITED_FOR_TORCH = 100; 107 private static final int NUM_PARTIAL_FRAMES_PFC = 2; 108 private static final int NUM_PARTIAL_FRAMES_NPFC = 6; 109 110 private static final int NUM_TEST_FOCUS_DISTANCES = 10; 111 private static final int NUM_FOCUS_DISTANCES_REPEAT = 3; 112 // 5 percent error margin for calibrated device 113 private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f; 114 // 25 percent error margin for uncalibrated device 115 private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f; 116 // 10 percent error margin for approximate device 117 private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f; 118 // 1 percent boundary margin for focus range verify 119 private static final float FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT = 0.01f; 120 private static final int ANTI_FLICKERING_50HZ = 1; 121 private static final int ANTI_FLICKERING_60HZ = 2; 122 // 5 percent error margin for resulting crop regions 123 private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f; 124 private static final float ZOOM_RATIO_ERROR_PERCENT_DELTA = 0.05f; 125 126 // 1 percent error margin for centering the crop region 127 private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f; 128 private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f; 129 private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f; 130 131 // Linear tone mapping curve example. 132 private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f}; 133 // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points. 134 private static final float[] TONEMAP_CURVE_SRGB = { 135 0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f, 136 0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f, 137 0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f, 138 0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f 139 }; 140 private final Rational ZERO_R = new Rational(0, 1); 141 private final Rational ONE_R = new Rational(1, 1); 142 143 private static final int ZOOM_STEPS = 15; 144 145 private enum TorchSeqState { 146 RAMPING_UP, 147 FIRED, 148 RAMPING_DOWN 149 } 150 151 @Rule 152 public final CheckFlagsRule mCheckFlagsRule = 153 DeviceFlagsValueProvider.createCheckFlagsRule(); 154 155 @Override setUp()156 public void setUp() throws Exception { 157 super.setUp(); 158 } 159 160 @Override tearDown()161 public void tearDown() throws Exception { 162 super.tearDown(); 163 } 164 165 /** 166 * Test CaptureRequest settings parcelling. 167 */ 168 @Test testSettingsBinderParcel()169 public void testSettingsBinderParcel() throws Exception { 170 SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5); 171 Surface surface = new Surface(outputTexture); 172 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 173 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 174 try { 175 openDevice(cameraIdsUnderTest[i]); 176 CaptureRequest.Builder requestBuilder = 177 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 178 requestBuilder.addTarget(surface); 179 180 // Check regular/default case 181 CaptureRequest captureRequestOriginal = requestBuilder.build(); 182 Parcel p; 183 p = Parcel.obtain(); 184 captureRequestOriginal.writeToParcel(p, 0); 185 p.setDataPosition(0); 186 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 187 assertEquals("Parcelled camera settings should match", 188 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 189 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 190 p.recycle(); 191 192 // Check capture request with additional physical camera settings 193 String physicalId; 194 if (TextUtils.isDigitsOnly(cameraIdsUnderTest[i])) { 195 physicalId = new String( 196 Integer.toString(Integer.valueOf(cameraIdsUnderTest[i]) + 1)); 197 } else { 198 physicalId = new String(Integer.toString(i + 1)); 199 } 200 201 ArraySet<String> physicalIds = new ArraySet<String> (); 202 physicalIds.add(physicalId); 203 204 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW, 205 physicalIds); 206 requestBuilder.addTarget(surface); 207 captureRequestOriginal = requestBuilder.build(); 208 p = Parcel.obtain(); 209 captureRequestOriginal.writeToParcel(p, 0); 210 p.setDataPosition(0); 211 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 212 assertEquals("Parcelled camera settings should match", 213 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 214 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 215 p.recycle(); 216 217 // Check consistency between parcel write and read by stacking 2 218 // CaptureRequest objects when writing and reading. 219 p = Parcel.obtain(); 220 captureRequestOriginal.writeToParcel(p, 0); 221 captureRequestOriginal.writeToParcel(p, 0); 222 p.setDataPosition(0); 223 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 224 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 225 p.recycle(); 226 227 // Check various invalid cases 228 p = Parcel.obtain(); 229 p.writeInt(-1); 230 p.setDataPosition(0); 231 try { 232 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 233 fail("should get RuntimeException due to invalid number of settings"); 234 } catch (RuntimeException e) { 235 // Expected 236 } 237 p.recycle(); 238 239 p = Parcel.obtain(); 240 p.writeInt(0); 241 p.setDataPosition(0); 242 try { 243 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 244 fail("should get RuntimeException due to invalid number of settings"); 245 } catch (RuntimeException e) { 246 // Expected 247 } 248 p.recycle(); 249 250 p = Parcel.obtain(); 251 p.writeInt(1); 252 p.setDataPosition(0); 253 try { 254 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 255 fail("should get RuntimeException due to absent settings"); 256 } catch (RuntimeException e) { 257 // Expected 258 } 259 p.recycle(); 260 } finally { 261 closeDevice(); 262 } 263 } 264 } 265 266 /** 267 * Test black level lock when exposure value change. 268 * <p> 269 * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the 270 * camera device should lock the black level. When the exposure values are changed, 271 * the camera may require reset black level Since changes to certain capture 272 * parameters (such as exposure time) may require resetting of black level 273 * compensation. However, the black level must remain locked after exposure 274 * value changes (when requests have lock ON). 275 * </p> 276 */ 277 @Test testBlackLevelLock()278 public void testBlackLevelLock() throws Exception { 279 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 280 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 281 try { 282 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isCapabilitySupported( 283 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 284 continue; 285 } 286 287 openDevice(cameraIdsUnderTest[i]); 288 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 289 CaptureRequest.Builder requestBuilder = 290 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 291 292 // Start with default manual exposure time, with black level being locked. 293 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true); 294 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 295 296 Size previewSz = 297 getMaxPreviewSize(mCamera.getId(), mCameraManager, 298 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 299 300 startPreview(requestBuilder, previewSz, listener); 301 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 302 // No lock OFF state is allowed as the exposure is not changed. 303 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0); 304 305 // Double the exposure time and gain, with black level still being locked. 306 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2); 307 listener = new SimpleCaptureCallback(); 308 startPreview(requestBuilder, previewSz, listener); 309 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 310 // Allow at most one lock OFF state as the exposure is changed once. 311 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1); 312 313 stopPreview(); 314 } finally { 315 closeDevice(); 316 } 317 } 318 } 319 320 /** 321 * Test dynamic black/white levels if they are supported. 322 * 323 * <p> 324 * If the dynamic black and white levels are reported, test below: 325 * 1. the dynamic black and white levels shouldn't deviate from the global value too much 326 * for different sensitivities. 327 * 2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and 328 * calculate the optical black level values. The reported dynamic black level should be 329 * close enough to the optical black level values. 330 * </p> 331 */ 332 @Test testDynamicBlackWhiteLevel()333 public void testDynamicBlackWhiteLevel() throws Exception { 334 for (String id : getCameraIdsUnderTest()) { 335 try { 336 if (!mAllStaticInfo.get(id).isDynamicBlackLevelSupported()) { 337 continue; 338 } 339 openDevice(id); 340 dynamicBlackWhiteLevelTestByCamera(); 341 } finally { 342 closeDevice(); 343 } 344 } 345 } 346 347 /** 348 * Basic lens shading map request test. 349 * <p> 350 * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will 351 * be applied by the camera device, and an identity lens shading map data 352 * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON. 353 * </p> 354 * <p> 355 * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction 356 * will be applied by the camera device. The lens shading map data can be 357 * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON. 358 * </p> 359 */ 360 @Test testLensShadingMap()361 public void testLensShadingMap() throws Exception { 362 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 363 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 364 try { 365 StaticMetadata staticInfo = mAllStaticInfo.get(cameraIdsUnderTest[i]); 366 if (!staticInfo.isManualLensShadingMapSupported()) { 367 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 368 " doesn't support lens shading controls, skipping test"); 369 continue; 370 } 371 372 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject( 373 staticInfo.getAvailableLensShadingMapModesChecked())); 374 375 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) { 376 continue; 377 } 378 379 openDevice(cameraIdsUnderTest[i]); 380 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 381 CaptureRequest.Builder requestBuilder = 382 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 383 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 384 STATISTICS_LENS_SHADING_MAP_MODE_ON); 385 386 Size previewSz = 387 getMaxPreviewSize(mCamera.getId(), mCameraManager, 388 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 389 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject( 390 mStaticInfo.getAvailableLensShadingModesChecked())); 391 392 // Shading map mode OFF, lensShadingMapMode ON, camera device 393 // should output unity maps. 394 if (lensShadingModes.contains(SHADING_MODE_OFF)) { 395 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF); 396 listener = new SimpleCaptureCallback(); 397 startPreview(requestBuilder, previewSz, listener); 398 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 399 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF); 400 } 401 402 // Shading map mode FAST, lensShadingMapMode ON, camera device 403 // should output valid maps. 404 if (lensShadingModes.contains(SHADING_MODE_FAST)) { 405 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST); 406 407 listener = new SimpleCaptureCallback(); 408 startPreview(requestBuilder, previewSz, listener); 409 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 410 // Allow at most one lock OFF state as the exposure is changed once. 411 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST); 412 } 413 414 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device 415 // should output valid maps. 416 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) { 417 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY); 418 419 listener = new SimpleCaptureCallback(); 420 startPreview(requestBuilder, previewSz, listener); 421 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 422 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY); 423 } 424 425 stopPreview(); 426 } finally { 427 closeDevice(); 428 } 429 } 430 } 431 432 /** 433 * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control. 434 * <p> 435 * Test all available anti-banding modes, check if the exposure time adjustment is 436 * correct. 437 * </p> 438 */ 439 @Test testAntiBandingModes()440 public void testAntiBandingModes() throws Exception { 441 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 442 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 443 try { 444 // Without manual sensor control, exposure time cannot be verified 445 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isCapabilitySupported( 446 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 447 continue; 448 } 449 450 openDevice(cameraIdsUnderTest[i]); 451 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked(); 452 453 Size previewSz = 454 getMaxPreviewSize(mCamera.getId(), mCameraManager, 455 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 456 457 for (int mode : modes) { 458 antiBandingTestByMode(previewSz, mode); 459 } 460 } finally { 461 closeDevice(); 462 } 463 } 464 465 } 466 467 /** 468 * Test AE and AE priority modes with AE lock. 469 * 470 * <p> 471 * For AE lock, when it is locked, exposure parameters shouldn't be changed. 472 * For AE modes, each mode should satisfy the per frame controls defined in 473 * API specifications. 474 * </p> 475 */ 476 @Test(timeout=60*60*1000) // timeout = 60 mins for long running tests testAeModeAndLock()477 public void testAeModeAndLock() throws Exception { 478 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 479 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 480 try { 481 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 482 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 483 " does not support color outputs, skipping"); 484 continue; 485 } 486 487 openDevice(cameraIdsUnderTest[i]); 488 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 489 490 // Update preview surface with given size for all sub-tests. 491 updatePreviewSurface(maxPreviewSz); 492 493 // Test aeMode and lock 494 int[] aeModes = mStaticInfo.getAeAvailableModesChecked(); 495 for (int aeMode : aeModes) { 496 // Test ae mode with lock without priority mode enabled 497 aeModeAndLockTestByMode(aeMode, CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF); 498 499 if (Flags.aePriority()) { 500 int[] aePriorityModes = mStaticInfo.getAeAvailablePriorityModesChecked(); 501 502 // LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY not supported with AE priority mode 503 if (aeMode == 504 CameraMetadata 505 .CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY || 506 aeMode == CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF) { 507 continue; 508 } 509 for (int aePriorityMode : aePriorityModes) { 510 // Test ae mode with lock and priority mode enabled 511 aeModeAndLockTestByMode(aeMode, aePriorityMode); 512 } 513 } 514 } 515 516 } finally { 517 closeDevice(); 518 } 519 } 520 } 521 522 /** Test {@link CaptureRequest#FLASH_MODE} control. 523 * <p> 524 * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control 525 * and {@link CaptureResult#FLASH_STATE} result. 526 * </p> 527 */ 528 @Test testFlashControl()529 public void testFlashControl() throws Exception { 530 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 531 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 532 try { 533 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 534 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 535 " does not support color outputs, skipping"); 536 continue; 537 } 538 539 openDevice(cameraIdsUnderTest[i]); 540 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 541 CaptureRequest.Builder requestBuilder = 542 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 543 544 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 545 546 startPreview(requestBuilder, maxPreviewSz, listener); 547 548 // Flash control can only be used when the AE mode is ON or OFF. 549 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON); 550 551 // LEGACY won't support AE mode OFF 552 boolean aeOffModeSupported = false; 553 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) { 554 if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 555 aeOffModeSupported = true; 556 } 557 } 558 if (aeOffModeSupported) { 559 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF); 560 } 561 562 stopPreview(); 563 } finally { 564 closeDevice(); 565 } 566 } 567 } 568 569 /** 570 * Test that the flash can be successfully turned off given various initial and final 571 * AE_CONTROL modes for repeating CaptureRequests. 572 */ 573 @Test testFlashTurnOff()574 public void testFlashTurnOff() throws Exception { 575 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 576 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 577 try { 578 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 579 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 580 " does not support color outputs, skipping"); 581 continue; 582 } 583 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).hasFlash()) { 584 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 585 " does not support flash, skipping"); 586 continue; 587 } 588 openDevice(cameraIdsUnderTest[i]); 589 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 590 CaptureRequest.Builder requestBuilder = 591 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 592 593 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 594 595 startPreview(requestBuilder, maxPreviewSz, listener); 596 boolean isLegacy = CameraUtils.isLegacyHAL(mCameraManager, cameraIdsUnderTest[i]); 597 flashTurnOffTest(listener, isLegacy, 598 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 599 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH); 600 601 flashTurnOffTest(listener, isLegacy, 602 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 603 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 604 605 flashTurnOffTest(listener, isLegacy, 606 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 607 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); 608 609 stopPreview(); 610 } finally { 611 closeDevice(); 612 } 613 } 614 615 } 616 617 /** 618 * Test face detection modes and results. 619 */ 620 @Test testFaceDetection()621 public void testFaceDetection() throws Exception { 622 String[] cameraIdsUnderTest = getCameraIdsUnderTest(); 623 for (int i = 0; i < cameraIdsUnderTest.length; i++) { 624 try { 625 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) { 626 Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + 627 " does not support color outputs, skipping"); 628 continue; 629 } 630 openDevice(cameraIdsUnderTest[i]); 631 faceDetectionTestByCamera(); 632 } finally { 633 closeDevice(); 634 } 635 } 636 } 637 638 /** 639 * Test tone map modes and controls. 640 */ 641 @Test testToneMapControl()642 public void testToneMapControl() throws Exception { 643 for (String id : getCameraIdsUnderTest()) { 644 try { 645 if (!mAllStaticInfo.get(id).isManualToneMapSupported()) { 646 Log.i(TAG, "Camera " + id + 647 " doesn't support tone mapping controls, skipping test"); 648 continue; 649 } 650 openDevice(id); 651 toneMapTestByCamera(); 652 } finally { 653 closeDevice(); 654 } 655 } 656 } 657 658 /** 659 * Test CCT color correction mode and color temperature, color tint controls 660 */ 661 @Test 662 @RequiresFlagsEnabled(Flags.FLAG_COLOR_TEMPERATURE) testCctColorCorrectionControl()663 public void testCctColorCorrectionControl() throws Exception { 664 for (String id : getCameraIdsUnderTest()) { 665 try { 666 if (!mAllStaticInfo.get(id).isCctModeSupported()) { 667 Log.i(TAG, "Camera " + id + 668 " doesn't support CCT color correction mode, skipping test"); 669 continue; 670 } 671 openDevice(id); 672 cctColorCorrectionTestByCamera(); 673 } finally { 674 closeDevice(); 675 } 676 } 677 } 678 679 /** 680 * Test color correction modes and controls. 681 */ 682 @Test testColorCorrectionControl()683 public void testColorCorrectionControl() throws Exception { 684 for (String id : getCameraIdsUnderTest()) { 685 try { 686 if (!mAllStaticInfo.get(id).isColorCorrectionSupported()) { 687 Log.i(TAG, "Camera " + id + 688 " doesn't support color correction controls, skipping test"); 689 continue; 690 } 691 openDevice(id); 692 colorCorrectionTestByCamera(); 693 } finally { 694 closeDevice(); 695 } 696 } 697 } 698 699 /** 700 * Test edge mode control for Fps not exceeding 30. 701 */ 702 @Test testEdgeModeControl()703 public void testEdgeModeControl() throws Exception { 704 for (String id : getCameraIdsUnderTest()) { 705 try { 706 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 707 Log.i(TAG, "Camera " + id + 708 " doesn't support EDGE_MODE controls, skipping test"); 709 continue; 710 } 711 712 openDevice(id); 713 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 714 edgeModesTestByCamera(fpsRanges); 715 } finally { 716 closeDevice(); 717 } 718 } 719 } 720 721 /** 722 * Test edge mode control for Fps greater than 30. 723 */ 724 @Test testEdgeModeControlFastFps()725 public void testEdgeModeControlFastFps() throws Exception { 726 for (String id : getCameraIdsUnderTest()) { 727 try { 728 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 729 Log.i(TAG, "Camera " + id + 730 " doesn't support EDGE_MODE controls, skipping test"); 731 continue; 732 } 733 734 openDevice(id); 735 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 736 edgeModesTestByCamera(fpsRanges); 737 } finally { 738 closeDevice(); 739 } 740 } 741 742 } 743 744 /** 745 * Test focus distance control. 746 */ 747 @Test 748 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testFocusDistanceControl()749 public void testFocusDistanceControl() throws Exception { 750 for (String id : getCameraIdsUnderTest()) { 751 try { 752 StaticMetadata staticInfo = mAllStaticInfo.get(id); 753 if (!staticInfo.hasFocuser()) { 754 Log.i(TAG, "Camera " + id + " has no focuser, skipping test"); 755 continue; 756 } 757 758 if (!staticInfo.isCapabilitySupported( 759 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 760 Log.i(TAG, "Camera " + id + 761 " does not support MANUAL_SENSOR, skipping test"); 762 continue; 763 } 764 765 openDevice(id); 766 focusDistanceTestByCamera(); 767 } finally { 768 closeDevice(); 769 } 770 } 771 } 772 773 /** 774 * Test noise reduction mode for fps ranges not exceeding 30 775 */ 776 @Test testNoiseReductionModeControl()777 public void testNoiseReductionModeControl() throws Exception { 778 for (String id : getCameraIdsUnderTest()) { 779 try { 780 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 781 Log.i(TAG, "Camera " + id + 782 " doesn't support noise reduction mode, skipping test"); 783 continue; 784 } 785 786 openDevice(id); 787 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 788 noiseReductionModeTestByCamera(fpsRanges); 789 } finally { 790 closeDevice(); 791 } 792 } 793 } 794 795 /** 796 * Test noise reduction mode for fps ranges greater than 30 797 */ 798 @Test testNoiseReductionModeControlFastFps()799 public void testNoiseReductionModeControlFastFps() throws Exception { 800 for (String id : getCameraIdsUnderTest()) { 801 try { 802 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 803 Log.i(TAG, "Camera " + id + 804 " doesn't support noise reduction mode, skipping test"); 805 continue; 806 } 807 808 openDevice(id); 809 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 810 noiseReductionModeTestByCamera(fpsRanges); 811 } finally { 812 closeDevice(); 813 } 814 } 815 } 816 817 /** 818 * Test AWB lock control. 819 * 820 * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p> 821 */ 822 @Test testAwbModeAndLock()823 public void testAwbModeAndLock() throws Exception { 824 for (String id : getCameraIdsUnderTest()) { 825 try { 826 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 827 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 828 continue; 829 } 830 openDevice(id); 831 awbModeAndLockTestByCamera(); 832 } finally { 833 closeDevice(); 834 } 835 } 836 } 837 838 /** 839 * Test different AF modes. 840 */ 841 @Test testAfModes()842 public void testAfModes() throws Exception { 843 for (String id : getCameraIdsUnderTest()) { 844 try { 845 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 846 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 847 continue; 848 } 849 openDevice(id); 850 afModeTestByCamera(); 851 } finally { 852 closeDevice(); 853 } 854 } 855 } 856 857 /** 858 * Test video and optical stabilizations. 859 */ 860 @Test testCameraStabilizations()861 public void testCameraStabilizations() throws Exception { 862 for (String id : getCameraIdsUnderTest()) { 863 try { 864 StaticMetadata staticInfo = mAllStaticInfo.get(id); 865 List<Key<?>> keys = staticInfo.getCharacteristics().getKeys(); 866 if (!(keys.contains( 867 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) || 868 keys.contains( 869 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) { 870 Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes"); 871 continue; 872 } 873 if (!staticInfo.isColorOutputSupported()) { 874 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 875 continue; 876 } 877 openDevice(id); 878 stabilizationTestByCamera(); 879 } finally { 880 closeDevice(); 881 } 882 } 883 } 884 885 /** 886 * Test digitalZoom (center wise and non-center wise), validate the returned crop regions. 887 * The max preview size is used for each camera. 888 */ 889 @Test testDigitalZoom()890 public void testDigitalZoom() throws Exception { 891 for (String id : getCameraIdsUnderTest()) { 892 try { 893 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 894 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 895 continue; 896 } 897 openDevice(id); 898 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 899 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/false); 900 digitalZoomTestByCamera(maxPreviewSize, /*repeating*/true); 901 } finally { 902 closeDevice(); 903 } 904 } 905 } 906 907 /** 908 * Test zoom using CONTROL_ZOOM_RATIO, validate the returned crop regions and zoom ratio. 909 * The max preview size is used for each camera. 910 */ 911 @Test testZoomRatio()912 public void testZoomRatio() throws Exception { 913 for (String id : getCameraIdsUnderTest()) { 914 try { 915 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 916 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 917 continue; 918 } 919 openDevice(id); 920 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 921 zoomRatioTestByCamera(maxPreviewSize, /*useZoomRatioMethod*/false); 922 } finally { 923 closeDevice(); 924 } 925 } 926 } 927 928 /** 929 * Test zoom using CONTROL_ZOOM_RATIO with CONTROL_ZOOM_METHOD set explicitly to ZOOM_RATIO, 930 * validate the returned crop regions and zoom ratio. 931 * 932 * The max preview size is used for each camera. 933 */ 934 @Test 935 @RequiresFlagsEnabled(Flags.FLAG_ZOOM_METHOD) testZoomRatioWithMethod()936 public void testZoomRatioWithMethod() throws Exception { 937 for (String id : getCameraIdsUnderTest()) { 938 try { 939 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 940 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 941 continue; 942 } 943 openDevice(id); 944 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 945 zoomRatioTestByCamera(maxPreviewSize, /*useZoomRatioMethod*/true); 946 } finally { 947 closeDevice(); 948 } 949 } 950 } 951 952 /** 953 * Test that zoom doesn't incur non-monotonic timestamp sequence 954 * 955 * Camera API requires that camera timestamps monotonically increase. 956 */ 957 @Test 958 @AppModeFull(reason = "PropertyUtil methods don't work for instant apps") testZoomTimestampIncrease()959 public void testZoomTimestampIncrease() throws Exception { 960 if (PropertyUtil.getFirstApiLevel() <= Build.VERSION_CODES.UPSIDE_DOWN_CAKE) { 961 // Only run test for first API level V or higher 962 return; 963 } 964 965 for (String id : getCameraIdsUnderTest()) { 966 try { 967 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 968 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 969 continue; 970 } 971 openDevice(id); 972 zoomTimestampIncreaseTestByCamera(); 973 } finally { 974 closeDevice(); 975 } 976 } 977 } 978 979 /** 980 * Test digital zoom and all preview size combinations. 981 * TODO: this and above test should all be moved to preview test class. 982 */ 983 @Test testDigitalZoomPreviewCombinations()984 public void testDigitalZoomPreviewCombinations() throws Exception { 985 for (String id : getCameraIdsUnderTest()) { 986 try { 987 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 988 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 989 continue; 990 } 991 openDevice(id); 992 digitalZoomPreviewCombinationTestByCamera(); 993 } finally { 994 closeDevice(); 995 } 996 } 997 } 998 999 /** 1000 * Test scene mode controls. 1001 */ 1002 @Test testSceneModes()1003 public void testSceneModes() throws Exception { 1004 for (String id : getCameraIdsUnderTest()) { 1005 try { 1006 if (mAllStaticInfo.get(id).isSceneModeSupported()) { 1007 openDevice(id); 1008 sceneModeTestByCamera(); 1009 } 1010 } finally { 1011 closeDevice(); 1012 } 1013 } 1014 } 1015 1016 /** 1017 * Test effect mode controls. 1018 */ 1019 @Test testEffectModes()1020 public void testEffectModes() throws Exception { 1021 for (String id : getCameraIdsUnderTest()) { 1022 try { 1023 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 1024 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 1025 continue; 1026 } 1027 openDevice(id); 1028 effectModeTestByCamera(); 1029 } finally { 1030 closeDevice(); 1031 } 1032 } 1033 } 1034 1035 /** 1036 * Test extended scene mode controls. 1037 */ 1038 @Test testExtendedSceneModes()1039 public void testExtendedSceneModes() throws Exception { 1040 for (String id : getCameraIdsUnderTest()) { 1041 try { 1042 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 1043 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 1044 continue; 1045 } 1046 openDevice(id); 1047 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 1048 extendedSceneModeTestByCamera(fpsRanges); 1049 } finally { 1050 closeDevice(); 1051 } 1052 } 1053 } 1054 1055 /** 1056 * Test basic auto-framing. 1057 */ 1058 @Test testAutoframing()1059 public void testAutoframing() throws Exception { 1060 for (String id : getCameraIdsUnderTest()) { 1061 try { 1062 if (!mAllStaticInfo.get(id).isAutoframingSupported()) { 1063 Log.i(TAG, "Camera " + id + " does not support auto-framing, skipping"); 1064 continue; 1065 } 1066 openDevice(id); 1067 autoframingTestByCamera(); 1068 } finally { 1069 closeDevice(); 1070 } 1071 } 1072 } 1073 1074 /** 1075 * Test manual flash strength level control. 1076 */ 1077 @Test testManualFlashStrengthLevelControl()1078 public void testManualFlashStrengthLevelControl() throws Exception { 1079 for (String id : getCameraIdsUnderTest()) { 1080 try { 1081 if (!mAllStaticInfo.get(id).isManualFlashStrengthControlSupported()) { 1082 Log.i(TAG, "Camera " + id + " does not support manual flash " 1083 + "strength control, skipping"); 1084 continue; 1085 } 1086 openDevice(id); 1087 manualFlashStrengthControlTestByCamera(); 1088 } finally { 1089 closeDevice(); 1090 } 1091 } 1092 } 1093 1094 /** 1095 * Test AE mode ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY. 1096 */ 1097 @Test 1098 @RequiresFlagsEnabled(Flags.FLAG_CAMERA_AE_MODE_LOW_LIGHT_BOOST) testAeModeOnLowLightBoostBrightnessPriority()1099 public void testAeModeOnLowLightBoostBrightnessPriority() throws Exception { 1100 for (String id : getCameraIdsUnderTest()) { 1101 try { 1102 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1103 if (!staticInfo.isAeModeLowLightBoostSupported()) { 1104 Log.i(TAG, "Camera " + id + " does not have AE mode " 1105 + "ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY, skipping"); 1106 continue; 1107 } 1108 openDevice(id); 1109 testAeModeOnLowLightBoostBrightnessPriorityTestByCamera(); 1110 } finally { 1111 closeDevice(); 1112 } 1113 } 1114 } 1115 1116 /** 1117 * Test AE priority modes 1118 */ 1119 @Test 1120 @RequiresFlagsEnabled(Flags.FLAG_AE_PRIORITY) testAePriorityModes()1121 public void testAePriorityModes() throws Exception { 1122 for (String id : getCameraIdsUnderTest()) { 1123 try { 1124 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1125 int[] aePriorityModes = staticInfo.getAeAvailablePriorityModesChecked(); 1126 1127 openDevice(id); 1128 for (int aePriorityMode : aePriorityModes) { 1129 if (aePriorityMode == CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF) { 1130 continue; 1131 } 1132 testAePriorityModesByCamera(aePriorityMode); 1133 } 1134 } finally { 1135 closeDevice(); 1136 } 1137 } 1138 } 1139 1140 /** 1141 * Test settings override controls. 1142 */ 1143 @Test testSettingsOverrides()1144 public void testSettingsOverrides() throws Exception { 1145 for (String id : getCameraIdsUnderTest()) { 1146 try { 1147 StaticMetadata staticInfo = mAllStaticInfo.get(id); 1148 if (!staticInfo.isColorOutputSupported()) { 1149 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 1150 continue; 1151 } 1152 if (!staticInfo.isZoomSettingsOverrideSupported()) { 1153 Log.i(TAG, "Camera " + id + " does not support zoom overrides, skipping"); 1154 continue; 1155 } 1156 openDevice(id); 1157 settingsOverrideTestByCamera(); 1158 } finally { 1159 closeDevice(); 1160 } 1161 } 1162 } 1163 1164 // TODO: add 3A state machine test. 1165 1166 /** 1167 * Per camera dynamic black and white level test. 1168 */ dynamicBlackWhiteLevelTestByCamera()1169 private void dynamicBlackWhiteLevelTestByCamera() throws Exception { 1170 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1171 SimpleImageReaderListener imageListener = null; 1172 CaptureRequest.Builder previewBuilder = 1173 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1174 CaptureRequest.Builder rawBuilder = null; 1175 Size previewSize = 1176 getMaxPreviewSize(mCamera.getId(), mCameraManager, 1177 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 1178 Size rawSize = null; 1179 boolean canCaptureBlackRaw = 1180 mStaticInfo.isCapabilitySupported( 1181 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) && 1182 mStaticInfo.isOpticalBlackRegionSupported(); 1183 if (canCaptureBlackRaw) { 1184 // Capture Raw16, then calculate the optical black, and use it to check with the dynamic 1185 // black level. 1186 rawBuilder = 1187 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 1188 rawSize = mStaticInfo.getRawDimensChecked(); 1189 imageListener = new SimpleImageReaderListener(); 1190 prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize, 1191 resultListener, imageListener); 1192 } else { 1193 startPreview(previewBuilder, previewSize, resultListener); 1194 } 1195 1196 // Capture a sequence of frames with different sensitivities and validate the black/white 1197 // level values 1198 int[] sensitivities = getSensitivityTestValuesSorted(); 1199 float[][] dynamicBlackLevels = new float[sensitivities.length][]; 1200 int[] dynamicWhiteLevels = new int[sensitivities.length]; 1201 float[][] opticalBlackLevels = new float[sensitivities.length][]; 1202 for (int i = 0; i < sensitivities.length; i++) { 1203 CaptureResult result = null; 1204 if (canCaptureBlackRaw) { 1205 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1206 CaptureRequest rawRequest = rawBuilder.build(); 1207 mSession.capture(rawRequest, resultListener, mHandler); 1208 result = resultListener.getCaptureResultForRequest(rawRequest, 1209 NUM_RESULTS_WAIT_TIMEOUT); 1210 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS); 1211 1212 // Get max (area-wise) optical black region 1213 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get( 1214 CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS); 1215 Rect maxRegion = opticalBlackRegions[0]; 1216 for (Rect region : opticalBlackRegions) { 1217 if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) { 1218 maxRegion = region; 1219 } 1220 } 1221 1222 // Get average black pixel values in the region (region is multiple of 2x2) 1223 Image.Plane rawPlane = rawImage.getPlanes()[0]; 1224 ByteBuffer rawBuffer = rawPlane.getBuffer(); 1225 float[] avgBlackLevels = {0, 0, 0, 0}; 1226 final int rowSize = rawPlane.getRowStride(); 1227 final int bytePerPixel = rawPlane.getPixelStride(); 1228 if (VERBOSE) { 1229 Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " + 1230 rawPlane.getRowStride()); 1231 } 1232 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) { 1233 for (int col = maxRegion.left; col < maxRegion.right; col += 2) { 1234 int startOffset = row * rowSize + col * bytePerPixel; 1235 avgBlackLevels[0] += rawBuffer.getShort(startOffset); 1236 avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel); 1237 startOffset += rowSize; 1238 avgBlackLevels[2] += rawBuffer.getShort(startOffset); 1239 avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel); 1240 } 1241 } 1242 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2); 1243 for (int m = 0; m < avgBlackLevels.length; m++) { 1244 avgBlackLevels[m] /= numBlackBlocks; 1245 } 1246 opticalBlackLevels[i] = avgBlackLevels; 1247 1248 if (VERBOSE) { 1249 Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s", 1250 sensitivities[i], Arrays.toString(avgBlackLevels))); 1251 } 1252 1253 rawImage.close(); 1254 } else { 1255 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 1256 CaptureRequest previewRequest = previewBuilder.build(); 1257 mSession.capture(previewRequest, resultListener, mHandler); 1258 result = resultListener.getCaptureResultForRequest(previewRequest, 1259 NUM_RESULTS_WAIT_TIMEOUT); 1260 } 1261 1262 dynamicBlackLevels[i] = getValueNotNull(result, 1263 CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL); 1264 dynamicWhiteLevels[i] = getValueNotNull(result, 1265 CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL); 1266 } 1267 1268 if (VERBOSE) { 1269 Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities)); 1270 Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)); 1271 Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels)); 1272 if (canCaptureBlackRaw) { 1273 Log.v(TAG, "Optical black level results " + 1274 Arrays.deepToString(opticalBlackLevels)); 1275 } 1276 } 1277 1278 // check the dynamic black level against global black level. 1279 // Implicit guarantee: if the dynamic black level is supported, fixed black level must be 1280 // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions). 1281 BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get( 1282 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN); 1283 int[] fixedBlackLevels = new int[4]; 1284 int fixedWhiteLevel = mStaticInfo.getCharacteristics().get( 1285 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL); 1286 blackPattern.copyTo(fixedBlackLevels, 0); 1287 float maxBlackDeviation = 0; 1288 int maxWhiteDeviation = 0; 1289 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1290 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1291 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) { 1292 maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]); 1293 } 1294 } 1295 if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) { 1296 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel); 1297 } 1298 } 1299 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level" 1300 + " exceed threshold." 1301 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels), 1302 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation); 1303 mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold." 1304 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels), 1305 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, 1306 (float)maxWhiteDeviation); 1307 1308 // Validate against optical black levels if it is available 1309 if (canCaptureBlackRaw) { 1310 maxBlackDeviation = 0; 1311 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1312 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1313 if (maxBlackDeviation < 1314 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) { 1315 maxBlackDeviation = 1316 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]); 1317 } 1318 } 1319 } 1320 1321 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black" 1322 + " exceed threshold." 1323 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels) 1324 + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels), 1325 fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN, 1326 maxBlackDeviation); 1327 } 1328 } 1329 noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges)1330 private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1331 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1332 CaptureRequest.Builder requestBuilder = 1333 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1334 int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked(); 1335 1336 for (int mode : availableModes) { 1337 requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode); 1338 1339 // Test that OFF and FAST mode should not slow down the frame rate. 1340 if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF || 1341 mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) { 1342 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1343 } 1344 1345 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1346 startPreview(requestBuilder, maxPrevSize, resultListener); 1347 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1348 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1349 1350 verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode, 1351 resultListener, NUM_FRAMES_VERIFIED); 1352 } 1353 1354 stopPreview(); 1355 } 1356 focusDistanceTestByCamera()1357 private void focusDistanceTestByCamera() throws Exception { 1358 CaptureRequest.Builder requestBuilder = 1359 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1360 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); 1361 int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked(); 1362 float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED; 1363 if (calibrationStatus == 1364 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1365 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED; 1366 } else if (calibrationStatus == 1367 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) { 1368 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE; 1369 } 1370 1371 // Test changing focus distance with repeating request 1372 focusDistanceTestRepeating(requestBuilder, errorMargin); 1373 1374 if (calibrationStatus == 1375 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1376 // Test changing focus distance with burst request 1377 focusDistanceTestBurst(requestBuilder, errorMargin); 1378 } 1379 } 1380 verifyFocusRange(CaptureResult result, float focusDistance)1381 private void verifyFocusRange(CaptureResult result, float focusDistance) { 1382 if (PropertyUtil.getVendorApiLevel() < 33) { 1383 // Skip, as this only applies to UDC and above 1384 if (VERBOSE) { 1385 Log.v(TAG, "Skipping FOCUS_RANGE verification due to API level"); 1386 } 1387 return; 1388 } 1389 1390 Pair<Float, Float> focusRange = result.get(CaptureResult.LENS_FOCUS_RANGE); 1391 if (focusRange != null) { 1392 // Prevent differences in floating point precision between manual request and HAL 1393 // result, some margin need to be considered for focusRange.near and far check 1394 float focusRangeNear = focusRange.first * (1.0f + FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1395 float focusRangeFar = focusRange.second * (1.0f - FOCUS_RANGE_BOUNDARY_MARGIN_PERCENT); 1396 1397 mCollector.expectLessOrEqual("Focus distance should be less than or equal to " 1398 + "FOCUS_RANGE.near (with margin)", focusRangeNear, focusDistance); 1399 mCollector.expectGreaterOrEqual("Focus distance should be greater than or equal to " 1400 + "FOCUS_RANGE.far (with margin)", focusRangeFar, focusDistance); 1401 } else if (VERBOSE) { 1402 Log.v(TAG, "FOCUS_RANGE undefined, skipping verification"); 1403 } 1404 } 1405 focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, float errorMargin)1406 private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, 1407 float errorMargin) throws Exception { 1408 CaptureRequest request; 1409 float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0); 1410 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1411 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1412 startPreview(requestBuilder, maxPrevSize, resultListener); 1413 1414 float[] resultDistances = new float[testDistances.length]; 1415 int[] resultLensStates = new int[testDistances.length]; 1416 1417 // Collect results 1418 for (int i = 0; i < testDistances.length; i++) { 1419 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1420 request = requestBuilder.build(); 1421 resultListener = new SimpleCaptureCallback(); 1422 mSession.setRepeatingRequest(request, resultListener, mHandler); 1423 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1424 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1425 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1426 CaptureResult result = resultListener.getCaptureResultForRequest(request, 1427 NUM_RESULTS_WAIT_TIMEOUT); 1428 1429 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1430 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1431 1432 verifyFocusRange(result, resultDistances[i]); 1433 1434 if (VERBOSE) { 1435 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i] 1436 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1437 } 1438 } 1439 1440 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1441 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0, 1442 errorMargin); 1443 1444 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1445 1446 // Test hyperfocal distance optionally 1447 float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1448 if (hyperFocalDistance > 0) { 1449 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance); 1450 request = requestBuilder.build(); 1451 resultListener = new SimpleCaptureCallback(); 1452 mSession.setRepeatingRequest(request, resultListener, mHandler); 1453 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1454 1455 // Then wait for the lens.state to be stationary. 1456 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1457 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1458 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1459 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1460 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" + 1461 " requested value", focusDistance, 1462 hyperFocalDistance * (1.0f - errorMargin), 1463 hyperFocalDistance * (1.0f + errorMargin)); 1464 } 1465 } 1466 } 1467 focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, float errorMargin)1468 private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, 1469 float errorMargin) throws Exception { 1470 1471 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1472 float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT, 1473 NUM_FOCUS_DISTANCES_REPEAT); 1474 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1475 startPreview(requestBuilder, maxPrevSize, resultListener); 1476 1477 float[] resultDistances = new float[testDistances.length]; 1478 int[] resultLensStates = new int[testDistances.length]; 1479 1480 final int maxPipelineDepth = mStaticInfo.getCharacteristics().get( 1481 CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH); 1482 1483 // Move lens to starting position, and wait for the lens.state to be stationary. 1484 CaptureRequest request; 1485 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]); 1486 request = requestBuilder.build(); 1487 mSession.setRepeatingRequest(request, resultListener, mHandler); 1488 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1489 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1490 1491 // Submit burst of requests with different focus distances 1492 List<CaptureRequest> burst = new ArrayList<>(); 1493 for (int i = 0; i < testDistances.length; i ++) { 1494 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1495 burst.add(requestBuilder.build()); 1496 } 1497 mSession.captureBurst(burst, resultListener, mHandler); 1498 1499 for (int i = 0; i < testDistances.length; i++) { 1500 CaptureResult result = resultListener.getCaptureResultForRequest( 1501 burst.get(i), maxPipelineDepth+1); 1502 1503 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1504 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1505 1506 verifyFocusRange(result, resultDistances[i]); 1507 1508 if (VERBOSE) { 1509 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i] 1510 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1511 } 1512 } 1513 1514 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1515 /*ascendingOrder*/true, /*noOvershoot*/true, 1516 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT, 1517 errorMargin); 1518 1519 } 1520 1521 /** 1522 * Verify focus distance control. 1523 * 1524 * Assumption: 1525 * - First repeatStart+1 elements of requestedDistances share the same value 1526 * - Last repeatEnd+1 elements of requestedDistances share the same value 1527 * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder. 1528 * - Focuser is at requestedDistances[0] at the beginning of the test. 1529 * 1530 * @param requestedDistances The requested focus distances 1531 * @param resultDistances The result focus distances 1532 * @param lensStates The result lens states 1533 * @param ascendingOrder The order of the expected focus distance request/output 1534 * @param noOvershoot Assert that focus control doesn't overshoot the requested value 1535 * @param repeatStart The number of times the starting focus distance is repeated 1536 * @param repeatEnd The number of times the ending focus distance is repeated 1537 * @param errorMargin The error margin between request and result 1538 */ verifyFocusDistance(float[] requestedDistances, float[] resultDistances, int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, int repeatEnd, float errorMargin)1539 private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances, 1540 int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, 1541 int repeatEnd, float errorMargin) { 1542 1543 float minValue = 0; 1544 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 1545 float hyperfocalDistance = 0; 1546 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1547 hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1548 } 1549 1550 // Verify lens and focus distance do not change for first repeatStart 1551 // results. 1552 for (int i = 0; i < repeatStart; i ++) { 1553 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1554 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1555 float marginMax = 1556 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1557 1558 mCollector.expectEquals("Lens moves even though focus_distance didn't change", 1559 lensStates[i], CaptureResult.LENS_STATE_STATIONARY); 1560 if (noOvershoot) { 1561 mCollector.expectInRange("Focus distance in result should be close enough to " + 1562 "requested value", resultDistances[i], marginMin, marginMax); 1563 } 1564 mCollector.expectInRange("Result focus distance is out of range", 1565 resultDistances[i], minValue, maxValue); 1566 } 1567 1568 for (int i = repeatStart; i < resultDistances.length-1; i ++) { 1569 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1570 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1571 float marginMax = 1572 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1573 if (noOvershoot) { 1574 // Result focus distance shouldn't overshoot the request 1575 boolean condition; 1576 if (ascendingOrder) { 1577 condition = resultDistances[i] <= marginMax; 1578 } else { 1579 condition = resultDistances[i] >= marginMin; 1580 } 1581 mCollector.expectTrue(String.format( 1582 "Lens shouldn't move past request focus distance. result " + 1583 resultDistances[i] + " vs target of " + 1584 (ascendingOrder ? marginMax : marginMin)), condition); 1585 } 1586 1587 // Verify monotonically increased focus distance setting 1588 boolean condition; 1589 float compareDistance = resultDistances[i+1] - resultDistances[i]; 1590 if (i < resultDistances.length-1-repeatEnd) { 1591 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0); 1592 } else { 1593 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0); 1594 } 1595 mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results [" 1596 + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + "," 1597 + lensStates[i+1] + "] monotonicity is broken"), condition); 1598 } 1599 1600 mCollector.expectTrue(String.format("All values of this array are equal: " + 1601 resultDistances[0] + " " + resultDistances[resultDistances.length-1]), 1602 resultDistances[0] != resultDistances[resultDistances.length-1]); 1603 1604 // Verify lens moved to destination location. 1605 mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] + 1606 " for minFocusDistance should be closed enough to requested value " + 1607 requestedDistances[requestedDistances.length-1], 1608 resultDistances[resultDistances.length-1], 1609 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin), 1610 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin)); 1611 } 1612 1613 /** 1614 * Verify edge mode control results for fpsRanges 1615 */ edgeModesTestByCamera(List<Range<Integer>> fpsRanges)1616 private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1617 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1618 int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked(); 1619 CaptureRequest.Builder requestBuilder = 1620 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1621 1622 for (int mode : edgeModes) { 1623 requestBuilder.set(CaptureRequest.EDGE_MODE, mode); 1624 1625 // Test that OFF and FAST mode should not slow down the frame rate. 1626 if (mode == CaptureRequest.EDGE_MODE_OFF || 1627 mode == CaptureRequest.EDGE_MODE_FAST) { 1628 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1629 } 1630 1631 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1632 startPreview(requestBuilder, maxPrevSize, resultListener); 1633 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1634 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1635 1636 verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener, 1637 NUM_FRAMES_VERIFIED); 1638 } 1639 1640 stopPreview(); 1641 } 1642 1643 /** 1644 * Test CCT color correction controls. 1645 * 1646 * <p>Test CCT color correction mode and control keys for color temperaure 1647 * and color tint.</p> 1648 */ cctColorCorrectionTestByCamera()1649 private void cctColorCorrectionTestByCamera() throws Exception { 1650 CaptureRequest request; 1651 CaptureResult result; 1652 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1653 updatePreviewSurface(maxPreviewSz); 1654 CaptureRequest.Builder manualRequestBuilder = createRequestForPreview(); 1655 CaptureRequest.Builder previewRequestBuilder = createRequestForPreview(); 1656 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1657 1658 startPreview(previewRequestBuilder, maxPreviewSz, listener); 1659 1660 // Default preview result should give valid color correction metadata. 1661 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1662 validateColorCorrectionResult(result, 1663 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE)); 1664 int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_CCT; 1665 1666 // Check if the color temperature range is advertised and 1667 // supports the minimum required range. 1668 Range<Integer> colorTemperatureRange = 1669 mStaticInfo.getCharacteristics().get(CameraCharacteristics. 1670 COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE); 1671 assertNotNull("CCT mode is supported but color temperature range is null", 1672 colorTemperatureRange); 1673 assertTrue("Color temperature range should advertise at least [2856, 6500]", 1674 colorTemperatureRange.getLower() <= 2856 1675 && colorTemperatureRange.getUpper() >= 6500); 1676 assertTrue("Color temperature range should advertise between [1000, 40000]", 1677 colorTemperatureRange.getLower() >= 1000 1678 && colorTemperatureRange.getUpper() <= 40000); 1679 1680 List<Integer> availableControlModes = Arrays.asList( 1681 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked())); 1682 List<Integer> availableAwbModes = Arrays.asList( 1683 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked())); 1684 boolean isManualCCSupported = 1685 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) || 1686 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF); 1687 if (isManualCCSupported) { 1688 // Turn off AWB through either CONTROL_AWB_MODE_OFF or CONTROL_MODE_OFF 1689 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1690 // Only manual AWB mode is supported 1691 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1692 CaptureRequest.CONTROL_MODE_AUTO); 1693 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, 1694 CaptureRequest.CONTROL_AWB_MODE_OFF); 1695 } else { 1696 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode 1697 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1698 CaptureRequest.CONTROL_MODE_OFF); 1699 } 1700 1701 int[] TEST_COLOR_TEMPERATURE_VALUES = {2500, 4500, 6500}; 1702 int[] TEST_COLOR_TINT_VALUES = {-25, 0, 25}; 1703 1704 for (int i = 0; i < TEST_COLOR_TEMPERATURE_VALUES.length; i++) { 1705 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, 1706 colorCorrectionMode); 1707 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_COLOR_TEMPERATURE, 1708 TEST_COLOR_TEMPERATURE_VALUES[i]); 1709 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_COLOR_TINT, 1710 TEST_COLOR_TINT_VALUES[i]); 1711 request = manualRequestBuilder.build(); 1712 mSession.capture(request, listener, mHandler); 1713 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1714 validateColorCorrectionResult(result, colorCorrectionMode); 1715 int colorTemperatureResult = 1716 result.get(CaptureResult.COLOR_CORRECTION_COLOR_TEMPERATURE); 1717 int colorTintResult = result.get(CaptureResult.COLOR_CORRECTION_COLOR_TINT); 1718 mCollector.expectEquals("Control mode result/request mismatch", 1719 CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE)); 1720 mCollector.expectEquals("Color temperature result/request mismatch", 1721 TEST_COLOR_TEMPERATURE_VALUES[i], colorTemperatureResult); 1722 // The actual color tint applied may be clamped so the result 1723 // may differ from the request, so we just check if it is null 1724 mCollector.expectNotNull("Color tint result null", colorTintResult); 1725 } 1726 } 1727 } 1728 1729 /** 1730 * Test color correction controls. 1731 * 1732 * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test 1733 * the unit gain and identity transform.</p> 1734 */ colorCorrectionTestByCamera()1735 private void colorCorrectionTestByCamera() throws Exception { 1736 CaptureRequest request; 1737 CaptureResult result; 1738 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1739 updatePreviewSurface(maxPreviewSz); 1740 CaptureRequest.Builder manualRequestBuilder = createRequestForPreview(); 1741 CaptureRequest.Builder previewRequestBuilder = createRequestForPreview(); 1742 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1743 1744 startPreview(previewRequestBuilder, maxPreviewSz, listener); 1745 1746 // Default preview result should give valid color correction metadata. 1747 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1748 validateColorCorrectionResult(result, 1749 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE)); 1750 int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; 1751 // TRANSFORM_MATRIX mode 1752 // Only test unit gain and identity transform 1753 List<Integer> availableControlModes = Arrays.asList( 1754 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked())); 1755 List<Integer> availableAwbModes = Arrays.asList( 1756 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked())); 1757 boolean isManualCCSupported = 1758 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) || 1759 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF); 1760 if (isManualCCSupported) { 1761 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1762 // Only manual AWB mode is supported 1763 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1764 CaptureRequest.CONTROL_MODE_AUTO); 1765 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, 1766 CaptureRequest.CONTROL_AWB_MODE_OFF); 1767 } else { 1768 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode. 1769 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1770 CaptureRequest.CONTROL_MODE_OFF); 1771 } 1772 1773 RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f); 1774 1775 ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform( 1776 new Rational[] { 1777 ONE_R, ZERO_R, ZERO_R, 1778 ZERO_R, ONE_R, ZERO_R, 1779 ZERO_R, ZERO_R, ONE_R 1780 }); 1781 1782 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1783 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN); 1784 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM); 1785 request = manualRequestBuilder.build(); 1786 mSession.capture(request, listener, mHandler); 1787 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1788 RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS); 1789 ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM); 1790 validateColorCorrectionResult(result, colorCorrectionMode); 1791 mCollector.expectEquals("control mode result/request mismatch", 1792 CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE)); 1793 mCollector.expectEquals("Color correction gain result/request mismatch", 1794 UNIT_GAIN, gains); 1795 mCollector.expectEquals("Color correction gain result/request mismatch", 1796 IDENTITY_TRANSFORM, transform); 1797 1798 } 1799 1800 // FAST mode 1801 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST; 1802 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1803 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1804 request = manualRequestBuilder.build(); 1805 mSession.capture(request, listener, mHandler); 1806 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1807 validateColorCorrectionResult(result, colorCorrectionMode); 1808 mCollector.expectEquals("control mode result/request mismatch", 1809 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1810 1811 // HIGH_QUALITY mode 1812 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY; 1813 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1814 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1815 request = manualRequestBuilder.build(); 1816 mSession.capture(request, listener, mHandler); 1817 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1818 validateColorCorrectionResult(result, colorCorrectionMode); 1819 mCollector.expectEquals("control mode result/request mismatch", 1820 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1821 } 1822 validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode)1823 private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) { 1824 final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0); 1825 final int TRANSFORM_SIZE = 9; 1826 Rational[] zeroTransform = new Rational[TRANSFORM_SIZE]; 1827 Arrays.fill(zeroTransform, ZERO_R); 1828 final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform); 1829 1830 RggbChannelVector resultGain; 1831 if ((resultGain = mCollector.expectKeyValueNotNull(result, 1832 CaptureResult.COLOR_CORRECTION_GAINS)) != null) { 1833 mCollector.expectKeyValueNotEquals(result, 1834 CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS); 1835 } 1836 1837 ColorSpaceTransform resultTransform; 1838 if ((resultTransform = mCollector.expectKeyValueNotNull(result, 1839 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) { 1840 mCollector.expectKeyValueNotEquals(result, 1841 CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM); 1842 } 1843 1844 mCollector.expectEquals("color correction mode result/request mismatch", 1845 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 1846 } 1847 1848 /** 1849 * Test that flash can be turned off successfully with a given initial and final AE_CONTROL 1850 * states. 1851 * 1852 * This function expects that initialAeControl and flashOffAeControl will not be either 1853 * CaptureRequest.CONTROL_AE_MODE_ON or CaptureRequest.CONTROL_AE_MODE_OFF 1854 * 1855 * @param listener The Capture listener that is used to wait for capture result 1856 * @param initialAeControl The initial AE_CONTROL mode to start repeating requests with. 1857 * @param flashOffAeControl The final AE_CONTROL mode which is expected to turn flash off for 1858 * TEMPLATE_PREVIEW repeating requests. 1859 */ flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, int initialAeControl, int flashOffAeControl)1860 private void flashTurnOffTest(SimpleCaptureCallback listener, boolean isLegacy, 1861 int initialAeControl, int flashOffAeControl) throws Exception { 1862 CaptureResult result; 1863 final int NUM_FLASH_REQUESTS_TESTED = 10; 1864 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1865 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1866 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, initialAeControl); 1867 1868 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1869 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1870 1871 // Turn on torch using FLASH_MODE_TORCH 1872 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1873 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1874 CaptureRequest torchOnRequest = requestBuilder.build(); 1875 mSession.setRepeatingRequest(torchOnRequest, listener, mHandler); 1876 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH); 1877 result = listener.getCaptureResultForRequest(torchOnRequest, NUM_RESULTS_WAIT_TIMEOUT); 1878 // Test that the flash actually turned on continuously. 1879 mCollector.expectEquals("Flash state result must be FIRED", CaptureResult.FLASH_STATE_FIRED, 1880 result.get(CaptureResult.FLASH_STATE)); 1881 mSession.stopRepeating(); 1882 // Turn off the torch 1883 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashOffAeControl); 1884 // TODO: jchowdhary@, b/130323585, this line can be removed. 1885 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1886 int numAllowedTransitionStates = NUM_PARTIAL_FRAMES_NPFC; 1887 if (mStaticInfo.isPerFrameControlSupported()) { 1888 numAllowedTransitionStates = NUM_PARTIAL_FRAMES_PFC; 1889 1890 } 1891 // We submit 2 * numAllowedTransitionStates + 1 requests since we have two torch mode 1892 // transitions. The additional request is to check for at least 1 expected (FIRED / READY) 1893 // state. 1894 int numTorchTestSamples = 2 * numAllowedTransitionStates + 1; 1895 CaptureRequest flashOffRequest = requestBuilder.build(); 1896 int flashModeOffRequests = captureRequestsSynchronizedBurst(flashOffRequest, 1897 numTorchTestSamples, listener, mHandler); 1898 // Turn it on again. 1899 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1900 // We need to have CONTROL_AE_MODE be either CONTROL_AE_MODE_ON or CONTROL_AE_MODE_OFF to 1901 // turn the torch on again. 1902 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1903 CaptureRequest flashModeTorchRequest = requestBuilder.build(); 1904 int flashModeTorchRequests = captureRequestsSynchronizedBurst(flashModeTorchRequest, 1905 numTorchTestSamples, listener, mHandler); 1906 1907 CaptureResult[] torchStateResults = 1908 new CaptureResult[flashModeTorchRequests + flashModeOffRequests]; 1909 Arrays.fill(torchStateResults, null); 1910 int i = 0; 1911 for (; i < flashModeOffRequests; i++) { 1912 torchStateResults[i] = 1913 listener.getCaptureResultForRequest(flashOffRequest, NUM_RESULTS_WAIT_TIMEOUT); 1914 mCollector.expectNotEquals("Result for flashModeOff request null", 1915 torchStateResults[i], null); 1916 } 1917 for (int j = i; j < torchStateResults.length; j++) { 1918 torchStateResults[j] = 1919 listener.getCaptureResultForRequest(flashModeTorchRequest, 1920 NUM_RESULTS_WAIT_TIMEOUT); 1921 mCollector.expectNotEquals("Result for flashModeTorch request null", 1922 torchStateResults[j], null); 1923 } 1924 if (isLegacy) { 1925 // For LEGACY devices, flash state is null for all situations except: 1926 // android.control.aeMode == ON_ALWAYS_FLASH, where flash.state will be FIRED 1927 // android.flash.mode == TORCH, where flash.state will be FIRED 1928 testLegacyTorchStates(torchStateResults, 0, flashModeOffRequests - 1, flashOffRequest); 1929 testLegacyTorchStates(torchStateResults, flashModeOffRequests, 1930 torchStateResults.length -1, 1931 flashModeTorchRequest); 1932 } else { 1933 checkTorchStates(torchStateResults, numAllowedTransitionStates, flashModeOffRequests, 1934 flashModeTorchRequests); 1935 } 1936 } 1937 testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, CaptureRequest request)1938 private void testLegacyTorchStates(CaptureResult []torchStateResults, int beg, int end, 1939 CaptureRequest request) { 1940 for (int i = beg; i <= end; i++) { 1941 Integer requestControlAeMode = request.get(CaptureRequest.CONTROL_AE_MODE); 1942 Integer requestFlashMode = request.get(CaptureRequest.FLASH_MODE); 1943 Integer resultFlashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1944 if (requestControlAeMode == CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH || 1945 requestFlashMode == CaptureRequest.FLASH_MODE_TORCH) { 1946 mCollector.expectEquals("For LEGACY devices, flash state must be FIRED when" + 1947 "CONTROL_AE_MODE == CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE == " + 1948 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1949 requestFlashMode, CaptureResult.FLASH_STATE_FIRED, resultFlashState); 1950 continue; 1951 } 1952 mCollector.expectTrue("For LEGACY devices, flash state must be null when" + 1953 "CONTROL_AE_MODE != CONTROL_AE_MODE_ON_ALWAYS_FLASH or FLASH_MODE != " + 1954 "TORCH, CONTROL_AE_MODE = " + requestControlAeMode + " FLASH_MODE = " + 1955 requestFlashMode, resultFlashState == null); 1956 } 1957 } 1958 // We check that torch states appear in the order expected. We don't necessarily know how many 1959 // times each state might appear, however we make sure that the states do not appear out of 1960 // order. checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, List<Integer> stateOrder, boolean isTurningOff)1961 private void checkTorchTransitionStates(CaptureResult []torchStateResults, int beg, int end, 1962 List<Integer> stateOrder, boolean isTurningOff) { 1963 Integer flashState; 1964 Integer curIndex = 0; 1965 for (int i = beg; i <= end; i++) { 1966 flashState = torchStateResults[i].get(CaptureResult.FLASH_STATE); 1967 int index = stateOrder.indexOf(flashState); 1968 mCollector.expectNotEquals("Invalid state " + flashState + " not in expected list" + 1969 stateOrder, index, -1); 1970 mCollector.expectGreaterOrEqual("state " + flashState + " index " + index + 1971 " is expected to be >= " + curIndex, 1972 curIndex, index); 1973 curIndex = index; 1974 } 1975 } 1976 checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, int numTorchOffSamples, int numTorchOnSamples)1977 private void checkTorchStates(CaptureResult []torchResults, int numAllowedTransitionStates, 1978 int numTorchOffSamples, int numTorchOnSamples) { 1979 // We test for flash states from request: 1980 // Request: O(0) O(1) O(2) O(n)....O(nOFF) T(0) T(1) T(2) ....T(n) .... T(nON) 1981 // Valid Result : P/R P/R P/R R R R...P/R P/R P/F P/F P/F F F 1982 // For the FLASH_STATE_OFF requests, once FLASH_STATE READY has been seen, for the 1983 // transition states while switching the torch off, it must not transition to 1984 // FLASH_STATE_PARTIAL again till the next transition period which turns the torch on. 1985 // P - FLASH_STATE_PARTIAL 1986 // R - FLASH_STATE_READY 1987 // F - FLASH_STATE_FIRED 1988 // O(k) - kth FLASH_MODE_OFF request 1989 // T(k) - kth FLASH_MODE_TORCH request 1990 // nOFF - number of torch off samples 1991 // nON - number of torch on samples 1992 Integer flashState; 1993 // Check on -> off transition states 1994 List<Integer> onToOffStateOrderList = new ArrayList<Integer>(); 1995 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 1996 onToOffStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 1997 checkTorchTransitionStates(torchResults, 0, numAllowedTransitionStates, 1998 onToOffStateOrderList, true); 1999 // The next frames (before transition) must have its flash state as FLASH_STATE_READY 2000 for (int i = numAllowedTransitionStates + 1; 2001 i < numTorchOffSamples - numAllowedTransitionStates; i++) { 2002 flashState = torchResults[numAllowedTransitionStates].get(CaptureResult.FLASH_STATE); 2003 mCollector.expectEquals("flash state result must be READY", 2004 CaptureResult.FLASH_STATE_READY, flashState); 2005 } 2006 // check off -> on transition states, before the FLASH_MODE_TORCH request was sent 2007 List<Integer> offToOnPreStateOrderList = new ArrayList<Integer>(); 2008 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_READY); 2009 offToOnPreStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 2010 checkTorchTransitionStates(torchResults, 2011 numTorchOffSamples - numAllowedTransitionStates, numTorchOffSamples - 1, 2012 offToOnPreStateOrderList, false); 2013 // check off -> on transition states 2014 List<Integer> offToOnPostStateOrderList = new ArrayList<Integer>(); 2015 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_PARTIAL); 2016 offToOnPostStateOrderList.add(CaptureRequest.FLASH_STATE_FIRED); 2017 checkTorchTransitionStates(torchResults, 2018 numTorchOffSamples, numTorchOffSamples + numAllowedTransitionStates, 2019 offToOnPostStateOrderList, false); 2020 // check on states after off -> on transition 2021 // The next frames must have its flash state as FLASH_STATE_FIRED 2022 for (int i = numTorchOffSamples + numAllowedTransitionStates + 1; 2023 i < torchResults.length - 1; i++) { 2024 flashState = torchResults[i].get(CaptureResult.FLASH_STATE); 2025 mCollector.expectEquals("flash state result must be FIRED for frame " + i, 2026 CaptureRequest.FLASH_STATE_FIRED, flashState); 2027 } 2028 } 2029 2030 /** 2031 * Test flash mode control by AE mode. 2032 * <p> 2033 * Only allow AE mode ON or OFF, because other AE mode could run into conflict with 2034 * flash manual control. This function expects the camera to already have an active 2035 * repeating request and be sending results to the listener. 2036 * </p> 2037 * 2038 * @param listener The Capture listener that is used to wait for capture result 2039 * @param aeMode The AE mode for flash to test with 2040 */ flashTestByAeMode(SimpleCaptureCallback listener, int aeMode)2041 private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception { 2042 CaptureResult result; 2043 final int NUM_FLASH_REQUESTS_TESTED = 10; 2044 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 2045 2046 if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) { 2047 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode); 2048 } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 2049 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 2050 } else { 2051 throw new IllegalArgumentException("This test only works when AE mode is ON or OFF"); 2052 } 2053 2054 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2055 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2056 2057 // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE. 2058 if (mStaticInfo.getFlashInfoChecked() == false) { 2059 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 2060 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 2061 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE" 2062 + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE, 2063 result.get(CaptureResult.FLASH_STATE)); 2064 } 2065 2066 return; 2067 } 2068 2069 // Test flash SINGLE mode control. Wait for flash state to be READY first. 2070 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 2071 waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY, 2072 NUM_RESULTS_WAIT_TIMEOUT); 2073 } // else the settings were already waited on earlier 2074 2075 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 2076 CaptureRequest flashSinglerequest = requestBuilder.build(); 2077 2078 int flashModeSingleRequests = captureRequestsSynchronized( 2079 flashSinglerequest, listener, mHandler); 2080 waitForNumResults(listener, flashModeSingleRequests - 1); 2081 result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT); 2082 // Result mode must be SINGLE, state must be FIRED. 2083 mCollector.expectEquals("Flash mode result must be SINGLE", 2084 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE)); 2085 mCollector.expectEquals("Flash state result must be FIRED", 2086 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 2087 2088 // Test flash TORCH mode control. 2089 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 2090 CaptureRequest torchRequest = requestBuilder.build(); 2091 2092 int flashModeTorchRequests = captureRequestsSynchronized(torchRequest, 2093 NUM_FLASH_REQUESTS_TESTED, listener, mHandler); 2094 waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED); 2095 2096 // Verify the results 2097 TorchSeqState state = TorchSeqState.RAMPING_UP; 2098 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 2099 result = listener.getCaptureResultForRequest(torchRequest, 2100 NUM_RESULTS_WAIT_TIMEOUT); 2101 int flashMode = result.get(CaptureResult.FLASH_MODE); 2102 int flashState = result.get(CaptureResult.FLASH_STATE); 2103 // Result mode must be TORCH 2104 mCollector.expectEquals("Flash mode result " + i + " must be TORCH", 2105 CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE)); 2106 if (state == TorchSeqState.RAMPING_UP && 2107 flashState == CaptureResult.FLASH_STATE_FIRED) { 2108 state = TorchSeqState.FIRED; 2109 } else if (state == TorchSeqState.FIRED && 2110 flashState == CaptureResult.FLASH_STATE_PARTIAL) { 2111 state = TorchSeqState.RAMPING_DOWN; 2112 } 2113 2114 if (i == 0 && mStaticInfo.isPerFrameControlSupported()) { 2115 mCollector.expectTrue( 2116 "Per frame control device must enter FIRED state on first torch request", 2117 state == TorchSeqState.FIRED); 2118 } 2119 2120 if (state == TorchSeqState.FIRED) { 2121 mCollector.expectEquals("Flash state result " + i + " must be FIRED", 2122 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 2123 } else { 2124 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL", 2125 CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE)); 2126 } 2127 } 2128 mCollector.expectTrue("Torch state FIRED never seen", 2129 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN); 2130 2131 // Test flash OFF mode control 2132 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 2133 CaptureRequest flashOffrequest = requestBuilder.build(); 2134 2135 int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler); 2136 waitForNumResults(listener, flashModeOffRequests - 1); 2137 result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT); 2138 mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF, 2139 result.get(CaptureResult.FLASH_MODE)); 2140 } 2141 verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, int mode, boolean isAeManual, long requestExpTime)2142 private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, 2143 int mode, boolean isAeManual, long requestExpTime) throws Exception { 2144 // Skip the first a couple of frames as antibanding may not be fully up yet. 2145 final int NUM_FRAMES_SKIPPED = 5; 2146 for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) { 2147 listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2148 } 2149 2150 for (int i = 0; i < numFramesVerified; i++) { 2151 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2152 Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 2153 assertNotNull("Exposure time shouldn't be null", resultExpTime); 2154 Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER); 2155 // Scene flicker result should be always available. 2156 assertNotNull("Scene flicker must not be null", flicker); 2157 assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE && 2158 flicker <= STATISTICS_SCENE_FLICKER_60HZ); 2159 2160 Integer antiBandMode = result.get(CaptureResult.CONTROL_AE_ANTIBANDING_MODE); 2161 assertNotNull("antiBanding mode shouldn't be null", antiBandMode); 2162 assertTrue("antiBanding Mode invalid, should be == " + mode + ", is: " + antiBandMode, 2163 antiBandMode == mode); 2164 if (isAeManual) { 2165 // First, round down not up, second, need close enough. 2166 validateExposureTime(requestExpTime, resultExpTime); 2167 return; 2168 } 2169 2170 long expectedExpTime = resultExpTime; // Default, no exposure adjustment. 2171 if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) { 2172 // result exposure time must be adjusted by 50Hz illuminant source. 2173 expectedExpTime = 2174 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 2175 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) { 2176 // result exposure time must be adjusted by 60Hz illuminant source. 2177 expectedExpTime = 2178 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 2179 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){ 2180 /** 2181 * Use STATISTICS_SCENE_FLICKER to tell the illuminant source 2182 * and do the exposure adjustment. 2183 */ 2184 expectedExpTime = resultExpTime; 2185 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) { 2186 expectedExpTime = 2187 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 2188 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) { 2189 expectedExpTime = 2190 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 2191 } 2192 } 2193 2194 if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) { 2195 mCollector.addMessage(String.format("Result exposure time %dns diverges too much" 2196 + " from expected exposure time %dns for mode %d when AE is auto", 2197 resultExpTime, expectedExpTime, mode)); 2198 } 2199 } 2200 } 2201 antiBandingTestByMode(Size size, int mode)2202 private void antiBandingTestByMode(Size size, int mode) 2203 throws Exception { 2204 if(VERBOSE) { 2205 Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId()); 2206 } 2207 CaptureRequest.Builder requestBuilder = 2208 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2209 2210 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode); 2211 2212 // Test auto AE mode anti-banding behavior 2213 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 2214 startPreview(requestBuilder, size, resultListener); 2215 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2216 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false, 2217 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK); 2218 2219 // Test manual AE mode anti-banding behavior 2220 // 65ms, must be supported by full capability devices. 2221 final long TEST_MANUAL_EXP_TIME_NS = 65000000L; 2222 long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS); 2223 changeExposure(requestBuilder, manualExpTime); 2224 resultListener = new SimpleCaptureCallback(); 2225 startPreview(requestBuilder, size, resultListener); 2226 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2227 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true, 2228 manualExpTime); 2229 2230 stopPreview(); 2231 } 2232 2233 /** 2234 * Test the all available AE modes and AE lock. 2235 * <p> 2236 * For manual AE mode, test iterates through different sensitivities and 2237 * exposure times, validate the result exposure time correctness. For 2238 * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested. 2239 * For the rest of the AUTO mode, AE lock is tested. 2240 * </p> 2241 * 2242 * @param mode corresponding to AE_MODE_* 2243 * @param priorityMode corresponding to AE_PRIORITY_MODE_* 2244 */ aeModeAndLockTestByMode(int mode, int priorityMode)2245 private void aeModeAndLockTestByMode(int mode, int priorityMode) 2246 throws Exception { 2247 switch (mode) { 2248 case CONTROL_AE_MODE_OFF: 2249 if (mStaticInfo.isCapabilitySupported( 2250 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 2251 // Test manual exposure control. 2252 aeManualControlTest(); 2253 } else { 2254 Log.w(TAG, 2255 "aeModeAndLockTestByMode - can't test AE mode OFF without " + 2256 "manual sensor control"); 2257 } 2258 break; 2259 case CONTROL_AE_MODE_ON: 2260 case CONTROL_AE_MODE_ON_AUTO_FLASH: 2261 case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: 2262 case CONTROL_AE_MODE_ON_ALWAYS_FLASH: 2263 case CONTROL_AE_MODE_ON_EXTERNAL_FLASH: 2264 // Test AE lock for above AUTO modes. 2265 aeAutoModeTestLock(mode, priorityMode); 2266 break; 2267 default: 2268 throw new UnsupportedOperationException("Unhandled AE mode " + mode); 2269 } 2270 } 2271 2272 /** 2273 * Test AE auto modes. 2274 * <p> 2275 * Use single request rather than repeating request to test AE lock per frame control. 2276 * </p> 2277 */ aeAutoModeTestLock(int mode, int priorityMode)2278 private void aeAutoModeTestLock(int mode, int priorityMode) throws Exception { 2279 CaptureRequest.Builder requestBuilder = 2280 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2281 if (mStaticInfo.isAeLockSupported()) { 2282 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2283 } 2284 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode); 2285 2286 if (Flags.aePriority()) { 2287 requestBuilder.set(CaptureRequest.CONTROL_AE_PRIORITY_MODE, priorityMode); 2288 } 2289 2290 configurePreviewOutput(requestBuilder); 2291 2292 final int MAX_NUM_CAPTURES_DURING_LOCK = 5; 2293 for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) { 2294 autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i, priorityMode); 2295 } 2296 } 2297 2298 /** 2299 * Issue multiple auto AE captures, then lock AE, validate the AE lock vs. 2300 * the first capture result after the AE lock. The right AE lock behavior is: 2301 * When it is locked, it locks to the current exposure value, and all subsequent 2302 * request with lock ON will have the same exposure value locked. 2303 */ autoAeMultipleCapturesThenTestLock( CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock, int priorityMode)2304 private void autoAeMultipleCapturesThenTestLock( 2305 CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock, 2306 int priorityMode) 2307 throws Exception { 2308 if (numCapturesDuringLock < 1) { 2309 throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1"); 2310 } 2311 if (VERBOSE) { 2312 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " 2313 + aeMode + " with " + numCapturesDuringLock + " captures before lock"); 2314 } 2315 2316 final int NUM_CAPTURES_BEFORE_LOCK = 2; 2317 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2318 2319 CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock]; 2320 boolean canSetAeLock = mStaticInfo.isAeLockSupported(); 2321 2322 // Reset the AE lock to OFF, since we are reusing this builder many times 2323 if (canSetAeLock) { 2324 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 2325 } 2326 2327 // Just send several captures with auto AE, lock off. 2328 CaptureRequest request = requestBuilder.build(); 2329 for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) { 2330 mSession.capture(request, listener, mHandler); 2331 } 2332 waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK); 2333 2334 if (!canSetAeLock) { 2335 // Without AE lock, the remaining tests items won't work 2336 return; 2337 } 2338 2339 // Then fire several capture to lock the AE. 2340 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true); 2341 2342 int requestCount = captureRequestsSynchronized( 2343 requestBuilder.build(), numCapturesDuringLock, listener, mHandler); 2344 2345 int[] sensitivities = new int[numCapturesDuringLock]; 2346 long[] expTimes = new long[numCapturesDuringLock]; 2347 Arrays.fill(sensitivities, -1); 2348 Arrays.fill(expTimes, -1L); 2349 2350 // Get the AE lock on result and validate the exposure values. 2351 waitForNumResults(listener, requestCount - numCapturesDuringLock); 2352 for (int i = 0; i < resultsDuringLock.length; i++) { 2353 resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2354 } 2355 2356 for (int i = 0; i < numCapturesDuringLock; i++) { 2357 mCollector.expectKeyValueEquals( 2358 resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true); 2359 } 2360 2361 // Can't read manual sensor/exposure settings without manual sensor 2362 if (mStaticInfo.isCapabilitySupported( 2363 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) { 2364 int sensitivityLocked = 2365 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY); 2366 long expTimeLocked = 2367 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME); 2368 for (int i = 1; i < resultsDuringLock.length; i++) { 2369 if (Flags.aePriority()) { 2370 switch (priorityMode) { 2371 case CONTROL_AE_PRIORITY_MODE_OFF: 2372 mCollector.expectKeyValueEquals( 2373 resultsDuringLock[i], 2374 CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked); 2375 mCollector.expectKeyValueEquals( 2376 resultsDuringLock[i], 2377 CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked); 2378 break; 2379 case CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY: 2380 mCollector.expectKeyValueEquals( 2381 resultsDuringLock[i], 2382 CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked); 2383 break; 2384 case CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY: 2385 mCollector.expectKeyValueEquals( 2386 resultsDuringLock[i], 2387 CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked); 2388 break; 2389 default: 2390 throw new UnsupportedOperationException("Unhandled AE priority mode " 2391 + priorityMode); 2392 } 2393 } else { 2394 mCollector.expectKeyValueEquals( 2395 resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, 2396 expTimeLocked); 2397 mCollector.expectKeyValueEquals( 2398 resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, 2399 sensitivityLocked); 2400 } 2401 } 2402 } 2403 } 2404 2405 /** 2406 * Iterate through exposure times and sensitivities for manual AE control. 2407 * <p> 2408 * Use single request rather than repeating request to test manual exposure 2409 * value change per frame control. 2410 * </p> 2411 */ aeManualControlTest()2412 private void aeManualControlTest() 2413 throws Exception { 2414 CaptureRequest.Builder requestBuilder = 2415 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2416 configurePreviewOutput(requestBuilder); 2417 2418 // Warm up pipeline for more accurate timing 2419 SimpleCaptureCallback warmupListener = new SimpleCaptureCallback(); 2420 mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler); 2421 warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2422 2423 // Do manual captures 2424 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 2425 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2426 2427 long[] expTimesNs = getExposureTimeTestValuesSorted(); 2428 int[] sensitivities = getSensitivityTestValuesSorted(); 2429 2430 assertTrue(expTimesNs.length > 0); 2431 assertTrue(sensitivities.length > 0); 2432 2433 // For multiple exposure times, make smart combinations of exposure and sensitivity to 2434 // reduce test time and still have exhaustive coverage. 2435 List<Pair<Long, Integer>> exposureSensitivityTestValues = 2436 new ArrayList<Pair<Long, Integer>>(); 2437 2438 // Min exposure should be tested with all sensitivity values. 2439 for (int i = 0; i < sensitivities.length; i++) { 2440 exposureSensitivityTestValues.add( 2441 new Pair<Long, Integer>(expTimesNs[0], sensitivities[i])); 2442 } 2443 2444 // All other exposure values should be tested only with min and max sensitivity. 2445 for (int i = 1; i < expTimesNs.length; i++) { 2446 exposureSensitivityTestValues.add( 2447 new Pair<Long, Integer>(expTimesNs[i], sensitivities[0])); 2448 2449 if (sensitivities.length > 1) { 2450 exposureSensitivityTestValues.add( 2451 new Pair<Long, Integer>(expTimesNs[i], 2452 sensitivities[sensitivities.length - 1])); 2453 } 2454 } 2455 2456 // Submit single request at a time, then verify the result. 2457 for (int i = 0; i < exposureSensitivityTestValues.size(); i++) { 2458 long exposure = exposureSensitivityTestValues.get(i).first; 2459 int sensitivity = exposureSensitivityTestValues.get(i).second; 2460 2461 if (VERBOSE) { 2462 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " 2463 + sensitivity + ", exposure time " + exposure + "ns"); 2464 } 2465 2466 changeExposure(requestBuilder, exposure, sensitivity); 2467 mSession.capture(requestBuilder.build(), listener, mHandler); 2468 2469 // make sure timeout is long enough for long exposure time - add a 2x safety margin 2470 // to exposure time 2471 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * exposure / 1000000; 2472 CaptureResult result = listener.getCaptureResult(timeoutMs); 2473 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 2474 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 2475 validateExposureTime(exposure, resultExpTimeNs); 2476 validateSensitivity(sensitivity, resultSensitivity); 2477 validateFrameDurationForCapture(result); 2478 } 2479 mSession.stopRepeating(); 2480 2481 // TODO: Add another case to test where we can submit all requests, then wait for 2482 // results, which will hide the pipeline latency. this is not only faster, but also 2483 // test high speed per frame control and synchronization. 2484 } 2485 2486 2487 /** 2488 * Verify black level lock control. 2489 */ verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, int maxLockOffCnt)2490 private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, 2491 int maxLockOffCnt) throws Exception { 2492 int noLockCnt = 0; 2493 for (int i = 0; i < numFramesVerified; i++) { 2494 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2495 Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK); 2496 assertNotNull("Black level lock result shouldn't be null", blackLevelLock); 2497 2498 // Count the lock == false result, which could possibly occur at most once. 2499 if (blackLevelLock == false) { 2500 noLockCnt++; 2501 } 2502 2503 if(VERBOSE) { 2504 Log.v(TAG, "Black level lock result: " + blackLevelLock); 2505 } 2506 } 2507 assertTrue("Black level lock OFF occurs " + noLockCnt + " times, expect at most " 2508 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt); 2509 } 2510 2511 /** 2512 * Verify shading map for different shading modes. 2513 */ verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, int shadingMode)2514 private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, 2515 int shadingMode) throws Exception { 2516 2517 for (int i = 0; i < numFramesVerified; i++) { 2518 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2519 mCollector.expectEquals("Shading mode result doesn't match request", 2520 shadingMode, result.get(CaptureResult.SHADING_MODE)); 2521 LensShadingMap mapObj = result.get( 2522 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP); 2523 assertNotNull("Map object must not be null", mapObj); 2524 int numElementsInMap = mapObj.getGainFactorCount(); 2525 float[] map = new float[numElementsInMap]; 2526 mapObj.copyGainFactors(map, /*offset*/0); 2527 assertNotNull("Map must not be null", map); 2528 assertFalse(String.format( 2529 "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE), 2530 numElementsInMap >= MAX_SHADING_MAP_SIZE); 2531 assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap, 2532 MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE); 2533 2534 if (shadingMode == CaptureRequest.SHADING_MODE_FAST || 2535 shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) { 2536 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all 2537 // elements >= 1.0f 2538 2539 int badValueCnt = 0; 2540 // Detect the bad values of the map data. 2541 for (int j = 0; j < numElementsInMap; j++) { 2542 if (Float.isNaN(map[j]) || map[j] < 1.0f) { 2543 badValueCnt++; 2544 } 2545 } 2546 assertEquals("Number of value in the map is " + badValueCnt + " out of " 2547 + numElementsInMap, /*expected*/0, /*actual*/badValueCnt); 2548 } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) { 2549 float[] unityMap = new float[numElementsInMap]; 2550 Arrays.fill(unityMap, 1.0f); 2551 // shading mode is OFF, expect to receive a unity map. 2552 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map", 2553 Arrays.equals(unityMap, map)); 2554 } 2555 } 2556 } 2557 2558 /** 2559 * Test face detection for a camera. 2560 */ 2561 private void faceDetectionTestByCamera() throws Exception { 2562 int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked(); 2563 2564 SimpleCaptureCallback listener; 2565 CaptureRequest.Builder requestBuilder = 2566 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2567 2568 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2569 for (int mode : faceDetectModes) { 2570 requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode); 2571 if (VERBOSE) { 2572 Log.v(TAG, "Start testing face detection mode " + mode); 2573 } 2574 2575 // Create a new listener for each run to avoid the results from one run spill 2576 // into another run. 2577 listener = new SimpleCaptureCallback(); 2578 startPreview(requestBuilder, maxPreviewSz, listener); 2579 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2580 verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode); 2581 } 2582 2583 stopPreview(); 2584 } 2585 2586 /** 2587 * Verify face detection results for different face detection modes. 2588 * 2589 * @param listener The listener to get capture result 2590 * @param numFramesVerified Number of results to be verified 2591 * @param faceDetectionMode Face detection mode to be verified against 2592 */ 2593 private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified, 2594 int faceDetectionMode) { 2595 for (int i = 0; i < numFramesVerified; i++) { 2596 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2597 mCollector.expectEquals("Result face detection mode should match the request", 2598 faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE)); 2599 2600 Face[] faces = result.get(CaptureResult.STATISTICS_FACES); 2601 List<Integer> faceIds = new ArrayList<Integer>(faces.length); 2602 List<Integer> faceScores = new ArrayList<Integer>(faces.length); 2603 if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 2604 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode", 2605 0, faces.length); 2606 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 2607 for (Face face : faces) { 2608 mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds()); 2609 faceScores.add(face.getScore()); 2610 mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode", 2611 face.getId() == Face.ID_UNSUPPORTED); 2612 } 2613 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 2614 if (VERBOSE) { 2615 Log.v(TAG, "Number of faces detected: " + faces.length); 2616 } 2617 2618 for (Face face : faces) { 2619 Rect faceBound; 2620 boolean faceRectAvailable = mCollector.expectTrue("Face rectangle " 2621 + "shouldn't be null", face.getBounds() != null); 2622 if (!faceRectAvailable) { 2623 continue; 2624 } 2625 faceBound = face.getBounds(); 2626 2627 faceScores.add(face.getScore()); 2628 faceIds.add(face.getId()); 2629 2630 mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode", 2631 face.getId() != Face.ID_UNSUPPORTED); 2632 boolean leftEyeAvailable = 2633 mCollector.expectTrue("Left eye position shouldn't be null", 2634 face.getLeftEyePosition() != null); 2635 boolean rightEyeAvailable = 2636 mCollector.expectTrue("Right eye position shouldn't be null", 2637 face.getRightEyePosition() != null); 2638 boolean mouthAvailable = 2639 mCollector.expectTrue("Mouth position shouldn't be null", 2640 face.getMouthPosition() != null); 2641 // Eyes/mouth position should be inside of the face rect. 2642 if (leftEyeAvailable) { 2643 Point leftEye = face.getLeftEyePosition(); 2644 mCollector.expectTrue("Left eye " + leftEye + "should be" 2645 + "inside of face rect " + faceBound, 2646 faceBound.contains(leftEye.x, leftEye.y)); 2647 } 2648 if (rightEyeAvailable) { 2649 Point rightEye = face.getRightEyePosition(); 2650 mCollector.expectTrue("Right eye " + rightEye + "should be" 2651 + "inside of face rect " + faceBound, 2652 faceBound.contains(rightEye.x, rightEye.y)); 2653 } 2654 if (mouthAvailable) { 2655 Point mouth = face.getMouthPosition(); 2656 mCollector.expectTrue("Mouth " + mouth + " should be inside of" 2657 + " face rect " + faceBound, 2658 faceBound.contains(mouth.x, mouth.y)); 2659 } 2660 } 2661 } 2662 mCollector.expectValuesInRange("Face scores are invalid", faceScores, 2663 Face.SCORE_MIN, Face.SCORE_MAX); 2664 mCollector.expectValuesUnique("Face ids are invalid", faceIds); 2665 } 2666 } 2667 2668 /** 2669 * Test tone map mode and result by camera 2670 */ 2671 private void toneMapTestByCamera() throws Exception { 2672 if (!mStaticInfo.isManualToneMapSupported()) { 2673 return; 2674 } 2675 2676 CaptureRequest.Builder requestBuilder = 2677 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2678 int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked(); 2679 // Test AUTO modes first. Note that FAST/HQ must both present or not present 2680 for (int i = 0; i < toneMapModes.length; i++) { 2681 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_FAST && i > 0) { 2682 int tmpMode = toneMapModes[0]; 2683 toneMapModes[0] = CaptureRequest.TONEMAP_MODE_FAST; 2684 toneMapModes[i] = tmpMode; 2685 } 2686 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_HIGH_QUALITY && i > 1) { 2687 int tmpMode = toneMapModes[1]; 2688 toneMapModes[1] = CaptureRequest.TONEMAP_MODE_HIGH_QUALITY; 2689 toneMapModes[i] = tmpMode; 2690 } 2691 } 2692 for (int mode : toneMapModes) { 2693 if (VERBOSE) { 2694 Log.v(TAG, "Testing tonemap mode " + mode); 2695 } 2696 2697 requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode); 2698 switch (mode) { 2699 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE: 2700 TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR, 2701 TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR); 2702 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2703 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2704 2705 toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB, 2706 TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB); 2707 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2708 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2709 break; 2710 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE: 2711 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f); 2712 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2713 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f); 2714 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2715 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f); 2716 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2717 break; 2718 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE: 2719 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2720 CaptureRequest.TONEMAP_PRESET_CURVE_REC709); 2721 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2722 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2723 CaptureRequest.TONEMAP_PRESET_CURVE_SRGB); 2724 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2725 break; 2726 default: 2727 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2728 break; 2729 } 2730 } 2731 2732 2733 } 2734 2735 /** 2736 * Test tonemap mode with speficied request settings 2737 * 2738 * @param numFramesVerified Number of results to be verified 2739 * @param requestBuilder the request builder of settings to be tested 2740 */ 2741 private void testToneMapMode (int numFramesVerified, 2742 CaptureRequest.Builder requestBuilder) throws Exception { 2743 final int MIN_TONEMAP_CURVE_POINTS = 2; 2744 final Float ZERO = new Float(0); 2745 final Float ONE = new Float(1.0f); 2746 2747 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2748 int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE); 2749 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2750 startPreview(requestBuilder, maxPreviewSz, listener); 2751 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2752 2753 int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked(); 2754 for (int i = 0; i < numFramesVerified; i++) { 2755 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2756 mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode, 2757 result.get(CaptureResult.TONEMAP_MODE)); 2758 TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE); 2759 int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED); 2760 float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE]; 2761 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN); 2762 float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE]; 2763 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE); 2764 float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE]; 2765 tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0); 2766 tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0); 2767 tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0); 2768 if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) { 2769 /** 2770 * TODO: need figure out a good way to measure the difference 2771 * between request and result, as they may have different array 2772 * size. 2773 */ 2774 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) { 2775 mCollector.expectEquals("Capture result gamma value should match request", 2776 requestBuilder.get(CaptureRequest.TONEMAP_GAMMA), 2777 result.get(CaptureResult.TONEMAP_GAMMA)); 2778 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) { 2779 mCollector.expectEquals("Capture result preset curve should match request", 2780 requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE), 2781 result.get(CaptureResult.TONEMAP_PRESET_CURVE)); 2782 } 2783 2784 // Tonemap curve result availability and basic validity check for all modes. 2785 mCollector.expectValuesInRange("Tonemap curve red values are out of range", 2786 CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE); 2787 mCollector.expectInRange("Tonemap curve red length is out of range", 2788 mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2789 mCollector.expectValuesInRange("Tonemap curve green values are out of range", 2790 CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE); 2791 mCollector.expectInRange("Tonemap curve green length is out of range", 2792 mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2793 mCollector.expectValuesInRange("Tonemap curve blue values are out of range", 2794 CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE); 2795 mCollector.expectInRange("Tonemap curve blue length is out of range", 2796 mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2797 2798 // Make sure capture result tonemap has identical channels. 2799 if (mStaticInfo.isMonochromeCamera()) { 2800 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2801 "have same dimension for all channels", mapRed.length, mapGreen.length); 2802 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2803 "have same dimension for all channels", mapRed.length, mapBlue.length); 2804 2805 if (mapRed.length == mapGreen.length && mapRed.length == mapBlue.length) { 2806 boolean isIdentical = true; 2807 for (int j = 0; j < mapRed.length; j++) { 2808 isIdentical = (mapRed[j] == mapGreen[j] && mapRed[j] == mapBlue[j]); 2809 if (!isIdentical) 2810 break; 2811 } 2812 mCollector.expectTrue("Capture result tonemap of monochrome camera should " + 2813 "be identical between all channels", isIdentical); 2814 } 2815 } 2816 } 2817 stopPreview(); 2818 } 2819 2820 /** 2821 * Test awb mode control. 2822 * <p> 2823 * Test each supported AWB mode, verify the AWB mode in capture result 2824 * matches request. When AWB is locked, the color correction gains and 2825 * transform should remain unchanged. 2826 * </p> 2827 */ 2828 private void awbModeAndLockTestByCamera() throws Exception { 2829 int[] awbModes = mStaticInfo.getAwbAvailableModesChecked(); 2830 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2831 boolean canSetAwbLock = mStaticInfo.isAwbLockSupported(); 2832 CaptureRequest.Builder requestBuilder = 2833 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2834 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2835 2836 for (int mode : awbModes) { 2837 SimpleCaptureCallback listener; 2838 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode); 2839 listener = new SimpleCaptureCallback(); 2840 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2841 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2842 2843 // Verify AWB mode in capture result. 2844 verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener, 2845 NUM_FRAMES_VERIFIED); 2846 2847 if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) { 2848 // Verify color correction transform and gains stay unchanged after a lock. 2849 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true); 2850 listener = new SimpleCaptureCallback(); 2851 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2852 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2853 2854 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) { 2855 waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE, 2856 CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT); 2857 } 2858 2859 } 2860 // Don't verify auto mode result if AWB lock is not supported 2861 if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) { 2862 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED); 2863 } 2864 } 2865 } 2866 2867 private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener, 2868 int numFramesVerified) { 2869 // Skip check if cc gains/transform/mode are not available 2870 if (!mStaticInfo.areKeysAvailable( 2871 CaptureResult.COLOR_CORRECTION_GAINS, 2872 CaptureResult.COLOR_CORRECTION_TRANSFORM, 2873 CaptureResult.COLOR_CORRECTION_MODE)) { 2874 return; 2875 } 2876 2877 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2878 RggbChannelVector lockedGains = 2879 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2880 ColorSpaceTransform lockedTransform = 2881 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2882 2883 for (int i = 0; i < numFramesVerified; i++) { 2884 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2885 // Color correction mode check is skipped here, as it is checked in colorCorrectionTest. 2886 validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 2887 2888 RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2889 ColorSpaceTransform transform = 2890 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2891 mCollector.expectEquals("Color correction gains should remain unchanged after awb lock", 2892 lockedGains, gains); 2893 mCollector.expectEquals("Color correction transform should remain unchanged after" 2894 + " awb lock", lockedTransform, transform); 2895 } 2896 } 2897 2898 /** 2899 * Test AF mode control. 2900 * <p> 2901 * Test all supported AF modes, verify the AF mode in capture result matches 2902 * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode, 2903 * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED 2904 * state within certain amount of frames. 2905 * </p> 2906 */ 2907 private void afModeTestByCamera() throws Exception { 2908 int[] afModes = mStaticInfo.getAfAvailableModesChecked(); 2909 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2910 CaptureRequest.Builder requestBuilder = 2911 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2912 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2913 2914 for (int mode : afModes) { 2915 SimpleCaptureCallback listener; 2916 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode); 2917 listener = new SimpleCaptureCallback(); 2918 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2919 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2920 2921 // Verify AF mode in capture result. 2922 verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener, 2923 NUM_FRAMES_VERIFIED); 2924 2925 // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes. 2926 // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily 2927 // result in a passive AF call if the camera has already been focused, and the scene has 2928 // not changed enough to trigger an AF pass. Skip this constraint for LEGACY. 2929 if (mStaticInfo.isHardwareLevelAtLeastLimited() && 2930 (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE || 2931 mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { 2932 List<Integer> afStateList = new ArrayList<Integer>(); 2933 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED); 2934 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED); 2935 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList, 2936 NUM_RESULTS_WAIT_TIMEOUT); 2937 } 2938 } 2939 } 2940 2941 /** 2942 * Test video and optical stabilizations if they are supported by a given camera. 2943 */ 2944 private void stabilizationTestByCamera() throws Exception { 2945 // video stabilization test. 2946 List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys(); 2947 2948 Integer[] videoStabModes = (keys.contains(CameraCharacteristics. 2949 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ? 2950 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) : 2951 new Integer[0]; 2952 int[] opticalStabModes = (keys.contains( 2953 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ? 2954 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0]; 2955 2956 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2957 CaptureRequest.Builder requestBuilder = 2958 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2959 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2960 startPreview(requestBuilder, maxPreviewSize, listener); 2961 2962 for (Integer mode : videoStabModes) { 2963 listener = new SimpleCaptureCallback(); 2964 requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode); 2965 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2966 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2967 // Video stabilization could return any modes. 2968 verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE, 2969 videoStabModes, listener, NUM_FRAMES_VERIFIED); 2970 } 2971 2972 for (int mode : opticalStabModes) { 2973 listener = new SimpleCaptureCallback(); 2974 requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode); 2975 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2976 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2977 verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode, 2978 listener, NUM_FRAMES_VERIFIED); 2979 } 2980 2981 stopPreview(); 2982 } 2983 2984 private void digitalZoomTestByCamera(Size previewSize, boolean repeating) throws Exception { 2985 final PointF[] TEST_ZOOM_CENTERS; 2986 final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked(); 2987 final float ZOOM_ERROR_MARGIN = 0.01f; 2988 if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) { 2989 // It doesn't make much sense to test the zoom if the device effectively supports 2990 // no zoom. 2991 return; 2992 } 2993 2994 final int croppingType = mStaticInfo.getScalerCroppingTypeChecked(); 2995 if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) { 2996 // Set the four corners in a way that the minimally allowed zoom factor is 2x. 2997 float normalizedLeft = 0.25f; 2998 float normalizedTop = 0.25f; 2999 float normalizedRight = 0.75f; 3000 float normalizedBottom = 0.75f; 3001 // If the max supported zoom is too small, make sure we at least test the max 3002 // Zoom is tested for the four corners. 3003 if (maxZoom < 2.0f) { 3004 normalizedLeft = 0.5f / maxZoom; 3005 normalizedTop = 0.5f / maxZoom; 3006 normalizedRight = 1.0f - normalizedLeft; 3007 normalizedBottom = 1.0f - normalizedTop; 3008 } 3009 TEST_ZOOM_CENTERS = new PointF[] { 3010 new PointF(0.5f, 0.5f), // Center point 3011 new PointF(normalizedLeft, normalizedTop), // top left corner zoom 3012 new PointF(normalizedRight, normalizedTop), // top right corner zoom 3013 new PointF(normalizedLeft, normalizedBottom), // bottom left corner zoom 3014 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom 3015 }; 3016 3017 if (VERBOSE) { 3018 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM"); 3019 } 3020 } else { 3021 // CENTER_ONLY 3022 TEST_ZOOM_CENTERS = new PointF[] { 3023 new PointF(0.5f, 0.5f), // Center point 3024 }; 3025 3026 if (VERBOSE) { 3027 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY"); 3028 } 3029 } 3030 3031 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 3032 final Rect defaultCropRegion = new Rect(0, 0, 3033 activeArraySize.width(), activeArraySize.height()); 3034 Rect[] cropRegions = new Rect[ZOOM_STEPS]; 3035 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 3036 CaptureRequest.Builder requestBuilder = 3037 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3038 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3039 3040 updatePreviewSurface(previewSize); 3041 configurePreviewOutput(requestBuilder); 3042 3043 CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS]; 3044 3045 // Set algorithm regions 3046 final int METERING_RECT_RATIO = 10; 3047 final MeteringRectangle[][] defaultMeteringRects = new MeteringRectangle[][] { 3048 { 3049 new MeteringRectangle ( 3050 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 3051 /*meteringWeight*/1), /* full active region */ 3052 }, 3053 { 3054 new MeteringRectangle ( 3055 /*x*/0, /*y*/0, activeArraySize.width()/METERING_RECT_RATIO, 3056 activeArraySize.height()/METERING_RECT_RATIO, 3057 /*meteringWeight*/1), 3058 }, 3059 { 3060 new MeteringRectangle ( 3061 /*x*/(int)(activeArraySize.width() * (0.5f - 0.5f/METERING_RECT_RATIO)), 3062 /*y*/(int)(activeArraySize.height() * (0.5f - 0.5f/METERING_RECT_RATIO)), 3063 activeArraySize.width()/METERING_RECT_RATIO, 3064 activeArraySize.height()/METERING_RECT_RATIO, 3065 /*meteringWeight*/1), 3066 }, 3067 }; 3068 3069 final int CAPTURE_SUBMIT_REPEAT; 3070 final int NUM_RESULTS_TO_SKIP; 3071 { 3072 int maxLatency = mStaticInfo.getSyncMaxLatency(); 3073 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 3074 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 3075 } else { 3076 CAPTURE_SUBMIT_REPEAT = maxLatency + 1; 3077 } 3078 if (repeating) { 3079 NUM_RESULTS_TO_SKIP = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 3080 } else { 3081 NUM_RESULTS_TO_SKIP = CAPTURE_SUBMIT_REPEAT - 1; 3082 } 3083 } 3084 3085 if (VERBOSE) { 3086 Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT); 3087 } 3088 3089 for (MeteringRectangle[] meteringRect : defaultMeteringRects) { 3090 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3091 update3aRegion(requestBuilder, algo, meteringRect, mStaticInfo); 3092 } 3093 3094 for (PointF center : TEST_ZOOM_CENTERS) { 3095 Rect previousCrop = null; 3096 3097 for (int i = 0; i < ZOOM_STEPS; i++) { 3098 /* 3099 * Submit capture request 3100 */ 3101 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS); 3102 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, 3103 maxZoom, defaultCropRegion); 3104 if (VERBOSE) { 3105 Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " + 3106 center + " The cropRegion is " + cropRegions[i] + 3107 " Preview size is " + previewSize + ", repeating is " + repeating); 3108 } 3109 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]); 3110 requests[i] = requestBuilder.build(); 3111 if (VERBOSE) { 3112 Log.v(TAG, "submit crop region " + cropRegions[i]); 3113 } 3114 if (repeating) { 3115 mSession.setRepeatingRequest(requests[i], listener, mHandler); 3116 // Drop first few frames 3117 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); 3118 // Interleave a regular capture 3119 mSession.capture(requests[0], listener, mHandler); 3120 } else { 3121 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) { 3122 mSession.capture(requests[i], listener, mHandler); 3123 } 3124 } 3125 3126 /* 3127 * Validate capture result 3128 */ 3129 waitForNumResults(listener, NUM_RESULTS_TO_SKIP); // Drop first few frames 3130 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 3131 requests[i], NUM_RESULTS_WAIT_TIMEOUT); 3132 List<CaptureResult> partialResults = result.getPartialResults(); 3133 3134 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 3135 for (CaptureResult partialResult : partialResults) { 3136 Rect cropRegionInPartial = 3137 partialResult.get(CaptureResult.SCALER_CROP_REGION); 3138 if (cropRegionInPartial != null) { 3139 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 3140 + "match in final result", cropRegionInPartial, cropRegion); 3141 } 3142 } 3143 3144 if (CameraTestUtils.isStabilizationOff(requests[i])) { 3145 /* 3146 * Validate resulting crop regions 3147 */ 3148 if (previousCrop != null) { 3149 Rect currentCrop = cropRegion; 3150 mCollector.expectTrue(String.format( 3151 "Crop region should shrink or stay the same " 3152 + "(previous = %s, current = %s)", 3153 previousCrop, currentCrop), 3154 previousCrop.equals(currentCrop) 3155 || (previousCrop.width() > currentCrop.width() 3156 && previousCrop.height() > currentCrop.height())); 3157 } 3158 3159 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3160 mCollector.expectRectsAreSimilar( 3161 "Request and result crop region should be similar", 3162 cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA); 3163 } 3164 3165 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) { 3166 mCollector.expectRectCentered( 3167 "Result crop region should be centered inside the active array", 3168 new Size(activeArraySize.width(), activeArraySize.height()), 3169 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED); 3170 } 3171 3172 /* 3173 * Validate resulting metering regions 3174 */ 3175 3176 // Use the actual reported crop region to calculate the resulting 3177 // metering region 3178 expectRegions[i] = getExpectedOutputRegion( 3179 /*requestRegion*/meteringRect, 3180 /*cropRect*/ cropRegion); 3181 3182 // Verify Output 3A region is intersection of input 3A region and 3183 // crop region 3184 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3185 validate3aRegion(result, partialResults, algo, expectRegions[i], 3186 false/*scaleByZoomRatio*/, mStaticInfo); 3187 } 3188 } 3189 3190 previousCrop = cropRegion; 3191 } 3192 3193 if (maxZoom > 1.0f) { 3194 mCollector.expectTrue( 3195 String.format("Most zoomed-in crop region should be smaller " + 3196 "than active array w/h" + 3197 "(last crop = %s, active array = %s)", 3198 previousCrop, activeArraySize), 3199 (previousCrop.width() < activeArraySize.width() && 3200 previousCrop.height() < activeArraySize.height())); 3201 } 3202 } 3203 } 3204 } 3205 3206 private void zoomRatioTestByCamera(Size previewSize, boolean useZoomRatioMethod) 3207 throws Exception { 3208 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3209 // The error margin is derive from a VGA size camera zoomed all the way to 10x, in which 3210 // case the cropping error can be as large as 480/46 - 480/48 = 0.435. 3211 final float ZOOM_ERROR_MARGIN = 0.05f; 3212 3213 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 3214 final Rect defaultCropRegion = 3215 new Rect(0, 0, activeArraySize.width(), activeArraySize.height()); 3216 final Rect zoom2xCropRegion = 3217 new Rect(activeArraySize.width()/4, activeArraySize.height()/4, 3218 activeArraySize.width()*3/4, activeArraySize.height()*3/4); 3219 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 3220 CaptureRequest.Builder requestBuilder = 3221 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3222 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 3223 if (Flags.zoomMethod() && useZoomRatioMethod) { 3224 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_METHOD, 3225 CameraMetadata.CONTROL_ZOOM_METHOD_ZOOM_RATIO); 3226 } 3227 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3228 3229 updatePreviewSurface(previewSize); 3230 configurePreviewOutput(requestBuilder); 3231 3232 // Set algorithm regions to full active region 3233 final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] { 3234 new MeteringRectangle ( 3235 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 3236 /*meteringWeight*/1) 3237 }; 3238 3239 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3240 update3aRegion(requestBuilder, algo, defaultMeteringRect, mStaticInfo); 3241 } 3242 3243 final int captureSubmitRepeat; 3244 { 3245 int maxLatency = mStaticInfo.getSyncMaxLatency(); 3246 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 3247 captureSubmitRepeat = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 3248 } else { 3249 captureSubmitRepeat = maxLatency + 1; 3250 } 3251 } 3252 3253 float previousRatio = zoomRatioRange.getLower(); 3254 for (int i = 0; i < ZOOM_STEPS; i++) { 3255 /* 3256 * Submit capture request 3257 */ 3258 float zoomFactor = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() - 3259 zoomRatioRange.getLower()) * i / ZOOM_STEPS; 3260 if (VERBOSE) { 3261 Log.v(TAG, "Testing Zoom ratio " + zoomFactor + " Preview size is " + previewSize); 3262 } 3263 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 3264 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, defaultCropRegion); 3265 CaptureRequest request = requestBuilder.build(); 3266 for (int j = 0; j < captureSubmitRepeat; ++j) { 3267 mSession.capture(request, listener, mHandler); 3268 } 3269 3270 /* 3271 * Validate capture result 3272 */ 3273 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 3274 TotalCaptureResult result = listener.getTotalCaptureResultForRequest( 3275 request, NUM_RESULTS_WAIT_TIMEOUT); 3276 List<CaptureResult> partialResults = result.getPartialResults(); 3277 float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3278 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 3279 3280 for (CaptureResult partialResult : partialResults) { 3281 Rect cropRegionInPartial = 3282 partialResult.get(CaptureResult.SCALER_CROP_REGION); 3283 if (cropRegionInPartial != null) { 3284 mCollector.expectEquals("SCALER_CROP_REGION in partial result must " 3285 + "match in final result", cropRegionInPartial, cropRegion); 3286 } 3287 3288 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO); 3289 if (zoomRatioInPartial != null) { 3290 mCollector.expectEquals("CONTROL_ZOOM_RATIO in partial result must match" 3291 + " that in final result", resultZoomRatio, zoomRatioInPartial); 3292 } 3293 } 3294 3295 /* 3296 * Validate resulting crop regions and zoom ratio 3297 */ 3298 mCollector.expectTrue(String.format( 3299 "Zoom ratio should increase or stay the same " + 3300 "(previous = %f, current = %f)", 3301 previousRatio, resultZoomRatio), 3302 Math.abs(previousRatio - resultZoomRatio) < ZOOM_ERROR_MARGIN || 3303 (previousRatio < resultZoomRatio)); 3304 3305 if (CameraTestUtils.isStabilizationOff(request)) { 3306 mCollector.expectTrue(String.format( 3307 "Request and result zoom ratio should be similar " 3308 + "(requested = %f, result = %f", zoomFactor, resultZoomRatio), 3309 Math.abs(zoomFactor - resultZoomRatio) / zoomFactor <= ZOOM_ERROR_MARGIN); 3310 3311 //In case zoom ratio is converted to crop region at HAL, due to error magnification 3312 //when converting to post-zoom crop region, scale the error threshold for crop 3313 //region check. 3314 float errorMultiplier = Math.max(1.0f, zoomFactor); 3315 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3316 mCollector.expectRectsAreSimilar( 3317 "Request and result crop region should be similar", 3318 defaultCropRegion, cropRegion, 3319 CROP_REGION_ERROR_PERCENT_DELTA * errorMultiplier); 3320 } 3321 3322 mCollector.expectRectCentered( 3323 "Result crop region should be centered inside the active array", 3324 new Size(activeArraySize.width(), activeArraySize.height()), 3325 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED * errorMultiplier); 3326 3327 /* 3328 * Validate resulting metering regions 3329 */ 3330 // Use the actual reported crop region to calculate the resulting metering region 3331 expectRegions[i] = getExpectedOutputRegion( 3332 /*requestRegion*/defaultMeteringRect, 3333 /*cropRect*/ cropRegion); 3334 3335 // Verify Output 3A region is intersection of input 3A region and crop region 3336 boolean scaleByZoomRatio = zoomFactor > 1.0f; 3337 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 3338 validate3aRegion(result, partialResults, algo, expectRegions[i], 3339 scaleByZoomRatio, mStaticInfo); 3340 } 3341 } 3342 3343 previousRatio = resultZoomRatio; 3344 3345 /* 3346 * Set windowboxing cropRegion while zoomRatio is not 1.0x or zoomRatio method 3347 * is used, and make sure the crop region was overwritten. 3348 */ 3349 if (zoomFactor != 1.0f || useZoomRatioMethod) { 3350 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom2xCropRegion); 3351 CaptureRequest requestWithCrop = requestBuilder.build(); 3352 for (int j = 0; j < captureSubmitRepeat; ++j) { 3353 mSession.capture(requestWithCrop, listener, mHandler); 3354 } 3355 3356 waitForNumResults(listener, captureSubmitRepeat - 1); // Drop first few frames 3357 CaptureResult resultWithCrop = listener.getCaptureResultForRequest( 3358 requestWithCrop, NUM_RESULTS_WAIT_TIMEOUT); 3359 float resultZoomRatioWithCrop = getValueNotNull(resultWithCrop, 3360 CaptureResult.CONTROL_ZOOM_RATIO); 3361 Rect cropRegionWithCrop = getValueNotNull(resultWithCrop, 3362 CaptureResult.SCALER_CROP_REGION); 3363 3364 mCollector.expectTrue(String.format( 3365 "Result zoom ratio should remain the same (activeArrayCrop: %f, " + 3366 "zoomedCrop: %f)", resultZoomRatio, resultZoomRatioWithCrop), 3367 Math.abs(resultZoomRatio - resultZoomRatioWithCrop) < ZOOM_ERROR_MARGIN); 3368 3369 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 3370 mCollector.expectRectsAreSimilar( 3371 "Result crop region should remain the same with or without crop", 3372 cropRegion, cropRegionWithCrop, CROP_REGION_ERROR_PERCENT_DELTA); 3373 } 3374 } 3375 } 3376 } 3377 3378 private void zoomTimestampIncreaseTestByCamera() throws Exception { 3379 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3380 3381 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3382 updatePreviewSurface(maxPreviewSize); 3383 CaptureRequest.Builder requestBuilder = 3384 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3385 configurePreviewOutput(requestBuilder); 3386 3387 // Submit a sequence of requests first zooming in then zooming out. 3388 List<CaptureRequest> requests = new ArrayList<CaptureRequest>(); 3389 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3390 float zoomRange = zoomRatioRange.getUpper() - zoomRatioRange.getLower(); 3391 for (int i = 0; i <= ZOOM_STEPS; i++) { 3392 float zoomFactor = zoomRatioRange.getUpper() - (zoomRange * i / ZOOM_STEPS); 3393 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor); 3394 // Add each ratio to both the beginning and end of the list. 3395 requests.add(requestBuilder.build()); 3396 requests.add(0, requestBuilder.build()); 3397 } 3398 int seqId = mSession.captureBurst(requests, listener, mHandler); 3399 3400 // onCaptureSequenceCompleted() trails all capture results. Upon its return, 3401 // we make sure we've received all results/errors. 3402 listener.getCaptureSequenceLastFrameNumber( 3403 seqId, WAIT_FOR_RESULT_TIMEOUT_MS * ZOOM_STEPS); 3404 // Check timestamp monotonically increase for the whole sequence 3405 long prevTimestamp = 0; 3406 while (listener.hasMoreResults()) { 3407 TotalCaptureResult result = listener.getTotalCaptureResult( 3408 WAIT_FOR_RESULT_TIMEOUT_MS); 3409 long timestamp = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 3410 mCollector.expectGreater("Sensor timestamp must monotonically increase, " 3411 + "but changed from " + prevTimestamp + " to " + timestamp, 3412 prevTimestamp, timestamp); 3413 prevTimestamp = timestamp; 3414 } 3415 } 3416 3417 private void digitalZoomPreviewCombinationTestByCamera() throws Exception { 3418 final double ASPECT_RATIO_THRESHOLD = 0.001; 3419 List<Double> aspectRatiosTested = new ArrayList<Double>(); 3420 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3421 aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight()); 3422 3423 for (Size size : mOrderedPreviewSizes) { 3424 // Max preview size was already tested in testDigitalZoom test. skip it. 3425 if (size.equals(maxPreviewSize)) { 3426 continue; 3427 } 3428 3429 // Only test the largest size for each aspect ratio. 3430 double aspectRatio = (double)(size.getWidth()) / size.getHeight(); 3431 if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) { 3432 continue; 3433 } 3434 3435 if (VERBOSE) { 3436 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom"); 3437 } 3438 3439 aspectRatiosTested.add(aspectRatio); 3440 digitalZoomTestByCamera(size, /*repeating*/false); 3441 } 3442 } 3443 3444 private static boolean isAspectRatioContained(List<Double> aspectRatioList, 3445 double aspectRatio, double delta) { 3446 for (Double ratio : aspectRatioList) { 3447 if (Math.abs(ratio - aspectRatio) < delta) { 3448 return true; 3449 } 3450 } 3451 3452 return false; 3453 } 3454 3455 private void sceneModeTestByCamera() throws Exception { 3456 int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked(); 3457 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3458 CaptureRequest.Builder requestBuilder = 3459 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3460 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3461 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE); 3462 startPreview(requestBuilder, maxPreviewSize, listener); 3463 3464 for(int mode : sceneModes) { 3465 requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode); 3466 listener = new SimpleCaptureCallback(); 3467 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3468 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3469 3470 verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE, 3471 mode, listener, NUM_FRAMES_VERIFIED); 3472 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3473 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3474 CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED); 3475 } 3476 } 3477 3478 private void effectModeTestByCamera() throws Exception { 3479 int[] effectModes = mStaticInfo.getAvailableEffectModesChecked(); 3480 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3481 CaptureRequest.Builder requestBuilder = 3482 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3483 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 3484 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3485 startPreview(requestBuilder, maxPreviewSize, listener); 3486 3487 for(int mode : effectModes) { 3488 requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode); 3489 listener = new SimpleCaptureCallback(); 3490 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3491 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3492 3493 verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE, 3494 mode, listener, NUM_FRAMES_VERIFIED); 3495 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 3496 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 3497 CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED); 3498 } 3499 } 3500 3501 private void extendedSceneModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 3502 Capability[] extendedSceneModeCaps = mStaticInfo.getAvailableExtendedSceneModeCapsChecked(); 3503 if (extendedSceneModeCaps.length == 0) { 3504 return; 3505 } 3506 3507 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3508 CaptureRequest.Builder requestBuilder = 3509 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3510 3511 for (Capability cap : extendedSceneModeCaps) { 3512 int mode = cap.getMode(); 3513 requestBuilder.set(CaptureRequest.CONTROL_EXTENDED_SCENE_MODE, mode); 3514 3515 // Test that DISABLED and BOKEH_CONTINUOUS mode doesn't slow down the frame rate 3516 if (mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_DISABLED || 3517 mode == CaptureRequest.CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS) { 3518 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 3519 } 3520 3521 Range<Float> zoomRange = cap.getZoomRatioRange(); 3522 float[] zoomRatios = new float[]{zoomRange.getLower(), zoomRange.getUpper()}; 3523 for (float ratio : zoomRatios) { 3524 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3525 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, ratio); 3526 startPreview(requestBuilder, maxPreviewSize, listener); 3527 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3528 3529 verifyCaptureResultForKey(CaptureResult.CONTROL_EXTENDED_SCENE_MODE, 3530 mode, listener, NUM_FRAMES_VERIFIED); 3531 float zoomRatioDelta = ZOOM_RATIO_ERROR_PERCENT_DELTA * ratio; 3532 if (CameraTestUtils.isStabilizationOff(requestBuilder.build())) { 3533 verifyCaptureResultForKey(CaptureResult.CONTROL_ZOOM_RATIO, 3534 ratio, listener, NUM_FRAMES_VERIFIED, zoomRatioDelta); 3535 } 3536 } 3537 } 3538 } 3539 3540 private void manualFlashStrengthControlTestByCamera() throws Exception { 3541 Size maxPrevSize = mOrderedPreviewSizes.get(0); 3542 int singleMaxLevel = mStaticInfo.getCharacteristics().get( 3543 CameraCharacteristics.FLASH_SINGLE_STRENGTH_MAX_LEVEL); 3544 int torchMaxLevel = mStaticInfo.getCharacteristics().get( 3545 CameraCharacteristics.FLASH_TORCH_STRENGTH_MAX_LEVEL); 3546 int strengthLevel = singleMaxLevel - 1; 3547 3548 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 3549 CaptureRequest.Builder requestBuilder = 3550 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3551 3552 // Single mode 3553 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 3554 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 3555 requestBuilder.set(CaptureRequest.FLASH_STRENGTH_LEVEL, strengthLevel); 3556 3557 CaptureRequest request; 3558 updatePreviewSurface(maxPrevSize); 3559 configurePreviewOutput(requestBuilder); 3560 request = requestBuilder.build(); 3561 mSession.capture(request, resultListener, mHandler); 3562 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3563 CaptureResult result = 3564 resultListener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 3565 int resultStrengthLevel = getValueNotNull(result, CaptureResult.FLASH_STRENGTH_LEVEL); 3566 assertTrue(resultStrengthLevel == strengthLevel); 3567 assertTrue(resultStrengthLevel <= singleMaxLevel); 3568 3569 // Torch mode 3570 strengthLevel = torchMaxLevel - 1; 3571 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 3572 requestBuilder.set(CaptureRequest.FLASH_STRENGTH_LEVEL, strengthLevel); 3573 CaptureRequest torchRequest = requestBuilder.build(); 3574 mSession.setRepeatingRequest(torchRequest, resultListener, mHandler); 3575 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3576 result = resultListener.getCaptureResultForRequest( 3577 torchRequest, NUM_RESULTS_WAIT_TIMEOUT); 3578 resultStrengthLevel = getValueNotNull(result, CaptureResult.FLASH_STRENGTH_LEVEL); 3579 assertTrue(resultStrengthLevel == strengthLevel); 3580 assertTrue(resultStrengthLevel <= torchMaxLevel); 3581 } 3582 3583 private void autoframingTestByCamera() throws Exception { 3584 // Verify autoframing state, zoom ratio and video stabilizations controls for autoframing 3585 // modes ON and OFF 3586 int[] autoframingModes = {CameraMetadata.CONTROL_AUTOFRAMING_OFF, 3587 CameraMetadata.CONTROL_AUTOFRAMING_ON}; 3588 final int zoomSteps = 5; 3589 final float zoomErrorMargin = 0.05f; 3590 final int kMaxNumFrames = 200; 3591 Size maxPreviewSize = mOrderedPreviewSizes.get(0); // Max preview size. 3592 CaptureRequest.Builder requestBuilder = 3593 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3594 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3595 startPreview(requestBuilder, maxPreviewSize, listener); 3596 3597 for (int mode : autoframingModes) { 3598 float expectedZoomRatio = 0.0f; 3599 final Range<Float> zoomRatioRange = mStaticInfo.getZoomRatioRangeChecked(); 3600 for (int i = 0; i < zoomSteps; i++) { 3601 float testZoomRatio = zoomRatioRange.getLower() + (zoomRatioRange.getUpper() 3602 - zoomRatioRange.getLower()) * i / zoomSteps; 3603 // Zoom ratio 1.0f is a special case. The ZoomRatioMapper in framework maintains the 3604 // 1.0f ratio in the CaptureResult 3605 if (testZoomRatio == 1.0f) { 3606 continue; 3607 } 3608 requestBuilder.set(CaptureRequest.CONTROL_AUTOFRAMING, mode); 3609 requestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, testZoomRatio); 3610 listener = new SimpleCaptureCallback(); 3611 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 3612 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3613 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3614 Float resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3615 int autoframingState = getValueNotNull(result, 3616 CaptureResult.CONTROL_AUTOFRAMING_STATE); 3617 int videoStabilizationMode = getValueNotNull(result, 3618 CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE); 3619 3620 if (mode == CameraMetadata.CONTROL_AUTOFRAMING_ON) { 3621 int numFrames = 0; 3622 while (numFrames < kMaxNumFrames) { 3623 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3624 autoframingState = getValueNotNull(result, 3625 CaptureResult.CONTROL_AUTOFRAMING_STATE); 3626 assertTrue("Autoframing state should be FRAMING or CONVERGED when " 3627 + "AUTOFRAMING is ON", 3628 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_FRAMING 3629 || autoframingState 3630 == CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED); 3631 3632 assertTrue("Video Stablization should be OFF when AUTOFRAMING is ON", 3633 videoStabilizationMode 3634 == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF); 3635 3636 resultZoomRatio = getValueNotNull(result, CaptureResult.CONTROL_ZOOM_RATIO); 3637 if (autoframingState == 3638 CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED) { 3639 break; 3640 } 3641 numFrames++; 3642 } 3643 3644 if (autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_CONVERGED 3645 && expectedZoomRatio == 0.0f) { 3646 expectedZoomRatio = resultZoomRatio; 3647 } 3648 } else { 3649 expectedZoomRatio = testZoomRatio; 3650 assertTrue("Autoframing state should be INACTIVE when AUTOFRAMING is OFF", 3651 autoframingState == CameraMetadata.CONTROL_AUTOFRAMING_STATE_INACTIVE); 3652 } 3653 3654 verifyCaptureResultForKey(CaptureResult.CONTROL_AUTOFRAMING, mode, listener, 3655 NUM_FRAMES_VERIFIED); 3656 3657 // If autoframing was OFF, or the framing state CONVERGED, the zoom ratio in result 3658 // should be within the margin of error. 3659 if (autoframingState != CameraMetadata.CONTROL_AUTOFRAMING_STATE_FRAMING) { 3660 mCollector.expectTrue(String.format( 3661 "Zoom Ratio in Capture Request does not match the expected zoom" 3662 + "ratio in Capture Result (expected = %f, actual = %f)", 3663 expectedZoomRatio, resultZoomRatio), 3664 Math.abs(expectedZoomRatio - resultZoomRatio) / expectedZoomRatio 3665 <= zoomErrorMargin); 3666 } 3667 } 3668 } 3669 } 3670 3671 private void settingsOverrideTestByCamera() throws Exception { 3672 // Verify that settings override is OFF by default 3673 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3674 CaptureRequest.Builder requestBuilder = 3675 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3676 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3677 startPreview(requestBuilder, maxPreviewSize, listener); 3678 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3679 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3680 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_OFF, listener, NUM_FRAMES_VERIFIED); 3681 3682 // Turn settings override to ZOOM, and make sure it's reflected in result 3683 requestBuilder.set(CaptureRequest.CONTROL_SETTINGS_OVERRIDE, 3684 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM); 3685 SimpleCaptureCallback listenerZoom = new SimpleCaptureCallback(); 3686 mSession.setRepeatingRequest(requestBuilder.build(), listenerZoom, mHandler); 3687 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3688 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3689 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3690 3691 // Verify that settings override result is ON if turned on from the beginning 3692 listenerZoom = new SimpleCaptureCallback(); 3693 stopPreviewAndDrain(); 3694 startPreview(requestBuilder, maxPreviewSize, listenerZoom); 3695 waitForSettingsApplied(listenerZoom, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3696 // Wait additional 2 frames to allow non-overridden 3697 // results during startup. 3698 final int ZOOM_SOME_FRAMES = 2; 3699 waitForNumResults(listenerZoom, ZOOM_SOME_FRAMES); 3700 verifyCaptureResultForKey(CaptureResult.CONTROL_SETTINGS_OVERRIDE, 3701 CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM, listenerZoom, NUM_FRAMES_VERIFIED); 3702 } 3703 3704 private void testAeModeOnLowLightBoostBrightnessPriorityTestByCamera() throws Exception { 3705 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3706 CaptureRequest.Builder requestBuilder = 3707 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3708 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, 3709 CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); 3710 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3711 startPreview(requestBuilder, maxPreviewSize, listener); 3712 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3713 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3714 // Expect that AE_MODE is ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY 3715 int resultAeMode = getValueNotNull(result, CaptureResult.CONTROL_AE_MODE); 3716 assertTrue("AE Mode should be ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY", resultAeMode 3717 == CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); 3718 3719 // Expect that CaptureResult.CONTROL_LOW_LIGHT_BOOST_STATE is present 3720 int resultLowLightBoostState = 3721 getValueNotNull(result, CaptureResult.CONTROL_LOW_LIGHT_BOOST_STATE); 3722 assertTrue("Low Light Boost State should be ACTIVE or INACTIVE", 3723 resultLowLightBoostState == CameraMetadata.CONTROL_LOW_LIGHT_BOOST_STATE_INACTIVE 3724 || resultLowLightBoostState == CameraMetadata.CONTROL_LOW_LIGHT_BOOST_STATE_ACTIVE); 3725 } 3726 3727 3728 private void testAePriorityModesByCamera(int aePriorityMode) throws Exception { 3729 final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204); 3730 final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000); 3731 final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000; 3732 3733 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 3734 CaptureRequest.Builder requestBuilder = 3735 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 3736 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON); 3737 requestBuilder.set(CaptureRequest.CONTROL_AE_PRIORITY_MODE, aePriorityMode); 3738 3739 switch (aePriorityMode) { 3740 case CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY: 3741 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS); 3742 break; 3743 case CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY: 3744 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE); 3745 break; 3746 default: 3747 throw new UnsupportedOperationException("Unhandled AE priority mode " 3748 + aePriorityMode); 3749 } 3750 3751 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 3752 startPreview(requestBuilder, maxPreviewSize, listener); 3753 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3754 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3755 3756 // Expect that AE Priority mode result matches request 3757 int resultAePriorityMode = getValueNotNull(result, CaptureResult.CONTROL_AE_PRIORITY_MODE); 3758 assertTrue("AE Mode should be " + aePriorityMode, resultAePriorityMode 3759 == aePriorityMode); 3760 3761 long exposureTimeDiff = TEST_EXPOSURE_TIME_NS - 3762 getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 3763 int sensitivityDiff = TEST_SENSITIVITY_VALUE - 3764 getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 3765 3766 switch (aePriorityMode) { 3767 case CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY: 3768 validateExposureTime(TEST_EXPOSURE_TIME_NS, 3769 getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME)); 3770 break; 3771 case CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY: 3772 validateSensitivity(TEST_SENSITIVITY_VALUE, 3773 getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY)); 3774 break; 3775 default: 3776 throw new UnsupportedOperationException("Unhandled AE priority mode " 3777 + aePriorityMode); 3778 } 3779 3780 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF); 3781 listener = new SimpleCaptureCallback(); 3782 startPreview(requestBuilder, maxPreviewSize, listener); 3783 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 3784 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3785 3786 // Expect that AE priority mode is off when AE mode if off 3787 resultAePriorityMode = 3788 getValueNotNull(result, CaptureResult.CONTROL_AE_PRIORITY_MODE); 3789 assertTrue("AE Priority mode should be off when AE mode is turned off", 3790 resultAePriorityMode == CameraMetadata.CONTROL_AE_PRIORITY_MODE_OFF); 3791 } 3792 3793 //---------------------------------------------------------------- 3794 //---------Below are common functions for all tests.-------------- 3795 //---------------------------------------------------------------- 3796 3797 /** 3798 * Enable exposure manual control and change exposure and sensitivity and 3799 * clamp the value into the supported range. 3800 */ 3801 private void changeExposure(CaptureRequest.Builder requestBuilder, 3802 long expTime, int sensitivity) { 3803 // Check if the max analog sensitivity is available and no larger than max sensitivity. The 3804 // max analog sensitivity is not actually used here. This is only an extra correctness 3805 // check. 3806 mStaticInfo.getMaxAnalogSensitivityChecked(); 3807 3808 expTime = mStaticInfo.getExposureClampToRange(expTime); 3809 sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity); 3810 3811 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 3812 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime); 3813 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); 3814 } 3815 /** 3816 * Enable exposure manual control and change exposure time and 3817 * clamp the value into the supported range. 3818 * 3819 * <p>The sensitivity is set to default value.</p> 3820 */ 3821 private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) { 3822 changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY); 3823 } 3824 3825 /** 3826 * Get the exposure time array that contains multiple exposure time steps in 3827 * the exposure time range, in nanoseconds. 3828 */ 3829 private long[] getExposureTimeTestValuesSorted() { 3830 long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1]; 3831 long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS); 3832 long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS); 3833 3834 long range = maxExpTime - minExpTime; 3835 double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS; 3836 for (int i = 0; i < testValues.length; i++) { 3837 // Multiply stepSize by largest to smallest so that the final array is sorted. 3838 testValues[i] = maxExpTime - (long) (stepSize * (testValues.length - 1 - i)); 3839 testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]); 3840 } 3841 3842 return testValues; 3843 } 3844 3845 /** 3846 * Generate test focus distances in range of [0, minFocusDistance] in increasing order. 3847 * 3848 * @param repeatMin number of times minValue will be repeated. 3849 * @param repeatMax number of times maxValue will be repeated. 3850 */ 3851 private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) { 3852 int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax; 3853 float[] testValues = new float[totalCount]; 3854 float minValue = 0; 3855 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 3856 3857 float range = maxValue - minValue; 3858 float stepSize = range / NUM_TEST_FOCUS_DISTANCES; 3859 3860 for (int i = 0; i < repeatMin; i++) { 3861 testValues[i] = minValue; 3862 } 3863 for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) { 3864 testValues[repeatMin+i] = minValue + stepSize * i; 3865 } 3866 for (int i = 0; i < repeatMax; i++) { 3867 testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] = 3868 maxValue; 3869 } 3870 3871 return testValues; 3872 } 3873 3874 /** 3875 * Get the sensitivity array that contains multiple sensitivity steps in the 3876 * sensitivity range. 3877 * <p> 3878 * Sensitivity number of test values is determined by 3879 * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and 3880 * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}. 3881 * </p> 3882 */ 3883 private int[] getSensitivityTestValuesSorted() { 3884 int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault( 3885 DEFAULT_SENSITIVITY); 3886 int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault( 3887 DEFAULT_SENSITIVITY); 3888 3889 int range = maxSensitivity - minSensitivity; 3890 int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE; 3891 int numSteps = range / stepSize; 3892 // Bound the test steps to avoid supper long test. 3893 if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) { 3894 numSteps = DEFAULT_NUM_SENSITIVITY_STEPS; 3895 stepSize = range / numSteps; 3896 } 3897 int[] testValues = new int[numSteps + 1]; 3898 for (int i = 0; i < testValues.length; i++) { 3899 // Multiply stepSize by largest to smallest so that the final array is sorted. 3900 testValues[i] = maxSensitivity - stepSize * (testValues.length - 1 - i); 3901 testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]); 3902 } 3903 3904 return testValues; 3905 } 3906 3907 /** 3908 * Validate the AE control exposure time. 3909 * 3910 * <p>Exposure should be close enough, and only round down if they are not equal.</p> 3911 * 3912 * @param request Request exposure time 3913 * @param result Result exposure time 3914 */ 3915 private void validateExposureTime(long request, long result) { 3916 long expTimeDelta = request - result; 3917 long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request 3918 * EXPOSURE_TIME_ERROR_MARGIN_RATE)); 3919 // First, round down not up, second, need close enough. 3920 mCollector.expectTrue("Exposure time is invalid, request: " 3921 + request + " result: " + result, 3922 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0); 3923 } 3924 3925 /** 3926 * Validate AE control sensitivity. 3927 * 3928 * @param request Request sensitivity 3929 * @param result Result sensitivity 3930 */ 3931 private void validateSensitivity(int request, int result) { 3932 float sensitivityDelta = request - result; 3933 float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE; 3934 // First, round down not up, second, need close enough. 3935 mCollector.expectTrue("Sensitivity is invalid, request: " + request + " result: " + result, 3936 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0); 3937 } 3938 3939 /** 3940 * Validate frame duration for a given capture. 3941 * 3942 * <p>Frame duration should be longer than exposure time.</p> 3943 * 3944 * @param result The capture result for a given capture 3945 */ 3946 private void validateFrameDurationForCapture(CaptureResult result) { 3947 long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 3948 long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 3949 if (VERBOSE) { 3950 Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime); 3951 } 3952 3953 mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure" 3954 + " time (%d) for a given capture", frameDuration, expTime), 3955 frameDuration >= expTime); 3956 3957 validatePipelineDepth(result); 3958 } 3959 3960 /** 3961 * Basic verification for the control mode capture result. 3962 * 3963 * @param key The capture result key to be verified against 3964 * @param requestMode The request mode for this result 3965 * @param listener The capture listener to get capture results 3966 * @param numFramesVerified The number of capture results to be verified 3967 * @param threshold The threshold by which the request and result keys can differ 3968 */ 3969 private void verifyCaptureResultForKey(CaptureResult.Key<Float> key, float requestMode, 3970 SimpleCaptureCallback listener, int numFramesVerified, float threshold) { 3971 for (int i = 0; i < numFramesVerified; i++) { 3972 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3973 validatePipelineDepth(result); 3974 float resultMode = getValueNotNull(result, key); 3975 if (VERBOSE) { 3976 Log.v(TAG, "Expect value: " + requestMode + " result value: " 3977 + resultMode + " threshold " + threshold); 3978 } 3979 // Check that the request and result are within the given threshold of each other. 3980 // (expectEquals isn't the most intuitive function name.) 3981 mCollector.expectEquals("Key " + key.getName() + " request: " + requestMode + 3982 " result: " + resultMode + " not within threshold " + threshold + 3983 " of each other", requestMode, resultMode, threshold); 3984 } 3985 } 3986 3987 /** 3988 * Basic verification for the control mode capture result. 3989 * 3990 * @param key The capture result key to be verified against 3991 * @param requestMode The request mode for this result 3992 * @param listener The capture listener to get capture results 3993 * @param numFramesVerified The number of capture results to be verified 3994 */ 3995 private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode, 3996 SimpleCaptureCallback listener, int numFramesVerified) { 3997 for (int i = 0; i < numFramesVerified; i++) { 3998 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 3999 validatePipelineDepth(result); 4000 T resultMode = getValueNotNull(result, key); 4001 if (VERBOSE) { 4002 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: " 4003 + resultMode.toString()); 4004 } 4005 mCollector.expectEquals("Key " + key.getName() + " result should match request", 4006 requestMode, resultMode); 4007 } 4008 } 4009 4010 /** 4011 * Basic verification that the value of a capture result key should be one of the expected 4012 * values. 4013 * 4014 * @param key The capture result key to be verified against 4015 * @param expectedModes The list of any possible expected modes for this result 4016 * @param listener The capture listener to get capture results 4017 * @param numFramesVerified The number of capture results to be verified 4018 */ 4019 private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes, 4020 SimpleCaptureCallback listener, int numFramesVerified) { 4021 for (int i = 0; i < numFramesVerified; i++) { 4022 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 4023 validatePipelineDepth(result); 4024 T resultMode = getValueNotNull(result, key); 4025 if (VERBOSE) { 4026 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: " 4027 + resultMode.toString()); 4028 } 4029 // Capture result should be one of the expected values. 4030 mCollector.expectContains(expectedModes, resultMode); 4031 } 4032 } 4033 4034 /** 4035 * Verify if the fps is slow down for given input request with certain 4036 * controls inside. 4037 * <p> 4038 * This method selects a max preview size for each fps range, and then 4039 * configure the preview stream. Preview is started with the max preview 4040 * size, and then verify if the result frame duration is in the frame 4041 * duration range. 4042 * </p> 4043 * 4044 * @param requestBuilder The request builder that contains post-processing 4045 * controls that could impact the output frame rate, such as 4046 * {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of 4047 * these controls must be set to some values such that the frame 4048 * rate is not slow down. 4049 * @param numFramesVerified The number of frames to be verified 4050 * @param fpsRanges The fps ranges to be verified 4051 */ 4052 private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder, 4053 int numFramesVerified, List<Range<Integer>> fpsRanges ) throws Exception { 4054 boolean frameDurationAvailable = true; 4055 // Allow a few frames for AE to settle on target FPS range 4056 final int NUM_FRAME_TO_SKIP = 6; 4057 float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN; 4058 if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) { 4059 frameDurationAvailable = false; 4060 // Allow a larger error margin (1.5%) for timestamps 4061 frameDurationErrorMargin = 0.015f; 4062 } 4063 if (mStaticInfo.isExternalCamera()) { 4064 // Allow a even larger error margin (15%) for external camera timestamps 4065 frameDurationErrorMargin = 0.15f; 4066 } 4067 4068 boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported(); 4069 Range<Integer> fpsRange; 4070 SimpleCaptureCallback resultListener; 4071 4072 for (int i = 0; i < fpsRanges.size(); i += 1) { 4073 fpsRange = fpsRanges.get(i); 4074 Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange); 4075 // If unable to find a preview size, then log the failure, and skip this run. 4076 if (previewSz == null) { 4077 if (mStaticInfo.isCapabilitySupported( 4078 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 4079 mCollector.addMessage(String.format( 4080 "Unable to find a preview size supporting given fps range %s", 4081 fpsRange)); 4082 } 4083 continue; 4084 } 4085 4086 if (VERBOSE) { 4087 Log.v(TAG, String.format("Test fps range %s for preview size %s", 4088 fpsRange, previewSz.toString())); 4089 } 4090 requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 4091 // Turn off auto antibanding to avoid exposure time and frame duration interference 4092 // from antibanding algorithm. 4093 if (antiBandingOffIsSupported) { 4094 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 4095 CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF); 4096 } else { 4097 // The device doesn't implement the OFF mode, test continues. It need make sure 4098 // that the antibanding algorithm doesn't slow down the fps. 4099 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" + 4100 " not slow down the frame rate regardless of its current antibanding" + 4101 " mode"); 4102 } 4103 4104 resultListener = new SimpleCaptureCallback(); 4105 startPreview(requestBuilder, previewSz, resultListener); 4106 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 4107 // Wait several more frames for AE to settle on target FPS range 4108 waitForNumResults(resultListener, NUM_FRAME_TO_SKIP); 4109 4110 long[] frameDurationRange = new long[]{ 4111 (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())}; 4112 long captureTime = 0, prevCaptureTime = 0; 4113 long frameDurationSum = 0; 4114 for (int j = 0; j < numFramesVerified; j++) { 4115 long frameDuration = frameDurationRange[0]; 4116 CaptureResult result = 4117 resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 4118 validatePipelineDepth(result); 4119 if (frameDurationAvailable) { 4120 frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 4121 } else { 4122 // if frame duration is not available, check timestamp instead 4123 captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 4124 if (j > 0) { 4125 frameDuration = captureTime - prevCaptureTime; 4126 } 4127 prevCaptureTime = captureTime; 4128 } 4129 frameDurationSum += frameDuration; 4130 } 4131 long frameDurationAvg = frameDurationSum / numFramesVerified; 4132 mCollector.expectInRange( 4133 "Frame duration must be in the range of " + 4134 Arrays.toString(frameDurationRange), 4135 frameDurationAvg, 4136 (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)), 4137 (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin))); 4138 4139 } 4140 4141 stopPreview(); 4142 } 4143 4144 /** 4145 * Validate the pipeline depth result. 4146 * 4147 * @param result The capture result to get pipeline depth data 4148 */ 4149 private void validatePipelineDepth(CaptureResult result) { 4150 final byte MIN_PIPELINE_DEPTH = 1; 4151 byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked(); 4152 Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH); 4153 mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]", 4154 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH, 4155 maxPipelineDepth); 4156 } 4157 4158 /** 4159 * Calculate the anti-flickering corrected exposure time. 4160 * <p> 4161 * If the input exposure time is very short (shorter than flickering 4162 * boundary), which indicate the scene is bright and very likely at outdoor 4163 * environment, skip the correction, as it doesn't make much sense by doing so. 4164 * </p> 4165 * <p> 4166 * For long exposure time (larger than the flickering boundary), find the 4167 * exposure time that is closest to the flickering boundary. 4168 * </p> 4169 * 4170 * @param flickeringMode The flickering mode 4171 * @param exposureTime The input exposureTime to be corrected 4172 * @return anti-flickering corrected exposure time 4173 */ 4174 private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) { 4175 if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) { 4176 throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz"); 4177 } 4178 long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS; 4179 if (flickeringMode == ANTI_FLICKERING_60HZ) { 4180 flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS; 4181 } 4182 4183 if (exposureTime <= flickeringBoundary) { 4184 return exposureTime; 4185 } 4186 4187 // Find the closest anti-flickering corrected exposure time 4188 long correctedExpTime = exposureTime + (flickeringBoundary / 2); 4189 correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary); 4190 return correctedExpTime; 4191 } 4192 } 4193