xref: /aosp_15_r20/external/webrtc/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm (revision d9f758449e529ab9291ac668be2861e7a55c2422)
1/*
2 *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import <OCMock/OCMock.h>
12#import <XCTest/XCTest.h>
13
14#if TARGET_OS_IPHONE
15#import <UIKit/UIKit.h>
16#endif
17
18#include "rtc_base/gunit.h"
19
20#import "base/RTCVideoFrame.h"
21#import "components/capturer/RTCCameraVideoCapturer.h"
22#import "helpers/AVCaptureSession+DevicePosition.h"
23#import "helpers/RTCDispatcher.h"
24#import "helpers/scoped_cftyperef.h"
25
26#if TARGET_OS_IPHONE
27// Helper method.
28CMSampleBufferRef createTestSampleBufferRef() {
29
30  // This image is already in the testing bundle.
31  UIImage *image = [UIImage imageNamed:@"Default.png"];
32  CGSize size = image.size;
33  CGImageRef imageRef = [image CGImage];
34
35  CVPixelBufferRef pixelBuffer = nullptr;
36  CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, nil,
37                      &pixelBuffer);
38
39  CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
40  // We don't care about bitsPerComponent and bytesPerRow so arbitrary value of 8 for both.
41  CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8, 8 * size.width,
42                                               rgbColorSpace, kCGImageAlphaPremultipliedFirst);
43
44  CGContextDrawImage(
45      context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), imageRef);
46
47  CGColorSpaceRelease(rgbColorSpace);
48  CGContextRelease(context);
49
50  // We don't really care about the timing.
51  CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
52  CMVideoFormatDescriptionRef description = nullptr;
53  CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description);
54
55  CMSampleBufferRef sampleBuffer = nullptr;
56  CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL, NULL, description,
57                                     &timing, &sampleBuffer);
58  CFRelease(pixelBuffer);
59
60  return sampleBuffer;
61
62}
63#endif
64@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
65(Tests)<AVCaptureVideoDataOutputSampleBufferDelegate> -
66    (instancetype)initWithDelegate
67    : (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate captureSession
68    : (AVCaptureSession *)captureSession;
69@end
70
71@interface RTCCameraVideoCapturerTests : XCTestCase
72@property(nonatomic, strong) id delegateMock;
73@property(nonatomic, strong) id deviceMock;
74@property(nonatomic, strong) id captureConnectionMock;
75@property(nonatomic, strong) id captureSessionMock;
76@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
77@end
78
79@implementation RTCCameraVideoCapturerTests
80@synthesize delegateMock = _delegateMock;
81@synthesize deviceMock = _deviceMock;
82@synthesize captureConnectionMock = _captureConnectionMock;
83@synthesize captureSessionMock = _captureSessionMock;
84@synthesize capturer = _capturer;
85
86- (void)setup {
87  self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
88  self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
89  self.capturer =
90      [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock];
91  self.deviceMock = [self createDeviceMock];
92}
93
94- (void)setupWithMockedCaptureSession {
95  self.captureSessionMock = OCMStrictClassMock([AVCaptureSession class]);
96  OCMStub([self.captureSessionMock setSessionPreset:[OCMArg any]]);
97  OCMStub([self.captureSessionMock setUsesApplicationAudioSession:NO]);
98  OCMStub([self.captureSessionMock canAddOutput:[OCMArg any]]).andReturn(YES);
99  OCMStub([self.captureSessionMock addOutput:[OCMArg any]]);
100  OCMStub([self.captureSessionMock beginConfiguration]);
101  OCMStub([self.captureSessionMock commitConfiguration]);
102  self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
103  self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
104  self.capturer =
105      [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock
106                                                       captureSession:self.captureSessionMock];
107  self.deviceMock = [self createDeviceMock];
108}
109
110- (void)tearDown {
111  [self.delegateMock stopMocking];
112  [self.deviceMock stopMocking];
113  self.delegateMock = nil;
114  self.deviceMock = nil;
115  self.capturer = nil;
116}
117
118#pragma mark - utils
119
120- (id)createDeviceMock {
121  return OCMClassMock([AVCaptureDevice class]);
122}
123
124#pragma mark - test cases
125
126- (void)testSetupSession {
127  AVCaptureSession *session = self.capturer.captureSession;
128  EXPECT_TRUE(session != nil);
129
130#if TARGET_OS_IPHONE
131  EXPECT_EQ(session.sessionPreset, AVCaptureSessionPresetInputPriority);
132  EXPECT_EQ(session.usesApplicationAudioSession, NO);
133#endif
134  EXPECT_EQ(session.outputs.count, 1u);
135}
136
137- (void)testSetupSessionOutput {
138  AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0];
139  EXPECT_EQ(videoOutput.alwaysDiscardsLateVideoFrames, NO);
140  EXPECT_EQ(videoOutput.sampleBufferDelegate, self.capturer);
141}
142
143- (void)testSupportedFormatsForDevice {
144  // given
145  id validFormat1 = OCMClassMock([AVCaptureDeviceFormat class]);
146  CMVideoFormatDescriptionRef format;
147
148  // We don't care about width and heigth so arbitrary 123 and 456 values.
149  int width = 123;
150  int height = 456;
151  CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRange, width, height,
152                                 nil, &format);
153  OCMStub([validFormat1 formatDescription]).andReturn(format);
154
155  id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]);
156  CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, width,
157                                 height, nil, &format);
158  OCMStub([validFormat2 formatDescription]).andReturn(format);
159
160  id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]);
161  CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil,
162                                 &format);
163  OCMStub([invalidFormat formatDescription]).andReturn(format);
164
165  NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ];
166  OCMStub([self.deviceMock formats]).andReturn(formats);
167
168  // when
169  NSArray *supportedFormats =
170      [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock];
171
172  // then
173  EXPECT_EQ(supportedFormats.count, 3u);
174  EXPECT_TRUE([supportedFormats containsObject:validFormat1]);
175  EXPECT_TRUE([supportedFormats containsObject:validFormat2]);
176  EXPECT_TRUE([supportedFormats containsObject:invalidFormat]);
177
178  // cleanup
179  [validFormat1 stopMocking];
180  [validFormat2 stopMocking];
181  [invalidFormat stopMocking];
182  validFormat1 = nil;
183  validFormat2 = nil;
184  invalidFormat = nil;
185}
186
187- (void)testDelegateCallbackNotCalledWhenInvalidBuffer {
188  // given
189  CMSampleBufferRef sampleBuffer = nullptr;
190  [[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg any]];
191
192  // when
193  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
194         didOutputSampleBuffer:sampleBuffer
195                fromConnection:self.captureConnectionMock];
196
197  // then
198  [self.delegateMock verify];
199}
200
201
202- (void)testDelegateCallbackWithValidBufferAndOrientationUpdate {
203#if TARGET_OS_IPHONE
204  [UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown) forKey:@"orientation"];
205  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
206
207  // then
208  [[self.delegateMock expect] capturer:self.capturer
209                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
210                                                                    expectedFrame) {
211                    EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270);
212                    return YES;
213                  }]];
214
215  // when
216  NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
217  [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
218
219  // We need to wait for the dispatch to finish.
220  WAIT(0, 1000);
221
222  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
223         didOutputSampleBuffer:sampleBuffer
224                fromConnection:self.captureConnectionMock];
225
226  [self.delegateMock verify];
227  CFRelease(sampleBuffer);
228#endif
229}
230
231- (void)testRotationCamera:(AVCaptureDevicePosition)camera
232           withOrientation:(UIDeviceOrientation)deviceOrientation {
233#if TARGET_OS_IPHONE
234  // Mock the AVCaptureConnection as we will get the camera position from the connection's
235  // input ports.
236  AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
237  AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
238  NSArray *inputPortsArrayMock = @[captureInputPort];
239  AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
240  OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
241      andReturn(inputPortsArrayMock);
242  OCMStub(captureInputPort.input).andReturn(inputPortMock);
243  OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
244  OCMStub(captureDeviceMock.position).andReturn(camera);
245
246  [UIDevice.currentDevice setValue:@(deviceOrientation) forKey:@"orientation"];
247
248  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
249
250  [[self.delegateMock expect] capturer:self.capturer
251                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
252                                                                    expectedFrame) {
253                    if (camera == AVCaptureDevicePositionFront) {
254                      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
255                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
256                      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
257                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
258                      }
259                    } else if (camera == AVCaptureDevicePositionBack) {
260                      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
261                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
262                      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
263                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
264                      }
265                    }
266                    return YES;
267                  }]];
268
269  NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
270  [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
271
272  // We need to wait for the dispatch to finish.
273  WAIT(0, 1000);
274
275  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
276         didOutputSampleBuffer:sampleBuffer
277                fromConnection:self.captureConnectionMock];
278
279  [self.delegateMock verify];
280
281  CFRelease(sampleBuffer);
282#endif
283}
284
285- (void)setExif:(CMSampleBufferRef)sampleBuffer {
286  rtc::ScopedCFTypeRef<CFMutableDictionaryRef> exif(CFDictionaryCreateMutable(
287      kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
288  CFDictionarySetValue(exif.get(), CFSTR("LensModel"), CFSTR("iPhone SE back camera 4.15mm f/2.2"));
289  CMSetAttachment(sampleBuffer, CFSTR("{Exif}"), exif.get(), kCMAttachmentMode_ShouldPropagate);
290}
291
292- (void)testRotationFrame {
293#if TARGET_OS_IPHONE
294  // Mock the AVCaptureConnection as we will get the camera position from the connection's
295  // input ports.
296  AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
297  AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
298  NSArray *inputPortsArrayMock = @[captureInputPort];
299  AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
300  OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
301      andReturn(inputPortsArrayMock);
302  OCMStub(captureInputPort.input).andReturn(inputPortMock);
303  OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
304  OCMStub(captureDeviceMock.position).andReturn(AVCaptureDevicePositionFront);
305
306  [UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft) forKey:@"orientation"];
307
308  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
309
310  [[self.delegateMock expect] capturer:self.capturer
311                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
312                                                                    expectedFrame) {
313                    // Front camera and landscape left should return 180. But the frame's exif
314                    // we add below says its from the back camera, so rotation should be 0.
315                    EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
316                    return YES;
317                  }]];
318
319  NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
320  [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
321
322  // We need to wait for the dispatch to finish.
323  WAIT(0, 1000);
324
325  [self setExif:sampleBuffer];
326
327  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
328         didOutputSampleBuffer:sampleBuffer
329                fromConnection:self.captureConnectionMock];
330
331  [self.delegateMock verify];
332  CFRelease(sampleBuffer);
333#endif
334}
335
336- (void)testImageExif {
337#if TARGET_OS_IPHONE
338  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
339  [self setExif:sampleBuffer];
340
341  AVCaptureDevicePosition cameraPosition = [AVCaptureSession
342                                            devicePositionForSampleBuffer:sampleBuffer];
343  EXPECT_EQ(cameraPosition, AVCaptureDevicePositionBack);
344#endif
345}
346
347- (void)testStartingAndStoppingCapture {
348  id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
349  id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
350  OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
351      .andReturn(expectedDeviceInputMock);
352
353  OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
354  OCMStub([self.deviceMock unlockForConfiguration]);
355  OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
356  OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
357  OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
358
359  // Set expectation that the capture session should be started with correct device.
360  OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
361  OCMExpect([_captureSessionMock startRunning]);
362  OCMExpect([_captureSessionMock stopRunning]);
363
364  id format = OCMClassMock([AVCaptureDeviceFormat class]);
365  [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
366  [self.capturer stopCapture];
367
368  // Start capture code is dispatched async.
369  OCMVerifyAllWithDelay(_captureSessionMock, 15);
370}
371
372- (void)testStartCaptureFailingToLockForConfiguration {
373  // The captureSessionMock is a strict mock, so this test will crash if the startCapture
374  // method does not return when failing to lock for configuration.
375  OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(NO);
376
377  id format = OCMClassMock([AVCaptureDeviceFormat class]);
378  [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
379
380  // Start capture code is dispatched async.
381  OCMVerifyAllWithDelay(self.deviceMock, 15);
382}
383
384- (void)testStartingAndStoppingCaptureWithCallbacks {
385  id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
386  id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
387  OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
388      .andReturn(expectedDeviceInputMock);
389
390  OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
391  OCMStub([self.deviceMock unlockForConfiguration]);
392  OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
393  OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
394  OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
395
396  // Set expectation that the capture session should be started with correct device.
397  OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
398  OCMExpect([_captureSessionMock startRunning]);
399  OCMExpect([_captureSessionMock stopRunning]);
400
401  dispatch_semaphore_t completedStopSemaphore = dispatch_semaphore_create(0);
402
403  __block BOOL completedStart = NO;
404  id format = OCMClassMock([AVCaptureDeviceFormat class]);
405  [self.capturer startCaptureWithDevice:self.deviceMock
406                                 format:format
407                                    fps:30
408                      completionHandler:^(NSError *error) {
409                        EXPECT_EQ(error, nil);
410                        completedStart = YES;
411                      }];
412
413  __block BOOL completedStop = NO;
414  [self.capturer stopCaptureWithCompletionHandler:^{
415    completedStop = YES;
416    dispatch_semaphore_signal(completedStopSemaphore);
417  }];
418
419  dispatch_semaphore_wait(completedStopSemaphore,
420                          dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
421  OCMVerifyAllWithDelay(_captureSessionMock, 15);
422  EXPECT_TRUE(completedStart);
423  EXPECT_TRUE(completedStop);
424}
425
426- (void)testStartCaptureFailingToLockForConfigurationWithCallback {
427  id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
428  id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
429  OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
430      .andReturn(expectedDeviceInputMock);
431
432  id errorMock = OCMClassMock([NSError class]);
433
434  OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]]).andReturn(NO);
435  OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
436  OCMStub([self.deviceMock unlockForConfiguration]);
437
438  OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
439
440  dispatch_semaphore_t completedStartSemaphore = dispatch_semaphore_create(0);
441  __block NSError *callbackError = nil;
442
443  id format = OCMClassMock([AVCaptureDeviceFormat class]);
444  [self.capturer startCaptureWithDevice:self.deviceMock
445                                 format:format
446                                    fps:30
447                      completionHandler:^(NSError *error) {
448                        callbackError = error;
449                        dispatch_semaphore_signal(completedStartSemaphore);
450                      }];
451
452  long ret = dispatch_semaphore_wait(completedStartSemaphore,
453                                     dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
454  EXPECT_EQ(ret, 0);
455  EXPECT_EQ(callbackError, errorMock);
456}
457
458@end
459