Updating XTBs based on .GRDs from branch master
[chromium-blink-merge.git] / media / capture / video / mac / video_capture_device_avfoundation_mac.mm
blob54ed686de9e2fb7e2b3ddebce4bff5abb4fa8800
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
7 #import <CoreVideo/CoreVideo.h>
9 #include "base/logging.h"
10 #include "base/mac/foundation_util.h"
11 #include "media/capture/video/mac/video_capture_device_mac.h"
12 #include "ui/gfx/geometry/size.h"
14 // Prefer MJPEG if frame width or height is larger than this.
15 static const int kMjpegWidthThreshold = 640;
16 static const int kMjpegHeightThreshold = 480;
18 // This function translates Mac Core Video pixel formats to Chromium pixel
19 // formats. Chromium pixel formats are sorted in order of preference.
20 media::VideoCapturePixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
21   switch (code) {
22     case kCVPixelFormatType_422YpCbCr8:
23       return media::VIDEO_CAPTURE_PIXEL_FORMAT_UYVY;
24     case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
25       return media::VIDEO_CAPTURE_PIXEL_FORMAT_YUY2;
26     case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
27       return media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG;
28     default:
29       return media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN;
30   }
33 @implementation VideoCaptureDeviceAVFoundation
35 #pragma mark Class methods
37 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
38   // At this stage we already know that AVFoundation is supported and the whole
39   // library is loaded and initialised, by the device monitoring.
40   NSArray* devices = [AVCaptureDeviceGlue devices];
41   for (CrAVCaptureDevice* device in devices) {
42     if (([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] ||
43          [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) &&
44         ![device isSuspended]) {
45       DeviceNameAndTransportType* nameAndTransportType =
46           [[[DeviceNameAndTransportType alloc]
47                initWithName:[device localizedName]
48               transportType:[device transportType]] autorelease];
49       [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]];
50     }
51   }
54 + (NSDictionary*)deviceNames {
55   NSMutableDictionary* deviceNames =
56       [[[NSMutableDictionary alloc] init] autorelease];
57   // The device name retrieval is not going to happen in the main thread, and
58   // this might cause instabilities (it did in QTKit), so keep an eye here.
59   [self getDeviceNames:deviceNames];
60   return deviceNames;
63 + (void)getDevice:(const media::VideoCaptureDevice::Name&)name
64     supportedFormats:(media::VideoCaptureFormats*)formats {
65   NSArray* devices = [AVCaptureDeviceGlue devices];
66   CrAVCaptureDevice* device = nil;
67   for (device in devices) {
68     if ([[device uniqueID] UTF8String] == name.id())
69       break;
70   }
71   if (device == nil)
72     return;
73   for (CrAVCaptureDeviceFormat* format in device.formats) {
74     // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
75     // as well according to CMFormatDescription.h
76     const media::VideoCapturePixelFormat pixelFormat =
77         FourCCToChromiumPixelFormat(
78             CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
79                 [format formatDescription]));
81     CoreMediaGlue::CMVideoDimensions dimensions =
82         CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(
83             [format formatDescription]);
85     for (CrAVFrameRateRange* frameRate in
86          [format videoSupportedFrameRateRanges]) {
87       media::VideoCaptureFormat format(
88           gfx::Size(dimensions.width, dimensions.height),
89           frameRate.maxFrameRate, pixelFormat);
90       formats->push_back(format);
91       DVLOG(2) << name.name() << " "
92                << media::VideoCaptureFormat::ToString(format);
93     }
94   }
97 #pragma mark Public methods
99 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
100   if ((self = [super init])) {
101     DCHECK(main_thread_checker_.CalledOnValidThread());
102     DCHECK(frameReceiver);
103     [self setFrameReceiver:frameReceiver];
104     captureSession_.reset(
105         [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]);
106   }
107   return self;
110 - (void)dealloc {
111   [self stopCapture];
112   [super dealloc];
115 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
116   base::AutoLock lock(lock_);
117   frameReceiver_ = frameReceiver;
120 - (BOOL)setCaptureDevice:(NSString*)deviceId {
121   DCHECK(captureSession_);
122   DCHECK(main_thread_checker_.CalledOnValidThread());
124   if (!deviceId) {
125     // First stop the capture session, if it's running.
126     [self stopCapture];
127     // Now remove the input and output from the capture session.
128     [captureSession_ removeOutput:captureVideoDataOutput_];
129     if (captureDeviceInput_) {
130       [captureSession_ removeInput:captureDeviceInput_];
131       // No need to release |captureDeviceInput_|, is owned by the session.
132       captureDeviceInput_ = nil;
133     }
134     return YES;
135   }
137   // Look for input device with requested name.
138   captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
139   if (!captureDevice_) {
140     [self
141         sendErrorString:[NSString stringWithUTF8String:
142                                       "Could not open video capture device."]];
143     return NO;
144   }
146   // Create the capture input associated with the device. Easy peasy.
147   NSError* error = nil;
148   captureDeviceInput_ =
149       [AVCaptureDeviceInputGlue deviceInputWithDevice:captureDevice_
150                                                 error:&error];
151   if (!captureDeviceInput_) {
152     captureDevice_ = nil;
153     [self sendErrorString:
154               [NSString stringWithFormat:
155                             @"Could not create video capture input (%@): %@",
156                             [error localizedDescription],
157                             [error localizedFailureReason]]];
158     return NO;
159   }
160   [captureSession_ addInput:captureDeviceInput_];
162   // Create a new data output for video. The data output is configured to
163   // discard late frames by default.
164   captureVideoDataOutput_.reset(
165       [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]);
166   if (!captureVideoDataOutput_) {
167     [captureSession_ removeInput:captureDeviceInput_];
168     [self sendErrorString:[NSString stringWithUTF8String:
169                                         "Could not create video data output."]];
170     return NO;
171   }
172   [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
173   [captureVideoDataOutput_
174       setSampleBufferDelegate:self
175                         queue:dispatch_get_global_queue(
176                                   DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
177   [captureSession_ addOutput:captureVideoDataOutput_];
178   return YES;
181 - (BOOL)setCaptureHeight:(int)height
182                    width:(int)width
183                frameRate:(float)frameRate {
184   // Check if either of VideoCaptureDeviceMac::AllocateAndStart() or
185   // VideoCaptureDeviceMac::ReceiveFrame() is calling here, depending on the
186   // running state. VCDM::ReceiveFrame() calls here to change aspect ratio.
187   DCHECK((![captureSession_ isRunning] &&
188           main_thread_checker_.CalledOnValidThread()) ||
189          callback_thread_checker_.CalledOnValidThread());
191   frameWidth_ = width;
192   frameHeight_ = height;
193   frameRate_ = frameRate;
195   FourCharCode best_fourcc = kCVPixelFormatType_422YpCbCr8;
196   const bool prefer_mjpeg =
197       width > kMjpegWidthThreshold || height > kMjpegHeightThreshold;
198   for (CrAVCaptureDeviceFormat* format in captureDevice_.formats) {
199     const FourCharCode fourcc =
200         CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
201             [format formatDescription]);
202     if (prefer_mjpeg &&
203         fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
204       best_fourcc = fourcc;
205       break;
206     }
207     // Compare according to Chromium preference.
208     if (FourCCToChromiumPixelFormat(fourcc) <
209         FourCCToChromiumPixelFormat(best_fourcc)) {
210       best_fourcc = fourcc;
211     }
212   }
214   // The capture output has to be configured, despite Mac documentation
215   // detailing that setting the sessionPreset would be enough. The reason for
216   // this mismatch is probably because most of the AVFoundation docs are written
217   // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
218   // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
219   // cropping and preservation.
220   NSDictionary* videoSettingsDictionary = @{
221     (id)kCVPixelBufferWidthKey : @(width), (id)
222     kCVPixelBufferHeightKey : @(height), (id)
223     kCVPixelBufferPixelFormatTypeKey : @(best_fourcc),
224     AVFoundationGlue::AVVideoScalingModeKey() :
225         AVFoundationGlue::AVVideoScalingModeResizeAspectFill()
226   };
227   [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary];
229   CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_
230       connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()];
231   // Check selector existence, related to bugs http://crbug.com/327532 and
232   // http://crbug.com/328096.
233   // CMTimeMake accepts integer argumenst but |frameRate| is float, round it.
234   if ([captureConnection
235           respondsToSelector:@selector(isVideoMinFrameDurationSupported)] &&
236       [captureConnection isVideoMinFrameDurationSupported]) {
237     [captureConnection
238         setVideoMinFrameDuration:CoreMediaGlue::CMTimeMake(
239                                      media::kFrameRatePrecision,
240                                      (int)(frameRate *
241                                            media::kFrameRatePrecision))];
242   }
243   if ([captureConnection
244           respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] &&
245       [captureConnection isVideoMaxFrameDurationSupported]) {
246     [captureConnection
247         setVideoMaxFrameDuration:CoreMediaGlue::CMTimeMake(
248                                      media::kFrameRatePrecision,
249                                      (int)(frameRate *
250                                            media::kFrameRatePrecision))];
251   }
252   return YES;
255 - (BOOL)startCapture {
256   DCHECK(main_thread_checker_.CalledOnValidThread());
257   if (!captureSession_) {
258     DLOG(ERROR) << "Video capture session not initialized.";
259     return NO;
260   }
261   // Connect the notifications.
262   NSNotificationCenter* nc = [NSNotificationCenter defaultCenter];
263   [nc addObserver:self
264          selector:@selector(onVideoError:)
265              name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification()
266            object:captureSession_];
267   [captureSession_ startRunning];
268   return YES;
271 - (void)stopCapture {
272   DCHECK(main_thread_checker_.CalledOnValidThread());
273   if ([captureSession_ isRunning])
274     [captureSession_ stopRunning];  // Synchronous.
275   [[NSNotificationCenter defaultCenter] removeObserver:self];
278 #pragma mark Private methods
280 // |captureOutput| is called by the capture device to deliver a new frame.
281 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
282     didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
283            fromConnection:(CrAVCaptureConnection*)connection {
284   // AVFoundation calls from a number of threads, depending on, at least, if
285   // Chrome is on foreground or background. Sample the actual thread here.
286   callback_thread_checker_.DetachFromThread();
287   CHECK(callback_thread_checker_.CalledOnValidThread());
289   const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
290       CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
291   const FourCharCode fourcc =
292       CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
293   const CoreMediaGlue::CMVideoDimensions dimensions =
294       CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
295   const media::VideoCaptureFormat captureFormat(
296       gfx::Size(dimensions.width, dimensions.height), frameRate_,
297       FourCCToChromiumPixelFormat(fourcc));
299   char* baseAddress = 0;
300   size_t frameSize = 0;
301   CVImageBufferRef videoFrame = nil;
302   if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
303     // If MJPEG, use block buffer instead of pixel buffer.
304     CoreMediaGlue::CMBlockBufferRef blockBuffer =
305         CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
306     if (blockBuffer) {
307       size_t lengthAtOffset;
308       CoreMediaGlue::CMBlockBufferGetDataPointer(
309           blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
310       // Expect the MJPEG data to be available as a contiguous reference, i.e.
311       // not covered by multiple memory blocks.
312       CHECK_EQ(lengthAtOffset, frameSize);
313     }
314   } else {
315     videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
316     // Lock the frame and calculate frame size.
317     if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
318         kCVReturnSuccess) {
319       baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
320       frameSize = CVPixelBufferGetHeight(videoFrame) *
321                   CVPixelBufferGetBytesPerRow(videoFrame);
322     } else {
323       videoFrame = nil;
324     }
325   }
327   {
328     base::AutoLock lock(lock_);
329     if (frameReceiver_ && baseAddress) {
330       frameReceiver_->ReceiveFrame(reinterpret_cast<uint8_t*>(baseAddress),
331                                    frameSize, captureFormat, 0, 0);
332     }
333   }
335   if (videoFrame)
336     CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly);
339 - (void)onVideoError:(NSNotification*)errorNotification {
340   NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo]
341       objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]);
342   [self sendErrorString:
343             [NSString stringWithFormat:@"%@: %@", [error localizedDescription],
344                                        [error localizedFailureReason]]];
347 - (void)sendErrorString:(NSString*)error {
348   DLOG(ERROR) << [error UTF8String];
349   base::AutoLock lock(lock_);
350   if (frameReceiver_)
351     frameReceiver_->ReceiveError([error UTF8String]);
354 @end