Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / media / capture / video / mac / video_capture_device_avfoundation_mac.mm
blobdeb14080cfd2f2cb16709c9dd473df7cd809d2fd
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
7 #import <CoreVideo/CoreVideo.h>
9 #include "base/logging.h"
10 #include "base/mac/foundation_util.h"
11 #include "media/base/video_capture_types.h"
12 #include "media/capture/video/mac/video_capture_device_mac.h"
13 #include "ui/gfx/geometry/size.h"
15 // Prefer MJPEG if frame width or height is larger than this.
16 static const int kMjpegWidthThreshold = 640;
17 static const int kMjpegHeightThreshold = 480;
19 // This function translates Mac Core Video pixel formats to Chromium pixel
20 // formats.
21 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
22   switch (code) {
23     case kCVPixelFormatType_422YpCbCr8:
24       return media::PIXEL_FORMAT_UYVY;
25     case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
26       return media::PIXEL_FORMAT_YUY2;
27     case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
28       return media::PIXEL_FORMAT_MJPEG;
29     default:
30       return media::PIXEL_FORMAT_UNKNOWN;
31   }
34 @implementation VideoCaptureDeviceAVFoundation
36 #pragma mark Class methods
38 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
39   // At this stage we already know that AVFoundation is supported and the whole
40   // library is loaded and initialised, by the device monitoring.
41   NSArray* devices = [AVCaptureDeviceGlue devices];
42   for (CrAVCaptureDevice* device in devices) {
43     if (([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] ||
44          [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) &&
45         ![device isSuspended]) {
46       DeviceNameAndTransportType* nameAndTransportType =
47           [[[DeviceNameAndTransportType alloc]
48                initWithName:[device localizedName]
49               transportType:[device transportType]] autorelease];
50       [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]];
51     }
52   }
55 + (NSDictionary*)deviceNames {
56   NSMutableDictionary* deviceNames =
57       [[[NSMutableDictionary alloc] init] autorelease];
58   // The device name retrieval is not going to happen in the main thread, and
59   // this might cause instabilities (it did in QTKit), so keep an eye here.
60   [self getDeviceNames:deviceNames];
61   return deviceNames;
64 + (void)getDevice:(const media::VideoCaptureDevice::Name&)name
65  supportedFormats:(media::VideoCaptureFormats*)formats {
66   NSArray* devices = [AVCaptureDeviceGlue devices];
67   CrAVCaptureDevice* device = nil;
68   for (device in devices) {
69     if ([[device uniqueID] UTF8String] == name.id())
70       break;
71   }
72   if (device == nil)
73     return;
74   for (CrAVCaptureDeviceFormat* format in device.formats) {
75     // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
76     // as well according to CMFormatDescription.h
77     const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(
78         CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
79             [format formatDescription]));
81     CoreMediaGlue::CMVideoDimensions dimensions =
82         CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(
83             [format formatDescription]);
85     for (CrAVFrameRateRange* frameRate in
86          [format videoSupportedFrameRateRanges]) {
87       media::VideoCaptureFormat format(
88           gfx::Size(dimensions.width, dimensions.height),
89           frameRate.maxFrameRate, pixelFormat);
90       formats->push_back(format);
91       DVLOG(2) << name.name() << " "
92                << media::VideoCaptureFormat::ToString(format);
93     }
94   }
97 #pragma mark Public methods
99 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
100   if ((self = [super init])) {
101     DCHECK(main_thread_checker_.CalledOnValidThread());
102     DCHECK(frameReceiver);
103     [self setFrameReceiver:frameReceiver];
104     captureSession_.reset(
105         [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]);
106   }
107   return self;
110 - (void)dealloc {
111   [self stopCapture];
112   [super dealloc];
115 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
116   base::AutoLock lock(lock_);
117   frameReceiver_ = frameReceiver;
120 - (BOOL)setCaptureDevice:(NSString*)deviceId {
121   DCHECK(captureSession_);
122   DCHECK(main_thread_checker_.CalledOnValidThread());
124   if (!deviceId) {
125     // First stop the capture session, if it's running.
126     [self stopCapture];
127     // Now remove the input and output from the capture session.
128     [captureSession_ removeOutput:captureVideoDataOutput_];
129     if (captureDeviceInput_) {
130       [captureSession_ removeInput:captureDeviceInput_];
131       // No need to release |captureDeviceInput_|, is owned by the session.
132       captureDeviceInput_ = nil;
133     }
134     return YES;
135   }
137   // Look for input device with requested name.
138   captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
139   if (!captureDevice_) {
140     [self
141         sendErrorString:[NSString stringWithUTF8String:
142                                       "Could not open video capture device."]];
143     return NO;
144   }
146   // Create the capture input associated with the device. Easy peasy.
147   NSError* error = nil;
148   captureDeviceInput_ =
149       [AVCaptureDeviceInputGlue deviceInputWithDevice:captureDevice_
150                                                 error:&error];
151   if (!captureDeviceInput_) {
152     captureDevice_ = nil;
153     [self sendErrorString:
154               [NSString stringWithFormat:
155                             @"Could not create video capture input (%@): %@",
156                             [error localizedDescription],
157                             [error localizedFailureReason]]];
158     return NO;
159   }
160   [captureSession_ addInput:captureDeviceInput_];
162   // Create a new data output for video. The data output is configured to
163   // discard late frames by default.
164   captureVideoDataOutput_.reset(
165       [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]);
166   if (!captureVideoDataOutput_) {
167     [captureSession_ removeInput:captureDeviceInput_];
168     [self sendErrorString:[NSString stringWithUTF8String:
169                                         "Could not create video data output."]];
170     return NO;
171   }
172   [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
173   [captureVideoDataOutput_
174       setSampleBufferDelegate:self
175                         queue:dispatch_get_global_queue(
176                                   DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
177   [captureSession_ addOutput:captureVideoDataOutput_];
178   return YES;
181 - (BOOL)setCaptureHeight:(int)height
182                    width:(int)width
183                frameRate:(float)frameRate {
184   // Check if either of VideoCaptureDeviceMac::AllocateAndStart() or
185   // VideoCaptureDeviceMac::ReceiveFrame() is calling here, depending on the
186   // running state. VCDM::ReceiveFrame() calls here to change aspect ratio.
187   DCHECK((![captureSession_ isRunning] &&
188           main_thread_checker_.CalledOnValidThread()) ||
189          callback_thread_checker_.CalledOnValidThread());
191   frameWidth_ = width;
192   frameHeight_ = height;
193   frameRate_ = frameRate;
195   FourCharCode best_fourcc = kCVPixelFormatType_422YpCbCr8;
196   const bool prefer_mjpeg =
197       width > kMjpegWidthThreshold || height > kMjpegHeightThreshold;
198   for (CrAVCaptureDeviceFormat* format in captureDevice_.formats) {
199     const FourCharCode fourcc =
200         CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
201             [format formatDescription]);
202     if (prefer_mjpeg &&
203         fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
204       best_fourcc = fourcc;
205       break;
206     }
208     // Compare according to Chromium preference.
209     if (media::VideoCaptureFormat::ComparePixelFormatPreference(
210             FourCCToChromiumPixelFormat(fourcc),
211             FourCCToChromiumPixelFormat(best_fourcc))) {
212       best_fourcc = fourcc;
213     }
214   }
216   // The capture output has to be configured, despite Mac documentation
217   // detailing that setting the sessionPreset would be enough. The reason for
218   // this mismatch is probably because most of the AVFoundation docs are written
219   // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
220   // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
221   // cropping and preservation.
222   NSDictionary* videoSettingsDictionary = @{
223     (id) kCVPixelBufferWidthKey : @(width), (id)
224     kCVPixelBufferHeightKey : @(height), (id)
225     kCVPixelBufferPixelFormatTypeKey : @(best_fourcc),
226     AVFoundationGlue::AVVideoScalingModeKey() :
227         AVFoundationGlue::AVVideoScalingModeResizeAspectFill()
228   };
229   [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary];
231   CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_
232       connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()];
233   // Check selector existence, related to bugs http://crbug.com/327532 and
234   // http://crbug.com/328096.
235   // CMTimeMake accepts integer argumenst but |frameRate| is float, round it.
236   if ([captureConnection
237           respondsToSelector:@selector(isVideoMinFrameDurationSupported)] &&
238       [captureConnection isVideoMinFrameDurationSupported]) {
239     [captureConnection
240         setVideoMinFrameDuration:CoreMediaGlue::CMTimeMake(
241                                      media::kFrameRatePrecision,
242                                      (int)(frameRate *
243                                            media::kFrameRatePrecision))];
244   }
245   if ([captureConnection
246           respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] &&
247       [captureConnection isVideoMaxFrameDurationSupported]) {
248     [captureConnection
249         setVideoMaxFrameDuration:CoreMediaGlue::CMTimeMake(
250                                      media::kFrameRatePrecision,
251                                      (int)(frameRate *
252                                            media::kFrameRatePrecision))];
253   }
254   return YES;
257 - (BOOL)startCapture {
258   DCHECK(main_thread_checker_.CalledOnValidThread());
259   if (!captureSession_) {
260     DLOG(ERROR) << "Video capture session not initialized.";
261     return NO;
262   }
263   // Connect the notifications.
264   NSNotificationCenter* nc = [NSNotificationCenter defaultCenter];
265   [nc addObserver:self
266          selector:@selector(onVideoError:)
267              name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification()
268            object:captureSession_];
269   [captureSession_ startRunning];
270   return YES;
273 - (void)stopCapture {
274   DCHECK(main_thread_checker_.CalledOnValidThread());
275   if ([captureSession_ isRunning])
276     [captureSession_ stopRunning];  // Synchronous.
277   [[NSNotificationCenter defaultCenter] removeObserver:self];
280 #pragma mark Private methods
282 // |captureOutput| is called by the capture device to deliver a new frame.
283 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
284 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
285        fromConnection:(CrAVCaptureConnection*)connection {
286   // AVFoundation calls from a number of threads, depending on, at least, if
287   // Chrome is on foreground or background. Sample the actual thread here.
288   callback_thread_checker_.DetachFromThread();
289   CHECK(callback_thread_checker_.CalledOnValidThread());
291   const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
292       CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
293   const FourCharCode fourcc =
294       CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
295   const CoreMediaGlue::CMVideoDimensions dimensions =
296       CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
297   const media::VideoCaptureFormat captureFormat(
298       gfx::Size(dimensions.width, dimensions.height), frameRate_,
299       FourCCToChromiumPixelFormat(fourcc));
301   char* baseAddress = 0;
302   size_t frameSize = 0;
303   CVImageBufferRef videoFrame = nil;
304   if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
305     // If MJPEG, use block buffer instead of pixel buffer.
306     CoreMediaGlue::CMBlockBufferRef blockBuffer =
307         CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
308     if (blockBuffer) {
309       size_t lengthAtOffset;
310       CoreMediaGlue::CMBlockBufferGetDataPointer(
311           blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
312       // Expect the MJPEG data to be available as a contiguous reference, i.e.
313       // not covered by multiple memory blocks.
314       CHECK_EQ(lengthAtOffset, frameSize);
315     }
316   } else {
317     videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
318     // Lock the frame and calculate frame size.
319     if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
320         kCVReturnSuccess) {
321       baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
322       frameSize = CVPixelBufferGetHeight(videoFrame) *
323                   CVPixelBufferGetBytesPerRow(videoFrame);
324     } else {
325       videoFrame = nil;
326     }
327   }
329   {
330     base::AutoLock lock(lock_);
331     if (frameReceiver_ && baseAddress) {
332       frameReceiver_->ReceiveFrame(reinterpret_cast<uint8_t*>(baseAddress),
333                                    frameSize, captureFormat, 0, 0);
334     }
335   }
337   if (videoFrame)
338     CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly);
341 - (void)onVideoError:(NSNotification*)errorNotification {
342   NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo]
343       objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]);
344   [self sendErrorString:[NSString
345                             stringWithFormat:@"%@: %@",
346                                              [error localizedDescription],
347                                              [error localizedFailureReason]]];
350 - (void)sendErrorString:(NSString*)error {
351   DLOG(ERROR) << [error UTF8String];
352   base::AutoLock lock(lock_);
353   if (frameReceiver_)
354     frameReceiver_->ReceiveError([error UTF8String]);
357 @end