1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/video/capture/mac/video_capture_device_avfoundation_mac.h"
7 #import <CoreVideo/CoreVideo.h>
9 #include "base/logging.h"
10 #include "base/mac/foundation_util.h"
11 #include "media/video/capture/mac/video_capture_device_mac.h"
12 #include "ui/gfx/size.h"
14 @implementation VideoCaptureDeviceAVFoundation
16 #pragma mark Class methods
18 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
19 // At this stage we already know that AVFoundation is supported and the whole
20 // library is loaded and initialised, by the device monitoring.
21 NSArray* devices = [AVCaptureDeviceGlue devices];
22 for (CrAVCaptureDevice* device in devices) {
23 if (([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] ||
24 [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) &&
25 ![device isSuspended]) {
26 [deviceNames setObject:[device localizedName]
27 forKey:[device uniqueID]];
32 + (NSDictionary*)deviceNames {
33 NSMutableDictionary* deviceNames =
34 [[[NSMutableDictionary alloc] init] autorelease];
35 // The device name retrieval is not going to happen in the main thread, and
36 // this might cause instabilities (it did in QTKit), so keep an eye here.
37 [self getDeviceNames:deviceNames];
41 + (void)getDevice:(const media::VideoCaptureDevice::Name&)name
42 supportedFormats:(media::VideoCaptureFormats*)formats{
43 NSArray* devices = [AVCaptureDeviceGlue devices];
44 CrAVCaptureDevice* device = nil;
45 for (device in devices) {
46 if ([[device uniqueID] UTF8String] == name.id())
51 for (CrAVCaptureDeviceFormat* format in device.formats) {
52 // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
53 // as well according to CMFormatDescription.h
54 media::VideoPixelFormat pixelFormat = media::PIXEL_FORMAT_UNKNOWN;
55 switch (CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
56 [format formatDescription])) {
57 case kCVPixelFormatType_422YpCbCr8: // Typical.
58 pixelFormat = media::PIXEL_FORMAT_UYVY;
60 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
61 pixelFormat = media::PIXEL_FORMAT_YUY2;
63 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
64 pixelFormat = media::PIXEL_FORMAT_MJPEG;
69 CoreMediaGlue::CMVideoDimensions dimensions =
70 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(
71 [format formatDescription]);
73 for (CrAVFrameRateRange* frameRate in
74 [format videoSupportedFrameRateRanges]) {
75 media::VideoCaptureFormat format(
76 gfx::Size(dimensions.width, dimensions.height),
77 static_cast<int>(frameRate.maxFrameRate),
79 formats->push_back(format);
80 DVLOG(2) << name.name() << " resolution: "
81 << format.frame_size.ToString() << ", fps: "
82 << format.frame_rate << ", pixel format: "
83 << format.pixel_format;
89 #pragma mark Public methods
91 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
92 if ((self = [super init])) {
93 DCHECK(main_thread_checker_.CalledOnValidThread());
94 DCHECK(frameReceiver);
95 [self setFrameReceiver:frameReceiver];
96 captureSession_.reset(
97 [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]);
107 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
108 base::AutoLock lock(lock_);
109 frameReceiver_ = frameReceiver;
112 - (BOOL)setCaptureDevice:(NSString*)deviceId {
113 DCHECK(captureSession_);
114 DCHECK(main_thread_checker_.CalledOnValidThread());
117 // First stop the capture session, if it's running.
119 // Now remove the input and output from the capture session.
120 [captureSession_ removeOutput:captureVideoDataOutput_];
121 if (captureDeviceInput_) {
122 [captureSession_ removeInput:captureDeviceInput_];
123 // No need to release |captureDeviceInput_|, is owned by the session.
124 captureDeviceInput_ = nil;
129 // Look for input device with requested name.
130 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
131 if (!captureDevice_) {
132 [self sendErrorString:[NSString
133 stringWithUTF8String:"Could not open video capture device."]];
137 // Create the capture input associated with the device. Easy peasy.
138 NSError* error = nil;
139 captureDeviceInput_ = [AVCaptureDeviceInputGlue
140 deviceInputWithDevice:captureDevice_
142 if (!captureDeviceInput_) {
143 captureDevice_ = nil;
144 [self sendErrorString:[NSString
145 stringWithFormat:@"Could not create video capture input (%@): %@",
146 [error localizedDescription],
147 [error localizedFailureReason]]];
150 [captureSession_ addInput:captureDeviceInput_];
152 // Create a new data output for video. The data output is configured to
153 // discard late frames by default.
154 captureVideoDataOutput_.reset(
155 [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]);
156 if (!captureVideoDataOutput_) {
157 [captureSession_ removeInput:captureDeviceInput_];
158 [self sendErrorString:[NSString
159 stringWithUTF8String:"Could not create video data output."]];
162 [captureVideoDataOutput_
163 setSampleBufferDelegate:self
164 queue:dispatch_get_global_queue(
165 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
166 [captureSession_ addOutput:captureVideoDataOutput_];
170 - (BOOL)setCaptureHeight:(int)height width:(int)width frameRate:(int)frameRate {
171 // Check if either of VideoCaptureDeviceMac::AllocateAndStart() or
172 // VideoCaptureDeviceMac::ReceiveFrame() is calling here, depending on the
173 // running state. VCDM::ReceiveFrame() calls here to change aspect ratio.
174 DCHECK((![captureSession_ isRunning] &&
175 main_thread_checker_.CalledOnValidThread()) ||
176 callback_thread_checker_.CalledOnValidThread());
179 frameHeight_ = height;
180 frameRate_ = frameRate;
182 // The capture output has to be configured, despite Mac documentation
183 // detailing that setting the sessionPreset would be enough. The reason for
184 // this mismatch is probably because most of the AVFoundation docs are written
185 // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
186 // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
187 // cropping and preservation.
188 NSDictionary* videoSettingsDictionary = @{
189 (id)kCVPixelBufferWidthKey : @(width),
190 (id)kCVPixelBufferHeightKey : @(height),
191 (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_422YpCbCr8),
192 AVFoundationGlue::AVVideoScalingModeKey() :
193 AVFoundationGlue::AVVideoScalingModeResizeAspectFill()
195 [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary];
197 CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_
198 connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()];
199 // Check selector existence, related to bugs http://crbug.com/327532 and
200 // http://crbug.com/328096.
201 if ([captureConnection
202 respondsToSelector:@selector(isVideoMinFrameDurationSupported)] &&
203 [captureConnection isVideoMinFrameDurationSupported]) {
204 [captureConnection setVideoMinFrameDuration:
205 CoreMediaGlue::CMTimeMake(1, frameRate)];
207 if ([captureConnection
208 respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] &&
209 [captureConnection isVideoMaxFrameDurationSupported]) {
210 [captureConnection setVideoMaxFrameDuration:
211 CoreMediaGlue::CMTimeMake(1, frameRate)];
216 - (BOOL)startCapture {
217 DCHECK(main_thread_checker_.CalledOnValidThread());
218 if (!captureSession_) {
219 DLOG(ERROR) << "Video capture session not initialized.";
222 // Connect the notifications.
223 NSNotificationCenter* nc = [NSNotificationCenter defaultCenter];
225 selector:@selector(onVideoError:)
226 name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification()
227 object:captureSession_];
228 [captureSession_ startRunning];
232 - (void)stopCapture {
233 DCHECK(main_thread_checker_.CalledOnValidThread());
234 if ([captureSession_ isRunning])
235 [captureSession_ stopRunning]; // Synchronous.
236 [[NSNotificationCenter defaultCenter] removeObserver:self];
239 #pragma mark Private methods
241 // |captureOutput| is called by the capture device to deliver a new frame.
242 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
243 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
244 fromConnection:(CrAVCaptureConnection*)connection {
245 // AVFoundation calls from a number of threads, depending on, at least, if
246 // Chrome is on foreground or background. Sample the actual thread here.
247 callback_thread_checker_.DetachFromThread();
248 callback_thread_checker_.CalledOnValidThread();
249 CVImageBufferRef videoFrame =
250 CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
251 // Lock the frame and calculate frame size.
252 const int kLockFlags = 0;
253 if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags) ==
255 void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
256 size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
257 size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
258 size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
259 size_t frameSize = bytesPerRow * frameHeight;
260 UInt8* addressToPass = reinterpret_cast<UInt8*>(baseAddress);
262 media::VideoCaptureFormat captureFormat(
263 gfx::Size(frameWidth, frameHeight),
265 media::PIXEL_FORMAT_UYVY);
266 base::AutoLock lock(lock_);
269 frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat, 0, 0);
270 CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
274 - (void)onVideoError:(NSNotification*)errorNotification {
275 NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo]
276 objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]);
277 [self sendErrorString:[NSString
278 stringWithFormat:@"%@: %@",
279 [error localizedDescription],
280 [error localizedFailureReason]]];
283 - (void)sendErrorString:(NSString*)error {
284 DLOG(ERROR) << [error UTF8String];
285 base::AutoLock lock(lock_);
287 frameReceiver_->ReceiveError([error UTF8String]);