1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/video/capture/mac/video_capture_device_avfoundation_mac.h"
7 #import <CoreVideo/CoreVideo.h>
9 #include "base/logging.h"
10 #include "base/mac/foundation_util.h"
11 #include "media/video/capture/mac/video_capture_device_mac.h"
12 #include "ui/gfx/geometry/size.h"
14 // Prefer MJPEG if frame width or height is larger than this.
15 static const int kMjpegWidthThreshold = 640;
16 static const int kMjpegHeightThreshold = 480;
18 // This function translates Mac Core Video pixel formats to Chromium pixel
19 // formats. Chromium pixel formats are sorted in order of preference.
20 media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
22 case kCVPixelFormatType_422YpCbCr8:
23 return media::PIXEL_FORMAT_UYVY;
24 case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
25 return media::PIXEL_FORMAT_YUY2;
26 case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
27 return media::PIXEL_FORMAT_MJPEG;
29 return media::PIXEL_FORMAT_UNKNOWN;
33 @implementation VideoCaptureDeviceAVFoundation
35 #pragma mark Class methods
37 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
38 // At this stage we already know that AVFoundation is supported and the whole
39 // library is loaded and initialised, by the device monitoring.
40 NSArray* devices = [AVCaptureDeviceGlue devices];
41 for (CrAVCaptureDevice* device in devices) {
42 if (([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] ||
43 [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) &&
44 ![device isSuspended]) {
45 DeviceNameAndTransportType* nameAndTransportType =
46 [[[DeviceNameAndTransportType alloc]
47 initWithName:[device localizedName]
48 transportType:[device transportType]] autorelease];
49 [deviceNames setObject:nameAndTransportType
50 forKey:[device uniqueID]];
55 + (NSDictionary*)deviceNames {
56 NSMutableDictionary* deviceNames =
57 [[[NSMutableDictionary alloc] init] autorelease];
58 // The device name retrieval is not going to happen in the main thread, and
59 // this might cause instabilities (it did in QTKit), so keep an eye here.
60 [self getDeviceNames:deviceNames];
64 + (void)getDevice:(const media::VideoCaptureDevice::Name&)name
65 supportedFormats:(media::VideoCaptureFormats*)formats{
66 NSArray* devices = [AVCaptureDeviceGlue devices];
67 CrAVCaptureDevice* device = nil;
68 for (device in devices) {
69 if ([[device uniqueID] UTF8String] == name.id())
74 for (CrAVCaptureDeviceFormat* format in device.formats) {
75 // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
76 // as well according to CMFormatDescription.h
77 const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(
78 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
79 [format formatDescription]));
81 CoreMediaGlue::CMVideoDimensions dimensions =
82 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(
83 [format formatDescription]);
85 for (CrAVFrameRateRange* frameRate in
86 [format videoSupportedFrameRateRanges]) {
87 media::VideoCaptureFormat format(
88 gfx::Size(dimensions.width, dimensions.height),
89 frameRate.maxFrameRate,
91 formats->push_back(format);
92 DVLOG(2) << name.name() << " " << format.ToString();
98 #pragma mark Public methods
100 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
101 if ((self = [super init])) {
102 DCHECK(main_thread_checker_.CalledOnValidThread());
103 DCHECK(frameReceiver);
104 [self setFrameReceiver:frameReceiver];
105 captureSession_.reset(
106 [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]);
116 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
117 base::AutoLock lock(lock_);
118 frameReceiver_ = frameReceiver;
121 - (BOOL)setCaptureDevice:(NSString*)deviceId {
122 DCHECK(captureSession_);
123 DCHECK(main_thread_checker_.CalledOnValidThread());
126 // First stop the capture session, if it's running.
128 // Now remove the input and output from the capture session.
129 [captureSession_ removeOutput:captureVideoDataOutput_];
130 if (captureDeviceInput_) {
131 [captureSession_ removeInput:captureDeviceInput_];
132 // No need to release |captureDeviceInput_|, is owned by the session.
133 captureDeviceInput_ = nil;
138 // Look for input device with requested name.
139 captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
140 if (!captureDevice_) {
141 [self sendErrorString:[NSString
142 stringWithUTF8String:"Could not open video capture device."]];
146 // Create the capture input associated with the device. Easy peasy.
147 NSError* error = nil;
148 captureDeviceInput_ = [AVCaptureDeviceInputGlue
149 deviceInputWithDevice:captureDevice_
151 if (!captureDeviceInput_) {
152 captureDevice_ = nil;
153 [self sendErrorString:[NSString
154 stringWithFormat:@"Could not create video capture input (%@): %@",
155 [error localizedDescription],
156 [error localizedFailureReason]]];
159 [captureSession_ addInput:captureDeviceInput_];
161 // Create a new data output for video. The data output is configured to
162 // discard late frames by default.
163 captureVideoDataOutput_.reset(
164 [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]);
165 if (!captureVideoDataOutput_) {
166 [captureSession_ removeInput:captureDeviceInput_];
167 [self sendErrorString:[NSString
168 stringWithUTF8String:"Could not create video data output."]];
171 [captureVideoDataOutput_ setAlwaysDiscardsLateVideoFrames:true];
172 [captureVideoDataOutput_
173 setSampleBufferDelegate:self
174 queue:dispatch_get_global_queue(
175 DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
176 [captureSession_ addOutput:captureVideoDataOutput_];
180 - (BOOL)setCaptureHeight:(int)height
182 frameRate:(float)frameRate {
183 // Check if either of VideoCaptureDeviceMac::AllocateAndStart() or
184 // VideoCaptureDeviceMac::ReceiveFrame() is calling here, depending on the
185 // running state. VCDM::ReceiveFrame() calls here to change aspect ratio.
186 DCHECK((![captureSession_ isRunning] &&
187 main_thread_checker_.CalledOnValidThread()) ||
188 callback_thread_checker_.CalledOnValidThread());
191 frameHeight_ = height;
192 frameRate_ = frameRate;
194 FourCharCode best_fourcc = kCVPixelFormatType_422YpCbCr8;
195 const bool prefer_mjpeg =
196 width > kMjpegWidthThreshold || height > kMjpegHeightThreshold;
197 for (CrAVCaptureDeviceFormat* format in captureDevice_.formats) {
198 const FourCharCode fourcc =
199 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
200 [format formatDescription]);
202 fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
203 best_fourcc = fourcc;
206 // Compare according to Chromium preference.
207 if (FourCCToChromiumPixelFormat(fourcc) <
208 FourCCToChromiumPixelFormat(best_fourcc)) {
209 best_fourcc = fourcc;
213 // The capture output has to be configured, despite Mac documentation
214 // detailing that setting the sessionPreset would be enough. The reason for
215 // this mismatch is probably because most of the AVFoundation docs are written
216 // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
217 // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
218 // cropping and preservation.
219 NSDictionary* videoSettingsDictionary = @{
220 (id)kCVPixelBufferWidthKey : @(width),
221 (id)kCVPixelBufferHeightKey : @(height),
222 (id)kCVPixelBufferPixelFormatTypeKey : @(best_fourcc),
223 AVFoundationGlue::AVVideoScalingModeKey() :
224 AVFoundationGlue::AVVideoScalingModeResizeAspectFill()
226 [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary];
228 CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_
229 connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()];
230 // Check selector existence, related to bugs http://crbug.com/327532 and
231 // http://crbug.com/328096.
232 // CMTimeMake accepts integer argumenst but |frameRate| is float, round it.
233 if ([captureConnection
234 respondsToSelector:@selector(isVideoMinFrameDurationSupported)] &&
235 [captureConnection isVideoMinFrameDurationSupported]) {
236 [captureConnection setVideoMinFrameDuration:
237 CoreMediaGlue::CMTimeMake(media::kFrameRatePrecision,
238 (int)(frameRate * media::kFrameRatePrecision))];
240 if ([captureConnection
241 respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] &&
242 [captureConnection isVideoMaxFrameDurationSupported]) {
243 [captureConnection setVideoMaxFrameDuration:
244 CoreMediaGlue::CMTimeMake(media::kFrameRatePrecision,
245 (int)(frameRate * media::kFrameRatePrecision))];
250 - (BOOL)startCapture {
251 DCHECK(main_thread_checker_.CalledOnValidThread());
252 if (!captureSession_) {
253 DLOG(ERROR) << "Video capture session not initialized.";
256 // Connect the notifications.
257 NSNotificationCenter* nc = [NSNotificationCenter defaultCenter];
259 selector:@selector(onVideoError:)
260 name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification()
261 object:captureSession_];
262 [captureSession_ startRunning];
266 - (void)stopCapture {
267 DCHECK(main_thread_checker_.CalledOnValidThread());
268 if ([captureSession_ isRunning])
269 [captureSession_ stopRunning]; // Synchronous.
270 [[NSNotificationCenter defaultCenter] removeObserver:self];
273 #pragma mark Private methods
275 // |captureOutput| is called by the capture device to deliver a new frame.
276 - (void)captureOutput:(CrAVCaptureOutput*)captureOutput
277 didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
278 fromConnection:(CrAVCaptureConnection*)connection {
279 // AVFoundation calls from a number of threads, depending on, at least, if
280 // Chrome is on foreground or background. Sample the actual thread here.
281 callback_thread_checker_.DetachFromThread();
282 CHECK(callback_thread_checker_.CalledOnValidThread());
284 const CoreMediaGlue::CMFormatDescriptionRef formatDescription =
285 CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer);
286 const FourCharCode fourcc =
287 CoreMediaGlue::CMFormatDescriptionGetMediaSubType(formatDescription);
288 const CoreMediaGlue::CMVideoDimensions dimensions =
289 CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(formatDescription);
290 const media::VideoCaptureFormat captureFormat(
291 gfx::Size(dimensions.width, dimensions.height),
293 FourCCToChromiumPixelFormat(fourcc));
295 char* baseAddress = 0;
296 size_t frameSize = 0;
297 CVImageBufferRef videoFrame = nil;
298 if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
299 // If MJPEG, use block buffer instead of pixel buffer.
300 CoreMediaGlue::CMBlockBufferRef blockBuffer =
301 CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
303 size_t lengthAtOffset;
304 CoreMediaGlue::CMBlockBufferGetDataPointer(
305 blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
306 // Expect the MJPEG data to be available as a contiguous reference, i.e.
307 // not covered by multiple memory blocks.
308 CHECK_EQ(lengthAtOffset, frameSize);
311 videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
312 // Lock the frame and calculate frame size.
313 if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
315 baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
316 frameSize = CVPixelBufferGetHeight(videoFrame) *
317 CVPixelBufferGetBytesPerRow(videoFrame);
324 base::AutoLock lock(lock_);
325 if (frameReceiver_ && baseAddress) {
326 frameReceiver_->ReceiveFrame(reinterpret_cast<uint8_t*>(baseAddress),
327 frameSize, captureFormat, 0, 0);
332 CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly);
335 - (void)onVideoError:(NSNotification*)errorNotification {
336 NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo]
337 objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]);
338 [self sendErrorString:[NSString
339 stringWithFormat:@"%@: %@",
340 [error localizedDescription],
341 [error localizedFailureReason]]];
344 - (void)sendErrorString:(NSString*)error {
345 DLOG(ERROR) << [error UTF8String];
346 base::AutoLock lock(lock_);
348 frameReceiver_->ReceiveError([error UTF8String]);