1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/video/capture/mac/video_capture_device_qtkit_mac.h"
7 #import <QTKit/QTKit.h>
9 #include "base/debug/crash_logging.h"
10 #include "base/logging.h"
11 #include "base/mac/scoped_nsexception_enabler.h"
12 #include "media/base/video_capture_types.h"
13 #include "media/video/capture/mac/video_capture_device_mac.h"
14 #include "media/video/capture/video_capture_device.h"
15 #include "ui/gfx/geometry/size.h"
17 @implementation VideoCaptureDeviceQTKit
19 #pragma mark Class methods
21 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
22 // Third-party drivers often throw exceptions, which are fatal in
23 // Chromium (see comments in scoped_nsexception_enabler.h). The
24 // following catches any exceptions and continues in an orderly
25 // fashion with no devices detected.
26 NSArray* captureDevices =
27 base::mac::RunBlockIgnoringExceptions(^{
28 return [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
31 for (QTCaptureDevice* device in captureDevices) {
32 if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue])
34 DeviceNameAndTransportType* nameAndTransportType =
35 [[[DeviceNameAndTransportType alloc]
36 initWithName:[device localizedDisplayName]
37 transportType:media::kIOAudioDeviceTransportTypeUnknown]
39 [deviceNames setObject:nameAndTransportType
40 forKey:[device uniqueID]];
44 + (NSDictionary*)deviceNames {
45 NSMutableDictionary* deviceNames =
46 [[[NSMutableDictionary alloc] init] autorelease];
48 // TODO(shess): Post to the main thread to see if that helps
49 // http://crbug.com/139164
50 [self performSelectorOnMainThread:@selector(getDeviceNames:)
51 withObject:deviceNames
56 #pragma mark Public methods
58 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
61 frameReceiver_ = frameReceiver;
62 lock_ = [[NSLock alloc] init];
68 [captureSession_ release];
69 [captureDeviceInput_ release];
73 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
75 frameReceiver_ = frameReceiver;
79 - (BOOL)setCaptureDevice:(NSString*)deviceId {
81 // Set the capture device.
82 if (captureDeviceInput_) {
83 DLOG(ERROR) << "Video capture device already set.";
87 // TODO(mcasas): Consider using [QTCaptureDevice deviceWithUniqueID] instead
88 // of explicitly forcing reenumeration of devices.
89 NSArray *captureDevices =
90 [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
91 NSArray *captureDevicesNames =
92 [captureDevices valueForKey:@"uniqueID"];
93 NSUInteger index = [captureDevicesNames indexOfObject:deviceId];
94 if (index == NSNotFound) {
95 [self sendErrorString:[NSString
96 stringWithUTF8String:"Video capture device not found."]];
99 QTCaptureDevice *device = [captureDevices objectAtIndex:index];
100 if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute]
102 [self sendErrorString:[NSString
103 stringWithUTF8String:"Cannot open suspended video capture device."]];
107 if (![device open:&error]) {
108 [self sendErrorString:[NSString
109 stringWithFormat:@"Could not open video capture device (%@): %@",
110 [error localizedDescription],
111 [error localizedFailureReason]]];
114 captureDeviceInput_ = [[QTCaptureDeviceInput alloc] initWithDevice:device];
115 captureSession_ = [[QTCaptureSession alloc] init];
117 QTCaptureDecompressedVideoOutput *captureDecompressedOutput =
118 [[[QTCaptureDecompressedVideoOutput alloc] init] autorelease];
119 [captureDecompressedOutput setDelegate:self];
120 [captureDecompressedOutput setAutomaticallyDropsLateVideoFrames:YES];
121 if (![captureSession_ addOutput:captureDecompressedOutput error:&error]) {
122 [self sendErrorString:[NSString
123 stringWithFormat:@"Could not connect video capture output (%@): %@",
124 [error localizedDescription],
125 [error localizedFailureReason]]];
129 // This key can be used to check if video capture code was related to a
131 base::debug::SetCrashKeyValue("VideoCaptureDeviceQTKit", "OpenedDevice");
133 // Set the video pixel format to 2VUY (a.k.a UYVY, packed 4:2:2).
134 NSDictionary *captureDictionary = [NSDictionary
135 dictionaryWithObject:
136 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8]
137 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
138 [captureDecompressedOutput setPixelBufferAttributes:captureDictionary];
142 // Remove the previously set capture device.
143 if (!captureDeviceInput_) {
144 // Being here means stopping a device that never started OK in the first
146 [self sendLogString:[NSString
147 stringWithUTF8String:"No video capture device set, on removal."]];
150 // Tear down input and output, stop the capture and deregister observers.
152 [captureSession_ release];
153 captureSession_ = nil;
154 [captureDeviceInput_ release];
155 captureDeviceInput_ = nil;
160 - (BOOL)setCaptureHeight:(int)height
162 frameRate:(float)frameRate {
163 if (!captureDeviceInput_) {
164 [self sendErrorString:[NSString
165 stringWithUTF8String:"No video capture device set."]];
168 if ([[captureSession_ outputs] count] != 1) {
169 [self sendErrorString:[NSString
170 stringWithUTF8String:"Video capture capabilities already set."]];
173 if (frameRate <= 0.0f) {
174 [self sendErrorString:[NSString stringWithUTF8String: "Wrong frame rate."]];
178 frameRate_ = frameRate;
180 QTCaptureDecompressedVideoOutput *output =
181 [[captureSession_ outputs] objectAtIndex:0];
183 // Set up desired output properties. The old capture dictionary is used to
184 // retrieve the initial pixel format, which must be maintained.
185 NSDictionary* videoSettingsDictionary = @{
186 (id)kCVPixelBufferWidthKey : @(width),
187 (id)kCVPixelBufferHeightKey : @(height),
188 (id)kCVPixelBufferPixelFormatTypeKey : [[output pixelBufferAttributes]
189 valueForKey:(id)kCVPixelBufferPixelFormatTypeKey]
191 [output setPixelBufferAttributes:videoSettingsDictionary];
193 [output setMinimumVideoFrameInterval:(NSTimeInterval)1/frameRate];
197 - (BOOL)startCapture {
198 if ([[captureSession_ outputs] count] == 0) {
199 // Capture properties not set.
200 [self sendErrorString:[NSString
201 stringWithUTF8String:"Video capture device not initialized."]];
204 if ([[captureSession_ inputs] count] == 0) {
206 if (![captureSession_ addInput:captureDeviceInput_ error:&error]) {
207 [self sendErrorString:[NSString
208 stringWithFormat:@"Could not connect video capture device (%@): %@",
209 [error localizedDescription],
210 [error localizedFailureReason]]];
214 NSNotificationCenter * notificationCenter =
215 [NSNotificationCenter defaultCenter];
216 [notificationCenter addObserver:self
217 selector:@selector(handleNotification:)
218 name:QTCaptureSessionRuntimeErrorNotification
219 object:captureSession_];
220 [captureSession_ startRunning];
225 - (void)stopCapture {
226 // QTKit achieves thread safety and asynchronous execution by posting messages
227 // to the main thread, e.g. -addOutput:. Both -removeOutput: and -removeInput:
228 // post a message to the main thread while holding a lock that the
229 // notification handler might need. To avoid a deadlock, we perform those
230 // tasks in the main thread. See bugs http://crbug.com/152757 and
231 // http://crbug.com/399792.
232 [self performSelectorOnMainThread:@selector(stopCaptureOnUIThread:)
235 [[NSNotificationCenter defaultCenter] removeObserver:self];
238 - (void)stopCaptureOnUIThread:(id)dummy {
239 if ([[captureSession_ inputs] count] > 0) {
240 DCHECK_EQ([[captureSession_ inputs] count], 1u);
241 [captureSession_ removeInput:captureDeviceInput_];
242 [captureSession_ stopRunning];
244 if ([[captureSession_ outputs] count] > 0) {
245 DCHECK_EQ([[captureSession_ outputs] count], 1u);
246 id output = [[captureSession_ outputs] objectAtIndex:0];
247 [output setDelegate:nil];
248 [captureSession_ removeOutput:output];
252 // |captureOutput| is called by the capture device to deliver a new frame.
253 - (void)captureOutput:(QTCaptureOutput*)captureOutput
254 didOutputVideoFrame:(CVImageBufferRef)videoFrame
255 withSampleBuffer:(QTSampleBuffer*)sampleBuffer
256 fromConnection:(QTCaptureConnection*)connection {
258 if(!frameReceiver_) {
263 // Lock the frame and calculate frame size.
264 const int kLockFlags = 0;
265 if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags)
266 == kCVReturnSuccess) {
267 void *baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
268 size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
269 size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
270 size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
271 size_t frameSize = bytesPerRow * frameHeight;
273 // TODO(shess): bytesPerRow may not correspond to frameWidth_*2,
274 // but VideoCaptureController::OnIncomingCapturedData() requires
275 // it to do so. Plumbing things through is intrusive, for now
276 // just deliver an adjusted buffer.
277 // TODO(nick): This workaround could probably be eliminated by using
278 // VideoCaptureController::OnIncomingCapturedVideoFrame, which supports
280 UInt8* addressToPass = static_cast<UInt8*>(baseAddress);
281 // UYVY is 2 bytes per pixel.
282 size_t expectedBytesPerRow = frameWidth * 2;
283 if (bytesPerRow > expectedBytesPerRow) {
284 // TODO(shess): frameHeight and frameHeight_ are not the same,
285 // try to do what the surrounding code seems to assume.
286 // Ironically, captureCapability and frameSize are ignored
288 adjustedFrame_.resize(expectedBytesPerRow * frameHeight);
289 // std::vector is contiguous according to standard.
290 UInt8* adjustedAddress = &adjustedFrame_[0];
292 for (size_t y = 0; y < frameHeight; ++y) {
293 memcpy(adjustedAddress + y * expectedBytesPerRow,
294 addressToPass + y * bytesPerRow,
295 expectedBytesPerRow);
298 addressToPass = adjustedAddress;
299 frameSize = frameHeight * expectedBytesPerRow;
302 media::VideoCaptureFormat captureFormat(gfx::Size(frameWidth, frameHeight),
304 media::PIXEL_FORMAT_UYVY);
306 // The aspect ratio dictionary is often missing, in which case we report
307 // a pixel aspect ratio of 0:0.
308 int aspectNumerator = 0, aspectDenominator = 0;
309 CFDictionaryRef aspectRatioDict = (CFDictionaryRef)CVBufferGetAttachment(
310 videoFrame, kCVImageBufferPixelAspectRatioKey, NULL);
311 if (aspectRatioDict) {
312 CFNumberRef aspectNumeratorRef = (CFNumberRef)CFDictionaryGetValue(
313 aspectRatioDict, kCVImageBufferPixelAspectRatioHorizontalSpacingKey);
314 CFNumberRef aspectDenominatorRef = (CFNumberRef)CFDictionaryGetValue(
315 aspectRatioDict, kCVImageBufferPixelAspectRatioVerticalSpacingKey);
316 DCHECK(aspectNumeratorRef && aspectDenominatorRef) <<
317 "Aspect Ratio dictionary missing its entries.";
318 CFNumberGetValue(aspectNumeratorRef, kCFNumberIntType, &aspectNumerator);
320 aspectDenominatorRef, kCFNumberIntType, &aspectDenominator);
323 // Deliver the captured video frame.
324 frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat,
325 aspectNumerator, aspectDenominator);
327 CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
332 - (void)handleNotification:(NSNotification*)errorNotification {
333 NSError * error = (NSError*)[[errorNotification userInfo]
334 objectForKey:QTCaptureSessionErrorKey];
335 [self sendErrorString:[NSString
336 stringWithFormat:@"%@: %@",
337 [error localizedDescription],
338 [error localizedFailureReason]]];
341 - (void)sendErrorString:(NSString*)error {
342 DLOG(ERROR) << [error UTF8String];
345 frameReceiver_->ReceiveError([error UTF8String]);
349 - (void)sendLogString:(NSString*)message {
350 DVLOG(1) << [message UTF8String];
353 frameReceiver_->LogMessage([message UTF8String]);