1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/capture/video/mac/video_capture_device_qtkit_mac.h"
7 #import <QTKit/QTKit.h>
9 #include "base/debug/crash_logging.h"
10 #include "base/logging.h"
11 #include "media/base/video_capture_types.h"
12 #include "media/capture/video/mac/video_capture_device_mac.h"
13 #include "media/capture/video/video_capture_device.h"
14 #include "ui/gfx/geometry/size.h"
16 @implementation VideoCaptureDeviceQTKit
18 #pragma mark Class methods
20 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
21 // Third-party drivers often throw exceptions. The following catches any
22 // exceptions and continues in an orderly fashion with no devices detected.
23 NSArray* captureDevices = nil;
26 [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
27 } @catch (id exception) {
30 for (QTCaptureDevice* device in captureDevices) {
31 if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue])
33 DeviceNameAndTransportType* nameAndTransportType = [[
34 [DeviceNameAndTransportType alloc]
35 initWithName:[device localizedDisplayName]
36 transportType:media::kIOAudioDeviceTransportTypeUnknown] autorelease];
37 [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]];
41 + (NSDictionary*)deviceNames {
42 NSMutableDictionary* deviceNames =
43 [[[NSMutableDictionary alloc] init] autorelease];
45 // TODO(shess): Post to the main thread to see if that helps
46 // http://crbug.com/139164
47 [self performSelectorOnMainThread:@selector(getDeviceNames:)
48 withObject:deviceNames
53 #pragma mark Public methods
55 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
58 frameReceiver_ = frameReceiver;
59 lock_ = [[NSLock alloc] init];
65 [captureSession_ release];
66 [captureDeviceInput_ release];
70 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
72 frameReceiver_ = frameReceiver;
76 - (BOOL)setCaptureDevice:(NSString*)deviceId {
78 // Set the capture device.
79 if (captureDeviceInput_) {
80 DLOG(ERROR) << "Video capture device already set.";
84 // TODO(mcasas): Consider using [QTCaptureDevice deviceWithUniqueID] instead
85 // of explicitly forcing reenumeration of devices.
86 NSArray* captureDevices =
87 [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
88 NSArray* captureDevicesNames = [captureDevices valueForKey:@"uniqueID"];
89 NSUInteger index = [captureDevicesNames indexOfObject:deviceId];
90 if (index == NSNotFound) {
91 [self sendErrorString:[NSString stringWithUTF8String:
92 "Video capture device not found."]];
95 QTCaptureDevice* device = [captureDevices objectAtIndex:index];
97 attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue]) {
98 [self sendErrorString:
99 [NSString stringWithUTF8String:
100 "Cannot open suspended video capture device."]];
104 if (![device open:&error]) {
105 [self sendErrorString:
106 [NSString stringWithFormat:
107 @"Could not open video capture device (%@): %@",
108 [error localizedDescription],
109 [error localizedFailureReason]]];
112 captureDeviceInput_ = [[QTCaptureDeviceInput alloc] initWithDevice:device];
113 captureSession_ = [[QTCaptureSession alloc] init];
115 QTCaptureDecompressedVideoOutput* captureDecompressedOutput =
116 [[[QTCaptureDecompressedVideoOutput alloc] init] autorelease];
117 [captureDecompressedOutput setDelegate:self];
118 [captureDecompressedOutput setAutomaticallyDropsLateVideoFrames:YES];
119 if (![captureSession_ addOutput:captureDecompressedOutput error:&error]) {
122 [NSString stringWithFormat:
123 @"Could not connect video capture output (%@): %@",
124 [error localizedDescription],
125 [error localizedFailureReason]]];
129 // This key can be used to check if video capture code was related to a
131 base::debug::SetCrashKeyValue("VideoCaptureDeviceQTKit", "OpenedDevice");
133 // Set the video pixel format to 2VUY (a.k.a UYVY, packed 4:2:2).
134 NSDictionary* captureDictionary = [NSDictionary
135 dictionaryWithObject:
136 [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8]
137 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
138 [captureDecompressedOutput setPixelBufferAttributes:captureDictionary];
142 // Remove the previously set capture device.
143 if (!captureDeviceInput_) {
144 // Being here means stopping a device that never started OK in the first
146 [self sendLogString:[NSString
147 stringWithUTF8String:
148 "No video capture device set, on removal."]];
151 // Tear down input and output, stop the capture and deregister observers.
153 [captureSession_ release];
154 captureSession_ = nil;
155 [captureDeviceInput_ release];
156 captureDeviceInput_ = nil;
161 - (BOOL)setCaptureHeight:(int)height
163 frameRate:(float)frameRate {
164 if (!captureDeviceInput_) {
165 [self sendErrorString:
166 [NSString stringWithUTF8String:"No video capture device set."]];
169 if ([[captureSession_ outputs] count] != 1) {
170 [self sendErrorString:[NSString
171 stringWithUTF8String:
172 "Video capture capabilities already set."]];
175 if (frameRate <= 0.0f) {
176 [self sendErrorString:[NSString stringWithUTF8String:"Wrong frame rate."]];
180 frameRate_ = frameRate;
182 QTCaptureDecompressedVideoOutput* output =
183 [[captureSession_ outputs] objectAtIndex:0];
185 // Set up desired output properties. The old capture dictionary is used to
186 // retrieve the initial pixel format, which must be maintained.
187 NSDictionary* videoSettingsDictionary = @{
188 (id)kCVPixelBufferWidthKey : @(width), (id)
189 kCVPixelBufferHeightKey : @(height), (id)
190 kCVPixelBufferPixelFormatTypeKey : [[output pixelBufferAttributes]
191 valueForKey:(id)kCVPixelBufferPixelFormatTypeKey]
193 [output setPixelBufferAttributes:videoSettingsDictionary];
195 [output setMinimumVideoFrameInterval:(NSTimeInterval)1 / frameRate];
199 - (BOOL)startCapture {
200 if ([[captureSession_ outputs] count] == 0) {
201 // Capture properties not set.
203 sendErrorString:[NSString stringWithUTF8String:
204 "Video capture device not initialized."]];
207 if ([[captureSession_ inputs] count] == 0) {
209 if (![captureSession_ addInput:captureDeviceInput_ error:&error]) {
212 [NSString stringWithFormat:
213 @"Could not connect video capture device (%@): %@",
214 [error localizedDescription],
215 [error localizedFailureReason]]];
219 NSNotificationCenter* notificationCenter =
220 [NSNotificationCenter defaultCenter];
221 [notificationCenter addObserver:self
222 selector:@selector(handleNotification:)
223 name:QTCaptureSessionRuntimeErrorNotification
224 object:captureSession_];
225 [captureSession_ startRunning];
230 - (void)stopCapture {
231 // QTKit achieves thread safety and asynchronous execution by posting messages
232 // to the main thread, e.g. -addOutput:. Both -removeOutput: and -removeInput:
233 // post a message to the main thread while holding a lock that the
234 // notification handler might need. To avoid a deadlock, we perform those
235 // tasks in the main thread. See bugs http://crbug.com/152757 and
236 // http://crbug.com/399792.
237 [self performSelectorOnMainThread:@selector(stopCaptureOnUIThread:)
240 [[NSNotificationCenter defaultCenter] removeObserver:self];
243 - (void)stopCaptureOnUIThread:(id)dummy {
244 if ([[captureSession_ inputs] count] > 0) {
245 DCHECK_EQ([[captureSession_ inputs] count], 1u);
246 [captureSession_ removeInput:captureDeviceInput_];
247 [captureSession_ stopRunning];
249 if ([[captureSession_ outputs] count] > 0) {
250 DCHECK_EQ([[captureSession_ outputs] count], 1u);
251 id output = [[captureSession_ outputs] objectAtIndex:0];
252 [output setDelegate:nil];
253 [captureSession_ removeOutput:output];
257 // |captureOutput| is called by the capture device to deliver a new frame.
258 - (void)captureOutput:(QTCaptureOutput*)captureOutput
259 didOutputVideoFrame:(CVImageBufferRef)videoFrame
260 withSampleBuffer:(QTSampleBuffer*)sampleBuffer
261 fromConnection:(QTCaptureConnection*)connection {
263 if (!frameReceiver_) {
268 // Lock the frame and calculate frame size.
269 const int kLockFlags = 0;
270 if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags) ==
272 void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
273 size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
274 size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
275 size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
276 size_t frameSize = bytesPerRow * frameHeight;
278 // TODO(shess): bytesPerRow may not correspond to frameWidth_*2,
279 // but VideoCaptureController::OnIncomingCapturedData() requires
280 // it to do so. Plumbing things through is intrusive, for now
281 // just deliver an adjusted buffer.
282 // TODO(nick): This workaround could probably be eliminated by using
283 // VideoCaptureController::OnIncomingCapturedVideoFrame, which supports
285 UInt8* addressToPass = static_cast<UInt8*>(baseAddress);
286 // UYVY is 2 bytes per pixel.
287 size_t expectedBytesPerRow = frameWidth * 2;
288 if (bytesPerRow > expectedBytesPerRow) {
289 // TODO(shess): frameHeight and frameHeight_ are not the same,
290 // try to do what the surrounding code seems to assume.
291 // Ironically, captureCapability and frameSize are ignored
293 adjustedFrame_.resize(expectedBytesPerRow * frameHeight);
294 // std::vector is contiguous according to standard.
295 UInt8* adjustedAddress = &adjustedFrame_[0];
297 for (size_t y = 0; y < frameHeight; ++y) {
298 memcpy(adjustedAddress + y * expectedBytesPerRow,
299 addressToPass + y * bytesPerRow, expectedBytesPerRow);
302 addressToPass = adjustedAddress;
303 frameSize = frameHeight * expectedBytesPerRow;
306 media::VideoCaptureFormat captureFormat(
307 gfx::Size(frameWidth, frameHeight), frameRate_,
308 media::PIXEL_FORMAT_UYVY);
310 // The aspect ratio dictionary is often missing, in which case we report
311 // a pixel aspect ratio of 0:0.
312 int aspectNumerator = 0, aspectDenominator = 0;
313 CFDictionaryRef aspectRatioDict = (CFDictionaryRef)CVBufferGetAttachment(
314 videoFrame, kCVImageBufferPixelAspectRatioKey, NULL);
315 if (aspectRatioDict) {
316 CFNumberRef aspectNumeratorRef = (CFNumberRef)CFDictionaryGetValue(
317 aspectRatioDict, kCVImageBufferPixelAspectRatioHorizontalSpacingKey);
318 CFNumberRef aspectDenominatorRef = (CFNumberRef)CFDictionaryGetValue(
319 aspectRatioDict, kCVImageBufferPixelAspectRatioVerticalSpacingKey);
320 DCHECK(aspectNumeratorRef && aspectDenominatorRef)
321 << "Aspect Ratio dictionary missing its entries.";
322 CFNumberGetValue(aspectNumeratorRef, kCFNumberIntType, &aspectNumerator);
323 CFNumberGetValue(aspectDenominatorRef, kCFNumberIntType,
327 // Deliver the captured video frame.
328 frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat,
329 aspectNumerator, aspectDenominator);
331 CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
336 - (void)handleNotification:(NSNotification*)errorNotification {
337 NSError* error = (NSError*)
338 [[errorNotification userInfo] objectForKey:QTCaptureSessionErrorKey];
339 [self sendErrorString:
340 [NSString stringWithFormat:@"%@: %@", [error localizedDescription],
341 [error localizedFailureReason]]];
344 - (void)sendErrorString:(NSString*)error {
345 DLOG(ERROR) << [error UTF8String];
348 frameReceiver_->ReceiveError([error UTF8String]);
352 - (void)sendLogString:(NSString*)message {
353 DVLOG(1) << [message UTF8String];
356 frameReceiver_->LogMessage([message UTF8String]);