Disable Enhanced Bookmark for ICS devices
[chromium-blink-merge.git] / media / video / capture / mac / video_capture_device_qtkit_mac.mm
bloba8492d595c27d48fec3d8f1bfd2a75ec76e50954
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/video/capture/mac/video_capture_device_qtkit_mac.h"
7 #import <QTKit/QTKit.h>
9 #include "base/debug/crash_logging.h"
10 #include "base/logging.h"
11 #include "base/mac/scoped_nsexception_enabler.h"
12 #include "media/base/video_capture_types.h"
13 #include "media/video/capture/mac/video_capture_device_mac.h"
14 #include "media/video/capture/video_capture_device.h"
15 #include "ui/gfx/geometry/size.h"
17 @implementation VideoCaptureDeviceQTKit
19 #pragma mark Class methods
21 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
22   // Third-party drivers often throw exceptions, which are fatal in
23   // Chromium (see comments in scoped_nsexception_enabler.h).  The
24   // following catches any exceptions and continues in an orderly
25   // fashion with no devices detected.
26   NSArray* captureDevices =
27       base::mac::RunBlockIgnoringExceptions(^{
28           return [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
29       });
31   for (QTCaptureDevice* device in captureDevices) {
32     if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue])
33       continue;
34     DeviceNameAndTransportType* nameAndTransportType =
35         [[[DeviceNameAndTransportType alloc]
36              initWithName:[device localizedDisplayName]
37             transportType:media::kIOAudioDeviceTransportTypeUnknown]
38             autorelease];
39     [deviceNames setObject:nameAndTransportType
40                     forKey:[device uniqueID]];
41   }
44 + (NSDictionary*)deviceNames {
45   NSMutableDictionary* deviceNames =
46       [[[NSMutableDictionary alloc] init] autorelease];
48   // TODO(shess): Post to the main thread to see if that helps
49   // http://crbug.com/139164
50   [self performSelectorOnMainThread:@selector(getDeviceNames:)
51                          withObject:deviceNames
52                       waitUntilDone:YES];
53   return deviceNames;
56 #pragma mark Public methods
58 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
59   self = [super init];
60   if (self) {
61     frameReceiver_ = frameReceiver;
62     lock_ = [[NSLock alloc] init];
63   }
64   return self;
67 - (void)dealloc {
68   [captureSession_ release];
69   [captureDeviceInput_ release];
70   [super dealloc];
73 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
74   [lock_ lock];
75   frameReceiver_ = frameReceiver;
76   [lock_ unlock];
79 - (BOOL)setCaptureDevice:(NSString*)deviceId {
80   if (deviceId) {
81     // Set the capture device.
82     if (captureDeviceInput_) {
83       DLOG(ERROR) << "Video capture device already set.";
84       return NO;
85     }
87     // TODO(mcasas): Consider using [QTCaptureDevice deviceWithUniqueID] instead
88     // of explicitly forcing reenumeration of devices.
89     NSArray *captureDevices =
90         [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
91     NSArray *captureDevicesNames =
92         [captureDevices valueForKey:@"uniqueID"];
93     NSUInteger index = [captureDevicesNames indexOfObject:deviceId];
94     if (index == NSNotFound) {
95       [self sendErrorString:[NSString
96         stringWithUTF8String:"Video capture device not found."]];
97       return NO;
98     }
99     QTCaptureDevice *device = [captureDevices objectAtIndex:index];
100     if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute]
101             boolValue]) {
102       [self sendErrorString:[NSString
103         stringWithUTF8String:"Cannot open suspended video capture device."]];
104       return NO;
105     }
106     NSError *error;
107     if (![device open:&error]) {
108       [self sendErrorString:[NSString
109           stringWithFormat:@"Could not open video capture device (%@): %@",
110                            [error localizedDescription],
111                            [error localizedFailureReason]]];
112       return NO;
113     }
114     captureDeviceInput_ = [[QTCaptureDeviceInput alloc] initWithDevice:device];
115     captureSession_ = [[QTCaptureSession alloc] init];
117     QTCaptureDecompressedVideoOutput *captureDecompressedOutput =
118         [[[QTCaptureDecompressedVideoOutput alloc] init] autorelease];
119     [captureDecompressedOutput setDelegate:self];
120     [captureDecompressedOutput setAutomaticallyDropsLateVideoFrames:YES];
121     if (![captureSession_ addOutput:captureDecompressedOutput error:&error]) {
122       [self sendErrorString:[NSString
123           stringWithFormat:@"Could not connect video capture output (%@): %@",
124                            [error localizedDescription],
125                            [error localizedFailureReason]]];
126       return NO;
127     }
129     // This key can be used to check if video capture code was related to a
130     // particular crash.
131     base::debug::SetCrashKeyValue("VideoCaptureDeviceQTKit", "OpenedDevice");
133     // Set the video pixel format to 2VUY (a.k.a UYVY, packed 4:2:2).
134     NSDictionary *captureDictionary = [NSDictionary
135         dictionaryWithObject:
136             [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8]
137                       forKey:(id)kCVPixelBufferPixelFormatTypeKey];
138     [captureDecompressedOutput setPixelBufferAttributes:captureDictionary];
140     return YES;
141   } else {
142     // Remove the previously set capture device.
143     if (!captureDeviceInput_) {
144       // Being here means stopping a device that never started OK in the first
145       // place, log it.
146       [self sendLogString:[NSString
147           stringWithUTF8String:"No video capture device set, on removal."]];
148       return YES;
149     }
150     // Tear down input and output, stop the capture and deregister observers.
151     [self stopCapture];
152     [captureSession_ release];
153     captureSession_ = nil;
154     [captureDeviceInput_ release];
155     captureDeviceInput_ = nil;
156     return YES;
157   }
160 - (BOOL)setCaptureHeight:(int)height
161                    width:(int)width
162                frameRate:(float)frameRate {
163   if (!captureDeviceInput_) {
164     [self sendErrorString:[NSString
165         stringWithUTF8String:"No video capture device set."]];
166     return NO;
167   }
168   if ([[captureSession_ outputs] count] != 1) {
169     [self sendErrorString:[NSString
170         stringWithUTF8String:"Video capture capabilities already set."]];
171     return NO;
172   }
173   if (frameRate <= 0.0f) {
174     [self sendErrorString:[NSString stringWithUTF8String: "Wrong frame rate."]];
175     return NO;
176   }
178   frameRate_ = frameRate;
180   QTCaptureDecompressedVideoOutput *output =
181       [[captureSession_ outputs] objectAtIndex:0];
183   // Set up desired output properties. The old capture dictionary is used to
184   // retrieve the initial pixel format, which must be maintained.
185   NSDictionary* videoSettingsDictionary = @{
186     (id)kCVPixelBufferWidthKey : @(width),
187     (id)kCVPixelBufferHeightKey : @(height),
188     (id)kCVPixelBufferPixelFormatTypeKey : [[output pixelBufferAttributes]
189         valueForKey:(id)kCVPixelBufferPixelFormatTypeKey]
190   };
191   [output setPixelBufferAttributes:videoSettingsDictionary];
193   [output setMinimumVideoFrameInterval:(NSTimeInterval)1/frameRate];
194   return YES;
197 - (BOOL)startCapture {
198   if ([[captureSession_ outputs] count] == 0) {
199     // Capture properties not set.
200     [self sendErrorString:[NSString
201         stringWithUTF8String:"Video capture device not initialized."]];
202     return NO;
203   }
204   if ([[captureSession_ inputs] count] == 0) {
205     NSError *error;
206     if (![captureSession_ addInput:captureDeviceInput_ error:&error]) {
207       [self sendErrorString:[NSString
208           stringWithFormat:@"Could not connect video capture device (%@): %@",
209                            [error localizedDescription],
210                            [error localizedFailureReason]]];
212       return NO;
213     }
214     NSNotificationCenter * notificationCenter =
215         [NSNotificationCenter defaultCenter];
216     [notificationCenter addObserver:self
217                            selector:@selector(handleNotification:)
218                                name:QTCaptureSessionRuntimeErrorNotification
219                              object:captureSession_];
220     [captureSession_ startRunning];
221   }
222   return YES;
225 - (void)stopCapture {
226   // QTKit achieves thread safety and asynchronous execution by posting messages
227   // to the main thread, e.g. -addOutput:. Both -removeOutput: and -removeInput:
228   // post a message to the main thread while holding a lock that the
229   // notification handler might need. To avoid a deadlock, we perform those
230   // tasks in the main thread. See bugs http://crbug.com/152757 and
231   // http://crbug.com/399792.
232   [self performSelectorOnMainThread:@selector(stopCaptureOnUIThread:)
233                          withObject:nil
234                       waitUntilDone:YES];
235   [[NSNotificationCenter defaultCenter] removeObserver:self];
238 - (void)stopCaptureOnUIThread:(id)dummy {
239   if ([[captureSession_ inputs] count] > 0) {
240     DCHECK_EQ([[captureSession_ inputs] count], 1u);
241     [captureSession_ removeInput:captureDeviceInput_];
242     [captureSession_ stopRunning];
243   }
244   if ([[captureSession_ outputs] count] > 0) {
245     DCHECK_EQ([[captureSession_ outputs] count], 1u);
246     id output = [[captureSession_ outputs] objectAtIndex:0];
247     [output setDelegate:nil];
248     [captureSession_ removeOutput:output];
249   }
252 // |captureOutput| is called by the capture device to deliver a new frame.
253 - (void)captureOutput:(QTCaptureOutput*)captureOutput
254   didOutputVideoFrame:(CVImageBufferRef)videoFrame
255      withSampleBuffer:(QTSampleBuffer*)sampleBuffer
256        fromConnection:(QTCaptureConnection*)connection {
257   [lock_ lock];
258   if(!frameReceiver_) {
259     [lock_ unlock];
260     return;
261   }
263   // Lock the frame and calculate frame size.
264   const int kLockFlags = 0;
265   if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags)
266       == kCVReturnSuccess) {
267     void *baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
268     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
269     size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
270     size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
271     size_t frameSize = bytesPerRow * frameHeight;
273     // TODO(shess): bytesPerRow may not correspond to frameWidth_*2,
274     // but VideoCaptureController::OnIncomingCapturedData() requires
275     // it to do so.  Plumbing things through is intrusive, for now
276     // just deliver an adjusted buffer.
277     // TODO(nick): This workaround could probably be eliminated by using
278     // VideoCaptureController::OnIncomingCapturedVideoFrame, which supports
279     // pitches.
280     UInt8* addressToPass = static_cast<UInt8*>(baseAddress);
281     // UYVY is 2 bytes per pixel.
282     size_t expectedBytesPerRow = frameWidth * 2;
283     if (bytesPerRow > expectedBytesPerRow) {
284       // TODO(shess): frameHeight and frameHeight_ are not the same,
285       // try to do what the surrounding code seems to assume.
286       // Ironically, captureCapability and frameSize are ignored
287       // anyhow.
288       adjustedFrame_.resize(expectedBytesPerRow * frameHeight);
289       // std::vector is contiguous according to standard.
290       UInt8* adjustedAddress = &adjustedFrame_[0];
292       for (size_t y = 0; y < frameHeight; ++y) {
293         memcpy(adjustedAddress + y * expectedBytesPerRow,
294                addressToPass + y * bytesPerRow,
295                expectedBytesPerRow);
296       }
298       addressToPass = adjustedAddress;
299       frameSize = frameHeight * expectedBytesPerRow;
300     }
302     media::VideoCaptureFormat captureFormat(gfx::Size(frameWidth, frameHeight),
303                                             frameRate_,
304                                             media::PIXEL_FORMAT_UYVY);
306     // The aspect ratio dictionary is often missing, in which case we report
307     // a pixel aspect ratio of 0:0.
308     int aspectNumerator = 0, aspectDenominator = 0;
309     CFDictionaryRef aspectRatioDict = (CFDictionaryRef)CVBufferGetAttachment(
310         videoFrame, kCVImageBufferPixelAspectRatioKey, NULL);
311     if (aspectRatioDict) {
312       CFNumberRef aspectNumeratorRef = (CFNumberRef)CFDictionaryGetValue(
313           aspectRatioDict, kCVImageBufferPixelAspectRatioHorizontalSpacingKey);
314       CFNumberRef aspectDenominatorRef = (CFNumberRef)CFDictionaryGetValue(
315           aspectRatioDict, kCVImageBufferPixelAspectRatioVerticalSpacingKey);
316       DCHECK(aspectNumeratorRef && aspectDenominatorRef) <<
317           "Aspect Ratio dictionary missing its entries.";
318       CFNumberGetValue(aspectNumeratorRef, kCFNumberIntType, &aspectNumerator);
319       CFNumberGetValue(
320           aspectDenominatorRef, kCFNumberIntType, &aspectDenominator);
321     }
323     // Deliver the captured video frame.
324     frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat,
325         aspectNumerator, aspectDenominator);
327     CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
328   }
329   [lock_ unlock];
332 - (void)handleNotification:(NSNotification*)errorNotification {
333   NSError * error = (NSError*)[[errorNotification userInfo]
334       objectForKey:QTCaptureSessionErrorKey];
335   [self sendErrorString:[NSString
336       stringWithFormat:@"%@: %@",
337                        [error localizedDescription],
338                        [error localizedFailureReason]]];
341 - (void)sendErrorString:(NSString*)error {
342   DLOG(ERROR) << [error UTF8String];
343   [lock_ lock];
344   if (frameReceiver_)
345     frameReceiver_->ReceiveError([error UTF8String]);
346   [lock_ unlock];
349 - (void)sendLogString:(NSString*)message {
350   DVLOG(1) << [message UTF8String];
351   [lock_ lock];
352   if (frameReceiver_)
353     frameReceiver_->LogMessage([message UTF8String]);
354   [lock_ unlock];
357 @end