Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / media / capture / video / mac / video_capture_device_qtkit_mac.mm
blob1ea4c9e44988f8cfb18d5f03ba8f2c13f236d948
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #import "media/capture/video/mac/video_capture_device_qtkit_mac.h"
7 #import <QTKit/QTKit.h>
9 #include "base/debug/crash_logging.h"
10 #include "base/logging.h"
11 #include "media/base/video_capture_types.h"
12 #include "media/capture/video/mac/video_capture_device_mac.h"
13 #include "media/capture/video/video_capture_device.h"
14 #include "ui/gfx/geometry/size.h"
16 @implementation VideoCaptureDeviceQTKit
18 #pragma mark Class methods
20 + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
21   // Third-party drivers often throw exceptions. The following catches any
22   // exceptions and continues in an orderly fashion with no devices detected.
23   NSArray* captureDevices = nil;
24   @try {
25     captureDevices =
26         [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
27   } @catch (id exception) {
28   }
30   for (QTCaptureDevice* device in captureDevices) {
31     if ([[device attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue])
32       continue;
33     DeviceNameAndTransportType* nameAndTransportType = [[
34         [DeviceNameAndTransportType alloc]
35          initWithName:[device localizedDisplayName]
36         transportType:media::kIOAudioDeviceTransportTypeUnknown] autorelease];
37     [deviceNames setObject:nameAndTransportType forKey:[device uniqueID]];
38   }
41 + (NSDictionary*)deviceNames {
42   NSMutableDictionary* deviceNames =
43       [[[NSMutableDictionary alloc] init] autorelease];
45   // TODO(shess): Post to the main thread to see if that helps
46   // http://crbug.com/139164
47   [self performSelectorOnMainThread:@selector(getDeviceNames:)
48                          withObject:deviceNames
49                       waitUntilDone:YES];
50   return deviceNames;
53 #pragma mark Public methods
55 - (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
56   self = [super init];
57   if (self) {
58     frameReceiver_ = frameReceiver;
59     lock_ = [[NSLock alloc] init];
60   }
61   return self;
64 - (void)dealloc {
65   [captureSession_ release];
66   [captureDeviceInput_ release];
67   [super dealloc];
70 - (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
71   [lock_ lock];
72   frameReceiver_ = frameReceiver;
73   [lock_ unlock];
76 - (BOOL)setCaptureDevice:(NSString*)deviceId {
77   if (deviceId) {
78     // Set the capture device.
79     if (captureDeviceInput_) {
80       DLOG(ERROR) << "Video capture device already set.";
81       return NO;
82     }
84     // TODO(mcasas): Consider using [QTCaptureDevice deviceWithUniqueID] instead
85     // of explicitly forcing reenumeration of devices.
86     NSArray* captureDevices =
87         [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
88     NSArray* captureDevicesNames = [captureDevices valueForKey:@"uniqueID"];
89     NSUInteger index = [captureDevicesNames indexOfObject:deviceId];
90     if (index == NSNotFound) {
91       [self sendErrorString:[NSString stringWithUTF8String:
92                                           "Video capture device not found."]];
93       return NO;
94     }
95     QTCaptureDevice* device = [captureDevices objectAtIndex:index];
96     if ([[device
97             attributeForKey:QTCaptureDeviceSuspendedAttribute] boolValue]) {
98       [self sendErrorString:
99                 [NSString stringWithUTF8String:
100                               "Cannot open suspended video capture device."]];
101       return NO;
102     }
103     NSError* error;
104     if (![device open:&error]) {
105       [self sendErrorString:
106                 [NSString stringWithFormat:
107                               @"Could not open video capture device (%@): %@",
108                               [error localizedDescription],
109                               [error localizedFailureReason]]];
110       return NO;
111     }
112     captureDeviceInput_ = [[QTCaptureDeviceInput alloc] initWithDevice:device];
113     captureSession_ = [[QTCaptureSession alloc] init];
115     QTCaptureDecompressedVideoOutput* captureDecompressedOutput =
116         [[[QTCaptureDecompressedVideoOutput alloc] init] autorelease];
117     [captureDecompressedOutput setDelegate:self];
118     [captureDecompressedOutput setAutomaticallyDropsLateVideoFrames:YES];
119     if (![captureSession_ addOutput:captureDecompressedOutput error:&error]) {
120       [self
121           sendErrorString:
122               [NSString stringWithFormat:
123                             @"Could not connect video capture output (%@): %@",
124                             [error localizedDescription],
125                             [error localizedFailureReason]]];
126       return NO;
127     }
129     // This key can be used to check if video capture code was related to a
130     // particular crash.
131     base::debug::SetCrashKeyValue("VideoCaptureDeviceQTKit", "OpenedDevice");
133     // Set the video pixel format to 2VUY (a.k.a UYVY, packed 4:2:2).
134     NSDictionary* captureDictionary = [NSDictionary
135         dictionaryWithObject:
136             [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8]
137                       forKey:(id)kCVPixelBufferPixelFormatTypeKey];
138     [captureDecompressedOutput setPixelBufferAttributes:captureDictionary];
140     return YES;
141   } else {
142     // Remove the previously set capture device.
143     if (!captureDeviceInput_) {
144       // Being here means stopping a device that never started OK in the first
145       // place, log it.
146       [self sendLogString:[NSString
147                               stringWithUTF8String:
148                                   "No video capture device set, on removal."]];
149       return YES;
150     }
151     // Tear down input and output, stop the capture and deregister observers.
152     [self stopCapture];
153     [captureSession_ release];
154     captureSession_ = nil;
155     [captureDeviceInput_ release];
156     captureDeviceInput_ = nil;
157     return YES;
158   }
161 - (BOOL)setCaptureHeight:(int)height
162                    width:(int)width
163                frameRate:(float)frameRate {
164   if (!captureDeviceInput_) {
165     [self sendErrorString:
166               [NSString stringWithUTF8String:"No video capture device set."]];
167     return NO;
168   }
169   if ([[captureSession_ outputs] count] != 1) {
170     [self sendErrorString:[NSString
171                               stringWithUTF8String:
172                                   "Video capture capabilities already set."]];
173     return NO;
174   }
175   if (frameRate <= 0.0f) {
176     [self sendErrorString:[NSString stringWithUTF8String:"Wrong frame rate."]];
177     return NO;
178   }
180   frameRate_ = frameRate;
182   QTCaptureDecompressedVideoOutput* output =
183       [[captureSession_ outputs] objectAtIndex:0];
185   // Set up desired output properties. The old capture dictionary is used to
186   // retrieve the initial pixel format, which must be maintained.
187   NSDictionary* videoSettingsDictionary = @{
188     (id)kCVPixelBufferWidthKey : @(width), (id)
189     kCVPixelBufferHeightKey : @(height), (id)
190     kCVPixelBufferPixelFormatTypeKey : [[output pixelBufferAttributes]
191         valueForKey:(id)kCVPixelBufferPixelFormatTypeKey]
192   };
193   [output setPixelBufferAttributes:videoSettingsDictionary];
195   [output setMinimumVideoFrameInterval:(NSTimeInterval)1 / frameRate];
196   return YES;
199 - (BOOL)startCapture {
200   if ([[captureSession_ outputs] count] == 0) {
201     // Capture properties not set.
202     [self
203         sendErrorString:[NSString stringWithUTF8String:
204                                       "Video capture device not initialized."]];
205     return NO;
206   }
207   if ([[captureSession_ inputs] count] == 0) {
208     NSError* error;
209     if (![captureSession_ addInput:captureDeviceInput_ error:&error]) {
210       [self
211           sendErrorString:
212               [NSString stringWithFormat:
213                             @"Could not connect video capture device (%@): %@",
214                             [error localizedDescription],
215                             [error localizedFailureReason]]];
217       return NO;
218     }
219     NSNotificationCenter* notificationCenter =
220         [NSNotificationCenter defaultCenter];
221     [notificationCenter addObserver:self
222                            selector:@selector(handleNotification:)
223                                name:QTCaptureSessionRuntimeErrorNotification
224                              object:captureSession_];
225     [captureSession_ startRunning];
226   }
227   return YES;
230 - (void)stopCapture {
231   // QTKit achieves thread safety and asynchronous execution by posting messages
232   // to the main thread, e.g. -addOutput:. Both -removeOutput: and -removeInput:
233   // post a message to the main thread while holding a lock that the
234   // notification handler might need. To avoid a deadlock, we perform those
235   // tasks in the main thread. See bugs http://crbug.com/152757 and
236   // http://crbug.com/399792.
237   [self performSelectorOnMainThread:@selector(stopCaptureOnUIThread:)
238                          withObject:nil
239                       waitUntilDone:YES];
240   [[NSNotificationCenter defaultCenter] removeObserver:self];
243 - (void)stopCaptureOnUIThread:(id)dummy {
244   if ([[captureSession_ inputs] count] > 0) {
245     DCHECK_EQ([[captureSession_ inputs] count], 1u);
246     [captureSession_ removeInput:captureDeviceInput_];
247     [captureSession_ stopRunning];
248   }
249   if ([[captureSession_ outputs] count] > 0) {
250     DCHECK_EQ([[captureSession_ outputs] count], 1u);
251     id output = [[captureSession_ outputs] objectAtIndex:0];
252     [output setDelegate:nil];
253     [captureSession_ removeOutput:output];
254   }
257 // |captureOutput| is called by the capture device to deliver a new frame.
258 - (void)captureOutput:(QTCaptureOutput*)captureOutput
259     didOutputVideoFrame:(CVImageBufferRef)videoFrame
260        withSampleBuffer:(QTSampleBuffer*)sampleBuffer
261          fromConnection:(QTCaptureConnection*)connection {
262   [lock_ lock];
263   if (!frameReceiver_) {
264     [lock_ unlock];
265     return;
266   }
268   // Lock the frame and calculate frame size.
269   const int kLockFlags = 0;
270   if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags) ==
271       kCVReturnSuccess) {
272     void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
273     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
274     size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
275     size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
276     size_t frameSize = bytesPerRow * frameHeight;
278     // TODO(shess): bytesPerRow may not correspond to frameWidth_*2,
279     // but VideoCaptureController::OnIncomingCapturedData() requires
280     // it to do so.  Plumbing things through is intrusive, for now
281     // just deliver an adjusted buffer.
282     // TODO(nick): This workaround could probably be eliminated by using
283     // VideoCaptureController::OnIncomingCapturedVideoFrame, which supports
284     // pitches.
285     UInt8* addressToPass = static_cast<UInt8*>(baseAddress);
286     // UYVY is 2 bytes per pixel.
287     size_t expectedBytesPerRow = frameWidth * 2;
288     if (bytesPerRow > expectedBytesPerRow) {
289       // TODO(shess): frameHeight and frameHeight_ are not the same,
290       // try to do what the surrounding code seems to assume.
291       // Ironically, captureCapability and frameSize are ignored
292       // anyhow.
293       adjustedFrame_.resize(expectedBytesPerRow * frameHeight);
294       // std::vector is contiguous according to standard.
295       UInt8* adjustedAddress = &adjustedFrame_[0];
297       for (size_t y = 0; y < frameHeight; ++y) {
298         memcpy(adjustedAddress + y * expectedBytesPerRow,
299                addressToPass + y * bytesPerRow, expectedBytesPerRow);
300       }
302       addressToPass = adjustedAddress;
303       frameSize = frameHeight * expectedBytesPerRow;
304     }
306     media::VideoCaptureFormat captureFormat(
307         gfx::Size(frameWidth, frameHeight), frameRate_,
308         media::PIXEL_FORMAT_UYVY);
310     // The aspect ratio dictionary is often missing, in which case we report
311     // a pixel aspect ratio of 0:0.
312     int aspectNumerator = 0, aspectDenominator = 0;
313     CFDictionaryRef aspectRatioDict = (CFDictionaryRef)CVBufferGetAttachment(
314         videoFrame, kCVImageBufferPixelAspectRatioKey, NULL);
315     if (aspectRatioDict) {
316       CFNumberRef aspectNumeratorRef = (CFNumberRef)CFDictionaryGetValue(
317           aspectRatioDict, kCVImageBufferPixelAspectRatioHorizontalSpacingKey);
318       CFNumberRef aspectDenominatorRef = (CFNumberRef)CFDictionaryGetValue(
319           aspectRatioDict, kCVImageBufferPixelAspectRatioVerticalSpacingKey);
320       DCHECK(aspectNumeratorRef && aspectDenominatorRef)
321           << "Aspect Ratio dictionary missing its entries.";
322       CFNumberGetValue(aspectNumeratorRef, kCFNumberIntType, &aspectNumerator);
323       CFNumberGetValue(aspectDenominatorRef, kCFNumberIntType,
324                        &aspectDenominator);
325     }
327     // Deliver the captured video frame.
328     frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat,
329                                  aspectNumerator, aspectDenominator);
331     CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
332   }
333   [lock_ unlock];
336 - (void)handleNotification:(NSNotification*)errorNotification {
337   NSError* error = (NSError*)
338       [[errorNotification userInfo] objectForKey:QTCaptureSessionErrorKey];
339   [self sendErrorString:
340             [NSString stringWithFormat:@"%@: %@", [error localizedDescription],
341                                        [error localizedFailureReason]]];
344 - (void)sendErrorString:(NSString*)error {
345   DLOG(ERROR) << [error UTF8String];
346   [lock_ lock];
347   if (frameReceiver_)
348     frameReceiver_->ReceiveError([error UTF8String]);
349   [lock_ unlock];
352 - (void)sendLogString:(NSString*)message {
353   DVLOG(1) << [message UTF8String];
354   [lock_ lock];
355   if (frameReceiver_)
356     frameReceiver_->LogMessage([message UTF8String]);
357   [lock_ unlock];
360 @end