Disable force-compositing-mode on background pages
[chromium-blink-merge.git] / media / video / capture / win / video_capture_device_win.cc
blobd073d21beea71a21e87e5305eaa10dc919bc3b04
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/video/capture/win/video_capture_device_win.h"
7 #include <algorithm>
8 #include <list>
10 #include "base/string_util.h"
11 #include "base/sys_string_conversions.h"
12 #include "base/win/scoped_variant.h"
14 using base::win::ScopedComPtr;
15 using base::win::ScopedVariant;
17 namespace {
19 // Finds and creates a DirectShow Video Capture filter matching the device_name.
20 HRESULT GetDeviceFilter(const media::VideoCaptureDevice::Name& device_name,
21 IBaseFilter** filter) {
22 DCHECK(filter);
24 ScopedComPtr<ICreateDevEnum> dev_enum;
25 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
26 CLSCTX_INPROC);
27 if (FAILED(hr))
28 return hr;
30 ScopedComPtr<IEnumMoniker> enum_moniker;
31 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
32 enum_moniker.Receive(), 0);
33 // CreateClassEnumerator returns S_FALSE on some Windows OS
34 // when no camera exist. Therefore the FAILED macro can't be used.
35 if (hr != S_OK)
36 return NULL;
38 ScopedComPtr<IMoniker> moniker;
39 ScopedComPtr<IBaseFilter> capture_filter;
40 DWORD fetched = 0;
41 while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
42 ScopedComPtr<IPropertyBag> prop_bag;
43 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
44 if (FAILED(hr)) {
45 moniker.Release();
46 continue;
49 // Find the description or friendly name.
50 static const wchar_t* kPropertyNames[] = {
51 L"DevicePath", L"Description", L"FriendlyName"
53 ScopedVariant name;
54 for (size_t i = 0;
55 i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
56 prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
58 if (name.type() == VT_BSTR) {
59 std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
60 if (device_path.compare(device_name.unique_id) == 0) {
61 // We have found the requested device
62 hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
63 capture_filter.ReceiveVoid());
64 DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
65 break;
68 moniker.Release();
71 *filter = capture_filter.Detach();
72 if (!*filter && SUCCEEDED(hr))
73 hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
75 return hr;
78 // Check if a Pin matches a category.
79 bool PinMatchesCategory(IPin* pin, REFGUID category) {
80 DCHECK(pin);
81 bool found = false;
82 ScopedComPtr<IKsPropertySet> ks_property;
83 HRESULT hr = ks_property.QueryFrom(pin);
84 if (SUCCEEDED(hr)) {
85 GUID pin_category;
86 DWORD return_value;
87 hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
88 &pin_category, sizeof(pin_category), &return_value);
89 if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
90 found = (pin_category == category) ? true : false;
93 return found;
96 // Finds a IPin on a IBaseFilter given the direction an category.
97 HRESULT GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir, REFGUID category,
98 IPin** pin) {
99 DCHECK(pin);
100 ScopedComPtr<IEnumPins> pin_emum;
101 HRESULT hr = filter->EnumPins(pin_emum.Receive());
102 if (pin_emum == NULL)
103 return hr;
105 // Get first unconnected pin.
106 hr = pin_emum->Reset(); // set to first pin
107 while ((hr = pin_emum->Next(1, pin, NULL)) == S_OK) {
108 PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
109 hr = (*pin)->QueryDirection(&this_pin_dir);
110 if (pin_dir == this_pin_dir) {
111 if (category == GUID_NULL || PinMatchesCategory(*pin, category))
112 return S_OK;
114 (*pin)->Release();
117 return E_FAIL;
120 // Release the format block for a media type.
121 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
122 void FreeMediaType(AM_MEDIA_TYPE* mt) {
123 if (mt->cbFormat != 0) {
124 CoTaskMemFree(mt->pbFormat);
125 mt->cbFormat = 0;
126 mt->pbFormat = NULL;
128 if (mt->pUnk != NULL) {
129 NOTREACHED();
130 // pUnk should not be used.
131 mt->pUnk->Release();
132 mt->pUnk = NULL;
136 // Delete a media type structure that was allocated on the heap.
137 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
138 void DeleteMediaType(AM_MEDIA_TYPE* mt) {
139 if (mt != NULL) {
140 FreeMediaType(mt);
141 CoTaskMemFree(mt);
145 // Help structure used for comparing video capture capabilities.
146 struct ResolutionDiff {
147 int capability_index;
148 int diff_height;
149 int diff_width;
150 int diff_frame_rate;
151 media::VideoCaptureDevice::Format color;
154 bool CompareHeight(const ResolutionDiff& item1, const ResolutionDiff& item2) {
155 return abs(item1.diff_height) < abs(item2.diff_height);
158 bool CompareWidth(const ResolutionDiff& item1, const ResolutionDiff& item2) {
159 return abs(item1.diff_width) < abs(item2.diff_width);
162 bool CompareFrameRate(const ResolutionDiff& item1,
163 const ResolutionDiff& item2) {
164 return abs(item1.diff_frame_rate) < abs(item2.diff_frame_rate);
167 bool CompareColor(const ResolutionDiff& item1, const ResolutionDiff& item2) {
168 return (item1.color < item2.color);
171 } // namespace
173 namespace media {
175 // Name of a fake DirectShow filter that exist on computers with
176 // GTalk installed.
177 static const char kGoogleCameraAdapter[] = "google camera adapter";
179 // Gets the names of all video capture devices connected to this computer.
180 void VideoCaptureDevice::GetDeviceNames(Names* device_names) {
181 DCHECK(device_names);
183 base::win::ScopedCOMInitializer coinit;
184 ScopedComPtr<ICreateDevEnum> dev_enum;
185 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
186 CLSCTX_INPROC);
187 if (FAILED(hr))
188 return;
190 ScopedComPtr<IEnumMoniker> enum_moniker;
191 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
192 enum_moniker.Receive(), 0);
193 // CreateClassEnumerator returns S_FALSE on some Windows OS
194 // when no camera exist. Therefore the FAILED macro can't be used.
195 if (hr != S_OK)
196 return;
198 device_names->clear();
200 // Enumerate all video capture devices.
201 ScopedComPtr<IMoniker> moniker;
202 int index = 0;
203 while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
204 Name device;
205 ScopedComPtr<IPropertyBag> prop_bag;
206 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
207 if (FAILED(hr)) {
208 moniker.Release();
209 continue;
212 // Find the description or friendly name.
213 ScopedVariant name;
214 hr = prop_bag->Read(L"Description", name.Receive(), 0);
215 if (FAILED(hr))
216 hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
218 if (SUCCEEDED(hr) && name.type() == VT_BSTR) {
219 // Ignore all VFW drivers and the special Google Camera Adapter.
220 // Google Camera Adapter is not a real DirectShow camera device.
221 // VFW is very old Video for Windows drivers that can not be used.
222 const wchar_t* str_ptr = V_BSTR(&name);
223 const int name_length = arraysize(kGoogleCameraAdapter) - 1;
225 if ((wcsstr(str_ptr, L"(VFW)") == NULL) &&
226 lstrlenW(str_ptr) < name_length ||
227 (!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length,
228 kGoogleCameraAdapter)))) {
229 device.device_name = base::SysWideToUTF8(str_ptr);
230 name.Reset();
231 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
232 if (FAILED(hr)) {
233 device.unique_id = device.device_name;
234 } else if (name.type() == VT_BSTR) {
235 device.unique_id = base::SysWideToUTF8(V_BSTR(&name));
238 device_names->push_back(device);
241 moniker.Release();
245 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) {
246 VideoCaptureDeviceWin* self = new VideoCaptureDeviceWin(device_name);
247 if (self && self->Init())
248 return self;
250 delete self;
251 return NULL;
254 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
255 : device_name_(device_name),
256 state_(kIdle),
257 observer_(NULL) {
260 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
261 if (media_control_)
262 media_control_->Stop();
264 if (graph_builder_) {
265 if (sink_filter_) {
266 graph_builder_->RemoveFilter(sink_filter_);
267 sink_filter_ = NULL;
270 if (capture_filter_)
271 graph_builder_->RemoveFilter(capture_filter_);
273 if (mjpg_filter_)
274 graph_builder_->RemoveFilter(mjpg_filter_);
278 bool VideoCaptureDeviceWin::Init() {
279 HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
280 if (!capture_filter_) {
281 DVLOG(2) << "Failed to create capture filter.";
282 return false;
285 hr = GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE,
286 output_capture_pin_.Receive());
287 if (!output_capture_pin_) {
288 DVLOG(2) << "Failed to get capture output pin";
289 return false;
292 // Create the sink filter used for receiving Captured frames.
293 sink_filter_ = new SinkFilter(this);
294 if (sink_filter_ == NULL) {
295 DVLOG(2) << "Failed to create send filter";
296 return false;
299 input_sink_pin_ = sink_filter_->GetPin(0);
301 hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
302 CLSCTX_INPROC_SERVER);
303 if (FAILED(hr)) {
304 DVLOG(2) << "Failed to create graph builder.";
305 return false;
308 hr = graph_builder_.QueryInterface(media_control_.Receive());
309 if (FAILED(hr)) {
310 DVLOG(2) << "Failed to create media control builder.";
311 return false;
314 hr = graph_builder_->AddFilter(capture_filter_, NULL);
315 if (FAILED(hr)) {
316 DVLOG(2) << "Failed to add the capture device to the graph.";
317 return false;
320 hr = graph_builder_->AddFilter(sink_filter_, NULL);
321 if (FAILED(hr)) {
322 DVLOG(2)<< "Failed to add the send filter to the graph.";
323 return false;
326 return CreateCapabilityMap();
329 void VideoCaptureDeviceWin::Allocate(
330 int width,
331 int height,
332 int frame_rate,
333 VideoCaptureDevice::EventHandler* observer) {
334 if (state_ != kIdle)
335 return;
337 observer_ = observer;
338 // Get the camera capability that best match the requested resolution.
339 const int capability_index = GetBestMatchedCapability(width, height,
340 frame_rate);
341 Capability capability = capabilities_[capability_index];
343 // Reduce the frame rate if the requested frame rate is lower
344 // than the capability.
345 if (capability.frame_rate > frame_rate)
346 capability.frame_rate = frame_rate;
348 AM_MEDIA_TYPE* pmt = NULL;
349 VIDEO_STREAM_CONFIG_CAPS caps;
351 ScopedComPtr<IAMStreamConfig> stream_config;
352 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
353 if (FAILED(hr)) {
354 SetErrorState("Can't get the Capture format settings");
355 return;
358 // Get the windows capability from the capture device.
359 hr = stream_config->GetStreamCaps(capability_index, &pmt,
360 reinterpret_cast<BYTE*>(&caps));
361 if (SUCCEEDED(hr)) {
362 if (pmt->formattype == FORMAT_VideoInfo) {
363 VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
364 if (capability.frame_rate > 0)
365 h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate;
367 // Set the sink filter to request this capability.
368 sink_filter_->SetRequestedMediaCapability(capability);
369 // Order the capture device to use this capability.
370 hr = stream_config->SetFormat(pmt);
373 if (FAILED(hr))
374 SetErrorState("Failed to set capture device output format");
376 if (capability.color == VideoCaptureDevice::kMJPEG && !mjpg_filter_.get()) {
377 // Create MJPG filter if we need it.
378 hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
380 if (SUCCEEDED(hr)) {
381 GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL, input_mjpg_pin_.Receive());
382 GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL,
383 output_mjpg_pin_.Receive());
384 hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
387 if (FAILED(hr)) {
388 mjpg_filter_.Release();
389 input_mjpg_pin_.Release();
390 output_mjpg_pin_.Release();
394 if (capability.color == VideoCaptureDevice::kMJPEG && mjpg_filter_.get()) {
395 // Connect the camera to the MJPEG decoder.
396 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
397 NULL);
398 // Connect the MJPEG filter to the Capture filter.
399 hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
400 NULL);
401 } else {
402 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
403 NULL);
406 if (FAILED(hr)) {
407 SetErrorState("Failed to connect the Capture graph.");
408 return;
411 hr = media_control_->Pause();
412 if (FAILED(hr)) {
413 SetErrorState("Failed to Pause the Capture device. "
414 "Is it already occupied?");
415 return;
418 // Get the capability back from the sink filter after the filter have been
419 // connected.
420 const Capability& used_capability = sink_filter_->ResultingCapability();
421 observer_->OnFrameInfo(used_capability);
423 state_ = kAllocated;
426 void VideoCaptureDeviceWin::Start() {
427 if (state_ != kAllocated)
428 return;
430 HRESULT hr = media_control_->Run();
431 if (FAILED(hr)) {
432 SetErrorState("Failed to start the Capture device.");
433 return;
436 state_ = kCapturing;
439 void VideoCaptureDeviceWin::Stop() {
440 if (state_ != kCapturing)
441 return;
443 HRESULT hr = media_control_->Stop();
444 if (FAILED(hr)) {
445 SetErrorState("Failed to stop the capture graph.");
446 return;
449 state_ = kAllocated;
452 void VideoCaptureDeviceWin::DeAllocate() {
453 if (state_ == kIdle)
454 return;
456 HRESULT hr = media_control_->Stop();
457 graph_builder_->Disconnect(output_capture_pin_);
458 graph_builder_->Disconnect(input_sink_pin_);
460 // If the _mjpg filter exist disconnect it even if it has not been used.
461 if (mjpg_filter_) {
462 graph_builder_->Disconnect(input_mjpg_pin_);
463 graph_builder_->Disconnect(output_mjpg_pin_);
466 if (FAILED(hr)) {
467 SetErrorState("Failed to Stop the Capture device");
468 return;
471 state_ = kIdle;
474 const VideoCaptureDevice::Name& VideoCaptureDeviceWin::device_name() {
475 return device_name_;
478 // Implements SinkFilterObserver::SinkFilterObserver.
479 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
480 int length) {
481 observer_->OnIncomingCapturedFrame(buffer, length, base::Time::Now());
484 bool VideoCaptureDeviceWin::CreateCapabilityMap() {
485 ScopedComPtr<IAMStreamConfig> stream_config;
486 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
487 if (FAILED(hr)) {
488 DVLOG(2) << "Failed to get IAMStreamConfig interface from "
489 "capture device";
490 return false;
493 // Get interface used for getting the frame rate.
494 ScopedComPtr<IAMVideoControl> video_control;
495 hr = capture_filter_.QueryInterface(video_control.Receive());
496 DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
498 AM_MEDIA_TYPE* media_type = NULL;
499 VIDEO_STREAM_CONFIG_CAPS caps;
500 int count, size;
502 hr = stream_config->GetNumberOfCapabilities(&count, &size);
503 if (FAILED(hr)) {
504 DVLOG(2) << "Failed to GetNumberOfCapabilities";
505 return false;
508 for (int i = 0; i < count; ++i) {
509 hr = stream_config->GetStreamCaps(i, &media_type,
510 reinterpret_cast<BYTE*>(&caps));
511 if (FAILED(hr)) {
512 DVLOG(2) << "Failed to GetStreamCaps";
513 return false;
516 if (media_type->majortype == MEDIATYPE_Video &&
517 media_type->formattype == FORMAT_VideoInfo) {
518 Capability capability;
519 REFERENCE_TIME time_per_frame = 0;
521 VIDEOINFOHEADER* h =
522 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
523 capability.width = h->bmiHeader.biWidth;
524 capability.height = h->bmiHeader.biHeight;
525 time_per_frame = h->AvgTimePerFrame;
527 // Try to get the max frame rate from IAMVideoControl.
528 if (video_control.get()) {
529 LONGLONG* max_fps_ptr;
530 LONG list_size;
531 SIZE size;
532 size.cx = capability.width;
533 size.cy = capability.height;
535 // GetFrameRateList doesn't return max frame rate always
536 // eg: Logitech Notebook. This may be due to a bug in that API
537 // because GetFrameRateList array is reversed in the above camera. So
538 // a util method written. Can't assume the first value will return
539 // the max fps.
540 hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
541 &list_size, &max_fps_ptr);
543 if (SUCCEEDED(hr) && list_size > 0) {
544 int min_time = *std::min_element(max_fps_ptr,
545 max_fps_ptr + list_size);
546 capability.frame_rate = (min_time > 0) ?
547 kSecondsToReferenceTime / min_time : 0;
548 } else {
549 // Get frame rate from VIDEOINFOHEADER.
550 capability.frame_rate = (time_per_frame > 0) ?
551 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
553 } else {
554 // Get frame rate from VIDEOINFOHEADER since IAMVideoControl is
555 // not supported.
556 capability.frame_rate = (time_per_frame > 0) ?
557 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
560 // We can't switch MEDIATYPE :~(.
561 if (media_type->subtype == kMediaSubTypeI420) {
562 capability.color = VideoCaptureDevice::kI420;
563 } else if (media_type->subtype == MEDIASUBTYPE_IYUV) {
564 // This is identical to kI420.
565 capability.color = VideoCaptureDevice::kI420;
566 } else if (media_type->subtype == MEDIASUBTYPE_RGB24) {
567 capability.color = VideoCaptureDevice::kRGB24;
568 } else if (media_type->subtype == MEDIASUBTYPE_YUY2) {
569 capability.color = VideoCaptureDevice::kYUY2;
570 } else if (media_type->subtype == MEDIASUBTYPE_MJPG) {
571 capability.color = VideoCaptureDevice::kMJPEG;
572 } else {
573 WCHAR guid_str[128];
574 StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str));
575 DVLOG(2) << "Device support unknown media type " << guid_str;
576 continue;
578 capabilities_[i] = capability;
580 DeleteMediaType(media_type);
581 media_type = NULL;
584 return capabilities_.size() > 0;
587 // Loops through the list of capabilities and returns an index of the best
588 // matching capability.
589 // The algorithm prioritize height, width, frame rate and color format in that
590 // order.
591 int VideoCaptureDeviceWin::GetBestMatchedCapability(int requested_width,
592 int requested_height,
593 int requested_frame_rate) {
594 std::list<ResolutionDiff> diff_list;
596 // Loop through the candidates to create a list of differentials between the
597 // requested resolution and the camera capability.
598 for (CapabilityMap::iterator iterator = capabilities_.begin();
599 iterator != capabilities_.end();
600 ++iterator) {
601 Capability capability = iterator->second;
603 ResolutionDiff diff;
604 diff.capability_index = iterator->first;
605 diff.diff_width = capability.width - requested_width;
606 diff.diff_height = capability.height - requested_height;
607 diff.diff_frame_rate = capability.frame_rate - requested_frame_rate;
608 diff.color = capability.color;
609 diff_list.push_back(diff);
612 // Sort the best height candidates.
613 diff_list.sort(&CompareHeight);
614 int best_diff = diff_list.front().diff_height;
615 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
616 it != diff_list.end(); ++it) {
617 if (it->diff_height != best_diff) {
618 // Remove all candidates but the best.
619 diff_list.erase(it, diff_list.end());
620 break;
624 // Sort the best width candidates.
625 diff_list.sort(&CompareWidth);
626 best_diff = diff_list.front().diff_width;
627 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
628 it != diff_list.end(); ++it) {
629 if (it->diff_width != best_diff) {
630 // Remove all candidates but the best.
631 diff_list.erase(it, diff_list.end());
632 break;
636 // Sort the best frame rate candidates.
637 diff_list.sort(&CompareFrameRate);
638 best_diff = diff_list.front().diff_frame_rate;
639 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
640 it != diff_list.end(); ++it) {
641 if (it->diff_frame_rate != best_diff) {
642 diff_list.erase(it, diff_list.end());
643 break;
647 // Decide the best color format.
648 diff_list.sort(&CompareColor);
649 return diff_list.front().capability_index;
652 void VideoCaptureDeviceWin::SetErrorState(const char* reason) {
653 DLOG(ERROR) << reason;
654 state_ = kError;
655 observer_->OnError();
658 } // namespace media