Pin Chrome's shortcut to the Win10 Start menu on install and OS upgrade.
[chromium-blink-merge.git] / content / common / gpu / media / dxva_video_decode_accelerator.cc
blob0a791b0dedd0de32195d9598f72007a801870d5c
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
7 #if !defined(OS_WIN)
8 #error This file should only be built on Windows.
9 #endif // !defined(OS_WIN)
11 #include <ks.h>
12 #include <codecapi.h>
13 #include <dxgi1_2.h>
14 #include <mfapi.h>
15 #include <mferror.h>
16 #include <ntverp.h>
17 #include <wmcodecdsp.h>
19 #include "base/base_paths_win.h"
20 #include "base/bind.h"
21 #include "base/callback.h"
22 #include "base/command_line.h"
23 #include "base/debug/alias.h"
24 #include "base/file_version_info.h"
25 #include "base/files/file_path.h"
26 #include "base/logging.h"
27 #include "base/memory/scoped_ptr.h"
28 #include "base/memory/shared_memory.h"
29 #include "base/message_loop/message_loop.h"
30 #include "base/path_service.h"
31 #include "base/trace_event/trace_event.h"
32 #include "base/win/windows_version.h"
33 #include "content/public/common/content_switches.h"
34 #include "media/base/win/mf_initializer.h"
35 #include "media/video/video_decode_accelerator.h"
36 #include "ui/gl/gl_bindings.h"
37 #include "ui/gl/gl_context.h"
38 #include "ui/gl/gl_surface_egl.h"
39 #include "ui/gl/gl_switches.h"
41 namespace {
43 // Path is appended on to the PROGRAM_FILES base path.
44 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\";
46 const wchar_t kVP8DecoderDLLName[] =
47 #if defined(ARCH_CPU_X86)
48 L"mfx_mft_vp8vd_32.dll";
49 #elif defined(ARCH_CPU_X86_64)
50 L"mfx_mft_vp8vd_64.dll";
51 #else
52 #error Unsupported Windows CPU Architecture
53 #endif
55 const wchar_t kVP9DecoderDLLName[] =
56 #if defined(ARCH_CPU_X86)
57 L"mfx_mft_vp9vd_32.dll";
58 #elif defined(ARCH_CPU_X86_64)
59 L"mfx_mft_vp9vd_64.dll";
60 #else
61 #error Unsupported Windows CPU Architecture
62 #endif
64 const CLSID CLSID_WebmMfVp8Dec = {
65 0x451e3cb7,
66 0x2622,
67 0x4ba5,
68 { 0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24 }
71 const CLSID CLSID_WebmMfVp9Dec = {
72 0x07ab4bd2,
73 0x1979,
74 0x4fcd,
75 { 0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe }
78 const CLSID MEDIASUBTYPE_VP80 = {
79 0x30385056,
80 0x0000,
81 0x0010,
82 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }
85 const CLSID MEDIASUBTYPE_VP90 = {
86 0x30395056,
87 0x0000,
88 0x0010,
89 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }
92 // The CLSID of the video processor media foundation transform which we use for
93 // texture color conversion in DX11.
94 // Defined in mfidl.h in the Windows 10 SDK. ntverp.h provides VER_PRODUCTBUILD
95 // to detect which SDK we are compiling with.
96 #if VER_PRODUCTBUILD < 10011 // VER_PRODUCTBUILD for 10.0.10158.0 SDK.
97 DEFINE_GUID(CLSID_VideoProcessorMFT,
98 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78,
99 0xc9, 0x82);
100 #endif
102 // MF_XVP_PLAYBACK_MODE
103 // Data type: UINT32 (treat as BOOL)
104 // If this attribute is TRUE, the video processor will run in playback mode
105 // where it allows callers to allocate output samples and allows last frame
106 // regeneration (repaint).
107 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12,
108 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9);
110 } // namespace
112 namespace content {
114 static const media::VideoCodecProfile kSupportedProfiles[] = {
115 media::H264PROFILE_BASELINE,
116 media::H264PROFILE_MAIN,
117 media::H264PROFILE_HIGH,
118 media::VP8PROFILE_ANY,
119 media::VP9PROFILE_ANY
122 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_
123 = NULL;
125 #define RETURN_ON_FAILURE(result, log, ret) \
126 do { \
127 if (!(result)) { \
128 DLOG(ERROR) << log; \
129 return ret; \
131 } while (0)
133 #define RETURN_ON_HR_FAILURE(result, log, ret) \
134 RETURN_ON_FAILURE(SUCCEEDED(result), \
135 log << ", HRESULT: 0x" << std::hex << result, \
136 ret);
138 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \
139 do { \
140 if (!(result)) { \
141 DVLOG(1) << log; \
142 StopOnError(error_code); \
143 return ret; \
145 } while (0)
147 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \
148 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \
149 log << ", HRESULT: 0x" << std::hex << result, \
150 error_code, ret);
152 enum {
153 // Maximum number of iterations we allow before aborting the attempt to flush
154 // the batched queries to the driver and allow torn/corrupt frames to be
155 // rendered.
156 kFlushDecoderSurfaceTimeoutMs = 1,
157 // Maximum iterations where we try to flush the d3d device.
158 kMaxIterationsForD3DFlush = 4,
159 // We only request 5 picture buffers from the client which are used to hold
160 // the decoded samples. These buffers are then reused when the client tells
161 // us that it is done with the buffer.
162 kNumPictureBuffers = 5,
165 static IMFSample* CreateEmptySample() {
166 base::win::ScopedComPtr<IMFSample> sample;
167 HRESULT hr = MFCreateSample(sample.Receive());
168 RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
169 return sample.Detach();
172 // Creates a Media Foundation sample with one buffer of length |buffer_length|
173 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
174 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
175 CHECK_GT(buffer_length, 0);
177 base::win::ScopedComPtr<IMFSample> sample;
178 sample.Attach(CreateEmptySample());
180 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
181 HRESULT hr = E_FAIL;
182 if (align == 0) {
183 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
184 // with the align argument being 0.
185 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
186 } else {
187 hr = MFCreateAlignedMemoryBuffer(buffer_length,
188 align - 1,
189 buffer.Receive());
191 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
193 hr = sample->AddBuffer(buffer.get());
194 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
196 buffer->SetCurrentLength(0);
197 return sample.Detach();
200 // Creates a Media Foundation sample with one buffer containing a copy of the
201 // given Annex B stream data.
202 // If duration and sample time are not known, provide 0.
203 // |min_size| specifies the minimum size of the buffer (might be required by
204 // the decoder for input). If no alignment is required, provide 0.
205 static IMFSample* CreateInputSample(const uint8* stream, int size,
206 int min_size, int alignment) {
207 CHECK(stream);
208 CHECK_GT(size, 0);
209 base::win::ScopedComPtr<IMFSample> sample;
210 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
211 alignment));
212 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL);
214 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
215 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
216 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
218 DWORD max_length = 0;
219 DWORD current_length = 0;
220 uint8* destination = NULL;
221 hr = buffer->Lock(&destination, &max_length, &current_length);
222 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
224 CHECK_EQ(current_length, 0u);
225 CHECK_GE(static_cast<int>(max_length), size);
226 memcpy(destination, stream, size);
228 hr = buffer->Unlock();
229 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
231 hr = buffer->SetCurrentLength(size);
232 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
234 return sample.Detach();
237 static IMFSample* CreateSampleFromInputBuffer(
238 const media::BitstreamBuffer& bitstream_buffer,
239 DWORD stream_size,
240 DWORD alignment) {
241 base::SharedMemory shm(bitstream_buffer.handle(), true);
242 RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
243 "Failed in base::SharedMemory::Map", NULL);
245 return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
246 bitstream_buffer.size(),
247 stream_size,
248 alignment);
251 // Helper function to create a COM object instance from a DLL. The alternative
252 // is to use the CoCreateInstance API which requires the COM apartment to be
253 // initialized which is not the case on the GPU main thread. We want to avoid
254 // initializing COM as it may have sideeffects.
255 HRESULT CreateCOMObjectFromDll(HMODULE dll, const CLSID& clsid, const IID& iid,
256 void** object) {
257 if (!dll || !object)
258 return E_INVALIDARG;
260 using GetClassObject = HRESULT (WINAPI*)(
261 const CLSID& clsid, const IID& iid, void** object);
263 GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
264 GetProcAddress(dll, "DllGetClassObject"));
265 RETURN_ON_FAILURE(
266 get_class_object, "Failed to get DllGetClassObject pointer", E_FAIL);
268 base::win::ScopedComPtr<IClassFactory> factory;
269 HRESULT hr = get_class_object(
270 clsid,
271 __uuidof(IClassFactory),
272 factory.ReceiveVoid());
273 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject failed", hr);
275 hr = factory->CreateInstance(NULL, iid, object);
276 return hr;
279 // Maintains information about a DXVA picture buffer, i.e. whether it is
280 // available for rendering, the texture information, etc.
281 struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer {
282 public:
283 static linked_ptr<DXVAPictureBuffer> Create(
284 const DXVAVideoDecodeAccelerator& decoder,
285 const media::PictureBuffer& buffer,
286 EGLConfig egl_config);
287 ~DXVAPictureBuffer();
289 void ReusePictureBuffer();
290 // Copies the output sample data to the picture buffer provided by the
291 // client.
292 // The dest_surface parameter contains the decoded bits.
293 bool CopyOutputSampleDataToPictureBuffer(
294 DXVAVideoDecodeAccelerator* decoder,
295 IDirect3DSurface9* dest_surface,
296 ID3D11Texture2D* dx11_texture,
297 int input_buffer_id);
299 bool available() const {
300 return available_;
303 void set_available(bool available) {
304 available_ = available;
307 int id() const {
308 return picture_buffer_.id();
311 gfx::Size size() const {
312 return picture_buffer_.size();
315 // Called when the source surface |src_surface| is copied to the destination
316 // |dest_surface|
317 void CopySurfaceComplete(IDirect3DSurface9* src_surface,
318 IDirect3DSurface9* dest_surface);
320 private:
321 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer);
323 bool available_;
324 media::PictureBuffer picture_buffer_;
325 EGLSurface decoding_surface_;
326 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_;
327 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture_;
329 // The following |IDirect3DSurface9| interface pointers are used to hold
330 // references on the surfaces during the course of a StretchRect operation
331 // to copy the source surface to the target. The references are released
332 // when the StretchRect operation i.e. the copy completes.
333 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_;
334 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_;
336 // This ID3D11Texture2D interface pointer is used to hold a reference to the
337 // decoder texture during the course of a copy operation. This reference is
338 // released when the copy completes.
339 base::win::ScopedComPtr<ID3D11Texture2D> decoder_dx11_texture_;
341 // Set to true if RGB is supported by the texture.
342 // Defaults to true.
343 bool use_rgb_;
345 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
348 // static
349 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer>
350 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
351 const DXVAVideoDecodeAccelerator& decoder,
352 const media::PictureBuffer& buffer,
353 EGLConfig egl_config) {
354 linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer));
356 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
358 EGLint use_rgb = 1;
359 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB,
360 &use_rgb);
362 EGLint attrib_list[] = {
363 EGL_WIDTH, buffer.size().width(),
364 EGL_HEIGHT, buffer.size().height(),
365 EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA,
366 EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
367 EGL_NONE
370 picture_buffer->decoding_surface_ = eglCreatePbufferSurface(
371 egl_display,
372 egl_config,
373 attrib_list);
374 RETURN_ON_FAILURE(picture_buffer->decoding_surface_,
375 "Failed to create surface",
376 linked_ptr<DXVAPictureBuffer>(NULL));
378 HANDLE share_handle = NULL;
379 EGLBoolean ret = eglQuerySurfacePointerANGLE(
380 egl_display,
381 picture_buffer->decoding_surface_,
382 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
383 &share_handle);
385 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE,
386 "Failed to query ANGLE surface pointer",
387 linked_ptr<DXVAPictureBuffer>(NULL));
389 HRESULT hr = E_FAIL;
390 if (decoder.d3d11_device_) {
391 base::win::ScopedComPtr<ID3D11Resource> resource;
392 hr = decoder.d3d11_device_->OpenSharedResource(
393 share_handle,
394 __uuidof(ID3D11Resource),
395 reinterpret_cast<void**>(resource.Receive()));
396 RETURN_ON_HR_FAILURE(hr, "Failed to open shared resource",
397 linked_ptr<DXVAPictureBuffer>(NULL));
398 hr = picture_buffer->dx11_decoding_texture_.QueryFrom(resource.get());
399 } else {
400 hr = decoder.d3d9_device_ex_->CreateTexture(
401 buffer.size().width(),
402 buffer.size().height(),
404 D3DUSAGE_RENDERTARGET,
405 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8,
406 D3DPOOL_DEFAULT,
407 picture_buffer->decoding_texture_.Receive(),
408 &share_handle);
410 RETURN_ON_HR_FAILURE(hr, "Failed to create texture",
411 linked_ptr<DXVAPictureBuffer>(NULL));
412 picture_buffer->use_rgb_ = !!use_rgb;
413 return picture_buffer;
416 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
417 const media::PictureBuffer& buffer)
418 : available_(true),
419 picture_buffer_(buffer),
420 decoding_surface_(NULL),
421 use_rgb_(true) {
424 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
425 if (decoding_surface_) {
426 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
428 eglReleaseTexImage(
429 egl_display,
430 decoding_surface_,
431 EGL_BACK_BUFFER);
433 eglDestroySurface(
434 egl_display,
435 decoding_surface_);
436 decoding_surface_ = NULL;
440 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
441 DCHECK(decoding_surface_);
442 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
443 eglReleaseTexImage(
444 egl_display,
445 decoding_surface_,
446 EGL_BACK_BUFFER);
447 decoder_surface_.Release();
448 target_surface_.Release();
449 decoder_dx11_texture_.Release();
450 set_available(true);
453 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
454 CopyOutputSampleDataToPictureBuffer(
455 DXVAVideoDecodeAccelerator* decoder,
456 IDirect3DSurface9* dest_surface,
457 ID3D11Texture2D* dx11_texture,
458 int input_buffer_id) {
459 DCHECK(dest_surface || dx11_texture);
460 if (dx11_texture) {
461 // Grab a reference on the decoder texture. This reference will be released
462 // when we receive a notification that the copy was completed or when the
463 // DXVAPictureBuffer instance is destroyed.
464 decoder_dx11_texture_ = dx11_texture;
465 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), NULL,
466 id(), input_buffer_id);
467 return true;
469 D3DSURFACE_DESC surface_desc;
470 HRESULT hr = dest_surface->GetDesc(&surface_desc);
471 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
473 D3DSURFACE_DESC texture_desc;
474 decoding_texture_->GetLevelDesc(0, &texture_desc);
476 if (texture_desc.Width != surface_desc.Width ||
477 texture_desc.Height != surface_desc.Height) {
478 NOTREACHED() << "Decode surface of different dimension than texture";
479 return false;
482 hr = decoder->d3d9_->CheckDeviceFormatConversion(
483 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, surface_desc.Format,
484 use_rgb_ ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8);
485 RETURN_ON_HR_FAILURE(hr, "Device does not support format converision", false);
487 // The same picture buffer can be reused for a different frame. Release the
488 // target surface and the decoder references here.
489 target_surface_.Release();
490 decoder_surface_.Release();
492 // Grab a reference on the decoder surface and the target surface. These
493 // references will be released when we receive a notification that the
494 // copy was completed or when the DXVAPictureBuffer instance is destroyed.
495 // We hold references here as it is easier to manage their lifetimes.
496 hr = decoding_texture_->GetSurfaceLevel(0, target_surface_.Receive());
497 RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false);
499 decoder_surface_ = dest_surface;
501 decoder->CopySurface(decoder_surface_.get(), target_surface_.get(), id(),
502 input_buffer_id);
503 return true;
506 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::CopySurfaceComplete(
507 IDirect3DSurface9* src_surface,
508 IDirect3DSurface9* dest_surface) {
509 DCHECK(!available());
511 GLint current_texture = 0;
512 glGetIntegerv(GL_TEXTURE_BINDING_2D, &current_texture);
514 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id());
516 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
518 if (src_surface && dest_surface) {
519 DCHECK_EQ(src_surface, decoder_surface_.get());
520 DCHECK_EQ(dest_surface, target_surface_.get());
521 decoder_surface_.Release();
522 target_surface_.Release();
523 } else {
524 DCHECK(decoder_dx11_texture_.get());
525 decoder_dx11_texture_.Release();
528 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
529 eglBindTexImage(
530 egl_display,
531 decoding_surface_,
532 EGL_BACK_BUFFER);
534 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
535 glBindTexture(GL_TEXTURE_2D, current_texture);
538 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
539 int32 buffer_id, IMFSample* sample)
540 : input_buffer_id(buffer_id),
541 picture_buffer_id(-1) {
542 output_sample.Attach(sample);
545 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
547 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
548 const base::Callback<bool(void)>& make_context_current,
549 gfx::GLContext* gl_context)
550 : client_(NULL),
551 dev_manager_reset_token_(0),
552 dx11_dev_manager_reset_token_(0),
553 egl_config_(NULL),
554 state_(kUninitialized),
555 pictures_requested_(false),
556 inputs_before_decode_(0),
557 sent_drain_message_(false),
558 make_context_current_(make_context_current),
559 codec_(media::kUnknownVideoCodec),
560 decoder_thread_("DXVAVideoDecoderThread"),
561 pending_flush_(false),
562 use_dx11_(false),
563 dx11_video_format_converter_media_type_needs_init_(true),
564 gl_context_(gl_context),
565 weak_this_factory_(this) {
566 weak_ptr_ = weak_this_factory_.GetWeakPtr();
567 memset(&input_stream_info_, 0, sizeof(input_stream_info_));
568 memset(&output_stream_info_, 0, sizeof(output_stream_info_));
571 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
572 client_ = NULL;
575 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
576 Client* client) {
577 client_ = client;
579 main_thread_task_runner_ = base::MessageLoop::current()->task_runner();
581 bool profile_supported = false;
582 for (const auto& supported_profile : kSupportedProfiles) {
583 if (profile == supported_profile) {
584 profile_supported = true;
585 break;
588 if (!profile_supported) {
589 RETURN_AND_NOTIFY_ON_FAILURE(false,
590 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false);
593 // Not all versions of Windows 7 and later include Media Foundation DLLs.
594 // Instead of crashing while delay loading the DLL when calling MFStartup()
595 // below, probe whether we can successfully load the DLL now.
596 // See http://crbug.com/339678 for details.
597 HMODULE dxgi_manager_dll = NULL;
598 if ((dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll")) == NULL) {
599 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll");
600 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding",
601 false);
602 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API.
603 // On Windows 7 mshtmlmedia.dll provides it.
604 dxgi_manager_dll = mfplat_dll;
607 // TODO(ananta)
608 // The code below works, as in we can create the DX11 device manager for
609 // Windows 7. However the IMFTransform we use for texture conversion and
610 // copy does not exist on Windows 7. Look into an alternate approach
611 // and enable the code below.
612 #if defined ENABLE_DX11_FOR_WIN7
613 if ((base::win::GetVersion() == base::win::VERSION_WIN7) &&
614 ((dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll")) == NULL)) {
615 HMODULE mshtml_media_dll = ::LoadLibrary(L"mshtmlmedia.dll");
616 if (mshtml_media_dll)
617 dxgi_manager_dll = mshtml_media_dll;
619 #endif
620 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9
621 // decoding.
622 if (dxgi_manager_dll && !create_dxgi_device_manager_) {
623 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>(
624 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager"));
627 RETURN_AND_NOTIFY_ON_FAILURE(
628 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle,
629 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
630 PLATFORM_FAILURE,
631 false);
633 State state = GetState();
634 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized),
635 "Initialize: invalid state: " << state, ILLEGAL_STATE, false);
637 media::InitializeMediaFoundation();
639 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile),
640 "Failed to initialize decoder", PLATFORM_FAILURE, false);
642 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
643 "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
645 RETURN_AND_NOTIFY_ON_FAILURE(
646 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
647 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
648 PLATFORM_FAILURE, false);
650 RETURN_AND_NOTIFY_ON_FAILURE(
651 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
652 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
653 PLATFORM_FAILURE, false);
655 SetState(kNormal);
657 StartDecoderThread();
658 return true;
661 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
662 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager");
664 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive());
665 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
667 D3DPRESENT_PARAMETERS present_params = {0};
668 present_params.BackBufferWidth = 1;
669 present_params.BackBufferHeight = 1;
670 present_params.BackBufferFormat = D3DFMT_UNKNOWN;
671 present_params.BackBufferCount = 1;
672 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
673 present_params.hDeviceWindow = ::GetShellWindow();
674 present_params.Windowed = TRUE;
675 present_params.Flags = D3DPRESENTFLAG_VIDEO;
676 present_params.FullScreen_RefreshRateInHz = 0;
677 present_params.PresentationInterval = 0;
679 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT,
680 D3DDEVTYPE_HAL,
681 ::GetShellWindow(),
682 D3DCREATE_FPU_PRESERVE |
683 D3DCREATE_SOFTWARE_VERTEXPROCESSING |
684 D3DCREATE_DISABLE_PSGP_THREADING |
685 D3DCREATE_MULTITHREADED,
686 &present_params,
687 NULL,
688 d3d9_device_ex_.Receive());
689 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
691 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
692 device_manager_.Receive());
693 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
695 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(),
696 dev_manager_reset_token_);
697 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
699 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive());
700 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false);
701 // Ensure query_ API works (to avoid an infinite loop later in
702 // CopyOutputSampleDataToPictureBuffer).
703 hr = query_->Issue(D3DISSUE_END);
704 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
705 return true;
708 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
709 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_,
710 d3d11_device_manager_.Receive());
711 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false);
713 // This array defines the set of DirectX hardware feature levels we support.
714 // The ordering MUST be preserved. All applications are assumed to support
715 // 9.1 unless otherwise stated by the application, which is not our case.
716 D3D_FEATURE_LEVEL feature_levels[] = {
717 D3D_FEATURE_LEVEL_11_1,
718 D3D_FEATURE_LEVEL_11_0,
719 D3D_FEATURE_LEVEL_10_1,
720 D3D_FEATURE_LEVEL_10_0,
721 D3D_FEATURE_LEVEL_9_3,
722 D3D_FEATURE_LEVEL_9_2,
723 D3D_FEATURE_LEVEL_9_1 };
725 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
727 #if defined _DEBUG
728 flags |= D3D11_CREATE_DEVICE_DEBUG;
729 #endif
731 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0;
732 hr = D3D11CreateDevice(NULL,
733 D3D_DRIVER_TYPE_HARDWARE,
734 NULL,
735 flags,
736 feature_levels,
737 arraysize(feature_levels),
738 D3D11_SDK_VERSION,
739 d3d11_device_.Receive(),
740 &feature_level_out,
741 d3d11_device_context_.Receive());
742 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false);
744 // Enable multithreaded mode on the context. This ensures that accesses to
745 // context are synchronized across threads. We have multiple threads
746 // accessing the context, the media foundation decoder threads and the
747 // decoder thread via the video format conversion transform.
748 base::win::ScopedComPtr<ID3D10Multithread> multi_threaded;
749 hr = multi_threaded.QueryFrom(d3d11_device_context_.get());
750 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false);
751 multi_threaded->SetMultithreadProtected(TRUE);
753 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(),
754 dx11_dev_manager_reset_token_);
755 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
757 D3D11_QUERY_DESC query_desc;
758 query_desc.Query = D3D11_QUERY_EVENT;
759 query_desc.MiscFlags = 0;
760 hr = d3d11_device_->CreateQuery(
761 &query_desc,
762 d3d11_query_.Receive());
763 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false);
765 HMODULE video_processor_dll = ::LoadLibrary(L"msvproc.dll");
766 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor",
767 false);
769 hr = CreateCOMObjectFromDll(
770 video_processor_dll,
771 CLSID_VideoProcessorMFT,
772 __uuidof(IMFTransform),
773 video_format_converter_mft_.ReceiveVoid());
774 if (FAILED(hr)) {
775 base::debug::Alias(&hr);
776 // TODO(ananta)
777 // Remove this CHECK when the change to use DX11 for H/W decoding
778 // stablizes.
779 CHECK(false);
782 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false);
783 return true;
786 void DXVAVideoDecodeAccelerator::Decode(
787 const media::BitstreamBuffer& bitstream_buffer) {
788 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
790 State state = GetState();
791 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped ||
792 state == kFlushing),
793 "Invalid state: " << state, ILLEGAL_STATE,);
795 base::win::ScopedComPtr<IMFSample> sample;
796 sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
797 input_stream_info_.cbSize,
798 input_stream_info_.cbAlignment));
799 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample",
800 PLATFORM_FAILURE, );
802 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()),
803 "Failed to associate input buffer id with sample", PLATFORM_FAILURE,);
805 decoder_thread_task_runner_->PostTask(
806 FROM_HERE,
807 base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal,
808 base::Unretained(this), sample));
811 void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
812 const std::vector<media::PictureBuffer>& buffers) {
813 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
815 State state = GetState();
816 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
817 "Invalid state: " << state, ILLEGAL_STATE,);
818 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers == buffers.size()),
819 "Failed to provide requested picture buffers. (Got " << buffers.size() <<
820 ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,);
822 // Copy the picture buffers provided by the client to the available list,
823 // and mark these buffers as available for use.
824 for (size_t buffer_index = 0; buffer_index < buffers.size();
825 ++buffer_index) {
826 linked_ptr<DXVAPictureBuffer> picture_buffer =
827 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_);
828 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(),
829 "Failed to allocate picture buffer", PLATFORM_FAILURE,);
831 bool inserted = output_picture_buffers_.insert(std::make_pair(
832 buffers[buffer_index].id(), picture_buffer)).second;
833 DCHECK(inserted);
836 ProcessPendingSamples();
837 if (pending_flush_) {
838 decoder_thread_task_runner_->PostTask(
839 FROM_HERE,
840 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
841 base::Unretained(this)));
845 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
846 int32 picture_buffer_id) {
847 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
849 State state = GetState();
850 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
851 "Invalid state: " << state, ILLEGAL_STATE,);
853 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty())
854 return;
856 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
857 // If we didn't find the picture id in the |output_picture_buffers_| map we
858 // try the |stale_output_picture_buffers_| map, as this may have been an
859 // output picture buffer from before a resolution change, that at resolution
860 // change time had yet to be displayed. The client is calling us back to tell
861 // us that we can now recycle this picture buffer, so if we were waiting to
862 // dispose of it we now can.
863 if (it == output_picture_buffers_.end()) {
864 it = stale_output_picture_buffers_.find(picture_buffer_id);
865 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(),
866 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
867 main_thread_task_runner_->PostTask(
868 FROM_HERE,
869 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer,
870 weak_this_factory_.GetWeakPtr(), picture_buffer_id));
871 return;
874 it->second->ReusePictureBuffer();
875 ProcessPendingSamples();
876 if (pending_flush_) {
877 decoder_thread_task_runner_->PostTask(
878 FROM_HERE,
879 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
880 base::Unretained(this)));
884 void DXVAVideoDecodeAccelerator::Flush() {
885 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
887 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
889 State state = GetState();
890 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped),
891 "Unexpected decoder state: " << state, ILLEGAL_STATE,);
893 SetState(kFlushing);
895 pending_flush_ = true;
897 decoder_thread_task_runner_->PostTask(
898 FROM_HERE,
899 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
900 base::Unretained(this)));
903 void DXVAVideoDecodeAccelerator::Reset() {
904 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
906 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
908 State state = GetState();
909 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped),
910 "Reset: invalid state: " << state, ILLEGAL_STATE,);
912 decoder_thread_.Stop();
914 SetState(kResetting);
916 // If we have pending output frames waiting for display then we drop those
917 // frames and set the corresponding picture buffer as available.
918 PendingOutputSamples::iterator index;
919 for (index = pending_output_samples_.begin();
920 index != pending_output_samples_.end();
921 ++index) {
922 if (index->picture_buffer_id != -1) {
923 OutputBuffers::iterator it = output_picture_buffers_.find(
924 index->picture_buffer_id);
925 if (it != output_picture_buffers_.end()) {
926 DXVAPictureBuffer* picture_buffer = it->second.get();
927 picture_buffer->ReusePictureBuffer();
932 pending_output_samples_.clear();
934 NotifyInputBuffersDropped();
936 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
937 "Reset: Failed to send message.", PLATFORM_FAILURE,);
939 main_thread_task_runner_->PostTask(
940 FROM_HERE,
941 base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone,
942 weak_this_factory_.GetWeakPtr()));
944 StartDecoderThread();
945 SetState(kNormal);
948 void DXVAVideoDecodeAccelerator::Destroy() {
949 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
950 Invalidate();
951 delete this;
954 bool DXVAVideoDecodeAccelerator::CanDecodeOnIOThread() {
955 return false;
958 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const {
959 return GL_BGRA_EXT;
962 // static
963 media::VideoDecodeAccelerator::SupportedProfiles
964 DXVAVideoDecodeAccelerator::GetSupportedProfiles() {
965 // TODO(henryhsu): Need to ensure the profiles are actually supported.
966 SupportedProfiles profiles;
967 for (const auto& supported_profile : kSupportedProfiles) {
968 SupportedProfile profile;
969 profile.profile = supported_profile;
970 // Windows Media Foundation H.264 decoding does not support decoding videos
971 // with any dimension smaller than 48 pixels:
972 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815
973 profile.min_resolution.SetSize(48, 48);
974 // Use 1088 to account for 16x16 macroblocks.
975 profile.max_resolution.SetSize(1920, 1088);
976 profiles.push_back(profile);
978 return profiles;
981 bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) {
982 HMODULE decoder_dll = NULL;
984 CLSID clsid = {};
986 // Profile must fall within the valid range for one of the supported codecs.
987 if (profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX) {
988 // We mimic the steps CoCreateInstance uses to instantiate the object. This
989 // was previously done because it failed inside the sandbox, and now is done
990 // as a more minimal approach to avoid other side-effects CCI might have (as
991 // we are still in a reduced sandbox).
992 decoder_dll = ::LoadLibrary(L"msmpeg2vdec.dll");
993 RETURN_ON_FAILURE(decoder_dll,
994 "msmpeg2vdec.dll required for decoding is not loaded",
995 false);
997 // Check version of DLL, version 6.7.7140 is blacklisted due to high crash
998 // rates in browsers loading that DLL. If that is the version installed we
999 // fall back to software decoding. See crbug/403440.
1000 FileVersionInfo* version_info =
1001 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll);
1002 RETURN_ON_FAILURE(version_info,
1003 "unable to get version of msmpeg2vdec.dll",
1004 false);
1005 base::string16 file_version = version_info->file_version();
1006 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos,
1007 "blacklisted version of msmpeg2vdec.dll 6.7.7140",
1008 false);
1009 codec_ = media::kCodecH264;
1010 clsid = __uuidof(CMSH264DecoderMFT);
1011 } else if ((profile == media::VP8PROFILE_ANY ||
1012 profile == media::VP9PROFILE_ANY) &&
1013 base::CommandLine::ForCurrentProcess()->HasSwitch(
1014 switches::kEnableAcceleratedVpxDecode)) {
1015 int program_files_key = base::DIR_PROGRAM_FILES;
1016 if (base::win::OSInfo::GetInstance()->wow64_status() ==
1017 base::win::OSInfo::WOW64_ENABLED) {
1018 program_files_key = base::DIR_PROGRAM_FILES6432;
1021 base::FilePath dll_path;
1022 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path),
1023 "failed to get path for Program Files", false);
1025 dll_path = dll_path.Append(kVPXDecoderDLLPath);
1026 if (profile == media::VP8PROFILE_ANY) {
1027 codec_ = media::kCodecVP8;
1028 dll_path = dll_path.Append(kVP8DecoderDLLName);
1029 clsid = CLSID_WebmMfVp8Dec;
1030 } else {
1031 codec_ = media::kCodecVP9;
1032 dll_path = dll_path.Append(kVP9DecoderDLLName);
1033 clsid = CLSID_WebmMfVp9Dec;
1035 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL,
1036 LOAD_WITH_ALTERED_SEARCH_PATH);
1037 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false);
1038 } else {
1039 RETURN_ON_FAILURE(false, "Unsupported codec.", false);
1042 HRESULT hr = CreateCOMObjectFromDll(decoder_dll,
1043 clsid,
1044 __uuidof(IMFTransform),
1045 decoder_.ReceiveVoid());
1046 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
1048 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
1049 "Failed to check decoder DXVA support", false);
1051 ULONG_PTR device_manager_to_use = NULL;
1052 if (use_dx11_) {
1053 CHECK(create_dxgi_device_manager_);
1054 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(),
1055 "Failed to initialize DX11 device and manager",
1056 PLATFORM_FAILURE,
1057 false);
1058 device_manager_to_use = reinterpret_cast<ULONG_PTR>(
1059 d3d11_device_manager_.get());
1060 } else {
1061 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
1062 "Failed to initialize D3D device and manager",
1063 PLATFORM_FAILURE,
1064 false);
1065 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get());
1068 hr = decoder_->ProcessMessage(
1069 MFT_MESSAGE_SET_D3D_MANAGER,
1070 device_manager_to_use);
1071 if (use_dx11_) {
1072 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false);
1073 } else {
1074 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
1077 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
1079 EGLint config_attribs[] = {
1080 EGL_BUFFER_SIZE, 32,
1081 EGL_RED_SIZE, 8,
1082 EGL_GREEN_SIZE, 8,
1083 EGL_BLUE_SIZE, 8,
1084 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
1085 EGL_ALPHA_SIZE, 0,
1086 EGL_NONE
1089 EGLint num_configs;
1091 if (!eglChooseConfig(
1092 egl_display,
1093 config_attribs,
1094 &egl_config_,
1096 &num_configs))
1097 return false;
1099 return SetDecoderMediaTypes();
1102 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
1103 base::win::ScopedComPtr<IMFAttributes> attributes;
1104 HRESULT hr = decoder_->GetAttributes(attributes.Receive());
1105 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
1107 UINT32 dxva = 0;
1108 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
1109 RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
1111 if (codec_ == media::kCodecH264) {
1112 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
1113 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
1116 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
1117 if (SUCCEEDED(hr)) {
1118 DVLOG(1) << "Successfully set Low latency mode on decoder.";
1119 } else {
1120 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr;
1123 // The decoder should use DX11 iff
1124 // 1. The underlying H/W decoder supports it.
1125 // 2. We have a pointer to the MFCreateDXGIDeviceManager function needed for
1126 // this. This should always be true for Windows 8+.
1127 // 3. ANGLE is using DX11.
1128 DCHECK(gl_context_);
1129 if (create_dxgi_device_manager_ &&
1130 (gl_context_->GetGLRenderer().find("Direct3D11") !=
1131 std::string::npos)) {
1132 UINT32 dx11_aware = 0;
1133 attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware);
1134 use_dx11_ = !!dx11_aware;
1136 return true;
1139 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
1140 RETURN_ON_FAILURE(SetDecoderInputMediaType(),
1141 "Failed to set decoder input media type", false);
1142 return SetDecoderOutputMediaType(MFVideoFormat_NV12);
1145 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
1146 base::win::ScopedComPtr<IMFMediaType> media_type;
1147 HRESULT hr = MFCreateMediaType(media_type.Receive());
1148 RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
1150 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
1151 RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
1153 if (codec_ == media::kCodecH264) {
1154 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
1155 } else if (codec_ == media::kCodecVP8) {
1156 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80);
1157 } else if (codec_ == media::kCodecVP9) {
1158 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90);
1159 } else {
1160 NOTREACHED();
1161 RETURN_ON_FAILURE(false, "Unsupported codec on input media type.", false);
1163 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
1165 // Not sure about this. msdn recommends setting this value on the input
1166 // media type.
1167 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
1168 MFVideoInterlace_MixedInterlaceOrProgressive);
1169 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false);
1171 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags
1172 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
1173 return true;
1176 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
1177 const GUID& subtype) {
1178 base::win::ScopedComPtr<IMFMediaType> out_media_type;
1180 for (uint32 i = 0;
1181 SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
1182 out_media_type.Receive()));
1183 ++i) {
1184 GUID out_subtype = {0};
1185 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
1186 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
1188 if (out_subtype == subtype) {
1189 hr = decoder_->SetOutputType(0, out_media_type.get(), 0); // No flags
1190 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
1191 return true;
1193 out_media_type.Release();
1195 return false;
1198 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
1199 int32 param) {
1200 HRESULT hr = decoder_->ProcessMessage(msg, param);
1201 return SUCCEEDED(hr);
1204 // Gets the minimum buffer sizes for input and output samples. The MFT will not
1205 // allocate buffer for input nor output, so we have to do it ourselves and make
1206 // sure they're the correct size. We only provide decoding if DXVA is enabled.
1207 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
1208 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
1209 RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
1211 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
1212 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
1214 DVLOG(1) << "Input stream info: ";
1215 DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
1216 if (codec_ == media::kCodecH264) {
1217 // There should be three flags, one for requiring a whole frame be in a
1218 // single sample, one for requiring there be one buffer only in a single
1219 // sample, and one that specifies a fixed sample size. (as in cbSize)
1220 CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
1223 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
1224 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
1225 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
1227 DVLOG(1) << "Output stream info: ";
1228 // The flags here should be the same and mean the same thing, except when
1229 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
1230 // allocate its own sample.
1231 DVLOG(1) << "Flags: "
1232 << std::hex << std::showbase << output_stream_info_.dwFlags;
1233 if (codec_ == media::kCodecH264) {
1234 CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
1236 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
1237 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
1238 return true;
1241 void DXVAVideoDecodeAccelerator::DoDecode() {
1242 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1243 // This function is also called from FlushInternal in a loop which could
1244 // result in the state transitioning to kStopped due to no decoded output.
1245 State state = GetState();
1246 RETURN_AND_NOTIFY_ON_FAILURE(
1247 (state == kNormal || state == kFlushing || state == kStopped),
1248 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,);
1250 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
1251 DWORD status = 0;
1253 HRESULT hr = decoder_->ProcessOutput(0, // No flags
1254 1, // # of out streams to pull from
1255 &output_data_buffer,
1256 &status);
1257 IMFCollection* events = output_data_buffer.pEvents;
1258 if (events != NULL) {
1259 DVLOG(1) << "Got events from ProcessOuput, but discarding";
1260 events->Release();
1262 if (FAILED(hr)) {
1263 // A stream change needs further ProcessInput calls to get back decoder
1264 // output which is why we need to set the state to stopped.
1265 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1266 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
1267 // Decoder didn't let us set NV12 output format. Not sure as to why
1268 // this can happen. Give up in disgust.
1269 NOTREACHED() << "Failed to set decoder output media type to NV12";
1270 SetState(kStopped);
1271 } else {
1272 DVLOG(1) << "Received output format change from the decoder."
1273 " Recursively invoking DoDecode";
1274 DoDecode();
1276 return;
1277 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1278 // No more output from the decoder. Stop playback.
1279 SetState(kStopped);
1280 return;
1281 } else {
1282 NOTREACHED() << "Unhandled error in DoDecode()";
1283 return;
1286 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1288 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
1289 inputs_before_decode_);
1291 inputs_before_decode_ = 0;
1293 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
1294 "Failed to process output sample.", PLATFORM_FAILURE,);
1297 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
1298 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
1300 LONGLONG input_buffer_id = 0;
1301 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1302 "Failed to get input buffer id associated with sample",
1303 false);
1306 base::AutoLock lock(decoder_lock_);
1307 DCHECK(pending_output_samples_.empty());
1308 pending_output_samples_.push_back(
1309 PendingSampleInfo(input_buffer_id, sample));
1312 if (pictures_requested_) {
1313 DVLOG(1) << "Waiting for picture slots from the client.";
1314 main_thread_task_runner_->PostTask(
1315 FROM_HERE,
1316 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples,
1317 weak_this_factory_.GetWeakPtr()));
1318 return true;
1321 int width = 0;
1322 int height = 0;
1323 if (!GetVideoFrameDimensions(sample, &width, &height)) {
1324 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample",
1325 false);
1328 // Go ahead and request picture buffers.
1329 main_thread_task_runner_->PostTask(
1330 FROM_HERE,
1331 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1332 weak_this_factory_.GetWeakPtr(),
1333 width,
1334 height));
1336 pictures_requested_ = true;
1337 return true;
1340 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
1341 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1343 if (!output_picture_buffers_.size())
1344 return;
1346 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
1347 "Failed to make context current", PLATFORM_FAILURE,);
1349 OutputBuffers::iterator index;
1351 for (index = output_picture_buffers_.begin();
1352 index != output_picture_buffers_.end() &&
1353 OutputSamplesPresent();
1354 ++index) {
1355 if (index->second->available()) {
1356 PendingSampleInfo* pending_sample = NULL;
1358 base::AutoLock lock(decoder_lock_);
1360 PendingSampleInfo& sample_info = pending_output_samples_.front();
1361 if (sample_info.picture_buffer_id != -1)
1362 continue;
1363 pending_sample = &sample_info;
1366 int width = 0;
1367 int height = 0;
1368 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(),
1369 &width, &height)) {
1370 RETURN_AND_NOTIFY_ON_FAILURE(false,
1371 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,);
1374 if (width != index->second->size().width() ||
1375 height != index->second->size().height()) {
1376 HandleResolutionChanged(width, height);
1377 return;
1380 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
1381 HRESULT hr = pending_sample->output_sample->GetBufferByIndex(
1382 0, output_buffer.Receive());
1383 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1384 "Failed to get buffer from output sample", PLATFORM_FAILURE,);
1386 base::win::ScopedComPtr<IDirect3DSurface9> surface;
1387 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture;
1389 if (use_dx11_) {
1390 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
1391 hr = dxgi_buffer.QueryFrom(output_buffer.get());
1392 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1393 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,);
1394 hr = dxgi_buffer->GetResource(
1395 __uuidof(ID3D11Texture2D),
1396 reinterpret_cast<void**>(d3d11_texture.Receive()));
1397 } else {
1398 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE,
1399 IID_PPV_ARGS(surface.Receive()));
1401 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1402 "Failed to get surface from output sample", PLATFORM_FAILURE,);
1404 pending_sample->picture_buffer_id = index->second->id();
1406 RETURN_AND_NOTIFY_ON_FAILURE(
1407 index->second->CopyOutputSampleDataToPictureBuffer(
1408 this,
1409 surface.get(),
1410 d3d11_texture.get(),
1411 pending_sample->input_buffer_id),
1412 "Failed to copy output sample", PLATFORM_FAILURE,);
1414 index->second->set_available(false);
1419 void DXVAVideoDecodeAccelerator::StopOnError(
1420 media::VideoDecodeAccelerator::Error error) {
1421 if (!main_thread_task_runner_->BelongsToCurrentThread()) {
1422 main_thread_task_runner_->PostTask(
1423 FROM_HERE,
1424 base::Bind(&DXVAVideoDecodeAccelerator::StopOnError,
1425 weak_this_factory_.GetWeakPtr(),
1426 error));
1427 return;
1430 if (client_)
1431 client_->NotifyError(error);
1432 client_ = NULL;
1434 if (GetState() != kUninitialized) {
1435 Invalidate();
1439 void DXVAVideoDecodeAccelerator::Invalidate() {
1440 if (GetState() == kUninitialized)
1441 return;
1442 decoder_thread_.Stop();
1443 weak_this_factory_.InvalidateWeakPtrs();
1444 output_picture_buffers_.clear();
1445 stale_output_picture_buffers_.clear();
1446 pending_output_samples_.clear();
1447 pending_input_buffers_.clear();
1448 decoder_.Release();
1450 if (use_dx11_) {
1451 if (video_format_converter_mft_.get()) {
1452 video_format_converter_mft_->ProcessMessage(
1453 MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
1454 video_format_converter_mft_.Release();
1456 d3d11_device_context_.Release();
1457 d3d11_device_.Release();
1458 d3d11_device_manager_.Release();
1459 d3d11_query_.Release();
1460 dx11_video_format_converter_media_type_needs_init_ = true;
1461 } else {
1462 d3d9_.Release();
1463 d3d9_device_ex_.Release();
1464 device_manager_.Release();
1465 query_.Release();
1468 SetState(kUninitialized);
1471 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
1472 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1473 if (client_)
1474 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1477 void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
1478 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1479 if (client_ && pending_flush_) {
1480 pending_flush_ = false;
1482 base::AutoLock lock(decoder_lock_);
1483 sent_drain_message_ = false;
1486 client_->NotifyFlushDone();
1490 void DXVAVideoDecodeAccelerator::NotifyResetDone() {
1491 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1492 if (client_)
1493 client_->NotifyResetDone();
1496 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
1497 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1498 // This task could execute after the decoder has been torn down.
1499 if (GetState() != kUninitialized && client_) {
1500 client_->ProvidePictureBuffers(
1501 kNumPictureBuffers,
1502 gfx::Size(width, height),
1503 GL_TEXTURE_2D);
1507 void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1508 int picture_buffer_id,
1509 int input_buffer_id,
1510 const gfx::Rect& picture_buffer_size) {
1511 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1512 // This task could execute after the decoder has been torn down.
1513 if (GetState() != kUninitialized && client_) {
1514 media::Picture picture(picture_buffer_id, input_buffer_id,
1515 picture_buffer_size, false);
1516 client_->PictureReady(picture);
1520 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1521 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1522 if (!client_)
1523 return;
1525 for (PendingInputs::iterator it = pending_input_buffers_.begin();
1526 it != pending_input_buffers_.end(); ++it) {
1527 LONGLONG input_buffer_id = 0;
1528 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id),
1529 "Failed to get buffer id associated with sample",);
1530 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1532 pending_input_buffers_.clear();
1535 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1536 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1537 State state = GetState();
1538 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
1539 "Invalid state: " << state, ILLEGAL_STATE,);
1541 if (pending_input_buffers_.empty() || OutputSamplesPresent())
1542 return;
1544 PendingInputs pending_input_buffers_copy;
1545 std::swap(pending_input_buffers_, pending_input_buffers_copy);
1547 for (PendingInputs::iterator it = pending_input_buffers_copy.begin();
1548 it != pending_input_buffers_copy.end(); ++it) {
1549 DecodeInternal(*it);
1553 void DXVAVideoDecodeAccelerator::FlushInternal() {
1554 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1556 // We allow only one output frame to be present at any given time. If we have
1557 // an output frame, then we cannot complete the flush at this time.
1558 if (OutputSamplesPresent())
1559 return;
1561 // First drain the pending input because once the drain message is sent below,
1562 // the decoder will ignore further input until it's drained.
1563 if (!pending_input_buffers_.empty()) {
1564 decoder_thread_task_runner_->PostTask(
1565 FROM_HERE,
1566 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1567 base::Unretained(this)));
1568 decoder_thread_task_runner_->PostTask(
1569 FROM_HERE,
1570 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
1571 base::Unretained(this)));
1572 return;
1576 base::AutoLock lock(decoder_lock_);
1577 if (!sent_drain_message_) {
1578 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
1579 "Failed to send drain message",
1580 PLATFORM_FAILURE,);
1581 sent_drain_message_ = true;
1585 // Attempt to retrieve an output frame from the decoder. If we have one,
1586 // return and proceed when the output frame is processed. If we don't have a
1587 // frame then we are done.
1588 DoDecode();
1589 if (OutputSamplesPresent())
1590 return;
1592 SetState(kFlushing);
1594 main_thread_task_runner_->PostTask(
1595 FROM_HERE,
1596 base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone,
1597 weak_this_factory_.GetWeakPtr()));
1599 SetState(kNormal);
1602 void DXVAVideoDecodeAccelerator::DecodeInternal(
1603 const base::win::ScopedComPtr<IMFSample>& sample) {
1604 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1606 if (GetState() == kUninitialized)
1607 return;
1609 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) {
1610 pending_input_buffers_.push_back(sample);
1611 return;
1614 if (!inputs_before_decode_) {
1615 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1617 inputs_before_decode_++;
1619 HRESULT hr = decoder_->ProcessInput(0, sample.get(), 0);
1620 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1621 // has enough data to produce one or more output samples. In this case the
1622 // recommended options are to
1623 // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1624 // returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1625 // 2. Flush the input data
1626 // We implement the first option, i.e to retrieve the output sample and then
1627 // process the input again. Failure in either of these steps is treated as a
1628 // decoder failure.
1629 if (hr == MF_E_NOTACCEPTING) {
1630 DoDecode();
1631 // If the DoDecode call resulted in an output frame then we should not
1632 // process any more input until that frame is copied to the target surface.
1633 if (!OutputSamplesPresent()) {
1634 State state = GetState();
1635 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal ||
1636 state == kFlushing),
1637 "Failed to process output. Unexpected decoder state: " << state,
1638 PLATFORM_FAILURE,);
1639 hr = decoder_->ProcessInput(0, sample.get(), 0);
1641 // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1642 // 1. Add the input sample to the pending queue.
1643 // 2. If we don't have any output samples we post the
1644 // DecodePendingInputBuffers task to process the pending input samples.
1645 // If we have an output sample then the above task is posted when the
1646 // output samples are sent to the client.
1647 // This is because we only support 1 pending output sample at any
1648 // given time due to the limitation with the Microsoft media foundation
1649 // decoder where it recycles the output Decoder surfaces.
1650 if (hr == MF_E_NOTACCEPTING) {
1651 pending_input_buffers_.push_back(sample);
1652 decoder_thread_task_runner_->PostTask(
1653 FROM_HERE,
1654 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1655 base::Unretained(this)));
1656 return;
1659 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
1660 PLATFORM_FAILURE,);
1662 DoDecode();
1664 State state = GetState();
1665 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal ||
1666 state == kFlushing),
1667 "Failed to process output. Unexpected decoder state: " << state,
1668 ILLEGAL_STATE,);
1670 LONGLONG input_buffer_id = 0;
1671 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1672 "Failed to get input buffer id associated with sample",);
1673 // The Microsoft Media foundation decoder internally buffers up to 30 frames
1674 // before returning a decoded frame. We need to inform the client that this
1675 // input buffer is processed as it may stop sending us further input.
1676 // Note: This may break clients which expect every input buffer to be
1677 // associated with a decoded output buffer.
1678 // TODO(ananta)
1679 // Do some more investigation into whether it is possible to get the MFT
1680 // decoder to emit an output packet for every input packet.
1681 // http://code.google.com/p/chromium/issues/detail?id=108121
1682 // http://code.google.com/p/chromium/issues/detail?id=150925
1683 main_thread_task_runner_->PostTask(
1684 FROM_HERE,
1685 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
1686 weak_this_factory_.GetWeakPtr(),
1687 input_buffer_id));
1690 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
1691 int height) {
1692 dx11_video_format_converter_media_type_needs_init_ = true;
1694 main_thread_task_runner_->PostTask(
1695 FROM_HERE,
1696 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers,
1697 weak_this_factory_.GetWeakPtr()));
1699 main_thread_task_runner_->PostTask(
1700 FROM_HERE,
1701 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1702 weak_this_factory_.GetWeakPtr(),
1703 width,
1704 height));
1707 void DXVAVideoDecodeAccelerator::DismissStaleBuffers() {
1708 OutputBuffers::iterator index;
1710 for (index = output_picture_buffers_.begin();
1711 index != output_picture_buffers_.end();
1712 ++index) {
1713 if (index->second->available()) {
1714 DVLOG(1) << "Dismissing picture id: " << index->second->id();
1715 client_->DismissPictureBuffer(index->second->id());
1716 } else {
1717 // Move to |stale_output_picture_buffers_| for deferred deletion.
1718 stale_output_picture_buffers_.insert(
1719 std::make_pair(index->first, index->second));
1723 output_picture_buffers_.clear();
1726 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer(
1727 int32 picture_buffer_id) {
1728 OutputBuffers::iterator it = stale_output_picture_buffers_.find(
1729 picture_buffer_id);
1730 DCHECK(it != stale_output_picture_buffers_.end());
1731 DVLOG(1) << "Dismissing picture id: " << it->second->id();
1732 client_->DismissPictureBuffer(it->second->id());
1733 stale_output_picture_buffers_.erase(it);
1736 DXVAVideoDecodeAccelerator::State
1737 DXVAVideoDecodeAccelerator::GetState() {
1738 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes");
1739 State state = static_cast<State>(
1740 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0));
1741 return state;
1744 void DXVAVideoDecodeAccelerator::SetState(State new_state) {
1745 if (!main_thread_task_runner_->BelongsToCurrentThread()) {
1746 main_thread_task_runner_->PostTask(
1747 FROM_HERE,
1748 base::Bind(&DXVAVideoDecodeAccelerator::SetState,
1749 weak_this_factory_.GetWeakPtr(),
1750 new_state));
1751 return;
1754 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes");
1755 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_),
1756 new_state);
1757 DCHECK_EQ(state_, new_state);
1760 void DXVAVideoDecodeAccelerator::StartDecoderThread() {
1761 decoder_thread_.init_com_with_mta(false);
1762 decoder_thread_.Start();
1763 decoder_thread_task_runner_ = decoder_thread_.task_runner();
1766 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() {
1767 base::AutoLock lock(decoder_lock_);
1768 return !pending_output_samples_.empty();
1771 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface,
1772 IDirect3DSurface9* dest_surface,
1773 int picture_buffer_id,
1774 int input_buffer_id) {
1775 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) {
1776 decoder_thread_task_runner_->PostTask(
1777 FROM_HERE,
1778 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface,
1779 base::Unretained(this),
1780 src_surface,
1781 dest_surface,
1782 picture_buffer_id,
1783 input_buffer_id));
1784 return;
1787 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface,
1788 NULL, D3DTEXF_NONE);
1789 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",);
1791 // Ideally, this should be done immediately before the draw call that uses
1792 // the texture. Flush it once here though.
1793 hr = query_->Issue(D3DISSUE_END);
1794 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",);
1796 // Flush the decoder device to ensure that the decoded frame is copied to the
1797 // target surface.
1798 decoder_thread_task_runner_->PostDelayedTask(
1799 FROM_HERE,
1800 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
1801 base::Unretained(this), 0, src_surface, dest_surface,
1802 picture_buffer_id, input_buffer_id),
1803 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
1806 void DXVAVideoDecodeAccelerator::CopySurfaceComplete(
1807 IDirect3DSurface9* src_surface,
1808 IDirect3DSurface9* dest_surface,
1809 int picture_buffer_id,
1810 int input_buffer_id) {
1811 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1813 // The output buffers may have changed in the following scenarios:-
1814 // 1. A resolution change.
1815 // 2. Decoder instance was destroyed.
1816 // Ignore copy surface notifications for such buffers.
1817 // copy surface notifications for such buffers.
1818 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
1819 if (it == output_picture_buffers_.end())
1820 return;
1822 // If the picture buffer is marked as available it probably means that there
1823 // was a Reset operation which dropped the output frame.
1824 DXVAPictureBuffer* picture_buffer = it->second.get();
1825 if (picture_buffer->available())
1826 return;
1828 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
1829 "Failed to make context current", PLATFORM_FAILURE,);
1831 DCHECK(!output_picture_buffers_.empty());
1833 picture_buffer->CopySurfaceComplete(src_surface,
1834 dest_surface);
1836 NotifyPictureReady(picture_buffer->id(),
1837 input_buffer_id,
1838 gfx::Rect(picture_buffer->size()));
1841 base::AutoLock lock(decoder_lock_);
1842 if (!pending_output_samples_.empty())
1843 pending_output_samples_.pop_front();
1846 if (pending_flush_) {
1847 decoder_thread_task_runner_->PostTask(
1848 FROM_HERE,
1849 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
1850 base::Unretained(this)));
1851 return;
1853 decoder_thread_task_runner_->PostTask(
1854 FROM_HERE,
1855 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1856 base::Unretained(this)));
1859 void DXVAVideoDecodeAccelerator::CopyTexture(ID3D11Texture2D* src_texture,
1860 ID3D11Texture2D* dest_texture,
1861 IMFSample* video_frame,
1862 int picture_buffer_id,
1863 int input_buffer_id) {
1864 HRESULT hr = E_FAIL;
1866 DCHECK(use_dx11_);
1868 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) {
1869 // The media foundation H.264 decoder outputs YUV12 textures which we
1870 // cannot copy into ANGLE as they expect ARGB textures. In D3D land
1871 // the StretchRect API in the IDirect3DDevice9Ex interface did the color
1872 // space conversion for us. Sadly in DX11 land the API does not provide
1873 // a straightforward way to do this.
1874 // We use the video processor MFT.
1875 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx
1876 // This object implements a media foundation transform (IMFTransform)
1877 // which follows the same contract as the decoder. The color space
1878 // conversion as per msdn is done in the GPU.
1880 D3D11_TEXTURE2D_DESC source_desc;
1881 src_texture->GetDesc(&source_desc);
1883 // Set up the input and output types for the video processor MFT.
1884 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width,
1885 source_desc.Height)) {
1886 RETURN_AND_NOTIFY_ON_FAILURE(
1887 false, "Failed to initialize media types for convesion.",
1888 PLATFORM_FAILURE,);
1891 // The input to the video processor is the output sample.
1892 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion;
1894 base::AutoLock lock(decoder_lock_);
1895 PendingSampleInfo& sample_info = pending_output_samples_.front();
1896 input_sample_for_conversion = sample_info.output_sample;
1899 decoder_thread_task_runner_->PostTask(
1900 FROM_HERE,
1901 base::Bind(&DXVAVideoDecodeAccelerator::CopyTexture,
1902 base::Unretained(this),
1903 src_texture,
1904 dest_texture,
1905 input_sample_for_conversion.Detach(),
1906 picture_buffer_id,
1907 input_buffer_id));
1908 return;
1911 DCHECK(video_frame);
1913 base::win::ScopedComPtr<IMFSample> input_sample;
1914 input_sample.Attach(video_frame);
1916 DCHECK(video_format_converter_mft_.get());
1918 // d3d11_device_context_->Begin(d3d11_query_.get());
1920 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0);
1921 if (FAILED(hr)) {
1922 DCHECK(false);
1923 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1924 "Failed to convert output sample format.", PLATFORM_FAILURE,);
1927 // The video processor MFT requires output samples to be allocated by the
1928 // caller. We create a sample with a buffer backed with the ID3D11Texture2D
1929 // interface exposed by ANGLE. This works nicely as this ensures that the
1930 // video processor coverts the color space of the output frame and copies
1931 // the result into the ANGLE texture.
1932 base::win::ScopedComPtr<IMFSample> output_sample;
1933 hr = MFCreateSample(output_sample.Receive());
1934 if (FAILED(hr)) {
1935 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1936 "Failed to create output sample.", PLATFORM_FAILURE,);
1939 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
1940 hr = MFCreateDXGISurfaceBuffer(
1941 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE,
1942 output_buffer.Receive());
1943 if (FAILED(hr)) {
1944 base::debug::Alias(&hr);
1945 // TODO(ananta)
1946 // Remove this CHECK when the change to use DX11 for H/W decoding
1947 // stablizes.
1948 CHECK(false);
1949 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1950 "Failed to create output sample.", PLATFORM_FAILURE,);
1953 output_sample->AddBuffer(output_buffer.get());
1955 DWORD status = 0;
1956 MFT_OUTPUT_DATA_BUFFER format_converter_output = {};
1957 format_converter_output.pSample = output_sample.get();
1958 hr = video_format_converter_mft_->ProcessOutput(
1959 0, // No flags
1960 1, // # of out streams to pull from
1961 &format_converter_output,
1962 &status);
1964 d3d11_device_context_->Flush();
1965 d3d11_device_context_->End(d3d11_query_.get());
1967 if (FAILED(hr)) {
1968 base::debug::Alias(&hr);
1969 // TODO(ananta)
1970 // Remove this CHECK when the change to use DX11 for H/W decoding
1971 // stablizes.
1972 CHECK(false);
1973 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1974 "Failed to convert output sample format.", PLATFORM_FAILURE,);
1977 decoder_thread_task_runner_->PostDelayedTask(
1978 FROM_HERE,
1979 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
1980 base::Unretained(this), 0,
1981 reinterpret_cast<IDirect3DSurface9*>(NULL),
1982 reinterpret_cast<IDirect3DSurface9*>(NULL),
1983 picture_buffer_id, input_buffer_id),
1984 base::TimeDelta::FromMilliseconds(
1985 kFlushDecoderSurfaceTimeoutMs));
1988 void DXVAVideoDecodeAccelerator::FlushDecoder(
1989 int iterations,
1990 IDirect3DSurface9* src_surface,
1991 IDirect3DSurface9* dest_surface,
1992 int picture_buffer_id,
1993 int input_buffer_id) {
1994 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1996 // The DXVA decoder has its own device which it uses for decoding. ANGLE
1997 // has its own device which we don't have access to.
1998 // The above code attempts to copy the decoded picture into a surface
1999 // which is owned by ANGLE. As there are multiple devices involved in
2000 // this, the StretchRect call above is not synchronous.
2001 // We attempt to flush the batched operations to ensure that the picture is
2002 // copied to the surface owned by ANGLE.
2003 // We need to do this in a loop and call flush multiple times.
2004 // We have seen the GetData call for flushing the command buffer fail to
2005 // return success occassionally on multi core machines, leading to an
2006 // infinite loop.
2007 // Workaround is to have an upper limit of 4 on the number of iterations to
2008 // wait for the Flush to finish.
2009 HRESULT hr = E_FAIL;
2011 if (use_dx11_) {
2012 BOOL query_data = 0;
2013 hr = d3d11_device_context_->GetData(d3d11_query_.get(), &query_data,
2014 sizeof(BOOL), 0);
2015 if (FAILED(hr)) {
2016 base::debug::Alias(&hr);
2017 // TODO(ananta)
2018 // Remove this CHECK when the change to use DX11 for H/W decoding
2019 // stablizes.
2020 CHECK(false);
2022 } else {
2023 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH);
2025 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) {
2026 decoder_thread_task_runner_->PostDelayedTask(
2027 FROM_HERE,
2028 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
2029 base::Unretained(this), iterations, src_surface,
2030 dest_surface, picture_buffer_id, input_buffer_id),
2031 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
2032 return;
2035 main_thread_task_runner_->PostTask(
2036 FROM_HERE,
2037 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete,
2038 weak_this_factory_.GetWeakPtr(),
2039 src_surface,
2040 dest_surface,
2041 picture_buffer_id,
2042 input_buffer_id));
2045 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType(
2046 int width, int height) {
2047 if (!dx11_video_format_converter_media_type_needs_init_)
2048 return true;
2050 CHECK(video_format_converter_mft_.get());
2052 HRESULT hr = video_format_converter_mft_->ProcessMessage(
2053 MFT_MESSAGE_SET_D3D_MANAGER,
2054 reinterpret_cast<ULONG_PTR>(
2055 d3d11_device_manager_.get()));
2057 if (FAILED(hr)) {
2058 base::debug::Alias(&hr);
2059 // TODO(ananta)
2060 // Remove this CHECK when the change to use DX11 for H/W decoding
2061 // stablizes.
2062 CHECK(false);
2064 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2065 "Failed to initialize video format converter", PLATFORM_FAILURE, false);
2067 video_format_converter_mft_->ProcessMessage(
2068 MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
2070 base::win::ScopedComPtr<IMFMediaType> media_type;
2071 hr = MFCreateMediaType(media_type.Receive());
2072 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed",
2073 PLATFORM_FAILURE, false);
2075 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
2076 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type",
2077 PLATFORM_FAILURE, false);
2079 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
2080 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type",
2081 PLATFORM_FAILURE, false);
2083 hr = media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
2084 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2085 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2087 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
2088 MFVideoInterlace_Progressive);
2089 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2090 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2092 base::win::ScopedComPtr<IMFAttributes> converter_attributes;
2093 hr = video_format_converter_mft_->GetAttributes(
2094 converter_attributes.Receive());
2095 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get converter attributes",
2096 PLATFORM_FAILURE, false);
2098 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
2099 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes",
2100 PLATFORM_FAILURE, false);
2102 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE);
2103 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes",
2104 PLATFORM_FAILURE, false);
2106 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height);
2107 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes",
2108 PLATFORM_FAILURE, false);
2110 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0);
2111 if (FAILED(hr)) {
2112 base::debug::Alias(&hr);
2113 // TODO(ananta)
2114 // Remove this CHECK when the change to use DX11 for H/W decoding
2115 // stablizes.
2116 CHECK(false);
2118 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type",
2119 PLATFORM_FAILURE, false);
2121 base::win::ScopedComPtr<IMFMediaType> out_media_type;
2123 for (uint32 i = 0;
2124 SUCCEEDED(video_format_converter_mft_->GetOutputAvailableType(0, i,
2125 out_media_type.Receive()));
2126 ++i) {
2127 GUID out_subtype = {0};
2128 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
2129 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output major type",
2130 PLATFORM_FAILURE, false);
2132 if (out_subtype == MFVideoFormat_ARGB32) {
2133 hr = out_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
2134 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2135 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2137 hr = out_media_type->SetUINT32(MF_MT_INTERLACE_MODE,
2138 MFVideoInterlace_Progressive);
2139 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2140 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2142 hr = MFSetAttributeSize(out_media_type.get(), MF_MT_FRAME_SIZE, width,
2143 height);
2144 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2145 "Failed to set media type attributes", PLATFORM_FAILURE, false);
2147 hr = video_format_converter_mft_->SetOutputType(
2148 0, out_media_type.get(), 0); // No flags
2149 if (FAILED(hr)) {
2150 base::debug::Alias(&hr);
2151 // TODO(ananta)
2152 // Remove this CHECK when the change to use DX11 for H/W decoding
2153 // stablizes.
2154 CHECK(false);
2156 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2157 "Failed to set converter output type", PLATFORM_FAILURE, false);
2159 hr = video_format_converter_mft_->ProcessMessage(
2160 MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
2161 if (FAILED(hr)) {
2162 // TODO(ananta)
2163 // Remove this CHECK when the change to use DX11 for H/W decoding
2164 // stablizes.
2165 RETURN_AND_NOTIFY_ON_FAILURE(
2166 false, "Failed to initialize video converter.", PLATFORM_FAILURE,
2167 false);
2169 dx11_video_format_converter_media_type_needs_init_ = false;
2170 return true;
2172 out_media_type.Release();
2174 return false;
2177 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(
2178 IMFSample* sample,
2179 int* width,
2180 int* height) {
2181 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
2182 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
2183 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
2185 if (use_dx11_) {
2186 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
2187 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture;
2188 hr = dxgi_buffer.QueryFrom(output_buffer.get());
2189 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample",
2190 false);
2191 hr = dxgi_buffer->GetResource(
2192 __uuidof(ID3D11Texture2D),
2193 reinterpret_cast<void**>(d3d11_texture.Receive()));
2194 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer",
2195 false);
2196 D3D11_TEXTURE2D_DESC d3d11_texture_desc;
2197 d3d11_texture->GetDesc(&d3d11_texture_desc);
2198 *width = d3d11_texture_desc.Width;
2199 *height = d3d11_texture_desc.Height;
2200 } else {
2201 base::win::ScopedComPtr<IDirect3DSurface9> surface;
2202 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE,
2203 IID_PPV_ARGS(surface.Receive()));
2204 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
2205 false);
2206 D3DSURFACE_DESC surface_desc;
2207 hr = surface->GetDesc(&surface_desc);
2208 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
2209 *width = surface_desc.Width;
2210 *height = surface_desc.Height;
2212 return true;
2215 } // namespace content