Fix incorrect usage of the RETURN_ON_FAILURE and RETURN_ON_HR_FAILURE macros in the...
[chromium-blink-merge.git] / content / common / gpu / media / dxva_video_decode_accelerator.cc
blob0609b51a814bb3c9108fd63baf9b4b1b9b6b6b4c
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
7 #if !defined(OS_WIN)
8 #error This file should only be built on Windows.
9 #endif // !defined(OS_WIN)
11 #include <ks.h>
12 #include <codecapi.h>
13 #include <dxgi1_2.h>
14 #include <mfapi.h>
15 #include <mferror.h>
16 #include <wmcodecdsp.h>
18 #include "base/base_paths_win.h"
19 #include "base/bind.h"
20 #include "base/callback.h"
21 #include "base/command_line.h"
22 #include "base/debug/alias.h"
23 #include "base/file_version_info.h"
24 #include "base/files/file_path.h"
25 #include "base/logging.h"
26 #include "base/memory/scoped_ptr.h"
27 #include "base/memory/shared_memory.h"
28 #include "base/message_loop/message_loop.h"
29 #include "base/path_service.h"
30 #include "base/trace_event/trace_event.h"
31 #include "base/win/windows_version.h"
32 #include "content/public/common/content_switches.h"
33 #include "media/base/win/mf_initializer.h"
34 #include "media/video/video_decode_accelerator.h"
35 #include "ui/gl/gl_bindings.h"
36 #include "ui/gl/gl_context.h"
37 #include "ui/gl/gl_surface_egl.h"
38 #include "ui/gl/gl_switches.h"
40 namespace {
42 // Path is appended on to the PROGRAM_FILES base path.
43 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\";
45 const wchar_t kVP8DecoderDLLName[] =
46 #if defined(ARCH_CPU_X86)
47 L"mfx_mft_vp8vd_32.dll";
48 #elif defined(ARCH_CPU_X86_64)
49 L"mfx_mft_vp8vd_64.dll";
50 #else
51 #error Unsupported Windows CPU Architecture
52 #endif
54 const wchar_t kVP9DecoderDLLName[] =
55 #if defined(ARCH_CPU_X86)
56 L"mfx_mft_vp9vd_32.dll";
57 #elif defined(ARCH_CPU_X86_64)
58 L"mfx_mft_vp9vd_64.dll";
59 #else
60 #error Unsupported Windows CPU Architecture
61 #endif
63 const CLSID CLSID_WebmMfVp8Dec = {
64 0x451e3cb7,
65 0x2622,
66 0x4ba5,
67 { 0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24 }
70 const CLSID CLSID_WebmMfVp9Dec = {
71 0x07ab4bd2,
72 0x1979,
73 0x4fcd,
74 { 0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe }
77 const CLSID MEDIASUBTYPE_VP80 = {
78 0x30385056,
79 0x0000,
80 0x0010,
81 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }
84 const CLSID MEDIASUBTYPE_VP90 = {
85 0x30395056,
86 0x0000,
87 0x0010,
88 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }
91 // The CLSID of the video processor media foundation transform which we use for
92 // texture color conversion in DX11.
93 DEFINE_GUID(CLSID_VideoProcessorMFT,
94 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78,
95 0xc9, 0x82);
97 // MF_XVP_PLAYBACK_MODE
98 // Data type: UINT32 (treat as BOOL)
99 // If this attribute is TRUE, the video processor will run in playback mode
100 // where it allows callers to allocate output samples and allows last frame
101 // regeneration (repaint).
102 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12,
103 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9);
105 } // namespace
107 namespace content {
109 static const media::VideoCodecProfile kSupportedProfiles[] = {
110 media::H264PROFILE_BASELINE,
111 media::H264PROFILE_MAIN,
112 media::H264PROFILE_HIGH,
113 media::VP8PROFILE_ANY,
114 media::VP9PROFILE_ANY
117 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_
118 = NULL;
120 #define RETURN_ON_FAILURE(result, log, ret) \
121 do { \
122 if (!(result)) { \
123 DLOG(ERROR) << log; \
124 return ret; \
126 } while (0)
128 #define RETURN_ON_HR_FAILURE(result, log, ret) \
129 RETURN_ON_FAILURE(SUCCEEDED(result), \
130 log << ", HRESULT: 0x" << std::hex << result, \
131 ret);
133 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \
134 do { \
135 if (!(result)) { \
136 DVLOG(1) << log; \
137 StopOnError(error_code); \
138 return ret; \
140 } while (0)
142 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \
143 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \
144 log << ", HRESULT: 0x" << std::hex << result, \
145 error_code, ret);
147 enum {
148 // Maximum number of iterations we allow before aborting the attempt to flush
149 // the batched queries to the driver and allow torn/corrupt frames to be
150 // rendered.
151 kFlushDecoderSurfaceTimeoutMs = 1,
152 // Maximum iterations where we try to flush the d3d device.
153 kMaxIterationsForD3DFlush = 4,
154 // We only request 5 picture buffers from the client which are used to hold
155 // the decoded samples. These buffers are then reused when the client tells
156 // us that it is done with the buffer.
157 kNumPictureBuffers = 5,
160 static IMFSample* CreateEmptySample() {
161 base::win::ScopedComPtr<IMFSample> sample;
162 HRESULT hr = MFCreateSample(sample.Receive());
163 RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
164 return sample.Detach();
167 // Creates a Media Foundation sample with one buffer of length |buffer_length|
168 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
169 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
170 CHECK_GT(buffer_length, 0);
172 base::win::ScopedComPtr<IMFSample> sample;
173 sample.Attach(CreateEmptySample());
175 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
176 HRESULT hr = E_FAIL;
177 if (align == 0) {
178 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
179 // with the align argument being 0.
180 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
181 } else {
182 hr = MFCreateAlignedMemoryBuffer(buffer_length,
183 align - 1,
184 buffer.Receive());
186 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
188 hr = sample->AddBuffer(buffer.get());
189 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
191 buffer->SetCurrentLength(0);
192 return sample.Detach();
195 // Creates a Media Foundation sample with one buffer containing a copy of the
196 // given Annex B stream data.
197 // If duration and sample time are not known, provide 0.
198 // |min_size| specifies the minimum size of the buffer (might be required by
199 // the decoder for input). If no alignment is required, provide 0.
200 static IMFSample* CreateInputSample(const uint8* stream, int size,
201 int min_size, int alignment) {
202 CHECK(stream);
203 CHECK_GT(size, 0);
204 base::win::ScopedComPtr<IMFSample> sample;
205 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
206 alignment));
207 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL);
209 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
210 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
211 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
213 DWORD max_length = 0;
214 DWORD current_length = 0;
215 uint8* destination = NULL;
216 hr = buffer->Lock(&destination, &max_length, &current_length);
217 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
219 CHECK_EQ(current_length, 0u);
220 CHECK_GE(static_cast<int>(max_length), size);
221 memcpy(destination, stream, size);
223 hr = buffer->Unlock();
224 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
226 hr = buffer->SetCurrentLength(size);
227 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
229 return sample.Detach();
232 static IMFSample* CreateSampleFromInputBuffer(
233 const media::BitstreamBuffer& bitstream_buffer,
234 DWORD stream_size,
235 DWORD alignment) {
236 base::SharedMemory shm(bitstream_buffer.handle(), true);
237 RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
238 "Failed in base::SharedMemory::Map", NULL);
240 return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
241 bitstream_buffer.size(),
242 stream_size,
243 alignment);
246 // Helper function to create a COM object instance from a DLL. The alternative
247 // is to use the CoCreateInstance API which requires the COM apartment to be
248 // initialized which is not the case on the GPU main thread. We want to avoid
249 // initializing COM as it may have sideeffects.
250 HRESULT CreateCOMObjectFromDll(HMODULE dll, const CLSID& clsid, const IID& iid,
251 void** object) {
252 if (!dll || !object)
253 return E_INVALIDARG;
255 using GetClassObject = HRESULT (WINAPI*)(
256 const CLSID& clsid, const IID& iid, void** object);
258 GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
259 GetProcAddress(dll, "DllGetClassObject"));
260 RETURN_ON_FAILURE(
261 get_class_object, "Failed to get DllGetClassObject pointer", E_FAIL);
263 base::win::ScopedComPtr<IClassFactory> factory;
264 HRESULT hr = get_class_object(
265 clsid,
266 __uuidof(IClassFactory),
267 factory.ReceiveVoid());
268 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject failed", hr);
270 hr = factory->CreateInstance(NULL, iid, object);
271 return hr;
274 // Maintains information about a DXVA picture buffer, i.e. whether it is
275 // available for rendering, the texture information, etc.
276 struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer {
277 public:
278 static linked_ptr<DXVAPictureBuffer> Create(
279 const DXVAVideoDecodeAccelerator& decoder,
280 const media::PictureBuffer& buffer,
281 EGLConfig egl_config);
282 ~DXVAPictureBuffer();
284 void ReusePictureBuffer();
285 // Copies the output sample data to the picture buffer provided by the
286 // client.
287 // The dest_surface parameter contains the decoded bits.
288 bool CopyOutputSampleDataToPictureBuffer(
289 DXVAVideoDecodeAccelerator* decoder,
290 IDirect3DSurface9* dest_surface,
291 ID3D11Texture2D* dx11_texture,
292 int input_buffer_id);
294 bool available() const {
295 return available_;
298 void set_available(bool available) {
299 available_ = available;
302 int id() const {
303 return picture_buffer_.id();
306 gfx::Size size() const {
307 return picture_buffer_.size();
310 // Called when the source surface |src_surface| is copied to the destination
311 // |dest_surface|
312 void CopySurfaceComplete(IDirect3DSurface9* src_surface,
313 IDirect3DSurface9* dest_surface);
315 private:
316 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer);
318 bool available_;
319 media::PictureBuffer picture_buffer_;
320 EGLSurface decoding_surface_;
321 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_;
322 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture_;
324 // The following |IDirect3DSurface9| interface pointers are used to hold
325 // references on the surfaces during the course of a StretchRect operation
326 // to copy the source surface to the target. The references are released
327 // when the StretchRect operation i.e. the copy completes.
328 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_;
329 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_;
331 // This ID3D11Texture2D interface pointer is used to hold a reference to the
332 // decoder texture during the course of a copy operation. This reference is
333 // released when the copy completes.
334 base::win::ScopedComPtr<ID3D11Texture2D> decoder_dx11_texture_;
336 // Set to true if RGB is supported by the texture.
337 // Defaults to true.
338 bool use_rgb_;
340 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
343 // static
344 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer>
345 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
346 const DXVAVideoDecodeAccelerator& decoder,
347 const media::PictureBuffer& buffer,
348 EGLConfig egl_config) {
349 linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer));
351 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
353 EGLint use_rgb = 1;
354 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB,
355 &use_rgb);
357 EGLint attrib_list[] = {
358 EGL_WIDTH, buffer.size().width(),
359 EGL_HEIGHT, buffer.size().height(),
360 EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA,
361 EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
362 EGL_NONE
365 picture_buffer->decoding_surface_ = eglCreatePbufferSurface(
366 egl_display,
367 egl_config,
368 attrib_list);
369 RETURN_ON_FAILURE(picture_buffer->decoding_surface_,
370 "Failed to create surface",
371 linked_ptr<DXVAPictureBuffer>(NULL));
373 HANDLE share_handle = NULL;
374 EGLBoolean ret = eglQuerySurfacePointerANGLE(
375 egl_display,
376 picture_buffer->decoding_surface_,
377 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
378 &share_handle);
380 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE,
381 "Failed to query ANGLE surface pointer",
382 linked_ptr<DXVAPictureBuffer>(NULL));
384 HRESULT hr = E_FAIL;
385 if (decoder.d3d11_device_) {
386 base::win::ScopedComPtr<ID3D11Resource> resource;
387 hr = decoder.d3d11_device_->OpenSharedResource(
388 share_handle,
389 __uuidof(ID3D11Resource),
390 reinterpret_cast<void**>(resource.Receive()));
391 RETURN_ON_HR_FAILURE(hr, "Failed to open shared resource",
392 linked_ptr<DXVAPictureBuffer>(NULL));
393 hr = picture_buffer->dx11_decoding_texture_.QueryFrom(resource.get());
394 } else {
395 hr = decoder.d3d9_device_ex_->CreateTexture(
396 buffer.size().width(),
397 buffer.size().height(),
399 D3DUSAGE_RENDERTARGET,
400 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8,
401 D3DPOOL_DEFAULT,
402 picture_buffer->decoding_texture_.Receive(),
403 &share_handle);
405 RETURN_ON_HR_FAILURE(hr, "Failed to create texture",
406 linked_ptr<DXVAPictureBuffer>(NULL));
407 picture_buffer->use_rgb_ = !!use_rgb;
408 return picture_buffer;
411 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
412 const media::PictureBuffer& buffer)
413 : available_(true),
414 picture_buffer_(buffer),
415 decoding_surface_(NULL),
416 use_rgb_(true) {
419 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
420 if (decoding_surface_) {
421 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
423 eglReleaseTexImage(
424 egl_display,
425 decoding_surface_,
426 EGL_BACK_BUFFER);
428 eglDestroySurface(
429 egl_display,
430 decoding_surface_);
431 decoding_surface_ = NULL;
435 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
436 DCHECK(decoding_surface_);
437 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
438 eglReleaseTexImage(
439 egl_display,
440 decoding_surface_,
441 EGL_BACK_BUFFER);
442 decoder_surface_.Release();
443 target_surface_.Release();
444 decoder_dx11_texture_.Release();
445 set_available(true);
448 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
449 CopyOutputSampleDataToPictureBuffer(
450 DXVAVideoDecodeAccelerator* decoder,
451 IDirect3DSurface9* dest_surface,
452 ID3D11Texture2D* dx11_texture,
453 int input_buffer_id) {
454 DCHECK(dest_surface || dx11_texture);
455 if (dx11_texture) {
456 // Grab a reference on the decoder texture. This reference will be released
457 // when we receive a notification that the copy was completed or when the
458 // DXVAPictureBuffer instance is destroyed.
459 decoder_dx11_texture_ = dx11_texture;
460 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), NULL,
461 id(), input_buffer_id);
462 return true;
464 D3DSURFACE_DESC surface_desc;
465 HRESULT hr = dest_surface->GetDesc(&surface_desc);
466 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
468 D3DSURFACE_DESC texture_desc;
469 decoding_texture_->GetLevelDesc(0, &texture_desc);
471 if (texture_desc.Width != surface_desc.Width ||
472 texture_desc.Height != surface_desc.Height) {
473 NOTREACHED() << "Decode surface of different dimension than texture";
474 return false;
477 hr = decoder->d3d9_->CheckDeviceFormatConversion(
478 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, surface_desc.Format,
479 use_rgb_ ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8);
480 RETURN_ON_HR_FAILURE(hr, "Device does not support format converision", false);
482 // The same picture buffer can be reused for a different frame. Release the
483 // target surface and the decoder references here.
484 target_surface_.Release();
485 decoder_surface_.Release();
487 // Grab a reference on the decoder surface and the target surface. These
488 // references will be released when we receive a notification that the
489 // copy was completed or when the DXVAPictureBuffer instance is destroyed.
490 // We hold references here as it is easier to manage their lifetimes.
491 hr = decoding_texture_->GetSurfaceLevel(0, target_surface_.Receive());
492 RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false);
494 decoder_surface_ = dest_surface;
496 decoder->CopySurface(decoder_surface_.get(), target_surface_.get(), id(),
497 input_buffer_id);
498 return true;
501 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::CopySurfaceComplete(
502 IDirect3DSurface9* src_surface,
503 IDirect3DSurface9* dest_surface) {
504 DCHECK(!available());
506 GLint current_texture = 0;
507 glGetIntegerv(GL_TEXTURE_BINDING_2D, &current_texture);
509 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id());
511 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
513 if (src_surface && dest_surface) {
514 DCHECK_EQ(src_surface, decoder_surface_.get());
515 DCHECK_EQ(dest_surface, target_surface_.get());
516 decoder_surface_.Release();
517 target_surface_.Release();
518 } else {
519 DCHECK(decoder_dx11_texture_.get());
520 decoder_dx11_texture_.Release();
523 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
524 eglBindTexImage(
525 egl_display,
526 decoding_surface_,
527 EGL_BACK_BUFFER);
529 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
530 glBindTexture(GL_TEXTURE_2D, current_texture);
533 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
534 int32 buffer_id, IMFSample* sample)
535 : input_buffer_id(buffer_id),
536 picture_buffer_id(-1) {
537 output_sample.Attach(sample);
540 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
542 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
543 const base::Callback<bool(void)>& make_context_current,
544 gfx::GLContext* gl_context)
545 : client_(NULL),
546 dev_manager_reset_token_(0),
547 dx11_dev_manager_reset_token_(0),
548 egl_config_(NULL),
549 state_(kUninitialized),
550 pictures_requested_(false),
551 inputs_before_decode_(0),
552 sent_drain_message_(false),
553 make_context_current_(make_context_current),
554 codec_(media::kUnknownVideoCodec),
555 decoder_thread_("DXVAVideoDecoderThread"),
556 pending_flush_(false),
557 use_dx11_(false),
558 dx11_video_format_converter_media_type_needs_init_(true),
559 gl_context_(gl_context),
560 weak_this_factory_(this) {
561 weak_ptr_ = weak_this_factory_.GetWeakPtr();
562 memset(&input_stream_info_, 0, sizeof(input_stream_info_));
563 memset(&output_stream_info_, 0, sizeof(output_stream_info_));
566 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
567 client_ = NULL;
570 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
571 Client* client) {
572 client_ = client;
574 main_thread_task_runner_ = base::MessageLoop::current()->task_runner();
576 bool profile_supported = false;
577 for (const auto& supported_profile : kSupportedProfiles) {
578 if (profile == supported_profile) {
579 profile_supported = true;
580 break;
583 if (!profile_supported) {
584 RETURN_AND_NOTIFY_ON_FAILURE(false,
585 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false);
588 // Not all versions of Windows 7 and later include Media Foundation DLLs.
589 // Instead of crashing while delay loading the DLL when calling MFStartup()
590 // below, probe whether we can successfully load the DLL now.
591 // See http://crbug.com/339678 for details.
592 HMODULE dxgi_manager_dll = NULL;
593 if ((dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll")) == NULL) {
594 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll");
595 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding",
596 false);
597 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API.
598 // On Windows 7 mshtmlmedia.dll provides it.
599 dxgi_manager_dll = mfplat_dll;
602 // TODO(ananta)
603 // The code below works, as in we can create the DX11 device manager for
604 // Windows 7. However the IMFTransform we use for texture conversion and
605 // copy does not exist on Windows 7. Look into an alternate approach
606 // and enable the code below.
607 #if defined ENABLE_DX11_FOR_WIN7
608 if ((base::win::GetVersion() == base::win::VERSION_WIN7) &&
609 ((dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll")) == NULL)) {
610 HMODULE mshtml_media_dll = ::LoadLibrary(L"mshtmlmedia.dll");
611 if (mshtml_media_dll)
612 dxgi_manager_dll = mshtml_media_dll;
614 #endif
615 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9
616 // decoding.
617 if (dxgi_manager_dll && !create_dxgi_device_manager_) {
618 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>(
619 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager"));
622 RETURN_AND_NOTIFY_ON_FAILURE(
623 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle,
624 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
625 PLATFORM_FAILURE,
626 false);
628 State state = GetState();
629 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized),
630 "Initialize: invalid state: " << state, ILLEGAL_STATE, false);
632 media::InitializeMediaFoundation();
634 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile),
635 "Failed to initialize decoder", PLATFORM_FAILURE, false);
637 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
638 "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
640 RETURN_AND_NOTIFY_ON_FAILURE(
641 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
642 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
643 PLATFORM_FAILURE, false);
645 RETURN_AND_NOTIFY_ON_FAILURE(
646 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
647 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
648 PLATFORM_FAILURE, false);
650 SetState(kNormal);
652 StartDecoderThread();
653 return true;
656 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
657 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager");
659 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive());
660 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
662 D3DPRESENT_PARAMETERS present_params = {0};
663 present_params.BackBufferWidth = 1;
664 present_params.BackBufferHeight = 1;
665 present_params.BackBufferFormat = D3DFMT_UNKNOWN;
666 present_params.BackBufferCount = 1;
667 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
668 present_params.hDeviceWindow = ::GetShellWindow();
669 present_params.Windowed = TRUE;
670 present_params.Flags = D3DPRESENTFLAG_VIDEO;
671 present_params.FullScreen_RefreshRateInHz = 0;
672 present_params.PresentationInterval = 0;
674 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT,
675 D3DDEVTYPE_HAL,
676 ::GetShellWindow(),
677 D3DCREATE_FPU_PRESERVE |
678 D3DCREATE_SOFTWARE_VERTEXPROCESSING |
679 D3DCREATE_DISABLE_PSGP_THREADING |
680 D3DCREATE_MULTITHREADED,
681 &present_params,
682 NULL,
683 d3d9_device_ex_.Receive());
684 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
686 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
687 device_manager_.Receive());
688 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
690 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(),
691 dev_manager_reset_token_);
692 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
694 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive());
695 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false);
696 // Ensure query_ API works (to avoid an infinite loop later in
697 // CopyOutputSampleDataToPictureBuffer).
698 hr = query_->Issue(D3DISSUE_END);
699 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
700 return true;
703 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
704 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_,
705 d3d11_device_manager_.Receive());
706 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false);
708 // This array defines the set of DirectX hardware feature levels we support.
709 // The ordering MUST be preserved. All applications are assumed to support
710 // 9.1 unless otherwise stated by the application, which is not our case.
711 D3D_FEATURE_LEVEL feature_levels[] = {
712 D3D_FEATURE_LEVEL_11_1,
713 D3D_FEATURE_LEVEL_11_0,
714 D3D_FEATURE_LEVEL_10_1,
715 D3D_FEATURE_LEVEL_10_0,
716 D3D_FEATURE_LEVEL_9_3,
717 D3D_FEATURE_LEVEL_9_2,
718 D3D_FEATURE_LEVEL_9_1 };
720 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
722 #if defined _DEBUG
723 flags |= D3D11_CREATE_DEVICE_DEBUG;
724 #endif
726 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0;
727 hr = D3D11CreateDevice(NULL,
728 D3D_DRIVER_TYPE_HARDWARE,
729 NULL,
730 flags,
731 feature_levels,
732 arraysize(feature_levels),
733 D3D11_SDK_VERSION,
734 d3d11_device_.Receive(),
735 &feature_level_out,
736 d3d11_device_context_.Receive());
737 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false);
739 // Enable multithreaded mode on the context. This ensures that accesses to
740 // context are synchronized across threads. We have multiple threads
741 // accessing the context, the media foundation decoder threads and the
742 // decoder thread via the video format conversion transform.
743 base::win::ScopedComPtr<ID3D10Multithread> multi_threaded;
744 hr = multi_threaded.QueryFrom(d3d11_device_context_.get());
745 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false);
746 multi_threaded->SetMultithreadProtected(TRUE);
748 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(),
749 dx11_dev_manager_reset_token_);
750 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
752 D3D11_QUERY_DESC query_desc;
753 query_desc.Query = D3D11_QUERY_EVENT;
754 query_desc.MiscFlags = 0;
755 hr = d3d11_device_->CreateQuery(
756 &query_desc,
757 d3d11_query_.Receive());
758 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false);
760 HMODULE video_processor_dll = ::LoadLibrary(L"msvproc.dll");
761 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor",
762 false);
764 hr = CreateCOMObjectFromDll(
765 video_processor_dll,
766 CLSID_VideoProcessorMFT,
767 __uuidof(IMFTransform),
768 video_format_converter_mft_.ReceiveVoid());
769 if (FAILED(hr)) {
770 base::debug::Alias(&hr);
771 // TODO(ananta)
772 // Remove this CHECK when the change to use DX11 for H/W decoding
773 // stablizes.
774 CHECK(false);
777 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false);
778 return true;
781 void DXVAVideoDecodeAccelerator::Decode(
782 const media::BitstreamBuffer& bitstream_buffer) {
783 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
785 State state = GetState();
786 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped ||
787 state == kFlushing),
788 "Invalid state: " << state, ILLEGAL_STATE,);
790 base::win::ScopedComPtr<IMFSample> sample;
791 sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
792 input_stream_info_.cbSize,
793 input_stream_info_.cbAlignment));
794 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample",
795 PLATFORM_FAILURE, );
797 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()),
798 "Failed to associate input buffer id with sample", PLATFORM_FAILURE,);
800 decoder_thread_task_runner_->PostTask(
801 FROM_HERE,
802 base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal,
803 base::Unretained(this), sample));
806 void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
807 const std::vector<media::PictureBuffer>& buffers) {
808 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
810 State state = GetState();
811 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
812 "Invalid state: " << state, ILLEGAL_STATE,);
813 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers == buffers.size()),
814 "Failed to provide requested picture buffers. (Got " << buffers.size() <<
815 ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,);
817 // Copy the picture buffers provided by the client to the available list,
818 // and mark these buffers as available for use.
819 for (size_t buffer_index = 0; buffer_index < buffers.size();
820 ++buffer_index) {
821 linked_ptr<DXVAPictureBuffer> picture_buffer =
822 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_);
823 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(),
824 "Failed to allocate picture buffer", PLATFORM_FAILURE,);
826 bool inserted = output_picture_buffers_.insert(std::make_pair(
827 buffers[buffer_index].id(), picture_buffer)).second;
828 DCHECK(inserted);
831 ProcessPendingSamples();
832 if (pending_flush_) {
833 decoder_thread_task_runner_->PostTask(
834 FROM_HERE,
835 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
836 base::Unretained(this)));
840 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
841 int32 picture_buffer_id) {
842 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
844 State state = GetState();
845 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
846 "Invalid state: " << state, ILLEGAL_STATE,);
848 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty())
849 return;
851 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
852 // If we didn't find the picture id in the |output_picture_buffers_| map we
853 // try the |stale_output_picture_buffers_| map, as this may have been an
854 // output picture buffer from before a resolution change, that at resolution
855 // change time had yet to be displayed. The client is calling us back to tell
856 // us that we can now recycle this picture buffer, so if we were waiting to
857 // dispose of it we now can.
858 if (it == output_picture_buffers_.end()) {
859 it = stale_output_picture_buffers_.find(picture_buffer_id);
860 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(),
861 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
862 main_thread_task_runner_->PostTask(
863 FROM_HERE,
864 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer,
865 weak_this_factory_.GetWeakPtr(), picture_buffer_id));
866 return;
869 it->second->ReusePictureBuffer();
870 ProcessPendingSamples();
871 if (pending_flush_) {
872 decoder_thread_task_runner_->PostTask(
873 FROM_HERE,
874 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
875 base::Unretained(this)));
879 void DXVAVideoDecodeAccelerator::Flush() {
880 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
882 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
884 State state = GetState();
885 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped),
886 "Unexpected decoder state: " << state, ILLEGAL_STATE,);
888 SetState(kFlushing);
890 pending_flush_ = true;
892 decoder_thread_task_runner_->PostTask(
893 FROM_HERE,
894 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
895 base::Unretained(this)));
898 void DXVAVideoDecodeAccelerator::Reset() {
899 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
901 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
903 State state = GetState();
904 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped),
905 "Reset: invalid state: " << state, ILLEGAL_STATE,);
907 decoder_thread_.Stop();
909 SetState(kResetting);
911 // If we have pending output frames waiting for display then we drop those
912 // frames and set the corresponding picture buffer as available.
913 PendingOutputSamples::iterator index;
914 for (index = pending_output_samples_.begin();
915 index != pending_output_samples_.end();
916 ++index) {
917 if (index->picture_buffer_id != -1) {
918 OutputBuffers::iterator it = output_picture_buffers_.find(
919 index->picture_buffer_id);
920 if (it != output_picture_buffers_.end()) {
921 DXVAPictureBuffer* picture_buffer = it->second.get();
922 picture_buffer->ReusePictureBuffer();
927 pending_output_samples_.clear();
929 NotifyInputBuffersDropped();
931 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
932 "Reset: Failed to send message.", PLATFORM_FAILURE,);
934 main_thread_task_runner_->PostTask(
935 FROM_HERE,
936 base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone,
937 weak_this_factory_.GetWeakPtr()));
939 StartDecoderThread();
940 SetState(kNormal);
943 void DXVAVideoDecodeAccelerator::Destroy() {
944 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
945 Invalidate();
946 delete this;
949 bool DXVAVideoDecodeAccelerator::CanDecodeOnIOThread() {
950 return false;
953 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const {
954 return GL_BGRA_EXT;
957 // static
958 media::VideoDecodeAccelerator::SupportedProfiles
959 DXVAVideoDecodeAccelerator::GetSupportedProfiles() {
960 // TODO(henryhsu): Need to ensure the profiles are actually supported.
961 SupportedProfiles profiles;
962 for (const auto& supported_profile : kSupportedProfiles) {
963 SupportedProfile profile;
964 profile.profile = supported_profile;
965 // Windows Media Foundation H.264 decoding does not support decoding videos
966 // with any dimension smaller than 48 pixels:
967 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815
968 profile.min_resolution.SetSize(48, 48);
969 // Use 1088 to account for 16x16 macroblocks.
970 profile.max_resolution.SetSize(1920, 1088);
971 profiles.push_back(profile);
973 return profiles;
976 bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) {
977 HMODULE decoder_dll = NULL;
979 CLSID clsid = {};
981 // Profile must fall within the valid range for one of the supported codecs.
982 if (profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX) {
983 // We mimic the steps CoCreateInstance uses to instantiate the object. This
984 // was previously done because it failed inside the sandbox, and now is done
985 // as a more minimal approach to avoid other side-effects CCI might have (as
986 // we are still in a reduced sandbox).
987 decoder_dll = ::LoadLibrary(L"msmpeg2vdec.dll");
988 RETURN_ON_FAILURE(decoder_dll,
989 "msmpeg2vdec.dll required for decoding is not loaded",
990 false);
992 // Check version of DLL, version 6.7.7140 is blacklisted due to high crash
993 // rates in browsers loading that DLL. If that is the version installed we
994 // fall back to software decoding. See crbug/403440.
995 FileVersionInfo* version_info =
996 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll);
997 RETURN_ON_FAILURE(version_info,
998 "unable to get version of msmpeg2vdec.dll",
999 false);
1000 base::string16 file_version = version_info->file_version();
1001 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos,
1002 "blacklisted version of msmpeg2vdec.dll 6.7.7140",
1003 false);
1004 codec_ = media::kCodecH264;
1005 clsid = __uuidof(CMSH264DecoderMFT);
1006 } else if ((profile == media::VP8PROFILE_ANY ||
1007 profile == media::VP9PROFILE_ANY) &&
1008 base::CommandLine::ForCurrentProcess()->HasSwitch(
1009 switches::kEnableAcceleratedVpxDecode)) {
1010 int program_files_key = base::DIR_PROGRAM_FILES;
1011 if (base::win::OSInfo::GetInstance()->wow64_status() ==
1012 base::win::OSInfo::WOW64_ENABLED) {
1013 program_files_key = base::DIR_PROGRAM_FILES6432;
1016 base::FilePath dll_path;
1017 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path),
1018 "failed to get path for Program Files", false);
1020 dll_path = dll_path.Append(kVPXDecoderDLLPath);
1021 if (profile == media::VP8PROFILE_ANY) {
1022 codec_ = media::kCodecVP8;
1023 dll_path = dll_path.Append(kVP8DecoderDLLName);
1024 clsid = CLSID_WebmMfVp8Dec;
1025 } else {
1026 codec_ = media::kCodecVP9;
1027 dll_path = dll_path.Append(kVP9DecoderDLLName);
1028 clsid = CLSID_WebmMfVp9Dec;
1030 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL,
1031 LOAD_WITH_ALTERED_SEARCH_PATH);
1032 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false);
1033 } else {
1034 RETURN_ON_FAILURE(false, "Unsupported codec.", false);
1037 HRESULT hr = CreateCOMObjectFromDll(decoder_dll,
1038 clsid,
1039 __uuidof(IMFTransform),
1040 decoder_.ReceiveVoid());
1041 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
1043 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
1044 "Failed to check decoder DXVA support", false);
1046 ULONG_PTR device_manager_to_use = NULL;
1047 if (use_dx11_) {
1048 CHECK(create_dxgi_device_manager_);
1049 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(),
1050 "Failed to initialize DX11 device and manager",
1051 PLATFORM_FAILURE,
1052 false);
1053 device_manager_to_use = reinterpret_cast<ULONG_PTR>(
1054 d3d11_device_manager_.get());
1055 } else {
1056 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
1057 "Failed to initialize D3D device and manager",
1058 PLATFORM_FAILURE,
1059 false);
1060 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get());
1063 hr = decoder_->ProcessMessage(
1064 MFT_MESSAGE_SET_D3D_MANAGER,
1065 device_manager_to_use);
1066 if (use_dx11_) {
1067 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false);
1068 } else {
1069 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
1072 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
1074 EGLint config_attribs[] = {
1075 EGL_BUFFER_SIZE, 32,
1076 EGL_RED_SIZE, 8,
1077 EGL_GREEN_SIZE, 8,
1078 EGL_BLUE_SIZE, 8,
1079 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
1080 EGL_ALPHA_SIZE, 0,
1081 EGL_NONE
1084 EGLint num_configs;
1086 if (!eglChooseConfig(
1087 egl_display,
1088 config_attribs,
1089 &egl_config_,
1091 &num_configs))
1092 return false;
1094 return SetDecoderMediaTypes();
1097 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
1098 base::win::ScopedComPtr<IMFAttributes> attributes;
1099 HRESULT hr = decoder_->GetAttributes(attributes.Receive());
1100 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
1102 UINT32 dxva = 0;
1103 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
1104 RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
1106 if (codec_ == media::kCodecH264) {
1107 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
1108 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
1111 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
1112 if (SUCCEEDED(hr)) {
1113 DVLOG(1) << "Successfully set Low latency mode on decoder.";
1114 } else {
1115 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr;
1118 // The decoder should use DX11 iff
1119 // 1. The underlying H/W decoder supports it.
1120 // 2. We have a pointer to the MFCreateDXGIDeviceManager function needed for
1121 // this. This should always be true for Windows 8+.
1122 // 3. ANGLE is using DX11.
1123 DCHECK(gl_context_);
1124 if (create_dxgi_device_manager_ &&
1125 (gl_context_->GetGLRenderer().find("Direct3D11") !=
1126 std::string::npos)) {
1127 UINT32 dx11_aware = 0;
1128 attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware);
1129 use_dx11_ = !!dx11_aware;
1131 return true;
1134 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
1135 RETURN_ON_FAILURE(SetDecoderInputMediaType(),
1136 "Failed to set decoder input media type", false);
1137 return SetDecoderOutputMediaType(MFVideoFormat_NV12);
1140 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
1141 base::win::ScopedComPtr<IMFMediaType> media_type;
1142 HRESULT hr = MFCreateMediaType(media_type.Receive());
1143 RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
1145 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
1146 RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
1148 if (codec_ == media::kCodecH264) {
1149 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
1150 } else if (codec_ == media::kCodecVP8) {
1151 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80);
1152 } else if (codec_ == media::kCodecVP9) {
1153 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90);
1154 } else {
1155 NOTREACHED();
1156 RETURN_ON_FAILURE(false, "Unsupported codec on input media type.", false);
1158 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
1160 // Not sure about this. msdn recommends setting this value on the input
1161 // media type.
1162 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
1163 MFVideoInterlace_MixedInterlaceOrProgressive);
1164 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false);
1166 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags
1167 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
1168 return true;
1171 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
1172 const GUID& subtype) {
1173 base::win::ScopedComPtr<IMFMediaType> out_media_type;
1175 for (uint32 i = 0;
1176 SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
1177 out_media_type.Receive()));
1178 ++i) {
1179 GUID out_subtype = {0};
1180 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
1181 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
1183 if (out_subtype == subtype) {
1184 hr = decoder_->SetOutputType(0, out_media_type.get(), 0); // No flags
1185 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
1186 return true;
1188 out_media_type.Release();
1190 return false;
1193 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
1194 int32 param) {
1195 HRESULT hr = decoder_->ProcessMessage(msg, param);
1196 return SUCCEEDED(hr);
1199 // Gets the minimum buffer sizes for input and output samples. The MFT will not
1200 // allocate buffer for input nor output, so we have to do it ourselves and make
1201 // sure they're the correct size. We only provide decoding if DXVA is enabled.
1202 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
1203 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
1204 RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
1206 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
1207 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
1209 DVLOG(1) << "Input stream info: ";
1210 DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
1211 if (codec_ == media::kCodecH264) {
1212 // There should be three flags, one for requiring a whole frame be in a
1213 // single sample, one for requiring there be one buffer only in a single
1214 // sample, and one that specifies a fixed sample size. (as in cbSize)
1215 CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
1218 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
1219 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
1220 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
1222 DVLOG(1) << "Output stream info: ";
1223 // The flags here should be the same and mean the same thing, except when
1224 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
1225 // allocate its own sample.
1226 DVLOG(1) << "Flags: "
1227 << std::hex << std::showbase << output_stream_info_.dwFlags;
1228 if (codec_ == media::kCodecH264) {
1229 CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
1231 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
1232 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
1233 return true;
1236 void DXVAVideoDecodeAccelerator::DoDecode() {
1237 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1238 // This function is also called from FlushInternal in a loop which could
1239 // result in the state transitioning to kStopped due to no decoded output.
1240 State state = GetState();
1241 RETURN_AND_NOTIFY_ON_FAILURE(
1242 (state == kNormal || state == kFlushing || state == kStopped),
1243 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,);
1245 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
1246 DWORD status = 0;
1248 HRESULT hr = decoder_->ProcessOutput(0, // No flags
1249 1, // # of out streams to pull from
1250 &output_data_buffer,
1251 &status);
1252 IMFCollection* events = output_data_buffer.pEvents;
1253 if (events != NULL) {
1254 DVLOG(1) << "Got events from ProcessOuput, but discarding";
1255 events->Release();
1257 if (FAILED(hr)) {
1258 // A stream change needs further ProcessInput calls to get back decoder
1259 // output which is why we need to set the state to stopped.
1260 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1261 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
1262 // Decoder didn't let us set NV12 output format. Not sure as to why
1263 // this can happen. Give up in disgust.
1264 NOTREACHED() << "Failed to set decoder output media type to NV12";
1265 SetState(kStopped);
1266 } else {
1267 DVLOG(1) << "Received output format change from the decoder."
1268 " Recursively invoking DoDecode";
1269 DoDecode();
1271 return;
1272 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1273 // No more output from the decoder. Stop playback.
1274 SetState(kStopped);
1275 return;
1276 } else {
1277 NOTREACHED() << "Unhandled error in DoDecode()";
1278 return;
1281 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1283 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
1284 inputs_before_decode_);
1286 inputs_before_decode_ = 0;
1288 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
1289 "Failed to process output sample.", PLATFORM_FAILURE,);
1292 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
1293 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
1295 LONGLONG input_buffer_id = 0;
1296 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1297 "Failed to get input buffer id associated with sample",
1298 false);
1301 base::AutoLock lock(decoder_lock_);
1302 DCHECK(pending_output_samples_.empty());
1303 pending_output_samples_.push_back(
1304 PendingSampleInfo(input_buffer_id, sample));
1307 if (pictures_requested_) {
1308 DVLOG(1) << "Waiting for picture slots from the client.";
1309 main_thread_task_runner_->PostTask(
1310 FROM_HERE,
1311 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples,
1312 weak_this_factory_.GetWeakPtr()));
1313 return true;
1316 int width = 0;
1317 int height = 0;
1318 if (!GetVideoFrameDimensions(sample, &width, &height)) {
1319 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample",
1320 false);
1323 // Go ahead and request picture buffers.
1324 main_thread_task_runner_->PostTask(
1325 FROM_HERE,
1326 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1327 weak_this_factory_.GetWeakPtr(),
1328 width,
1329 height));
1331 pictures_requested_ = true;
1332 return true;
1335 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
1336 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1338 if (!output_picture_buffers_.size())
1339 return;
1341 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
1342 "Failed to make context current", PLATFORM_FAILURE,);
1344 OutputBuffers::iterator index;
1346 for (index = output_picture_buffers_.begin();
1347 index != output_picture_buffers_.end() &&
1348 OutputSamplesPresent();
1349 ++index) {
1350 if (index->second->available()) {
1351 PendingSampleInfo* pending_sample = NULL;
1353 base::AutoLock lock(decoder_lock_);
1355 PendingSampleInfo& sample_info = pending_output_samples_.front();
1356 if (sample_info.picture_buffer_id != -1)
1357 continue;
1358 pending_sample = &sample_info;
1361 int width = 0;
1362 int height = 0;
1363 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(),
1364 &width, &height)) {
1365 RETURN_AND_NOTIFY_ON_FAILURE(false,
1366 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,);
1369 if (width != index->second->size().width() ||
1370 height != index->second->size().height()) {
1371 HandleResolutionChanged(width, height);
1372 return;
1375 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
1376 HRESULT hr = pending_sample->output_sample->GetBufferByIndex(
1377 0, output_buffer.Receive());
1378 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1379 "Failed to get buffer from output sample", PLATFORM_FAILURE,);
1381 base::win::ScopedComPtr<IDirect3DSurface9> surface;
1382 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture;
1384 if (use_dx11_) {
1385 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
1386 hr = dxgi_buffer.QueryFrom(output_buffer.get());
1387 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1388 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,);
1389 hr = dxgi_buffer->GetResource(
1390 __uuidof(ID3D11Texture2D),
1391 reinterpret_cast<void**>(d3d11_texture.Receive()));
1392 } else {
1393 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE,
1394 IID_PPV_ARGS(surface.Receive()));
1396 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1397 "Failed to get surface from output sample", PLATFORM_FAILURE,);
1399 pending_sample->picture_buffer_id = index->second->id();
1401 RETURN_AND_NOTIFY_ON_FAILURE(
1402 index->second->CopyOutputSampleDataToPictureBuffer(
1403 this,
1404 surface.get(),
1405 d3d11_texture.get(),
1406 pending_sample->input_buffer_id),
1407 "Failed to copy output sample", PLATFORM_FAILURE,);
1409 index->second->set_available(false);
1414 void DXVAVideoDecodeAccelerator::StopOnError(
1415 media::VideoDecodeAccelerator::Error error) {
1416 if (!main_thread_task_runner_->BelongsToCurrentThread()) {
1417 main_thread_task_runner_->PostTask(
1418 FROM_HERE,
1419 base::Bind(&DXVAVideoDecodeAccelerator::StopOnError,
1420 weak_this_factory_.GetWeakPtr(),
1421 error));
1422 return;
1425 if (client_)
1426 client_->NotifyError(error);
1427 client_ = NULL;
1429 if (GetState() != kUninitialized) {
1430 Invalidate();
1434 void DXVAVideoDecodeAccelerator::Invalidate() {
1435 if (GetState() == kUninitialized)
1436 return;
1437 decoder_thread_.Stop();
1438 weak_this_factory_.InvalidateWeakPtrs();
1439 output_picture_buffers_.clear();
1440 stale_output_picture_buffers_.clear();
1441 pending_output_samples_.clear();
1442 pending_input_buffers_.clear();
1443 decoder_.Release();
1445 if (use_dx11_) {
1446 if (video_format_converter_mft_.get()) {
1447 video_format_converter_mft_->ProcessMessage(
1448 MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
1449 video_format_converter_mft_.Release();
1451 d3d11_device_context_.Release();
1452 d3d11_device_.Release();
1453 d3d11_device_manager_.Release();
1454 d3d11_query_.Release();
1455 dx11_video_format_converter_media_type_needs_init_ = true;
1456 } else {
1457 d3d9_.Release();
1458 d3d9_device_ex_.Release();
1459 device_manager_.Release();
1460 query_.Release();
1463 SetState(kUninitialized);
1466 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
1467 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1468 if (client_)
1469 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1472 void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
1473 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1474 if (client_ && pending_flush_) {
1475 pending_flush_ = false;
1477 base::AutoLock lock(decoder_lock_);
1478 sent_drain_message_ = false;
1481 client_->NotifyFlushDone();
1485 void DXVAVideoDecodeAccelerator::NotifyResetDone() {
1486 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1487 if (client_)
1488 client_->NotifyResetDone();
1491 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
1492 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1493 // This task could execute after the decoder has been torn down.
1494 if (GetState() != kUninitialized && client_) {
1495 client_->ProvidePictureBuffers(
1496 kNumPictureBuffers,
1497 gfx::Size(width, height),
1498 GL_TEXTURE_2D);
1502 void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1503 int picture_buffer_id,
1504 int input_buffer_id,
1505 const gfx::Rect& picture_buffer_size) {
1506 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1507 // This task could execute after the decoder has been torn down.
1508 if (GetState() != kUninitialized && client_) {
1509 media::Picture picture(picture_buffer_id, input_buffer_id,
1510 picture_buffer_size, false);
1511 client_->PictureReady(picture);
1515 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1516 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1517 if (!client_)
1518 return;
1520 for (PendingInputs::iterator it = pending_input_buffers_.begin();
1521 it != pending_input_buffers_.end(); ++it) {
1522 LONGLONG input_buffer_id = 0;
1523 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id),
1524 "Failed to get buffer id associated with sample",);
1525 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1527 pending_input_buffers_.clear();
1530 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1531 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1532 State state = GetState();
1533 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
1534 "Invalid state: " << state, ILLEGAL_STATE,);
1536 if (pending_input_buffers_.empty() || OutputSamplesPresent())
1537 return;
1539 PendingInputs pending_input_buffers_copy;
1540 std::swap(pending_input_buffers_, pending_input_buffers_copy);
1542 for (PendingInputs::iterator it = pending_input_buffers_copy.begin();
1543 it != pending_input_buffers_copy.end(); ++it) {
1544 DecodeInternal(*it);
1548 void DXVAVideoDecodeAccelerator::FlushInternal() {
1549 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1551 // We allow only one output frame to be present at any given time. If we have
1552 // an output frame, then we cannot complete the flush at this time.
1553 if (OutputSamplesPresent())
1554 return;
1556 // First drain the pending input because once the drain message is sent below,
1557 // the decoder will ignore further input until it's drained.
1558 if (!pending_input_buffers_.empty()) {
1559 decoder_thread_task_runner_->PostTask(
1560 FROM_HERE,
1561 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1562 base::Unretained(this)));
1563 decoder_thread_task_runner_->PostTask(
1564 FROM_HERE,
1565 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
1566 base::Unretained(this)));
1567 return;
1571 base::AutoLock lock(decoder_lock_);
1572 if (!sent_drain_message_) {
1573 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
1574 "Failed to send drain message",
1575 PLATFORM_FAILURE,);
1576 sent_drain_message_ = true;
1580 // Attempt to retrieve an output frame from the decoder. If we have one,
1581 // return and proceed when the output frame is processed. If we don't have a
1582 // frame then we are done.
1583 DoDecode();
1584 if (OutputSamplesPresent())
1585 return;
1587 SetState(kFlushing);
1589 main_thread_task_runner_->PostTask(
1590 FROM_HERE,
1591 base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone,
1592 weak_this_factory_.GetWeakPtr()));
1594 SetState(kNormal);
1597 void DXVAVideoDecodeAccelerator::DecodeInternal(
1598 const base::win::ScopedComPtr<IMFSample>& sample) {
1599 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1601 if (GetState() == kUninitialized)
1602 return;
1604 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) {
1605 pending_input_buffers_.push_back(sample);
1606 return;
1609 if (!inputs_before_decode_) {
1610 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1612 inputs_before_decode_++;
1614 HRESULT hr = decoder_->ProcessInput(0, sample.get(), 0);
1615 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1616 // has enough data to produce one or more output samples. In this case the
1617 // recommended options are to
1618 // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1619 // returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1620 // 2. Flush the input data
1621 // We implement the first option, i.e to retrieve the output sample and then
1622 // process the input again. Failure in either of these steps is treated as a
1623 // decoder failure.
1624 if (hr == MF_E_NOTACCEPTING) {
1625 DoDecode();
1626 // If the DoDecode call resulted in an output frame then we should not
1627 // process any more input until that frame is copied to the target surface.
1628 if (!OutputSamplesPresent()) {
1629 State state = GetState();
1630 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal ||
1631 state == kFlushing),
1632 "Failed to process output. Unexpected decoder state: " << state,
1633 PLATFORM_FAILURE,);
1634 hr = decoder_->ProcessInput(0, sample.get(), 0);
1636 // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1637 // 1. Add the input sample to the pending queue.
1638 // 2. If we don't have any output samples we post the
1639 // DecodePendingInputBuffers task to process the pending input samples.
1640 // If we have an output sample then the above task is posted when the
1641 // output samples are sent to the client.
1642 // This is because we only support 1 pending output sample at any
1643 // given time due to the limitation with the Microsoft media foundation
1644 // decoder where it recycles the output Decoder surfaces.
1645 if (hr == MF_E_NOTACCEPTING) {
1646 pending_input_buffers_.push_back(sample);
1647 decoder_thread_task_runner_->PostTask(
1648 FROM_HERE,
1649 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1650 base::Unretained(this)));
1651 return;
1654 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
1655 PLATFORM_FAILURE,);
1657 DoDecode();
1659 State state = GetState();
1660 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal ||
1661 state == kFlushing),
1662 "Failed to process output. Unexpected decoder state: " << state,
1663 ILLEGAL_STATE,);
1665 LONGLONG input_buffer_id = 0;
1666 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1667 "Failed to get input buffer id associated with sample",);
1668 // The Microsoft Media foundation decoder internally buffers up to 30 frames
1669 // before returning a decoded frame. We need to inform the client that this
1670 // input buffer is processed as it may stop sending us further input.
1671 // Note: This may break clients which expect every input buffer to be
1672 // associated with a decoded output buffer.
1673 // TODO(ananta)
1674 // Do some more investigation into whether it is possible to get the MFT
1675 // decoder to emit an output packet for every input packet.
1676 // http://code.google.com/p/chromium/issues/detail?id=108121
1677 // http://code.google.com/p/chromium/issues/detail?id=150925
1678 main_thread_task_runner_->PostTask(
1679 FROM_HERE,
1680 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
1681 weak_this_factory_.GetWeakPtr(),
1682 input_buffer_id));
1685 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
1686 int height) {
1687 dx11_video_format_converter_media_type_needs_init_ = true;
1689 main_thread_task_runner_->PostTask(
1690 FROM_HERE,
1691 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers,
1692 weak_this_factory_.GetWeakPtr()));
1694 main_thread_task_runner_->PostTask(
1695 FROM_HERE,
1696 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1697 weak_this_factory_.GetWeakPtr(),
1698 width,
1699 height));
1702 void DXVAVideoDecodeAccelerator::DismissStaleBuffers() {
1703 OutputBuffers::iterator index;
1705 for (index = output_picture_buffers_.begin();
1706 index != output_picture_buffers_.end();
1707 ++index) {
1708 if (index->second->available()) {
1709 DVLOG(1) << "Dismissing picture id: " << index->second->id();
1710 client_->DismissPictureBuffer(index->second->id());
1711 } else {
1712 // Move to |stale_output_picture_buffers_| for deferred deletion.
1713 stale_output_picture_buffers_.insert(
1714 std::make_pair(index->first, index->second));
1718 output_picture_buffers_.clear();
1721 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer(
1722 int32 picture_buffer_id) {
1723 OutputBuffers::iterator it = stale_output_picture_buffers_.find(
1724 picture_buffer_id);
1725 DCHECK(it != stale_output_picture_buffers_.end());
1726 DVLOG(1) << "Dismissing picture id: " << it->second->id();
1727 client_->DismissPictureBuffer(it->second->id());
1728 stale_output_picture_buffers_.erase(it);
1731 DXVAVideoDecodeAccelerator::State
1732 DXVAVideoDecodeAccelerator::GetState() {
1733 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes");
1734 State state = static_cast<State>(
1735 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0));
1736 return state;
1739 void DXVAVideoDecodeAccelerator::SetState(State new_state) {
1740 if (!main_thread_task_runner_->BelongsToCurrentThread()) {
1741 main_thread_task_runner_->PostTask(
1742 FROM_HERE,
1743 base::Bind(&DXVAVideoDecodeAccelerator::SetState,
1744 weak_this_factory_.GetWeakPtr(),
1745 new_state));
1746 return;
1749 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes");
1750 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_),
1751 new_state);
1752 DCHECK_EQ(state_, new_state);
1755 void DXVAVideoDecodeAccelerator::StartDecoderThread() {
1756 decoder_thread_.init_com_with_mta(false);
1757 decoder_thread_.Start();
1758 decoder_thread_task_runner_ = decoder_thread_.task_runner();
1761 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() {
1762 base::AutoLock lock(decoder_lock_);
1763 return !pending_output_samples_.empty();
1766 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface,
1767 IDirect3DSurface9* dest_surface,
1768 int picture_buffer_id,
1769 int input_buffer_id) {
1770 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) {
1771 decoder_thread_task_runner_->PostTask(
1772 FROM_HERE,
1773 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface,
1774 base::Unretained(this),
1775 src_surface,
1776 dest_surface,
1777 picture_buffer_id,
1778 input_buffer_id));
1779 return;
1782 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface,
1783 NULL, D3DTEXF_NONE);
1784 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",);
1786 // Ideally, this should be done immediately before the draw call that uses
1787 // the texture. Flush it once here though.
1788 hr = query_->Issue(D3DISSUE_END);
1789 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",);
1791 // Flush the decoder device to ensure that the decoded frame is copied to the
1792 // target surface.
1793 decoder_thread_task_runner_->PostDelayedTask(
1794 FROM_HERE,
1795 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
1796 base::Unretained(this), 0, src_surface, dest_surface,
1797 picture_buffer_id, input_buffer_id),
1798 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
1801 void DXVAVideoDecodeAccelerator::CopySurfaceComplete(
1802 IDirect3DSurface9* src_surface,
1803 IDirect3DSurface9* dest_surface,
1804 int picture_buffer_id,
1805 int input_buffer_id) {
1806 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1808 // The output buffers may have changed in the following scenarios:-
1809 // 1. A resolution change.
1810 // 2. Decoder instance was destroyed.
1811 // Ignore copy surface notifications for such buffers.
1812 // copy surface notifications for such buffers.
1813 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
1814 if (it == output_picture_buffers_.end())
1815 return;
1817 // If the picture buffer is marked as available it probably means that there
1818 // was a Reset operation which dropped the output frame.
1819 DXVAPictureBuffer* picture_buffer = it->second.get();
1820 if (picture_buffer->available())
1821 return;
1823 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
1824 "Failed to make context current", PLATFORM_FAILURE,);
1826 DCHECK(!output_picture_buffers_.empty());
1828 picture_buffer->CopySurfaceComplete(src_surface,
1829 dest_surface);
1831 NotifyPictureReady(picture_buffer->id(),
1832 input_buffer_id,
1833 gfx::Rect(picture_buffer->size()));
1836 base::AutoLock lock(decoder_lock_);
1837 if (!pending_output_samples_.empty())
1838 pending_output_samples_.pop_front();
1841 if (pending_flush_) {
1842 decoder_thread_task_runner_->PostTask(
1843 FROM_HERE,
1844 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
1845 base::Unretained(this)));
1846 return;
1848 decoder_thread_task_runner_->PostTask(
1849 FROM_HERE,
1850 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1851 base::Unretained(this)));
1854 void DXVAVideoDecodeAccelerator::CopyTexture(ID3D11Texture2D* src_texture,
1855 ID3D11Texture2D* dest_texture,
1856 IMFSample* video_frame,
1857 int picture_buffer_id,
1858 int input_buffer_id) {
1859 HRESULT hr = E_FAIL;
1861 DCHECK(use_dx11_);
1863 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) {
1864 // The media foundation H.264 decoder outputs YUV12 textures which we
1865 // cannot copy into ANGLE as they expect ARGB textures. In D3D land
1866 // the StretchRect API in the IDirect3DDevice9Ex interface did the color
1867 // space conversion for us. Sadly in DX11 land the API does not provide
1868 // a straightforward way to do this.
1869 // We use the video processor MFT.
1870 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx
1871 // This object implements a media foundation transform (IMFTransform)
1872 // which follows the same contract as the decoder. The color space
1873 // conversion as per msdn is done in the GPU.
1875 D3D11_TEXTURE2D_DESC source_desc;
1876 src_texture->GetDesc(&source_desc);
1878 // Set up the input and output types for the video processor MFT.
1879 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width,
1880 source_desc.Height)) {
1881 RETURN_AND_NOTIFY_ON_FAILURE(
1882 false, "Failed to initialize media types for convesion.",
1883 PLATFORM_FAILURE,);
1886 // The input to the video processor is the output sample.
1887 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion;
1889 base::AutoLock lock(decoder_lock_);
1890 PendingSampleInfo& sample_info = pending_output_samples_.front();
1891 input_sample_for_conversion = sample_info.output_sample;
1894 decoder_thread_task_runner_->PostTask(
1895 FROM_HERE,
1896 base::Bind(&DXVAVideoDecodeAccelerator::CopyTexture,
1897 base::Unretained(this),
1898 src_texture,
1899 dest_texture,
1900 input_sample_for_conversion.Detach(),
1901 picture_buffer_id,
1902 input_buffer_id));
1903 return;
1906 DCHECK(video_frame);
1908 base::win::ScopedComPtr<IMFSample> input_sample;
1909 input_sample.Attach(video_frame);
1911 DCHECK(video_format_converter_mft_.get());
1913 // d3d11_device_context_->Begin(d3d11_query_.get());
1915 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0);
1916 if (FAILED(hr)) {
1917 DCHECK(false);
1918 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1919 "Failed to convert output sample format.", PLATFORM_FAILURE,);
1922 // The video processor MFT requires output samples to be allocated by the
1923 // caller. We create a sample with a buffer backed with the ID3D11Texture2D
1924 // interface exposed by ANGLE. This works nicely as this ensures that the
1925 // video processor coverts the color space of the output frame and copies
1926 // the result into the ANGLE texture.
1927 base::win::ScopedComPtr<IMFSample> output_sample;
1928 hr = MFCreateSample(output_sample.Receive());
1929 if (FAILED(hr)) {
1930 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1931 "Failed to create output sample.", PLATFORM_FAILURE,);
1934 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
1935 hr = MFCreateDXGISurfaceBuffer(
1936 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE,
1937 output_buffer.Receive());
1938 if (FAILED(hr)) {
1939 base::debug::Alias(&hr);
1940 // TODO(ananta)
1941 // Remove this CHECK when the change to use DX11 for H/W decoding
1942 // stablizes.
1943 CHECK(false);
1944 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1945 "Failed to create output sample.", PLATFORM_FAILURE,);
1948 output_sample->AddBuffer(output_buffer.get());
1950 DWORD status = 0;
1951 MFT_OUTPUT_DATA_BUFFER format_converter_output = {};
1952 format_converter_output.pSample = output_sample.get();
1953 hr = video_format_converter_mft_->ProcessOutput(
1954 0, // No flags
1955 1, // # of out streams to pull from
1956 &format_converter_output,
1957 &status);
1959 d3d11_device_context_->Flush();
1960 d3d11_device_context_->End(d3d11_query_.get());
1962 if (FAILED(hr)) {
1963 base::debug::Alias(&hr);
1964 // TODO(ananta)
1965 // Remove this CHECK when the change to use DX11 for H/W decoding
1966 // stablizes.
1967 CHECK(false);
1968 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1969 "Failed to convert output sample format.", PLATFORM_FAILURE,);
1972 decoder_thread_task_runner_->PostDelayedTask(
1973 FROM_HERE,
1974 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
1975 base::Unretained(this), 0,
1976 reinterpret_cast<IDirect3DSurface9*>(NULL),
1977 reinterpret_cast<IDirect3DSurface9*>(NULL),
1978 picture_buffer_id, input_buffer_id),
1979 base::TimeDelta::FromMilliseconds(
1980 kFlushDecoderSurfaceTimeoutMs));
1983 void DXVAVideoDecodeAccelerator::FlushDecoder(
1984 int iterations,
1985 IDirect3DSurface9* src_surface,
1986 IDirect3DSurface9* dest_surface,
1987 int picture_buffer_id,
1988 int input_buffer_id) {
1989 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1991 // The DXVA decoder has its own device which it uses for decoding. ANGLE
1992 // has its own device which we don't have access to.
1993 // The above code attempts to copy the decoded picture into a surface
1994 // which is owned by ANGLE. As there are multiple devices involved in
1995 // this, the StretchRect call above is not synchronous.
1996 // We attempt to flush the batched operations to ensure that the picture is
1997 // copied to the surface owned by ANGLE.
1998 // We need to do this in a loop and call flush multiple times.
1999 // We have seen the GetData call for flushing the command buffer fail to
2000 // return success occassionally on multi core machines, leading to an
2001 // infinite loop.
2002 // Workaround is to have an upper limit of 4 on the number of iterations to
2003 // wait for the Flush to finish.
2004 HRESULT hr = E_FAIL;
2006 if (use_dx11_) {
2007 BOOL query_data = 0;
2008 hr = d3d11_device_context_->GetData(d3d11_query_.get(), &query_data,
2009 sizeof(BOOL), 0);
2010 if (FAILED(hr)) {
2011 base::debug::Alias(&hr);
2012 // TODO(ananta)
2013 // Remove this CHECK when the change to use DX11 for H/W decoding
2014 // stablizes.
2015 CHECK(false);
2017 } else {
2018 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH);
2020 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) {
2021 decoder_thread_task_runner_->PostDelayedTask(
2022 FROM_HERE,
2023 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
2024 base::Unretained(this), iterations, src_surface,
2025 dest_surface, picture_buffer_id, input_buffer_id),
2026 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
2027 return;
2030 main_thread_task_runner_->PostTask(
2031 FROM_HERE,
2032 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete,
2033 weak_this_factory_.GetWeakPtr(),
2034 src_surface,
2035 dest_surface,
2036 picture_buffer_id,
2037 input_buffer_id));
2040 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType(
2041 int width, int height) {
2042 if (!dx11_video_format_converter_media_type_needs_init_)
2043 return true;
2045 CHECK(video_format_converter_mft_.get());
2047 HRESULT hr = video_format_converter_mft_->ProcessMessage(
2048 MFT_MESSAGE_SET_D3D_MANAGER,
2049 reinterpret_cast<ULONG_PTR>(
2050 d3d11_device_manager_.get()));
2052 if (FAILED(hr)) {
2053 base::debug::Alias(&hr);
2054 // TODO(ananta)
2055 // Remove this CHECK when the change to use DX11 for H/W decoding
2056 // stablizes.
2057 CHECK(false);
2059 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2060 "Failed to initialize video format converter", PLATFORM_FAILURE, false);
2062 video_format_converter_mft_->ProcessMessage(
2063 MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
2065 base::win::ScopedComPtr<IMFMediaType> media_type;
2066 hr = MFCreateMediaType(media_type.Receive());
2067 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed",
2068 PLATFORM_FAILURE, false);
2070 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
2071 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type",
2072 PLATFORM_FAILURE, false);
2074 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
2075 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type",
2076 PLATFORM_FAILURE, false);
2078 hr = media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
2079 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2080 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2082 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
2083 MFVideoInterlace_Progressive);
2084 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2085 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2087 base::win::ScopedComPtr<IMFAttributes> converter_attributes;
2088 hr = video_format_converter_mft_->GetAttributes(
2089 converter_attributes.Receive());
2090 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get converter attributes",
2091 PLATFORM_FAILURE, false);
2093 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
2094 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes",
2095 PLATFORM_FAILURE, false);
2097 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE);
2098 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes",
2099 PLATFORM_FAILURE, false);
2101 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height);
2102 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes",
2103 PLATFORM_FAILURE, false);
2105 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0);
2106 if (FAILED(hr)) {
2107 base::debug::Alias(&hr);
2108 // TODO(ananta)
2109 // Remove this CHECK when the change to use DX11 for H/W decoding
2110 // stablizes.
2111 CHECK(false);
2113 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type",
2114 PLATFORM_FAILURE, false);
2116 base::win::ScopedComPtr<IMFMediaType> out_media_type;
2118 for (uint32 i = 0;
2119 SUCCEEDED(video_format_converter_mft_->GetOutputAvailableType(0, i,
2120 out_media_type.Receive()));
2121 ++i) {
2122 GUID out_subtype = {0};
2123 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
2124 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output major type",
2125 PLATFORM_FAILURE, false);
2127 if (out_subtype == MFVideoFormat_ARGB32) {
2128 hr = out_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
2129 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2130 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2132 hr = out_media_type->SetUINT32(MF_MT_INTERLACE_MODE,
2133 MFVideoInterlace_Progressive);
2134 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2135 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2137 hr = MFSetAttributeSize(out_media_type.get(), MF_MT_FRAME_SIZE, width,
2138 height);
2139 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2140 "Failed to set media type attributes", PLATFORM_FAILURE, false);
2142 hr = video_format_converter_mft_->SetOutputType(
2143 0, out_media_type.get(), 0); // No flags
2144 if (FAILED(hr)) {
2145 base::debug::Alias(&hr);
2146 // TODO(ananta)
2147 // Remove this CHECK when the change to use DX11 for H/W decoding
2148 // stablizes.
2149 CHECK(false);
2151 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2152 "Failed to set converter output type", PLATFORM_FAILURE, false);
2154 hr = video_format_converter_mft_->ProcessMessage(
2155 MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
2156 if (FAILED(hr)) {
2157 // TODO(ananta)
2158 // Remove this CHECK when the change to use DX11 for H/W decoding
2159 // stablizes.
2160 RETURN_AND_NOTIFY_ON_FAILURE(
2161 false, "Failed to initialize video converter.", PLATFORM_FAILURE,
2162 false);
2164 dx11_video_format_converter_media_type_needs_init_ = false;
2165 return true;
2167 out_media_type.Release();
2169 return false;
2172 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(
2173 IMFSample* sample,
2174 int* width,
2175 int* height) {
2176 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
2177 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
2178 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
2180 if (use_dx11_) {
2181 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
2182 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture;
2183 hr = dxgi_buffer.QueryFrom(output_buffer.get());
2184 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample",
2185 false);
2186 hr = dxgi_buffer->GetResource(
2187 __uuidof(ID3D11Texture2D),
2188 reinterpret_cast<void**>(d3d11_texture.Receive()));
2189 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer",
2190 false);
2191 D3D11_TEXTURE2D_DESC d3d11_texture_desc;
2192 d3d11_texture->GetDesc(&d3d11_texture_desc);
2193 *width = d3d11_texture_desc.Width;
2194 *height = d3d11_texture_desc.Height;
2195 } else {
2196 base::win::ScopedComPtr<IDirect3DSurface9> surface;
2197 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE,
2198 IID_PPV_ARGS(surface.Receive()));
2199 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
2200 false);
2201 D3DSURFACE_DESC surface_desc;
2202 hr = surface->GetDesc(&surface_desc);
2203 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
2204 *width = surface_desc.Width;
2205 *height = surface_desc.Height;
2207 return true;
2210 } // namespace content