[Android WebView] Fix webview perf bot switchover to use org.chromium.webview_shell...
[chromium-blink-merge.git] / content / common / gpu / media / dxva_video_decode_accelerator.cc
blob568b128e1d1bdea5e0d98bed04fda5db6b54b733
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
7 #if !defined(OS_WIN)
8 #error This file should only be built on Windows.
9 #endif // !defined(OS_WIN)
11 #include <ks.h>
12 #include <codecapi.h>
13 #include <dxgi1_2.h>
14 #include <mfapi.h>
15 #include <mferror.h>
16 #include <wmcodecdsp.h>
18 #include "base/base_paths_win.h"
19 #include "base/bind.h"
20 #include "base/callback.h"
21 #include "base/command_line.h"
22 #include "base/debug/alias.h"
23 #include "base/file_version_info.h"
24 #include "base/files/file_path.h"
25 #include "base/logging.h"
26 #include "base/memory/scoped_ptr.h"
27 #include "base/memory/shared_memory.h"
28 #include "base/message_loop/message_loop.h"
29 #include "base/path_service.h"
30 #include "base/trace_event/trace_event.h"
31 #include "base/win/windows_version.h"
32 #include "media/base/win/mf_initializer.h"
33 #include "media/video/video_decode_accelerator.h"
34 #include "ui/gl/gl_bindings.h"
35 #include "ui/gl/gl_context.h"
36 #include "ui/gl/gl_surface_egl.h"
37 #include "ui/gl/gl_switches.h"
39 namespace {
41 // Path is appended on to the PROGRAM_FILES base path.
42 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\";
44 const wchar_t kVP8DecoderDLLName[] =
45 #if defined(ARCH_CPU_X86)
46 L"mfx_mft_vp8vd_32.dll";
47 #elif defined(ARCH_CPU_X86_64)
48 L"mfx_mft_vp8vd_64.dll";
49 #else
50 #error Unsupported Windows CPU Architecture
51 #endif
53 const wchar_t kVP9DecoderDLLName[] =
54 #if defined(ARCH_CPU_X86)
55 L"mfx_mft_vp9vd_32.dll";
56 #elif defined(ARCH_CPU_X86_64)
57 L"mfx_mft_vp9vd_64.dll";
58 #else
59 #error Unsupported Windows CPU Architecture
60 #endif
62 const CLSID CLSID_WebmMfVp8Dec = {
63 0x451e3cb7,
64 0x2622,
65 0x4ba5,
66 { 0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24 }
69 const CLSID CLSID_WebmMfVp9Dec = {
70 0x07ab4bd2,
71 0x1979,
72 0x4fcd,
73 { 0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe }
76 const CLSID MEDIASUBTYPE_VP80 = {
77 0x30385056,
78 0x0000,
79 0x0010,
80 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }
83 const CLSID MEDIASUBTYPE_VP90 = {
84 0x30395056,
85 0x0000,
86 0x0010,
87 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }
90 // The CLSID of the video processor media foundation transform which we use for
91 // texture color conversion in DX11.
92 DEFINE_GUID(CLSID_VideoProcessorMFT,
93 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78,
94 0xc9, 0x82);
96 // MF_XVP_PLAYBACK_MODE
97 // Data type: UINT32 (treat as BOOL)
98 // If this attribute is TRUE, the video processor will run in playback mode
99 // where it allows callers to allocate output samples and allows last frame
100 // regeneration (repaint).
101 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12,
102 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9);
105 namespace content {
107 static const media::VideoCodecProfile kSupportedProfiles[] = {
108 media::H264PROFILE_BASELINE,
109 media::H264PROFILE_MAIN,
110 media::H264PROFILE_HIGH,
111 media::VP8PROFILE_ANY,
112 media::VP9PROFILE_ANY
115 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_
116 = NULL;
118 #define RETURN_ON_FAILURE(result, log, ret) \
119 do { \
120 if (!(result)) { \
121 DLOG(ERROR) << log; \
122 return ret; \
124 } while (0)
126 #define RETURN_ON_HR_FAILURE(result, log, ret) \
127 RETURN_ON_FAILURE(SUCCEEDED(result), \
128 log << ", HRESULT: 0x" << std::hex << result, \
129 ret);
131 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \
132 do { \
133 if (!(result)) { \
134 DVLOG(1) << log; \
135 StopOnError(error_code); \
136 return ret; \
138 } while (0)
140 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \
141 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \
142 log << ", HRESULT: 0x" << std::hex << result, \
143 error_code, ret);
145 enum {
146 // Maximum number of iterations we allow before aborting the attempt to flush
147 // the batched queries to the driver and allow torn/corrupt frames to be
148 // rendered.
149 kFlushDecoderSurfaceTimeoutMs = 1,
150 // Maximum iterations where we try to flush the d3d device.
151 kMaxIterationsForD3DFlush = 4,
152 // We only request 5 picture buffers from the client which are used to hold
153 // the decoded samples. These buffers are then reused when the client tells
154 // us that it is done with the buffer.
155 kNumPictureBuffers = 5,
158 static IMFSample* CreateEmptySample() {
159 base::win::ScopedComPtr<IMFSample> sample;
160 HRESULT hr = MFCreateSample(sample.Receive());
161 RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
162 return sample.Detach();
165 // Creates a Media Foundation sample with one buffer of length |buffer_length|
166 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
167 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
168 CHECK_GT(buffer_length, 0);
170 base::win::ScopedComPtr<IMFSample> sample;
171 sample.Attach(CreateEmptySample());
173 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
174 HRESULT hr = E_FAIL;
175 if (align == 0) {
176 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
177 // with the align argument being 0.
178 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
179 } else {
180 hr = MFCreateAlignedMemoryBuffer(buffer_length,
181 align - 1,
182 buffer.Receive());
184 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
186 hr = sample->AddBuffer(buffer.get());
187 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
189 buffer->SetCurrentLength(0);
190 return sample.Detach();
193 // Creates a Media Foundation sample with one buffer containing a copy of the
194 // given Annex B stream data.
195 // If duration and sample time are not known, provide 0.
196 // |min_size| specifies the minimum size of the buffer (might be required by
197 // the decoder for input). If no alignment is required, provide 0.
198 static IMFSample* CreateInputSample(const uint8* stream, int size,
199 int min_size, int alignment) {
200 CHECK(stream);
201 CHECK_GT(size, 0);
202 base::win::ScopedComPtr<IMFSample> sample;
203 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
204 alignment));
205 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL);
207 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
208 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
209 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
211 DWORD max_length = 0;
212 DWORD current_length = 0;
213 uint8* destination = NULL;
214 hr = buffer->Lock(&destination, &max_length, &current_length);
215 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
217 CHECK_EQ(current_length, 0u);
218 CHECK_GE(static_cast<int>(max_length), size);
219 memcpy(destination, stream, size);
221 hr = buffer->Unlock();
222 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
224 hr = buffer->SetCurrentLength(size);
225 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
227 return sample.Detach();
230 static IMFSample* CreateSampleFromInputBuffer(
231 const media::BitstreamBuffer& bitstream_buffer,
232 DWORD stream_size,
233 DWORD alignment) {
234 base::SharedMemory shm(bitstream_buffer.handle(), true);
235 RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
236 "Failed in base::SharedMemory::Map", NULL);
238 return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
239 bitstream_buffer.size(),
240 stream_size,
241 alignment);
244 // Maintains information about a DXVA picture buffer, i.e. whether it is
245 // available for rendering, the texture information, etc.
246 struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer {
247 public:
248 static linked_ptr<DXVAPictureBuffer> Create(
249 const DXVAVideoDecodeAccelerator& decoder,
250 const media::PictureBuffer& buffer,
251 EGLConfig egl_config);
252 ~DXVAPictureBuffer();
254 void ReusePictureBuffer();
255 // Copies the output sample data to the picture buffer provided by the
256 // client.
257 // The dest_surface parameter contains the decoded bits.
258 bool CopyOutputSampleDataToPictureBuffer(
259 DXVAVideoDecodeAccelerator* decoder,
260 IDirect3DSurface9* dest_surface,
261 ID3D11Texture2D* dx11_texture,
262 int input_buffer_id);
264 bool available() const {
265 return available_;
268 void set_available(bool available) {
269 available_ = available;
272 int id() const {
273 return picture_buffer_.id();
276 gfx::Size size() const {
277 return picture_buffer_.size();
280 // Called when the source surface |src_surface| is copied to the destination
281 // |dest_surface|
282 void CopySurfaceComplete(IDirect3DSurface9* src_surface,
283 IDirect3DSurface9* dest_surface);
285 private:
286 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer);
288 bool available_;
289 media::PictureBuffer picture_buffer_;
290 EGLSurface decoding_surface_;
291 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_;
292 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture_;
294 // The following |IDirect3DSurface9| interface pointers are used to hold
295 // references on the surfaces during the course of a StretchRect operation
296 // to copy the source surface to the target. The references are released
297 // when the StretchRect operation i.e. the copy completes.
298 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_;
299 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_;
301 // This ID3D11Texture2D interface pointer is used to hold a reference to the
302 // decoder texture during the course of a copy operation. This reference is
303 // released when the copy completes.
304 base::win::ScopedComPtr<ID3D11Texture2D> decoder_dx11_texture_;
306 // Set to true if RGB is supported by the texture.
307 // Defaults to true.
308 bool use_rgb_;
310 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
313 // static
314 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer>
315 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
316 const DXVAVideoDecodeAccelerator& decoder,
317 const media::PictureBuffer& buffer,
318 EGLConfig egl_config) {
319 linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer));
321 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
323 EGLint use_rgb = 1;
324 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB,
325 &use_rgb);
327 EGLint attrib_list[] = {
328 EGL_WIDTH, buffer.size().width(),
329 EGL_HEIGHT, buffer.size().height(),
330 EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA,
331 EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
332 EGL_NONE
335 picture_buffer->decoding_surface_ = eglCreatePbufferSurface(
336 egl_display,
337 egl_config,
338 attrib_list);
339 RETURN_ON_FAILURE(picture_buffer->decoding_surface_,
340 "Failed to create surface",
341 linked_ptr<DXVAPictureBuffer>(NULL));
343 HANDLE share_handle = NULL;
344 EGLBoolean ret = eglQuerySurfacePointerANGLE(
345 egl_display,
346 picture_buffer->decoding_surface_,
347 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
348 &share_handle);
350 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE,
351 "Failed to query ANGLE surface pointer",
352 linked_ptr<DXVAPictureBuffer>(NULL));
354 HRESULT hr = E_FAIL;
355 if (decoder.d3d11_device_) {
356 base::win::ScopedComPtr<ID3D11Resource> resource;
357 hr = decoder.d3d11_device_->OpenSharedResource(
358 share_handle,
359 __uuidof(ID3D11Resource),
360 reinterpret_cast<void**>(resource.Receive()));
361 RETURN_ON_HR_FAILURE(hr, "Failed to open shared resource",
362 linked_ptr<DXVAPictureBuffer>(NULL));
363 hr = picture_buffer->dx11_decoding_texture_.QueryFrom(resource.get());
364 } else {
365 hr = decoder.d3d9_device_ex_->CreateTexture(
366 buffer.size().width(),
367 buffer.size().height(),
369 D3DUSAGE_RENDERTARGET,
370 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8,
371 D3DPOOL_DEFAULT,
372 picture_buffer->decoding_texture_.Receive(),
373 &share_handle);
375 RETURN_ON_HR_FAILURE(hr, "Failed to create texture",
376 linked_ptr<DXVAPictureBuffer>(NULL));
377 picture_buffer->use_rgb_ = !!use_rgb;
378 return picture_buffer;
381 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
382 const media::PictureBuffer& buffer)
383 : available_(true),
384 picture_buffer_(buffer),
385 decoding_surface_(NULL),
386 use_rgb_(true) {
389 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
390 if (decoding_surface_) {
391 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
393 eglReleaseTexImage(
394 egl_display,
395 decoding_surface_,
396 EGL_BACK_BUFFER);
398 eglDestroySurface(
399 egl_display,
400 decoding_surface_);
401 decoding_surface_ = NULL;
405 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
406 DCHECK(decoding_surface_);
407 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
408 eglReleaseTexImage(
409 egl_display,
410 decoding_surface_,
411 EGL_BACK_BUFFER);
412 decoder_surface_.Release();
413 target_surface_.Release();
414 decoder_dx11_texture_.Release();
415 set_available(true);
418 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
419 CopyOutputSampleDataToPictureBuffer(
420 DXVAVideoDecodeAccelerator* decoder,
421 IDirect3DSurface9* dest_surface,
422 ID3D11Texture2D* dx11_texture,
423 int input_buffer_id) {
424 DCHECK(dest_surface || dx11_texture);
425 if (dx11_texture) {
426 // Grab a reference on the decoder texture. This reference will be released
427 // when we receive a notification that the copy was completed or when the
428 // DXVAPictureBuffer instance is destroyed.
429 decoder_dx11_texture_ = dx11_texture;
430 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), NULL,
431 id(), input_buffer_id);
432 return true;
434 D3DSURFACE_DESC surface_desc;
435 HRESULT hr = dest_surface->GetDesc(&surface_desc);
436 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
438 D3DSURFACE_DESC texture_desc;
439 decoding_texture_->GetLevelDesc(0, &texture_desc);
441 if (texture_desc.Width != surface_desc.Width ||
442 texture_desc.Height != surface_desc.Height) {
443 NOTREACHED() << "Decode surface of different dimension than texture";
444 return false;
447 hr = decoder->d3d9_->CheckDeviceFormatConversion(
448 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, surface_desc.Format,
449 use_rgb_ ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8);
450 RETURN_ON_HR_FAILURE(hr, "Device does not support format converision", false);
452 // The same picture buffer can be reused for a different frame. Release the
453 // target surface and the decoder references here.
454 target_surface_.Release();
455 decoder_surface_.Release();
457 // Grab a reference on the decoder surface and the target surface. These
458 // references will be released when we receive a notification that the
459 // copy was completed or when the DXVAPictureBuffer instance is destroyed.
460 // We hold references here as it is easier to manage their lifetimes.
461 hr = decoding_texture_->GetSurfaceLevel(0, target_surface_.Receive());
462 RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false);
464 decoder_surface_ = dest_surface;
466 decoder->CopySurface(decoder_surface_.get(), target_surface_.get(), id(),
467 input_buffer_id);
468 return true;
471 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::CopySurfaceComplete(
472 IDirect3DSurface9* src_surface,
473 IDirect3DSurface9* dest_surface) {
474 DCHECK(!available());
476 GLint current_texture = 0;
477 glGetIntegerv(GL_TEXTURE_BINDING_2D, &current_texture);
479 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id());
481 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
483 if (src_surface && dest_surface) {
484 DCHECK_EQ(src_surface, decoder_surface_.get());
485 DCHECK_EQ(dest_surface, target_surface_.get());
486 decoder_surface_.Release();
487 target_surface_.Release();
488 } else {
489 DCHECK(decoder_dx11_texture_.get());
490 decoder_dx11_texture_.Release();
493 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
494 eglBindTexImage(
495 egl_display,
496 decoding_surface_,
497 EGL_BACK_BUFFER);
499 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
500 glBindTexture(GL_TEXTURE_2D, current_texture);
503 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
504 int32 buffer_id, IMFSample* sample)
505 : input_buffer_id(buffer_id),
506 picture_buffer_id(-1) {
507 output_sample.Attach(sample);
510 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
512 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
513 const base::Callback<bool(void)>& make_context_current,
514 gfx::GLContext* gl_context)
515 : client_(NULL),
516 dev_manager_reset_token_(0),
517 dx11_dev_manager_reset_token_(0),
518 egl_config_(NULL),
519 state_(kUninitialized),
520 pictures_requested_(false),
521 inputs_before_decode_(0),
522 sent_drain_message_(false),
523 make_context_current_(make_context_current),
524 codec_(media::kUnknownVideoCodec),
525 decoder_thread_("DXVAVideoDecoderThread"),
526 pending_flush_(false),
527 use_dx11_(false),
528 dx11_video_format_converter_media_type_needs_init_(true),
529 gl_context_(gl_context),
530 weak_this_factory_(this) {
531 weak_ptr_ = weak_this_factory_.GetWeakPtr();
532 memset(&input_stream_info_, 0, sizeof(input_stream_info_));
533 memset(&output_stream_info_, 0, sizeof(output_stream_info_));
536 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
537 client_ = NULL;
540 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
541 Client* client) {
542 client_ = client;
544 main_thread_task_runner_ = base::MessageLoop::current()->task_runner();
546 bool profile_supported = false;
547 for (const auto& supported_profile : kSupportedProfiles) {
548 if (profile == supported_profile) {
549 profile_supported = true;
550 break;
553 if (!profile_supported) {
554 RETURN_AND_NOTIFY_ON_FAILURE(false,
555 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false);
558 // Not all versions of Windows 7 and later include Media Foundation DLLs.
559 // Instead of crashing while delay loading the DLL when calling MFStartup()
560 // below, probe whether we can successfully load the DLL now.
561 // See http://crbug.com/339678 for details.
562 HMODULE dxgi_manager_dll = NULL;
563 if ((dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll")) == NULL) {
564 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll");
565 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding",
566 false);
567 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API.
568 // On Windows 7 mshtmlmedia.dll provides it.
569 dxgi_manager_dll = mfplat_dll;
572 // TODO(ananta)
573 // The code below works, as in we can create the DX11 device manager for
574 // Windows 7. However the IMFTransform we use for texture conversion and
575 // copy does not exist on Windows 7. Look into an alternate approach
576 // and enable the code below.
577 #if defined ENABLE_DX11_FOR_WIN7
578 if ((base::win::GetVersion() == base::win::VERSION_WIN7) &&
579 ((dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll")) == NULL)) {
580 HMODULE mshtml_media_dll = ::LoadLibrary(L"mshtmlmedia.dll");
581 if (mshtml_media_dll)
582 dxgi_manager_dll = mshtml_media_dll;
584 #endif
585 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9
586 // decoding.
587 if (dxgi_manager_dll && !create_dxgi_device_manager_) {
588 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>(
589 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager"));
592 RETURN_AND_NOTIFY_ON_FAILURE(
593 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle,
594 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
595 PLATFORM_FAILURE,
596 false);
598 State state = GetState();
599 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized),
600 "Initialize: invalid state: " << state, ILLEGAL_STATE, false);
602 media::InitializeMediaFoundation();
604 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile),
605 "Failed to initialize decoder", PLATFORM_FAILURE, false);
607 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
608 "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
610 RETURN_AND_NOTIFY_ON_FAILURE(
611 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
612 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
613 PLATFORM_FAILURE, false);
615 RETURN_AND_NOTIFY_ON_FAILURE(
616 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
617 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
618 PLATFORM_FAILURE, false);
620 SetState(kNormal);
622 StartDecoderThread();
623 return true;
626 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
627 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager");
629 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive());
630 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
632 D3DPRESENT_PARAMETERS present_params = {0};
633 present_params.BackBufferWidth = 1;
634 present_params.BackBufferHeight = 1;
635 present_params.BackBufferFormat = D3DFMT_UNKNOWN;
636 present_params.BackBufferCount = 1;
637 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
638 present_params.hDeviceWindow = ::GetShellWindow();
639 present_params.Windowed = TRUE;
640 present_params.Flags = D3DPRESENTFLAG_VIDEO;
641 present_params.FullScreen_RefreshRateInHz = 0;
642 present_params.PresentationInterval = 0;
644 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT,
645 D3DDEVTYPE_HAL,
646 ::GetShellWindow(),
647 D3DCREATE_FPU_PRESERVE |
648 D3DCREATE_SOFTWARE_VERTEXPROCESSING |
649 D3DCREATE_DISABLE_PSGP_THREADING |
650 D3DCREATE_MULTITHREADED,
651 &present_params,
652 NULL,
653 d3d9_device_ex_.Receive());
654 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
656 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
657 device_manager_.Receive());
658 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
660 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(),
661 dev_manager_reset_token_);
662 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
664 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive());
665 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false);
666 // Ensure query_ API works (to avoid an infinite loop later in
667 // CopyOutputSampleDataToPictureBuffer).
668 hr = query_->Issue(D3DISSUE_END);
669 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
670 return true;
673 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
674 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_,
675 d3d11_device_manager_.Receive());
676 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false);
678 // This array defines the set of DirectX hardware feature levels we support.
679 // The ordering MUST be preserved. All applications are assumed to support
680 // 9.1 unless otherwise stated by the application, which is not our case.
681 D3D_FEATURE_LEVEL feature_levels[] = {
682 D3D_FEATURE_LEVEL_11_1,
683 D3D_FEATURE_LEVEL_11_0,
684 D3D_FEATURE_LEVEL_10_1,
685 D3D_FEATURE_LEVEL_10_0,
686 D3D_FEATURE_LEVEL_9_3,
687 D3D_FEATURE_LEVEL_9_2,
688 D3D_FEATURE_LEVEL_9_1 };
690 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
692 #if defined _DEBUG
693 flags |= D3D11_CREATE_DEVICE_DEBUG;
694 #endif
696 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0;
697 hr = D3D11CreateDevice(NULL,
698 D3D_DRIVER_TYPE_HARDWARE,
699 NULL,
700 flags,
701 feature_levels,
702 arraysize(feature_levels),
703 D3D11_SDK_VERSION,
704 d3d11_device_.Receive(),
705 &feature_level_out,
706 d3d11_device_context_.Receive());
707 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false);
709 // Enable multithreaded mode on the context. This ensures that accesses to
710 // context are synchronized across threads. We have multiple threads
711 // accessing the context, the media foundation decoder threads and the
712 // decoder thread via the video format conversion transform.
713 base::win::ScopedComPtr<ID3D10Multithread> multi_threaded;
714 hr = multi_threaded.QueryFrom(d3d11_device_context_.get());
715 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false);
716 multi_threaded->SetMultithreadProtected(TRUE);
718 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(),
719 dx11_dev_manager_reset_token_);
720 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
722 D3D11_QUERY_DESC query_desc;
723 query_desc.Query = D3D11_QUERY_EVENT;
724 query_desc.MiscFlags = 0;
725 hr = d3d11_device_->CreateQuery(
726 &query_desc,
727 d3d11_query_.Receive());
728 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false);
730 hr = ::CoCreateInstance(
731 CLSID_VideoProcessorMFT,
732 NULL,
733 CLSCTX_INPROC_SERVER,
734 IID_IMFTransform,
735 reinterpret_cast<void**>(video_format_converter_mft_.Receive()));
737 if (FAILED(hr)) {
738 base::debug::Alias(&hr);
739 // TODO(ananta)
740 // Remove this CHECK when the change to use DX11 for H/W decoding
741 // stablizes.
742 CHECK(false);
744 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false);
745 return true;
748 void DXVAVideoDecodeAccelerator::Decode(
749 const media::BitstreamBuffer& bitstream_buffer) {
750 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
752 State state = GetState();
753 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped ||
754 state == kFlushing),
755 "Invalid state: " << state, ILLEGAL_STATE,);
757 base::win::ScopedComPtr<IMFSample> sample;
758 sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
759 input_stream_info_.cbSize,
760 input_stream_info_.cbAlignment));
761 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample",
762 PLATFORM_FAILURE, );
764 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()),
765 "Failed to associate input buffer id with sample", PLATFORM_FAILURE,);
767 decoder_thread_task_runner_->PostTask(
768 FROM_HERE,
769 base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal,
770 base::Unretained(this), sample));
773 void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
774 const std::vector<media::PictureBuffer>& buffers) {
775 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
777 State state = GetState();
778 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
779 "Invalid state: " << state, ILLEGAL_STATE,);
780 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers == buffers.size()),
781 "Failed to provide requested picture buffers. (Got " << buffers.size() <<
782 ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,);
784 // Copy the picture buffers provided by the client to the available list,
785 // and mark these buffers as available for use.
786 for (size_t buffer_index = 0; buffer_index < buffers.size();
787 ++buffer_index) {
788 linked_ptr<DXVAPictureBuffer> picture_buffer =
789 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_);
790 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(),
791 "Failed to allocate picture buffer", PLATFORM_FAILURE,);
793 bool inserted = output_picture_buffers_.insert(std::make_pair(
794 buffers[buffer_index].id(), picture_buffer)).second;
795 DCHECK(inserted);
798 ProcessPendingSamples();
799 if (pending_flush_) {
800 decoder_thread_task_runner_->PostTask(
801 FROM_HERE,
802 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
803 base::Unretained(this)));
807 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
808 int32 picture_buffer_id) {
809 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
811 State state = GetState();
812 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
813 "Invalid state: " << state, ILLEGAL_STATE,);
815 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty())
816 return;
818 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
819 // If we didn't find the picture id in the |output_picture_buffers_| map we
820 // try the |stale_output_picture_buffers_| map, as this may have been an
821 // output picture buffer from before a resolution change, that at resolution
822 // change time had yet to be displayed. The client is calling us back to tell
823 // us that we can now recycle this picture buffer, so if we were waiting to
824 // dispose of it we now can.
825 if (it == output_picture_buffers_.end()) {
826 it = stale_output_picture_buffers_.find(picture_buffer_id);
827 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(),
828 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
829 main_thread_task_runner_->PostTask(
830 FROM_HERE,
831 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer,
832 weak_this_factory_.GetWeakPtr(), picture_buffer_id));
833 return;
836 it->second->ReusePictureBuffer();
837 ProcessPendingSamples();
838 if (pending_flush_) {
839 decoder_thread_task_runner_->PostTask(
840 FROM_HERE,
841 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
842 base::Unretained(this)));
846 void DXVAVideoDecodeAccelerator::Flush() {
847 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
849 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
851 State state = GetState();
852 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped),
853 "Unexpected decoder state: " << state, ILLEGAL_STATE,);
855 SetState(kFlushing);
857 pending_flush_ = true;
859 decoder_thread_task_runner_->PostTask(
860 FROM_HERE,
861 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
862 base::Unretained(this)));
865 void DXVAVideoDecodeAccelerator::Reset() {
866 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
868 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
870 State state = GetState();
871 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped),
872 "Reset: invalid state: " << state, ILLEGAL_STATE,);
874 decoder_thread_.Stop();
876 SetState(kResetting);
878 // If we have pending output frames waiting for display then we drop those
879 // frames and set the corresponding picture buffer as available.
880 PendingOutputSamples::iterator index;
881 for (index = pending_output_samples_.begin();
882 index != pending_output_samples_.end();
883 ++index) {
884 if (index->picture_buffer_id != -1) {
885 OutputBuffers::iterator it = output_picture_buffers_.find(
886 index->picture_buffer_id);
887 if (it != output_picture_buffers_.end()) {
888 DXVAPictureBuffer* picture_buffer = it->second.get();
889 picture_buffer->ReusePictureBuffer();
894 pending_output_samples_.clear();
896 NotifyInputBuffersDropped();
898 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
899 "Reset: Failed to send message.", PLATFORM_FAILURE,);
901 main_thread_task_runner_->PostTask(
902 FROM_HERE,
903 base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone,
904 weak_this_factory_.GetWeakPtr()));
906 StartDecoderThread();
907 SetState(kNormal);
910 void DXVAVideoDecodeAccelerator::Destroy() {
911 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
912 Invalidate();
913 delete this;
916 bool DXVAVideoDecodeAccelerator::CanDecodeOnIOThread() {
917 return false;
920 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const {
921 return GL_BGRA_EXT;
924 // static
925 media::VideoDecodeAccelerator::SupportedProfiles
926 DXVAVideoDecodeAccelerator::GetSupportedProfiles() {
927 // TODO(henryhsu): Need to ensure the profiles are actually supported.
928 SupportedProfiles profiles;
929 for (const auto& supported_profile : kSupportedProfiles) {
930 SupportedProfile profile;
931 profile.profile = supported_profile;
932 // Windows Media Foundation H.264 decoding does not support decoding videos
933 // with any dimension smaller than 48 pixels:
934 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815
935 profile.min_resolution.SetSize(48, 48);
936 // Use 1088 to account for 16x16 macroblocks.
937 profile.max_resolution.SetSize(1920, 1088);
938 profiles.push_back(profile);
940 return profiles;
943 bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) {
944 HMODULE decoder_dll = NULL;
946 // Profile must fall within the valid range for one of the supported codecs.
947 if (profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX) {
948 // We mimic the steps CoCreateInstance uses to instantiate the object. This
949 // was previously done because it failed inside the sandbox, and now is done
950 // as a more minimal approach to avoid other side-effects CCI might have (as
951 // we are still in a reduced sandbox).
952 decoder_dll = ::LoadLibrary(L"msmpeg2vdec.dll");
953 RETURN_ON_FAILURE(decoder_dll,
954 "msmpeg2vdec.dll required for decoding is not loaded",
955 false);
957 // Check version of DLL, version 6.7.7140 is blacklisted due to high crash
958 // rates in browsers loading that DLL. If that is the version installed we
959 // fall back to software decoding. See crbug/403440.
960 FileVersionInfo* version_info =
961 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll);
962 RETURN_ON_FAILURE(version_info,
963 "unable to get version of msmpeg2vdec.dll",
964 false);
965 base::string16 file_version = version_info->file_version();
966 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos,
967 "blacklisted version of msmpeg2vdec.dll 6.7.7140",
968 false);
969 codec_ = media::kCodecH264;
970 } else if (profile == media::VP8PROFILE_ANY ||
971 profile == media::VP9PROFILE_ANY) {
972 int program_files_key = base::DIR_PROGRAM_FILES;
973 if (base::win::OSInfo::GetInstance()->wow64_status() ==
974 base::win::OSInfo::WOW64_ENABLED) {
975 program_files_key = base::DIR_PROGRAM_FILES6432;
978 base::FilePath dll_path;
979 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path),
980 "failed to get path for Program Files", false);
982 dll_path = dll_path.Append(kVPXDecoderDLLPath);
983 if (profile == media::VP8PROFILE_ANY) {
984 codec_ = media::kCodecVP8;
985 dll_path = dll_path.Append(kVP8DecoderDLLName);
986 } else {
987 codec_ = media::kCodecVP9;
988 dll_path = dll_path.Append(kVP9DecoderDLLName);
990 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL,
991 LOAD_WITH_ALTERED_SEARCH_PATH);
992 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false);
993 } else {
994 RETURN_ON_FAILURE(false, "Unsupported codec.", false);
997 typedef HRESULT(WINAPI * GetClassObject)(
998 const CLSID & clsid, const IID & iid, void * *object);
1000 GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
1001 GetProcAddress(decoder_dll, "DllGetClassObject"));
1002 RETURN_ON_FAILURE(
1003 get_class_object, "Failed to get DllGetClassObject pointer", false);
1005 base::win::ScopedComPtr<IClassFactory> factory;
1006 HRESULT hr;
1007 if (codec_ == media::kCodecH264) {
1008 hr = get_class_object(__uuidof(CMSH264DecoderMFT),
1009 __uuidof(IClassFactory),
1010 reinterpret_cast<void**>(factory.Receive()));
1011 } else if (codec_ == media::kCodecVP8) {
1012 hr = get_class_object(CLSID_WebmMfVp8Dec,
1013 __uuidof(IClassFactory),
1014 reinterpret_cast<void**>(factory.Receive()));
1015 } else if (codec_ == media::kCodecVP9) {
1016 hr = get_class_object(CLSID_WebmMfVp9Dec,
1017 __uuidof(IClassFactory),
1018 reinterpret_cast<void**>(factory.Receive()));
1019 } else {
1020 RETURN_ON_FAILURE(false, "Unsupported codec.", false);
1022 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false);
1024 hr = factory->CreateInstance(NULL,
1025 __uuidof(IMFTransform),
1026 reinterpret_cast<void**>(decoder_.Receive()));
1027 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
1029 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
1030 "Failed to check decoder DXVA support", false);
1032 ULONG_PTR device_manager_to_use = NULL;
1033 if (use_dx11_) {
1034 CHECK(create_dxgi_device_manager_);
1035 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(),
1036 "Failed to initialize DX11 device and manager",
1037 PLATFORM_FAILURE,
1038 false);
1039 device_manager_to_use = reinterpret_cast<ULONG_PTR>(
1040 d3d11_device_manager_.get());
1041 } else {
1042 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
1043 "Failed to initialize D3D device and manager",
1044 PLATFORM_FAILURE,
1045 false);
1046 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get());
1049 hr = decoder_->ProcessMessage(
1050 MFT_MESSAGE_SET_D3D_MANAGER,
1051 device_manager_to_use);
1052 if (use_dx11_) {
1053 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false);
1054 } else {
1055 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
1058 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
1060 EGLint config_attribs[] = {
1061 EGL_BUFFER_SIZE, 32,
1062 EGL_RED_SIZE, 8,
1063 EGL_GREEN_SIZE, 8,
1064 EGL_BLUE_SIZE, 8,
1065 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
1066 EGL_ALPHA_SIZE, 0,
1067 EGL_NONE
1070 EGLint num_configs;
1072 if (!eglChooseConfig(
1073 egl_display,
1074 config_attribs,
1075 &egl_config_,
1077 &num_configs))
1078 return false;
1080 return SetDecoderMediaTypes();
1083 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
1084 base::win::ScopedComPtr<IMFAttributes> attributes;
1085 HRESULT hr = decoder_->GetAttributes(attributes.Receive());
1086 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
1088 UINT32 dxva = 0;
1089 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
1090 RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
1092 if (codec_ == media::kCodecH264) {
1093 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
1094 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
1097 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
1098 if (SUCCEEDED(hr)) {
1099 DVLOG(1) << "Successfully set Low latency mode on decoder.";
1100 } else {
1101 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr;
1104 // The decoder should use DX11 iff
1105 // 1. The underlying H/W decoder supports it.
1106 // 2. We have a pointer to the MFCreateDXGIDeviceManager function needed for
1107 // this. This should always be true for Windows 8+.
1108 // 3. ANGLE is using DX11.
1109 DCHECK(gl_context_);
1110 if (create_dxgi_device_manager_ &&
1111 (gl_context_->GetGLRenderer().find("Direct3D11") !=
1112 std::string::npos)) {
1113 UINT32 dx11_aware = 0;
1114 attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware);
1115 use_dx11_ = !!dx11_aware;
1117 return true;
1120 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
1121 RETURN_ON_FAILURE(SetDecoderInputMediaType(),
1122 "Failed to set decoder input media type", false);
1123 return SetDecoderOutputMediaType(MFVideoFormat_NV12);
1126 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
1127 base::win::ScopedComPtr<IMFMediaType> media_type;
1128 HRESULT hr = MFCreateMediaType(media_type.Receive());
1129 RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
1131 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
1132 RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
1134 if (codec_ == media::kCodecH264) {
1135 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
1136 } else if (codec_ == media::kCodecVP8) {
1137 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80);
1138 } else if (codec_ == media::kCodecVP9) {
1139 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90);
1140 } else {
1141 NOTREACHED();
1142 RETURN_ON_FAILURE(false, "Unsupported codec on input media type.", false);
1144 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
1146 // Not sure about this. msdn recommends setting this value on the input
1147 // media type.
1148 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
1149 MFVideoInterlace_MixedInterlaceOrProgressive);
1150 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false);
1152 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags
1153 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
1154 return true;
1157 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
1158 const GUID& subtype) {
1159 base::win::ScopedComPtr<IMFMediaType> out_media_type;
1161 for (uint32 i = 0;
1162 SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
1163 out_media_type.Receive()));
1164 ++i) {
1165 GUID out_subtype = {0};
1166 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
1167 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
1169 if (out_subtype == subtype) {
1170 hr = decoder_->SetOutputType(0, out_media_type.get(), 0); // No flags
1171 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
1172 return true;
1174 out_media_type.Release();
1176 return false;
1179 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
1180 int32 param) {
1181 HRESULT hr = decoder_->ProcessMessage(msg, param);
1182 return SUCCEEDED(hr);
1185 // Gets the minimum buffer sizes for input and output samples. The MFT will not
1186 // allocate buffer for input nor output, so we have to do it ourselves and make
1187 // sure they're the correct size. We only provide decoding if DXVA is enabled.
1188 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
1189 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
1190 RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
1192 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
1193 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
1195 DVLOG(1) << "Input stream info: ";
1196 DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
1197 if (codec_ == media::kCodecH264) {
1198 // There should be three flags, one for requiring a whole frame be in a
1199 // single sample, one for requiring there be one buffer only in a single
1200 // sample, and one that specifies a fixed sample size. (as in cbSize)
1201 CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
1204 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
1205 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
1206 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
1208 DVLOG(1) << "Output stream info: ";
1209 // The flags here should be the same and mean the same thing, except when
1210 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
1211 // allocate its own sample.
1212 DVLOG(1) << "Flags: "
1213 << std::hex << std::showbase << output_stream_info_.dwFlags;
1214 if (codec_ == media::kCodecH264) {
1215 CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
1217 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
1218 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
1219 return true;
1222 void DXVAVideoDecodeAccelerator::DoDecode() {
1223 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1224 // This function is also called from FlushInternal in a loop which could
1225 // result in the state transitioning to kStopped due to no decoded output.
1226 State state = GetState();
1227 RETURN_AND_NOTIFY_ON_FAILURE(
1228 (state == kNormal || state == kFlushing || state == kStopped),
1229 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,);
1231 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
1232 DWORD status = 0;
1234 HRESULT hr = decoder_->ProcessOutput(0, // No flags
1235 1, // # of out streams to pull from
1236 &output_data_buffer,
1237 &status);
1238 IMFCollection* events = output_data_buffer.pEvents;
1239 if (events != NULL) {
1240 DVLOG(1) << "Got events from ProcessOuput, but discarding";
1241 events->Release();
1243 if (FAILED(hr)) {
1244 // A stream change needs further ProcessInput calls to get back decoder
1245 // output which is why we need to set the state to stopped.
1246 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1247 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
1248 // Decoder didn't let us set NV12 output format. Not sure as to why
1249 // this can happen. Give up in disgust.
1250 NOTREACHED() << "Failed to set decoder output media type to NV12";
1251 SetState(kStopped);
1252 } else {
1253 DVLOG(1) << "Received output format change from the decoder."
1254 " Recursively invoking DoDecode";
1255 DoDecode();
1257 return;
1258 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1259 // No more output from the decoder. Stop playback.
1260 SetState(kStopped);
1261 return;
1262 } else {
1263 NOTREACHED() << "Unhandled error in DoDecode()";
1264 return;
1267 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1269 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
1270 inputs_before_decode_);
1272 inputs_before_decode_ = 0;
1274 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
1275 "Failed to process output sample.", PLATFORM_FAILURE,);
1278 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
1279 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
1281 LONGLONG input_buffer_id = 0;
1282 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1283 "Failed to get input buffer id associated with sample",
1284 false);
1287 base::AutoLock lock(decoder_lock_);
1288 DCHECK(pending_output_samples_.empty());
1289 pending_output_samples_.push_back(
1290 PendingSampleInfo(input_buffer_id, sample));
1293 if (pictures_requested_) {
1294 DVLOG(1) << "Waiting for picture slots from the client.";
1295 main_thread_task_runner_->PostTask(
1296 FROM_HERE,
1297 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples,
1298 weak_this_factory_.GetWeakPtr()));
1299 return true;
1302 int width = 0;
1303 int height = 0;
1304 if (!GetVideoFrameDimensions(sample, &width, &height)) {
1305 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample",
1306 false);
1309 // Go ahead and request picture buffers.
1310 main_thread_task_runner_->PostTask(
1311 FROM_HERE,
1312 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1313 weak_this_factory_.GetWeakPtr(),
1314 width,
1315 height));
1317 pictures_requested_ = true;
1318 return true;
1321 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
1322 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1324 if (!output_picture_buffers_.size())
1325 return;
1327 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
1328 "Failed to make context current", PLATFORM_FAILURE,);
1330 OutputBuffers::iterator index;
1332 for (index = output_picture_buffers_.begin();
1333 index != output_picture_buffers_.end() &&
1334 OutputSamplesPresent();
1335 ++index) {
1336 if (index->second->available()) {
1337 PendingSampleInfo* pending_sample = NULL;
1339 base::AutoLock lock(decoder_lock_);
1341 PendingSampleInfo& sample_info = pending_output_samples_.front();
1342 if (sample_info.picture_buffer_id != -1)
1343 continue;
1344 pending_sample = &sample_info;
1347 int width = 0;
1348 int height = 0;
1349 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(),
1350 &width, &height)) {
1351 RETURN_AND_NOTIFY_ON_FAILURE(false,
1352 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,);
1355 if (width != index->second->size().width() ||
1356 height != index->second->size().height()) {
1357 HandleResolutionChanged(width, height);
1358 return;
1361 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
1362 HRESULT hr = pending_sample->output_sample->GetBufferByIndex(
1363 0, output_buffer.Receive());
1364 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1365 "Failed to get buffer from output sample", PLATFORM_FAILURE,);
1367 base::win::ScopedComPtr<IDirect3DSurface9> surface;
1368 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture;
1370 if (use_dx11_) {
1371 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
1372 hr = dxgi_buffer.QueryFrom(output_buffer.get());
1373 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1374 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,);
1375 hr = dxgi_buffer->GetResource(
1376 __uuidof(ID3D11Texture2D),
1377 reinterpret_cast<void**>(d3d11_texture.Receive()));
1378 } else {
1379 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE,
1380 IID_PPV_ARGS(surface.Receive()));
1382 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1383 "Failed to get surface from output sample", PLATFORM_FAILURE,);
1385 pending_sample->picture_buffer_id = index->second->id();
1387 RETURN_AND_NOTIFY_ON_FAILURE(
1388 index->second->CopyOutputSampleDataToPictureBuffer(
1389 this,
1390 surface.get(),
1391 d3d11_texture.get(),
1392 pending_sample->input_buffer_id),
1393 "Failed to copy output sample", PLATFORM_FAILURE,);
1395 index->second->set_available(false);
1400 void DXVAVideoDecodeAccelerator::StopOnError(
1401 media::VideoDecodeAccelerator::Error error) {
1402 if (!main_thread_task_runner_->BelongsToCurrentThread()) {
1403 main_thread_task_runner_->PostTask(
1404 FROM_HERE,
1405 base::Bind(&DXVAVideoDecodeAccelerator::StopOnError,
1406 weak_this_factory_.GetWeakPtr(),
1407 error));
1408 return;
1411 if (client_)
1412 client_->NotifyError(error);
1413 client_ = NULL;
1415 if (GetState() != kUninitialized) {
1416 Invalidate();
1420 void DXVAVideoDecodeAccelerator::Invalidate() {
1421 if (GetState() == kUninitialized)
1422 return;
1423 decoder_thread_.Stop();
1424 weak_this_factory_.InvalidateWeakPtrs();
1425 output_picture_buffers_.clear();
1426 stale_output_picture_buffers_.clear();
1427 pending_output_samples_.clear();
1428 pending_input_buffers_.clear();
1429 decoder_.Release();
1431 if (use_dx11_) {
1432 if (video_format_converter_mft_.get()) {
1433 video_format_converter_mft_->ProcessMessage(
1434 MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
1435 video_format_converter_mft_.Release();
1437 d3d11_device_context_.Release();
1438 d3d11_device_.Release();
1439 d3d11_device_manager_.Release();
1440 d3d11_query_.Release();
1441 dx11_video_format_converter_media_type_needs_init_ = true;
1442 } else {
1443 d3d9_.Release();
1444 d3d9_device_ex_.Release();
1445 device_manager_.Release();
1446 query_.Release();
1449 SetState(kUninitialized);
1452 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
1453 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1454 if (client_)
1455 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1458 void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
1459 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1460 if (client_ && pending_flush_) {
1461 pending_flush_ = false;
1463 base::AutoLock lock(decoder_lock_);
1464 sent_drain_message_ = false;
1467 client_->NotifyFlushDone();
1471 void DXVAVideoDecodeAccelerator::NotifyResetDone() {
1472 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1473 if (client_)
1474 client_->NotifyResetDone();
1477 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
1478 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1479 // This task could execute after the decoder has been torn down.
1480 if (GetState() != kUninitialized && client_) {
1481 client_->ProvidePictureBuffers(
1482 kNumPictureBuffers,
1483 gfx::Size(width, height),
1484 GL_TEXTURE_2D);
1488 void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1489 int picture_buffer_id,
1490 int input_buffer_id,
1491 const gfx::Rect& picture_buffer_size) {
1492 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1493 // This task could execute after the decoder has been torn down.
1494 if (GetState() != kUninitialized && client_) {
1495 media::Picture picture(picture_buffer_id, input_buffer_id,
1496 picture_buffer_size, false);
1497 client_->PictureReady(picture);
1501 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1502 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1503 if (!client_)
1504 return;
1506 for (PendingInputs::iterator it = pending_input_buffers_.begin();
1507 it != pending_input_buffers_.end(); ++it) {
1508 LONGLONG input_buffer_id = 0;
1509 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id),
1510 "Failed to get buffer id associated with sample",);
1511 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1513 pending_input_buffers_.clear();
1516 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1517 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1518 State state = GetState();
1519 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized),
1520 "Invalid state: " << state, ILLEGAL_STATE,);
1522 if (pending_input_buffers_.empty() || OutputSamplesPresent())
1523 return;
1525 PendingInputs pending_input_buffers_copy;
1526 std::swap(pending_input_buffers_, pending_input_buffers_copy);
1528 for (PendingInputs::iterator it = pending_input_buffers_copy.begin();
1529 it != pending_input_buffers_copy.end(); ++it) {
1530 DecodeInternal(*it);
1534 void DXVAVideoDecodeAccelerator::FlushInternal() {
1535 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1537 // We allow only one output frame to be present at any given time. If we have
1538 // an output frame, then we cannot complete the flush at this time.
1539 if (OutputSamplesPresent())
1540 return;
1542 // First drain the pending input because once the drain message is sent below,
1543 // the decoder will ignore further input until it's drained.
1544 if (!pending_input_buffers_.empty()) {
1545 decoder_thread_task_runner_->PostTask(
1546 FROM_HERE,
1547 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1548 base::Unretained(this)));
1549 decoder_thread_task_runner_->PostTask(
1550 FROM_HERE,
1551 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
1552 base::Unretained(this)));
1553 return;
1557 base::AutoLock lock(decoder_lock_);
1558 if (!sent_drain_message_) {
1559 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
1560 "Failed to send drain message",
1561 PLATFORM_FAILURE,);
1562 sent_drain_message_ = true;
1566 // Attempt to retrieve an output frame from the decoder. If we have one,
1567 // return and proceed when the output frame is processed. If we don't have a
1568 // frame then we are done.
1569 DoDecode();
1570 if (OutputSamplesPresent())
1571 return;
1573 SetState(kFlushing);
1575 main_thread_task_runner_->PostTask(
1576 FROM_HERE,
1577 base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone,
1578 weak_this_factory_.GetWeakPtr()));
1580 SetState(kNormal);
1583 void DXVAVideoDecodeAccelerator::DecodeInternal(
1584 const base::win::ScopedComPtr<IMFSample>& sample) {
1585 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1587 if (GetState() == kUninitialized)
1588 return;
1590 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) {
1591 pending_input_buffers_.push_back(sample);
1592 return;
1595 if (!inputs_before_decode_) {
1596 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1598 inputs_before_decode_++;
1600 HRESULT hr = decoder_->ProcessInput(0, sample.get(), 0);
1601 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1602 // has enough data to produce one or more output samples. In this case the
1603 // recommended options are to
1604 // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1605 // returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1606 // 2. Flush the input data
1607 // We implement the first option, i.e to retrieve the output sample and then
1608 // process the input again. Failure in either of these steps is treated as a
1609 // decoder failure.
1610 if (hr == MF_E_NOTACCEPTING) {
1611 DoDecode();
1612 // If the DoDecode call resulted in an output frame then we should not
1613 // process any more input until that frame is copied to the target surface.
1614 if (!OutputSamplesPresent()) {
1615 State state = GetState();
1616 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal ||
1617 state == kFlushing),
1618 "Failed to process output. Unexpected decoder state: " << state,
1619 PLATFORM_FAILURE,);
1620 hr = decoder_->ProcessInput(0, sample.get(), 0);
1622 // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1623 // 1. Add the input sample to the pending queue.
1624 // 2. If we don't have any output samples we post the
1625 // DecodePendingInputBuffers task to process the pending input samples.
1626 // If we have an output sample then the above task is posted when the
1627 // output samples are sent to the client.
1628 // This is because we only support 1 pending output sample at any
1629 // given time due to the limitation with the Microsoft media foundation
1630 // decoder where it recycles the output Decoder surfaces.
1631 if (hr == MF_E_NOTACCEPTING) {
1632 pending_input_buffers_.push_back(sample);
1633 decoder_thread_task_runner_->PostTask(
1634 FROM_HERE,
1635 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1636 base::Unretained(this)));
1637 return;
1640 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
1641 PLATFORM_FAILURE,);
1643 DoDecode();
1645 State state = GetState();
1646 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal ||
1647 state == kFlushing),
1648 "Failed to process output. Unexpected decoder state: " << state,
1649 ILLEGAL_STATE,);
1651 LONGLONG input_buffer_id = 0;
1652 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1653 "Failed to get input buffer id associated with sample",);
1654 // The Microsoft Media foundation decoder internally buffers up to 30 frames
1655 // before returning a decoded frame. We need to inform the client that this
1656 // input buffer is processed as it may stop sending us further input.
1657 // Note: This may break clients which expect every input buffer to be
1658 // associated with a decoded output buffer.
1659 // TODO(ananta)
1660 // Do some more investigation into whether it is possible to get the MFT
1661 // decoder to emit an output packet for every input packet.
1662 // http://code.google.com/p/chromium/issues/detail?id=108121
1663 // http://code.google.com/p/chromium/issues/detail?id=150925
1664 main_thread_task_runner_->PostTask(
1665 FROM_HERE,
1666 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
1667 weak_this_factory_.GetWeakPtr(),
1668 input_buffer_id));
1671 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
1672 int height) {
1673 dx11_video_format_converter_media_type_needs_init_ = true;
1675 main_thread_task_runner_->PostTask(
1676 FROM_HERE,
1677 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers,
1678 weak_this_factory_.GetWeakPtr()));
1680 main_thread_task_runner_->PostTask(
1681 FROM_HERE,
1682 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1683 weak_this_factory_.GetWeakPtr(),
1684 width,
1685 height));
1688 void DXVAVideoDecodeAccelerator::DismissStaleBuffers() {
1689 OutputBuffers::iterator index;
1691 for (index = output_picture_buffers_.begin();
1692 index != output_picture_buffers_.end();
1693 ++index) {
1694 if (index->second->available()) {
1695 DVLOG(1) << "Dismissing picture id: " << index->second->id();
1696 client_->DismissPictureBuffer(index->second->id());
1697 } else {
1698 // Move to |stale_output_picture_buffers_| for deferred deletion.
1699 stale_output_picture_buffers_.insert(
1700 std::make_pair(index->first, index->second));
1704 output_picture_buffers_.clear();
1707 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer(
1708 int32 picture_buffer_id) {
1709 OutputBuffers::iterator it = stale_output_picture_buffers_.find(
1710 picture_buffer_id);
1711 DCHECK(it != stale_output_picture_buffers_.end());
1712 DVLOG(1) << "Dismissing picture id: " << it->second->id();
1713 client_->DismissPictureBuffer(it->second->id());
1714 stale_output_picture_buffers_.erase(it);
1717 DXVAVideoDecodeAccelerator::State
1718 DXVAVideoDecodeAccelerator::GetState() {
1719 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes");
1720 State state = static_cast<State>(
1721 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0));
1722 return state;
1725 void DXVAVideoDecodeAccelerator::SetState(State new_state) {
1726 if (!main_thread_task_runner_->BelongsToCurrentThread()) {
1727 main_thread_task_runner_->PostTask(
1728 FROM_HERE,
1729 base::Bind(&DXVAVideoDecodeAccelerator::SetState,
1730 weak_this_factory_.GetWeakPtr(),
1731 new_state));
1732 return;
1735 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes");
1736 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_),
1737 new_state);
1738 DCHECK_EQ(state_, new_state);
1741 void DXVAVideoDecodeAccelerator::StartDecoderThread() {
1742 decoder_thread_.init_com_with_mta(false);
1743 decoder_thread_.Start();
1744 decoder_thread_task_runner_ = decoder_thread_.task_runner();
1747 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() {
1748 base::AutoLock lock(decoder_lock_);
1749 return !pending_output_samples_.empty();
1752 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface,
1753 IDirect3DSurface9* dest_surface,
1754 int picture_buffer_id,
1755 int input_buffer_id) {
1756 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) {
1757 decoder_thread_task_runner_->PostTask(
1758 FROM_HERE,
1759 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface,
1760 base::Unretained(this),
1761 src_surface,
1762 dest_surface,
1763 picture_buffer_id,
1764 input_buffer_id));
1765 return;
1768 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface,
1769 NULL, D3DTEXF_NONE);
1770 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",);
1772 // Ideally, this should be done immediately before the draw call that uses
1773 // the texture. Flush it once here though.
1774 hr = query_->Issue(D3DISSUE_END);
1775 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",);
1777 // Flush the decoder device to ensure that the decoded frame is copied to the
1778 // target surface.
1779 decoder_thread_task_runner_->PostDelayedTask(
1780 FROM_HERE,
1781 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
1782 base::Unretained(this), 0, src_surface, dest_surface,
1783 picture_buffer_id, input_buffer_id),
1784 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
1787 void DXVAVideoDecodeAccelerator::CopySurfaceComplete(
1788 IDirect3DSurface9* src_surface,
1789 IDirect3DSurface9* dest_surface,
1790 int picture_buffer_id,
1791 int input_buffer_id) {
1792 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
1794 // The output buffers may have changed in the following scenarios:-
1795 // 1. A resolution change.
1796 // 2. Decoder instance was destroyed.
1797 // Ignore copy surface notifications for such buffers.
1798 // copy surface notifications for such buffers.
1799 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
1800 if (it == output_picture_buffers_.end())
1801 return;
1803 // If the picture buffer is marked as available it probably means that there
1804 // was a Reset operation which dropped the output frame.
1805 DXVAPictureBuffer* picture_buffer = it->second.get();
1806 if (picture_buffer->available())
1807 return;
1809 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
1810 "Failed to make context current", PLATFORM_FAILURE,);
1812 DCHECK(!output_picture_buffers_.empty());
1814 picture_buffer->CopySurfaceComplete(src_surface,
1815 dest_surface);
1817 NotifyPictureReady(picture_buffer->id(),
1818 input_buffer_id,
1819 gfx::Rect(picture_buffer->size()));
1822 base::AutoLock lock(decoder_lock_);
1823 if (!pending_output_samples_.empty())
1824 pending_output_samples_.pop_front();
1827 if (pending_flush_) {
1828 decoder_thread_task_runner_->PostTask(
1829 FROM_HERE,
1830 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal,
1831 base::Unretained(this)));
1832 return;
1834 decoder_thread_task_runner_->PostTask(
1835 FROM_HERE,
1836 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1837 base::Unretained(this)));
1840 void DXVAVideoDecodeAccelerator::CopyTexture(ID3D11Texture2D* src_texture,
1841 ID3D11Texture2D* dest_texture,
1842 IMFSample* video_frame,
1843 int picture_buffer_id,
1844 int input_buffer_id) {
1845 HRESULT hr = E_FAIL;
1847 DCHECK(use_dx11_);
1849 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) {
1850 // The media foundation H.264 decoder outputs YUV12 textures which we
1851 // cannot copy into ANGLE as they expect ARGB textures. In D3D land
1852 // the StretchRect API in the IDirect3DDevice9Ex interface did the color
1853 // space conversion for us. Sadly in DX11 land the API does not provide
1854 // a straightforward way to do this.
1855 // We use the video processor MFT.
1856 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx
1857 // This object implements a media foundation transform (IMFTransform)
1858 // which follows the same contract as the decoder. The color space
1859 // conversion as per msdn is done in the GPU.
1861 D3D11_TEXTURE2D_DESC source_desc;
1862 src_texture->GetDesc(&source_desc);
1864 // Set up the input and output types for the video processor MFT.
1865 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width,
1866 source_desc.Height)) {
1867 RETURN_AND_NOTIFY_ON_FAILURE(
1868 false, "Failed to initialize media types for convesion.",
1869 PLATFORM_FAILURE,);
1872 // The input to the video processor is the output sample.
1873 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion;
1875 base::AutoLock lock(decoder_lock_);
1876 PendingSampleInfo& sample_info = pending_output_samples_.front();
1877 input_sample_for_conversion = sample_info.output_sample;
1880 decoder_thread_task_runner_->PostTask(
1881 FROM_HERE,
1882 base::Bind(&DXVAVideoDecodeAccelerator::CopyTexture,
1883 base::Unretained(this),
1884 src_texture,
1885 dest_texture,
1886 input_sample_for_conversion.Detach(),
1887 picture_buffer_id,
1888 input_buffer_id));
1889 return;
1892 DCHECK(video_frame);
1894 base::win::ScopedComPtr<IMFSample> input_sample;
1895 input_sample.Attach(video_frame);
1897 DCHECK(video_format_converter_mft_.get());
1899 // d3d11_device_context_->Begin(d3d11_query_.get());
1901 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0);
1902 if (FAILED(hr)) {
1903 DCHECK(false);
1904 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1905 "Failed to convert output sample format.", PLATFORM_FAILURE,);
1908 // The video processor MFT requires output samples to be allocated by the
1909 // caller. We create a sample with a buffer backed with the ID3D11Texture2D
1910 // interface exposed by ANGLE. This works nicely as this ensures that the
1911 // video processor coverts the color space of the output frame and copies
1912 // the result into the ANGLE texture.
1913 base::win::ScopedComPtr<IMFSample> output_sample;
1914 hr = MFCreateSample(output_sample.Receive());
1915 if (FAILED(hr)) {
1916 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1917 "Failed to create output sample.", PLATFORM_FAILURE,);
1920 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
1921 hr = MFCreateDXGISurfaceBuffer(
1922 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE,
1923 output_buffer.Receive());
1924 if (FAILED(hr)) {
1925 base::debug::Alias(&hr);
1926 // TODO(ananta)
1927 // Remove this CHECK when the change to use DX11 for H/W decoding
1928 // stablizes.
1929 CHECK(false);
1930 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1931 "Failed to create output sample.", PLATFORM_FAILURE,);
1934 output_sample->AddBuffer(output_buffer.get());
1936 DWORD status = 0;
1937 MFT_OUTPUT_DATA_BUFFER format_converter_output = {};
1938 format_converter_output.pSample = output_sample.get();
1939 hr = video_format_converter_mft_->ProcessOutput(
1940 0, // No flags
1941 1, // # of out streams to pull from
1942 &format_converter_output,
1943 &status);
1945 d3d11_device_context_->Flush();
1946 d3d11_device_context_->End(d3d11_query_.get());
1948 if (FAILED(hr)) {
1949 base::debug::Alias(&hr);
1950 // TODO(ananta)
1951 // Remove this CHECK when the change to use DX11 for H/W decoding
1952 // stablizes.
1953 CHECK(false);
1954 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
1955 "Failed to convert output sample format.", PLATFORM_FAILURE,);
1958 decoder_thread_task_runner_->PostDelayedTask(
1959 FROM_HERE,
1960 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
1961 base::Unretained(this), 0,
1962 reinterpret_cast<IDirect3DSurface9*>(NULL),
1963 reinterpret_cast<IDirect3DSurface9*>(NULL),
1964 picture_buffer_id, input_buffer_id),
1965 base::TimeDelta::FromMilliseconds(
1966 kFlushDecoderSurfaceTimeoutMs));
1969 void DXVAVideoDecodeAccelerator::FlushDecoder(
1970 int iterations,
1971 IDirect3DSurface9* src_surface,
1972 IDirect3DSurface9* dest_surface,
1973 int picture_buffer_id,
1974 int input_buffer_id) {
1975 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
1977 // The DXVA decoder has its own device which it uses for decoding. ANGLE
1978 // has its own device which we don't have access to.
1979 // The above code attempts to copy the decoded picture into a surface
1980 // which is owned by ANGLE. As there are multiple devices involved in
1981 // this, the StretchRect call above is not synchronous.
1982 // We attempt to flush the batched operations to ensure that the picture is
1983 // copied to the surface owned by ANGLE.
1984 // We need to do this in a loop and call flush multiple times.
1985 // We have seen the GetData call for flushing the command buffer fail to
1986 // return success occassionally on multi core machines, leading to an
1987 // infinite loop.
1988 // Workaround is to have an upper limit of 4 on the number of iterations to
1989 // wait for the Flush to finish.
1990 HRESULT hr = E_FAIL;
1992 if (use_dx11_) {
1993 BOOL query_data = 0;
1994 hr = d3d11_device_context_->GetData(d3d11_query_.get(), &query_data,
1995 sizeof(BOOL), 0);
1996 if (FAILED(hr)) {
1997 base::debug::Alias(&hr);
1998 // TODO(ananta)
1999 // Remove this CHECK when the change to use DX11 for H/W decoding
2000 // stablizes.
2001 CHECK(false);
2003 } else {
2004 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH);
2006 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) {
2007 decoder_thread_task_runner_->PostDelayedTask(
2008 FROM_HERE,
2009 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder,
2010 base::Unretained(this), iterations, src_surface,
2011 dest_surface, picture_buffer_id, input_buffer_id),
2012 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
2013 return;
2016 main_thread_task_runner_->PostTask(
2017 FROM_HERE,
2018 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete,
2019 weak_this_factory_.GetWeakPtr(),
2020 src_surface,
2021 dest_surface,
2022 picture_buffer_id,
2023 input_buffer_id));
2026 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType(
2027 int width, int height) {
2028 if (!dx11_video_format_converter_media_type_needs_init_)
2029 return true;
2031 CHECK(video_format_converter_mft_.get());
2033 HRESULT hr = video_format_converter_mft_->ProcessMessage(
2034 MFT_MESSAGE_SET_D3D_MANAGER,
2035 reinterpret_cast<ULONG_PTR>(
2036 d3d11_device_manager_.get()));
2038 if (FAILED(hr)) {
2039 base::debug::Alias(&hr);
2040 // TODO(ananta)
2041 // Remove this CHECK when the change to use DX11 for H/W decoding
2042 // stablizes.
2043 CHECK(false);
2045 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2046 "Failed to initialize video format converter", PLATFORM_FAILURE, false);
2048 video_format_converter_mft_->ProcessMessage(
2049 MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
2051 base::win::ScopedComPtr<IMFMediaType> media_type;
2052 hr = MFCreateMediaType(media_type.Receive());
2053 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed",
2054 PLATFORM_FAILURE, false);
2056 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
2057 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type",
2058 PLATFORM_FAILURE, false);
2060 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
2061 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type",
2062 PLATFORM_FAILURE, false);
2064 hr = media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
2065 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2066 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2068 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
2069 MFVideoInterlace_Progressive);
2070 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2071 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2073 base::win::ScopedComPtr<IMFAttributes> converter_attributes;
2074 hr = video_format_converter_mft_->GetAttributes(
2075 converter_attributes.Receive());
2076 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get converter attributes",
2077 PLATFORM_FAILURE, false);
2079 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
2080 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes",
2081 PLATFORM_FAILURE, false);
2083 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE);
2084 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes",
2085 PLATFORM_FAILURE, false);
2087 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height);
2088 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes",
2089 PLATFORM_FAILURE, false);
2091 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0);
2092 if (FAILED(hr)) {
2093 base::debug::Alias(&hr);
2094 // TODO(ananta)
2095 // Remove this CHECK when the change to use DX11 for H/W decoding
2096 // stablizes.
2097 CHECK(false);
2099 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type",
2100 PLATFORM_FAILURE, false);
2102 base::win::ScopedComPtr<IMFMediaType> out_media_type;
2104 for (uint32 i = 0;
2105 SUCCEEDED(video_format_converter_mft_->GetOutputAvailableType(0, i,
2106 out_media_type.Receive()));
2107 ++i) {
2108 GUID out_subtype = {0};
2109 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
2110 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output major type",
2111 PLATFORM_FAILURE, false);
2113 if (out_subtype == MFVideoFormat_ARGB32) {
2114 hr = out_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
2115 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2116 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2118 hr = out_media_type->SetUINT32(MF_MT_INTERLACE_MODE,
2119 MFVideoInterlace_Progressive);
2120 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2121 "Failed to set attributes on media type", PLATFORM_FAILURE, false);
2123 hr = MFSetAttributeSize(out_media_type.get(), MF_MT_FRAME_SIZE, width,
2124 height);
2125 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2126 "Failed to set media type attributes", PLATFORM_FAILURE, false);
2128 hr = video_format_converter_mft_->SetOutputType(
2129 0, out_media_type.get(), 0); // No flags
2130 if (FAILED(hr)) {
2131 base::debug::Alias(&hr);
2132 // TODO(ananta)
2133 // Remove this CHECK when the change to use DX11 for H/W decoding
2134 // stablizes.
2135 CHECK(false);
2137 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2138 "Failed to set converter output type", PLATFORM_FAILURE, false);
2140 hr = video_format_converter_mft_->ProcessMessage(
2141 MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
2142 if (FAILED(hr)) {
2143 // TODO(ananta)
2144 // Remove this CHECK when the change to use DX11 for H/W decoding
2145 // stablizes.
2146 RETURN_AND_NOTIFY_ON_FAILURE(
2147 false, "Failed to initialize video converter.", PLATFORM_FAILURE,
2148 false);
2150 dx11_video_format_converter_media_type_needs_init_ = false;
2151 return true;
2153 out_media_type.Release();
2155 return false;
2158 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(
2159 IMFSample* sample,
2160 int* width,
2161 int* height) {
2162 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
2163 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
2164 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
2166 if (use_dx11_) {
2167 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
2168 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture;
2169 hr = dxgi_buffer.QueryFrom(output_buffer.get());
2170 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample",
2171 false);
2172 hr = dxgi_buffer->GetResource(
2173 __uuidof(ID3D11Texture2D),
2174 reinterpret_cast<void**>(d3d11_texture.Receive()));
2175 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer",
2176 false);
2177 D3D11_TEXTURE2D_DESC d3d11_texture_desc;
2178 d3d11_texture->GetDesc(&d3d11_texture_desc);
2179 *width = d3d11_texture_desc.Width;
2180 *height = d3d11_texture_desc.Height;
2181 } else {
2182 base::win::ScopedComPtr<IDirect3DSurface9> surface;
2183 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE,
2184 IID_PPV_ARGS(surface.Receive()));
2185 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
2186 false);
2187 D3DSURFACE_DESC surface_desc;
2188 hr = surface->GetDesc(&surface_desc);
2189 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
2190 *width = surface_desc.Width;
2191 *height = surface_desc.Height;
2193 return true;
2196 } // namespace content