1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
8 #error This file should only be built on Windows.
9 #endif // !defined(OS_WIN)
15 #include <wmcodecdsp.h>
17 #include "base/bind.h"
18 #include "base/callback.h"
19 #include "base/command_line.h"
20 #include "base/debug/trace_event.h"
21 #include "base/file_version_info.h"
22 #include "base/logging.h"
23 #include "base/memory/scoped_ptr.h"
24 #include "base/memory/shared_memory.h"
25 #include "base/message_loop/message_loop.h"
26 #include "base/win/windows_version.h"
27 #include "media/video/video_decode_accelerator.h"
28 #include "ui/gl/gl_bindings.h"
29 #include "ui/gl/gl_surface_egl.h"
30 #include "ui/gl/gl_switches.h"
34 // We only request 5 picture buffers from the client which are used to hold the
35 // decoded samples. These buffers are then reused when the client tells us that
36 // it is done with the buffer.
37 static const int kNumPictureBuffers
= 5;
39 #define RETURN_ON_FAILURE(result, log, ret) \
47 #define RETURN_ON_HR_FAILURE(result, log, ret) \
48 RETURN_ON_FAILURE(SUCCEEDED(result), \
49 log << ", HRESULT: 0x" << std::hex << result, \
52 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \
56 StopOnError(error_code); \
61 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \
62 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \
63 log << ", HRESULT: 0x" << std::hex << result, \
66 // Maximum number of iterations we allow before aborting the attempt to flush
67 // the batched queries to the driver and allow torn/corrupt frames to be
69 enum { kMaxIterationsForD3DFlush
= 10 };
71 static IMFSample
* CreateEmptySample() {
72 base::win::ScopedComPtr
<IMFSample
> sample
;
73 HRESULT hr
= MFCreateSample(sample
.Receive());
74 RETURN_ON_HR_FAILURE(hr
, "MFCreateSample failed", NULL
);
75 return sample
.Detach();
78 // Creates a Media Foundation sample with one buffer of length |buffer_length|
79 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
80 static IMFSample
* CreateEmptySampleWithBuffer(int buffer_length
, int align
) {
81 CHECK_GT(buffer_length
, 0);
83 base::win::ScopedComPtr
<IMFSample
> sample
;
84 sample
.Attach(CreateEmptySample());
86 base::win::ScopedComPtr
<IMFMediaBuffer
> buffer
;
89 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
90 // with the align argument being 0.
91 hr
= MFCreateMemoryBuffer(buffer_length
, buffer
.Receive());
93 hr
= MFCreateAlignedMemoryBuffer(buffer_length
,
97 RETURN_ON_HR_FAILURE(hr
, "Failed to create memory buffer for sample", NULL
);
99 hr
= sample
->AddBuffer(buffer
);
100 RETURN_ON_HR_FAILURE(hr
, "Failed to add buffer to sample", NULL
);
102 return sample
.Detach();
105 // Creates a Media Foundation sample with one buffer containing a copy of the
106 // given Annex B stream data.
107 // If duration and sample time are not known, provide 0.
108 // |min_size| specifies the minimum size of the buffer (might be required by
109 // the decoder for input). If no alignment is required, provide 0.
110 static IMFSample
* CreateInputSample(const uint8
* stream
, int size
,
111 int min_size
, int alignment
) {
114 base::win::ScopedComPtr
<IMFSample
> sample
;
115 sample
.Attach(CreateEmptySampleWithBuffer(std::max(min_size
, size
),
117 RETURN_ON_FAILURE(sample
, "Failed to create empty sample", NULL
);
119 base::win::ScopedComPtr
<IMFMediaBuffer
> buffer
;
120 HRESULT hr
= sample
->GetBufferByIndex(0, buffer
.Receive());
121 RETURN_ON_HR_FAILURE(hr
, "Failed to get buffer from sample", NULL
);
123 DWORD max_length
= 0;
124 DWORD current_length
= 0;
125 uint8
* destination
= NULL
;
126 hr
= buffer
->Lock(&destination
, &max_length
, ¤t_length
);
127 RETURN_ON_HR_FAILURE(hr
, "Failed to lock buffer", NULL
);
129 CHECK_EQ(current_length
, 0u);
130 CHECK_GE(static_cast<int>(max_length
), size
);
131 memcpy(destination
, stream
, size
);
133 hr
= buffer
->Unlock();
134 RETURN_ON_HR_FAILURE(hr
, "Failed to unlock buffer", NULL
);
136 hr
= buffer
->SetCurrentLength(size
);
137 RETURN_ON_HR_FAILURE(hr
, "Failed to set buffer length", NULL
);
139 return sample
.Detach();
142 static IMFSample
* CreateSampleFromInputBuffer(
143 const media::BitstreamBuffer
& bitstream_buffer
,
146 base::SharedMemory
shm(bitstream_buffer
.handle(), true);
147 RETURN_ON_FAILURE(shm
.Map(bitstream_buffer
.size()),
148 "Failed in base::SharedMemory::Map", NULL
);
150 return CreateInputSample(reinterpret_cast<const uint8
*>(shm
.memory()),
151 bitstream_buffer
.size(),
156 // Maintains information about a DXVA picture buffer, i.e. whether it is
157 // available for rendering, the texture information, etc.
158 struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer
{
160 static linked_ptr
<DXVAPictureBuffer
> Create(
161 const DXVAVideoDecodeAccelerator
& decoder
,
162 const media::PictureBuffer
& buffer
,
163 EGLConfig egl_config
);
164 ~DXVAPictureBuffer();
166 void ReusePictureBuffer();
167 // Copies the output sample data to the picture buffer provided by the
169 // The dest_surface parameter contains the decoded bits.
170 bool CopyOutputSampleDataToPictureBuffer(
171 const DXVAVideoDecodeAccelerator
& decoder
,
172 IDirect3DSurface9
* dest_surface
);
174 bool available() const {
178 void set_available(bool available
) {
179 available_
= available
;
183 return picture_buffer_
.id();
186 gfx::Size
size() const {
187 return picture_buffer_
.size();
191 explicit DXVAPictureBuffer(const media::PictureBuffer
& buffer
);
194 media::PictureBuffer picture_buffer_
;
195 EGLSurface decoding_surface_
;
196 base::win::ScopedComPtr
<IDirect3DTexture9
> decoding_texture_
;
197 // Set to true if RGB is supported by the texture.
201 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer
);
205 linked_ptr
<DXVAVideoDecodeAccelerator::DXVAPictureBuffer
>
206 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
207 const DXVAVideoDecodeAccelerator
& decoder
,
208 const media::PictureBuffer
& buffer
,
209 EGLConfig egl_config
) {
210 linked_ptr
<DXVAPictureBuffer
> picture_buffer(new DXVAPictureBuffer(buffer
));
212 EGLDisplay egl_display
= gfx::GLSurfaceEGL::GetHardwareDisplay();
215 eglGetConfigAttrib(egl_display
, egl_config
, EGL_BIND_TO_TEXTURE_RGB
,
218 EGLint attrib_list
[] = {
219 EGL_WIDTH
, buffer
.size().width(),
220 EGL_HEIGHT
, buffer
.size().height(),
221 EGL_TEXTURE_FORMAT
, use_rgb
? EGL_TEXTURE_RGB
: EGL_TEXTURE_RGBA
,
222 EGL_TEXTURE_TARGET
, EGL_TEXTURE_2D
,
226 picture_buffer
->decoding_surface_
= eglCreatePbufferSurface(
230 RETURN_ON_FAILURE(picture_buffer
->decoding_surface_
,
231 "Failed to create surface",
232 linked_ptr
<DXVAPictureBuffer
>(NULL
));
234 HANDLE share_handle
= NULL
;
235 EGLBoolean ret
= eglQuerySurfacePointerANGLE(
237 picture_buffer
->decoding_surface_
,
238 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE
,
241 RETURN_ON_FAILURE(share_handle
&& ret
== EGL_TRUE
,
242 "Failed to query ANGLE surface pointer",
243 linked_ptr
<DXVAPictureBuffer
>(NULL
));
245 HRESULT hr
= decoder
.device_
->CreateTexture(
246 buffer
.size().width(),
247 buffer
.size().height(),
249 D3DUSAGE_RENDERTARGET
,
250 use_rgb
? D3DFMT_X8R8G8B8
: D3DFMT_A8R8G8B8
,
252 picture_buffer
->decoding_texture_
.Receive(),
255 RETURN_ON_HR_FAILURE(hr
, "Failed to create texture",
256 linked_ptr
<DXVAPictureBuffer
>(NULL
));
257 picture_buffer
->use_rgb_
= !!use_rgb
;
258 return picture_buffer
;
261 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
262 const media::PictureBuffer
& buffer
)
264 picture_buffer_(buffer
),
265 decoding_surface_(NULL
),
269 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
270 if (decoding_surface_
) {
271 EGLDisplay egl_display
= gfx::GLSurfaceEGL::GetHardwareDisplay();
281 decoding_surface_
= NULL
;
285 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
286 DCHECK(decoding_surface_
);
287 EGLDisplay egl_display
= gfx::GLSurfaceEGL::GetHardwareDisplay();
295 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
296 CopyOutputSampleDataToPictureBuffer(
297 const DXVAVideoDecodeAccelerator
& decoder
,
298 IDirect3DSurface9
* dest_surface
) {
299 DCHECK(dest_surface
);
301 D3DSURFACE_DESC surface_desc
;
302 HRESULT hr
= dest_surface
->GetDesc(&surface_desc
);
303 RETURN_ON_HR_FAILURE(hr
, "Failed to get surface description", false);
305 D3DSURFACE_DESC texture_desc
;
306 decoding_texture_
->GetLevelDesc(0, &texture_desc
);
308 if (texture_desc
.Width
!= surface_desc
.Width
||
309 texture_desc
.Height
!= surface_desc
.Height
) {
310 NOTREACHED() << "Decode surface of different dimension than texture";
314 hr
= decoder
.d3d9_
->CheckDeviceFormatConversion(
315 D3DADAPTER_DEFAULT
, D3DDEVTYPE_HAL
, surface_desc
.Format
,
316 use_rgb_
? D3DFMT_X8R8G8B8
: D3DFMT_A8R8G8B8
);
317 RETURN_ON_HR_FAILURE(hr
, "Device does not support format converision", false);
319 // This function currently executes in the context of IPC handlers in the
320 // GPU process which ensures that there is always an OpenGL context.
321 GLint current_texture
= 0;
322 glGetIntegerv(GL_TEXTURE_BINDING_2D
, ¤t_texture
);
324 glBindTexture(GL_TEXTURE_2D
, picture_buffer_
.texture_id());
326 glTexParameteri(GL_TEXTURE_2D
, GL_TEXTURE_MIN_FILTER
, GL_NEAREST
);
328 base::win::ScopedComPtr
<IDirect3DSurface9
> d3d_surface
;
329 hr
= decoding_texture_
->GetSurfaceLevel(0, d3d_surface
.Receive());
330 RETURN_ON_HR_FAILURE(hr
, "Failed to get surface from texture", false);
332 hr
= decoder
.device_
->StretchRect(
333 dest_surface
, NULL
, d3d_surface
, NULL
, D3DTEXF_NONE
);
334 RETURN_ON_HR_FAILURE(hr
, "Colorspace conversion via StretchRect failed",
337 // Ideally, this should be done immediately before the draw call that uses
338 // the texture. Flush it once here though.
339 hr
= decoder
.query_
->Issue(D3DISSUE_END
);
340 RETURN_ON_HR_FAILURE(hr
, "Failed to issue END", false);
342 // The DXVA decoder has its own device which it uses for decoding. ANGLE
343 // has its own device which we don't have access to.
344 // The above code attempts to copy the decoded picture into a surface
345 // which is owned by ANGLE. As there are multiple devices involved in
346 // this, the StretchRect call above is not synchronous.
347 // We attempt to flush the batched operations to ensure that the picture is
348 // copied to the surface owned by ANGLE.
349 // We need to do this in a loop and call flush multiple times.
350 // We have seen the GetData call for flushing the command buffer fail to
351 // return success occassionally on multi core machines, leading to an
353 // Workaround is to have an upper limit of 10 on the number of iterations to
354 // wait for the Flush to finish.
356 while ((decoder
.query_
->GetData(NULL
, 0, D3DGETDATA_FLUSH
) == S_FALSE
) &&
357 ++iterations
< kMaxIterationsForD3DFlush
) {
358 Sleep(1); // Poor-man's Yield().
360 EGLDisplay egl_display
= gfx::GLSurfaceEGL::GetHardwareDisplay();
365 glTexParameteri(GL_TEXTURE_2D
, GL_TEXTURE_MIN_FILTER
, GL_NEAREST
);
366 glBindTexture(GL_TEXTURE_2D
, current_texture
);
370 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
371 int32 buffer_id
, IMFSample
* sample
)
372 : input_buffer_id(buffer_id
) {
373 output_sample
.Attach(sample
);
376 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
379 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
380 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager");
382 HRESULT hr
= Direct3DCreate9Ex(D3D_SDK_VERSION
, d3d9_
.Receive());
383 RETURN_ON_HR_FAILURE(hr
, "Direct3DCreate9Ex failed", false);
385 D3DPRESENT_PARAMETERS present_params
= {0};
386 present_params
.BackBufferWidth
= 1;
387 present_params
.BackBufferHeight
= 1;
388 present_params
.BackBufferFormat
= D3DFMT_UNKNOWN
;
389 present_params
.BackBufferCount
= 1;
390 present_params
.SwapEffect
= D3DSWAPEFFECT_DISCARD
;
391 present_params
.hDeviceWindow
= ::GetShellWindow();
392 present_params
.Windowed
= TRUE
;
393 present_params
.Flags
= D3DPRESENTFLAG_VIDEO
;
394 present_params
.FullScreen_RefreshRateInHz
= 0;
395 present_params
.PresentationInterval
= 0;
397 hr
= d3d9_
->CreateDeviceEx(D3DADAPTER_DEFAULT
,
400 D3DCREATE_FPU_PRESERVE
|
401 D3DCREATE_SOFTWARE_VERTEXPROCESSING
|
402 D3DCREATE_DISABLE_PSGP_THREADING
|
403 D3DCREATE_MULTITHREADED
,
407 RETURN_ON_HR_FAILURE(hr
, "Failed to create D3D device", false);
409 hr
= DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_
,
410 device_manager_
.Receive());
411 RETURN_ON_HR_FAILURE(hr
, "DXVA2CreateDirect3DDeviceManager9 failed", false);
413 hr
= device_manager_
->ResetDevice(device_
, dev_manager_reset_token_
);
414 RETURN_ON_HR_FAILURE(hr
, "Failed to reset device", false);
416 hr
= device_
->CreateQuery(D3DQUERYTYPE_EVENT
, query_
.Receive());
417 RETURN_ON_HR_FAILURE(hr
, "Failed to create D3D device query", false);
418 // Ensure query_ API works (to avoid an infinite loop later in
419 // CopyOutputSampleDataToPictureBuffer).
420 hr
= query_
->Issue(D3DISSUE_END
);
421 RETURN_ON_HR_FAILURE(hr
, "Failed to issue END test query", false);
425 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
426 const base::Callback
<bool(void)>& make_context_current
)
428 dev_manager_reset_token_(0),
430 state_(kUninitialized
),
431 pictures_requested_(false),
432 inputs_before_decode_(0),
433 make_context_current_(make_context_current
),
434 weak_this_factory_(this) {
435 memset(&input_stream_info_
, 0, sizeof(input_stream_info_
));
436 memset(&output_stream_info_
, 0, sizeof(output_stream_info_
));
439 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
443 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile
,
445 DCHECK(CalledOnValidThread());
449 // Not all versions of Windows 7 and later include Media Foundation DLLs.
450 // Instead of crashing while delay loading the DLL when calling MFStartup()
451 // below, probe whether we can successfully load the DLL now.
453 // See http://crbug.com/339678 for details.
454 HMODULE mfplat_dll
= ::LoadLibrary(L
"MFPlat.dll");
455 RETURN_ON_FAILURE(mfplat_dll
, "MFPlat.dll is required for decoding", false);
458 // H264PROFILE_HIGH video decoding is janky at times. Needs more
460 if (profile
!= media::H264PROFILE_BASELINE
&&
461 profile
!= media::H264PROFILE_MAIN
&&
462 profile
!= media::H264PROFILE_HIGH
) {
463 RETURN_AND_NOTIFY_ON_FAILURE(false,
464 "Unsupported h264 profile", PLATFORM_FAILURE
, false);
467 RETURN_AND_NOTIFY_ON_FAILURE(
468 gfx::g_driver_egl
.ext
.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle
,
469 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
473 RETURN_AND_NOTIFY_ON_FAILURE((state_
== kUninitialized
),
474 "Initialize: invalid state: " << state_
, ILLEGAL_STATE
, false);
476 HRESULT hr
= MFStartup(MF_VERSION
, MFSTARTUP_FULL
);
477 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr
, "MFStartup failed.", PLATFORM_FAILURE
,
480 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
481 "Failed to initialize D3D device and manager",
485 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile
),
486 "Failed to initialize decoder", PLATFORM_FAILURE
, false);
488 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
489 "Failed to get input/output stream info.", PLATFORM_FAILURE
, false);
491 RETURN_AND_NOTIFY_ON_FAILURE(
492 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING
, 0),
493 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
494 PLATFORM_FAILURE
, false);
496 RETURN_AND_NOTIFY_ON_FAILURE(
497 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM
, 0),
498 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
499 PLATFORM_FAILURE
, false);
505 void DXVAVideoDecodeAccelerator::Decode(
506 const media::BitstreamBuffer
& bitstream_buffer
) {
507 DCHECK(CalledOnValidThread());
509 RETURN_AND_NOTIFY_ON_FAILURE((state_
== kNormal
|| state_
== kStopped
||
510 state_
== kFlushing
),
511 "Invalid state: " << state_
, ILLEGAL_STATE
,);
513 base::win::ScopedComPtr
<IMFSample
> sample
;
514 sample
.Attach(CreateSampleFromInputBuffer(bitstream_buffer
,
515 input_stream_info_
.cbSize
,
516 input_stream_info_
.cbAlignment
));
517 RETURN_AND_NOTIFY_ON_FAILURE(sample
, "Failed to create input sample",
520 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample
->SetSampleTime(bitstream_buffer
.id()),
521 "Failed to associate input buffer id with sample", PLATFORM_FAILURE
,);
523 DecodeInternal(sample
);
526 void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
527 const std::vector
<media::PictureBuffer
>& buffers
) {
528 DCHECK(CalledOnValidThread());
530 RETURN_AND_NOTIFY_ON_FAILURE((state_
!= kUninitialized
),
531 "Invalid state: " << state_
, ILLEGAL_STATE
,);
532 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers
== buffers
.size()),
533 "Failed to provide requested picture buffers. (Got " << buffers
.size() <<
534 ", requested " << kNumPictureBuffers
<< ")", INVALID_ARGUMENT
,);
536 // Copy the picture buffers provided by the client to the available list,
537 // and mark these buffers as available for use.
538 for (size_t buffer_index
= 0; buffer_index
< buffers
.size();
540 linked_ptr
<DXVAPictureBuffer
> picture_buffer
=
541 DXVAPictureBuffer::Create(*this, buffers
[buffer_index
], egl_config_
);
542 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer
.get(),
543 "Failed to allocate picture buffer", PLATFORM_FAILURE
,);
545 bool inserted
= output_picture_buffers_
.insert(std::make_pair(
546 buffers
[buffer_index
].id(), picture_buffer
)).second
;
549 ProcessPendingSamples();
550 if (state_
== kFlushing
&& pending_output_samples_
.empty())
554 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
555 int32 picture_buffer_id
) {
556 DCHECK(CalledOnValidThread());
558 RETURN_AND_NOTIFY_ON_FAILURE((state_
!= kUninitialized
),
559 "Invalid state: " << state_
, ILLEGAL_STATE
,);
561 if (output_picture_buffers_
.empty())
564 OutputBuffers::iterator it
= output_picture_buffers_
.find(picture_buffer_id
);
565 RETURN_AND_NOTIFY_ON_FAILURE(it
!= output_picture_buffers_
.end(),
566 "Invalid picture id: " << picture_buffer_id
, INVALID_ARGUMENT
,);
568 it
->second
->ReusePictureBuffer();
569 ProcessPendingSamples();
571 if (state_
== kFlushing
&& pending_output_samples_
.empty())
575 void DXVAVideoDecodeAccelerator::Flush() {
576 DCHECK(CalledOnValidThread());
578 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
580 RETURN_AND_NOTIFY_ON_FAILURE((state_
== kNormal
|| state_
== kStopped
),
581 "Unexpected decoder state: " << state_
, ILLEGAL_STATE
,);
585 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN
, 0),
586 "Failed to send drain message", PLATFORM_FAILURE
,);
588 if (!pending_output_samples_
.empty())
594 void DXVAVideoDecodeAccelerator::Reset() {
595 DCHECK(CalledOnValidThread());
597 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
599 RETURN_AND_NOTIFY_ON_FAILURE((state_
== kNormal
|| state_
== kStopped
),
600 "Reset: invalid state: " << state_
, ILLEGAL_STATE
,);
604 pending_output_samples_
.clear();
606 NotifyInputBuffersDropped();
608 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH
, 0),
609 "Reset: Failed to send message.", PLATFORM_FAILURE
,);
611 base::MessageLoop::current()->PostTask(
613 base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone
,
614 weak_this_factory_
.GetWeakPtr()));
616 state_
= DXVAVideoDecodeAccelerator::kNormal
;
619 void DXVAVideoDecodeAccelerator::Destroy() {
620 DCHECK(CalledOnValidThread());
625 bool DXVAVideoDecodeAccelerator::CanDecodeOnIOThread() {
629 bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile
) {
630 if (profile
< media::H264PROFILE_MIN
|| profile
> media::H264PROFILE_MAX
)
633 // We mimic the steps CoCreateInstance uses to instantiate the object. This
634 // was previously done because it failed inside the sandbox, and now is done
635 // as a more minimal approach to avoid other side-effects CCI might have (as
636 // we are still in a reduced sandbox).
637 HMODULE decoder_dll
= ::LoadLibrary(L
"msmpeg2vdec.dll");
638 RETURN_ON_FAILURE(decoder_dll
,
639 "msmpeg2vdec.dll required for decoding is not loaded",
642 // Check version of DLL, version 6.7.7140 is blacklisted due to high crash
643 // rates in browsers loading that DLL. If that is the version installed we
644 // fall back to software decoding. See crbug/403440.
645 FileVersionInfo
* version_info
=
646 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll
);
647 RETURN_ON_FAILURE(version_info
,
648 "unable to get version of msmpeg2vdec.dll",
650 base::string16 file_version
= version_info
->file_version();
651 RETURN_ON_FAILURE(file_version
.find(L
"6.1.7140") == base::string16::npos
,
652 "blacklisted version of msmpeg2vdec.dll 6.7.7140",
655 typedef HRESULT(WINAPI
* GetClassObject
)(
656 const CLSID
& clsid
, const IID
& iid
, void * *object
);
658 GetClassObject get_class_object
= reinterpret_cast<GetClassObject
>(
659 GetProcAddress(decoder_dll
, "DllGetClassObject"));
661 get_class_object
, "Failed to get DllGetClassObject pointer", false);
663 base::win::ScopedComPtr
<IClassFactory
> factory
;
664 HRESULT hr
= get_class_object(__uuidof(CMSH264DecoderMFT
),
665 __uuidof(IClassFactory
),
666 reinterpret_cast<void**>(factory
.Receive()));
667 RETURN_ON_HR_FAILURE(hr
, "DllGetClassObject for decoder failed", false);
669 hr
= factory
->CreateInstance(NULL
,
670 __uuidof(IMFTransform
),
671 reinterpret_cast<void**>(decoder_
.Receive()));
672 RETURN_ON_HR_FAILURE(hr
, "Failed to create decoder instance", false);
674 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
675 "Failed to check decoder DXVA support", false);
677 hr
= decoder_
->ProcessMessage(
678 MFT_MESSAGE_SET_D3D_MANAGER
,
679 reinterpret_cast<ULONG_PTR
>(device_manager_
.get()));
680 RETURN_ON_HR_FAILURE(hr
, "Failed to pass D3D manager to decoder", false);
682 EGLDisplay egl_display
= gfx::GLSurfaceEGL::GetHardwareDisplay();
684 EGLint config_attribs
[] = {
689 EGL_SURFACE_TYPE
, EGL_PBUFFER_BIT
,
696 if (!eglChooseConfig(
704 return SetDecoderMediaTypes();
707 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
708 base::win::ScopedComPtr
<IMFAttributes
> attributes
;
709 HRESULT hr
= decoder_
->GetAttributes(attributes
.Receive());
710 RETURN_ON_HR_FAILURE(hr
, "Failed to get decoder attributes", false);
713 hr
= attributes
->GetUINT32(MF_SA_D3D_AWARE
, &dxva
);
714 RETURN_ON_HR_FAILURE(hr
, "Failed to check if decoder supports DXVA", false);
716 hr
= attributes
->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264
, TRUE
);
717 RETURN_ON_HR_FAILURE(hr
, "Failed to enable DXVA H/W decoding", false);
721 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
722 RETURN_ON_FAILURE(SetDecoderInputMediaType(),
723 "Failed to set decoder input media type", false);
724 return SetDecoderOutputMediaType(MFVideoFormat_NV12
);
727 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
728 base::win::ScopedComPtr
<IMFMediaType
> media_type
;
729 HRESULT hr
= MFCreateMediaType(media_type
.Receive());
730 RETURN_ON_HR_FAILURE(hr
, "MFCreateMediaType failed", false);
732 hr
= media_type
->SetGUID(MF_MT_MAJOR_TYPE
, MFMediaType_Video
);
733 RETURN_ON_HR_FAILURE(hr
, "Failed to set major input type", false);
735 hr
= media_type
->SetGUID(MF_MT_SUBTYPE
, MFVideoFormat_H264
);
736 RETURN_ON_HR_FAILURE(hr
, "Failed to set subtype", false);
738 // Not sure about this. msdn recommends setting this value on the input
740 hr
= media_type
->SetUINT32(MF_MT_INTERLACE_MODE
,
741 MFVideoInterlace_MixedInterlaceOrProgressive
);
742 RETURN_ON_HR_FAILURE(hr
, "Failed to set interlace mode", false);
744 hr
= decoder_
->SetInputType(0, media_type
, 0); // No flags
745 RETURN_ON_HR_FAILURE(hr
, "Failed to set decoder input type", false);
749 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
750 const GUID
& subtype
) {
751 base::win::ScopedComPtr
<IMFMediaType
> out_media_type
;
754 SUCCEEDED(decoder_
->GetOutputAvailableType(0, i
,
755 out_media_type
.Receive()));
757 GUID out_subtype
= {0};
758 HRESULT hr
= out_media_type
->GetGUID(MF_MT_SUBTYPE
, &out_subtype
);
759 RETURN_ON_HR_FAILURE(hr
, "Failed to get output major type", false);
761 if (out_subtype
== subtype
) {
762 hr
= decoder_
->SetOutputType(0, out_media_type
, 0); // No flags
763 RETURN_ON_HR_FAILURE(hr
, "Failed to set decoder output type", false);
766 out_media_type
.Release();
771 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg
,
773 HRESULT hr
= decoder_
->ProcessMessage(msg
, param
);
774 return SUCCEEDED(hr
);
777 // Gets the minimum buffer sizes for input and output samples. The MFT will not
778 // allocate buffer for input nor output, so we have to do it ourselves and make
779 // sure they're the correct size. We only provide decoding if DXVA is enabled.
780 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
781 HRESULT hr
= decoder_
->GetInputStreamInfo(0, &input_stream_info_
);
782 RETURN_ON_HR_FAILURE(hr
, "Failed to get input stream info", false);
784 hr
= decoder_
->GetOutputStreamInfo(0, &output_stream_info_
);
785 RETURN_ON_HR_FAILURE(hr
, "Failed to get decoder output stream info", false);
787 DVLOG(1) << "Input stream info: ";
788 DVLOG(1) << "Max latency: " << input_stream_info_
.hnsMaxLatency
;
789 // There should be three flags, one for requiring a whole frame be in a
790 // single sample, one for requiring there be one buffer only in a single
791 // sample, and one that specifies a fixed sample size. (as in cbSize)
792 CHECK_EQ(input_stream_info_
.dwFlags
, 0x7u
);
794 DVLOG(1) << "Min buffer size: " << input_stream_info_
.cbSize
;
795 DVLOG(1) << "Max lookahead: " << input_stream_info_
.cbMaxLookahead
;
796 DVLOG(1) << "Alignment: " << input_stream_info_
.cbAlignment
;
798 DVLOG(1) << "Output stream info: ";
799 // The flags here should be the same and mean the same thing, except when
800 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
801 // allocate its own sample.
802 DVLOG(1) << "Flags: "
803 << std::hex
<< std::showbase
<< output_stream_info_
.dwFlags
;
804 CHECK_EQ(output_stream_info_
.dwFlags
, 0x107u
);
805 DVLOG(1) << "Min buffer size: " << output_stream_info_
.cbSize
;
806 DVLOG(1) << "Alignment: " << output_stream_info_
.cbAlignment
;
810 void DXVAVideoDecodeAccelerator::DoDecode() {
811 // This function is also called from FlushInternal in a loop which could
812 // result in the state transitioning to kStopped due to no decoded output.
813 RETURN_AND_NOTIFY_ON_FAILURE((state_
== kNormal
|| state_
== kFlushing
||
815 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE
,);
817 MFT_OUTPUT_DATA_BUFFER output_data_buffer
= {0};
820 HRESULT hr
= decoder_
->ProcessOutput(0, // No flags
821 1, // # of out streams to pull from
824 IMFCollection
* events
= output_data_buffer
.pEvents
;
825 if (events
!= NULL
) {
826 VLOG(1) << "Got events from ProcessOuput, but discarding";
830 // A stream change needs further ProcessInput calls to get back decoder
831 // output which is why we need to set the state to stopped.
832 if (hr
== MF_E_TRANSFORM_STREAM_CHANGE
) {
833 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12
)) {
834 // Decoder didn't let us set NV12 output format. Not sure as to why
835 // this can happen. Give up in disgust.
836 NOTREACHED() << "Failed to set decoder output media type to NV12";
839 DVLOG(1) << "Received output format change from the decoder."
840 " Recursively invoking DoDecode";
844 } else if (hr
== MF_E_TRANSFORM_NEED_MORE_INPUT
) {
845 // No more output from the decoder. Stop playback.
849 NOTREACHED() << "Unhandled error in DoDecode()";
853 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
855 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
856 inputs_before_decode_
);
858 inputs_before_decode_
= 0;
860 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer
.pSample
),
861 "Failed to process output sample.", PLATFORM_FAILURE
,);
864 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample
* sample
) {
865 RETURN_ON_FAILURE(sample
, "Decode succeeded with NULL output sample", false);
867 base::win::ScopedComPtr
<IMFMediaBuffer
> output_buffer
;
868 HRESULT hr
= sample
->GetBufferByIndex(0, output_buffer
.Receive());
869 RETURN_ON_HR_FAILURE(hr
, "Failed to get buffer from output sample", false);
871 base::win::ScopedComPtr
<IDirect3DSurface9
> surface
;
872 hr
= MFGetService(output_buffer
, MR_BUFFER_SERVICE
,
873 IID_PPV_ARGS(surface
.Receive()));
874 RETURN_ON_HR_FAILURE(hr
, "Failed to get D3D surface from output sample",
877 LONGLONG input_buffer_id
= 0;
878 RETURN_ON_HR_FAILURE(sample
->GetSampleTime(&input_buffer_id
),
879 "Failed to get input buffer id associated with sample",
882 pending_output_samples_
.push_back(
883 PendingSampleInfo(input_buffer_id
, sample
));
885 // If we have available picture buffers to copy the output data then use the
886 // first one and then flag it as not being available for use.
887 if (output_picture_buffers_
.size()) {
888 ProcessPendingSamples();
891 if (pictures_requested_
) {
892 DVLOG(1) << "Waiting for picture slots from the client.";
896 // We only read the surface description, which contains its width/height when
897 // we need the picture buffers from the client. Once we have those, then they
899 D3DSURFACE_DESC surface_desc
;
900 hr
= surface
->GetDesc(&surface_desc
);
901 RETURN_ON_HR_FAILURE(hr
, "Failed to get surface description", false);
903 // Go ahead and request picture buffers.
904 base::MessageLoop::current()->PostTask(
906 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers
,
907 weak_this_factory_
.GetWeakPtr(),
909 surface_desc
.Height
));
911 pictures_requested_
= true;
915 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
916 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_
.Run(),
917 "Failed to make context current", PLATFORM_FAILURE
,);
919 OutputBuffers::iterator index
;
921 for (index
= output_picture_buffers_
.begin();
922 index
!= output_picture_buffers_
.end() &&
923 !pending_output_samples_
.empty();
925 if (index
->second
->available()) {
926 PendingSampleInfo sample_info
= pending_output_samples_
.front();
928 base::win::ScopedComPtr
<IMFMediaBuffer
> output_buffer
;
929 HRESULT hr
= sample_info
.output_sample
->GetBufferByIndex(
930 0, output_buffer
.Receive());
931 RETURN_AND_NOTIFY_ON_HR_FAILURE(
932 hr
, "Failed to get buffer from output sample", PLATFORM_FAILURE
,);
934 base::win::ScopedComPtr
<IDirect3DSurface9
> surface
;
935 hr
= MFGetService(output_buffer
, MR_BUFFER_SERVICE
,
936 IID_PPV_ARGS(surface
.Receive()));
937 RETURN_AND_NOTIFY_ON_HR_FAILURE(
938 hr
, "Failed to get D3D surface from output sample",
941 D3DSURFACE_DESC surface_desc
;
942 hr
= surface
->GetDesc(&surface_desc
);
943 RETURN_AND_NOTIFY_ON_HR_FAILURE(
944 hr
, "Failed to get surface description", PLATFORM_FAILURE
,);
946 if (surface_desc
.Width
!=
947 static_cast<uint32
>(index
->second
->size().width()) ||
948 surface_desc
.Height
!=
949 static_cast<uint32
>(index
->second
->size().height())) {
950 HandleResolutionChanged(surface_desc
.Width
, surface_desc
.Height
);
954 RETURN_AND_NOTIFY_ON_FAILURE(
955 index
->second
->CopyOutputSampleDataToPictureBuffer(*this, surface
),
956 "Failed to copy output sample",
959 media::Picture
output_picture(index
->second
->id(),
960 sample_info
.input_buffer_id
,
961 gfx::Rect(index
->second
->size()));
962 base::MessageLoop::current()->PostTask(
964 base::Bind(&DXVAVideoDecodeAccelerator::NotifyPictureReady
,
965 weak_this_factory_
.GetWeakPtr(),
968 index
->second
->set_available(false);
969 pending_output_samples_
.pop_front();
973 if (!pending_input_buffers_
.empty() && pending_output_samples_
.empty()) {
974 base::MessageLoop::current()->PostTask(
976 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers
,
977 weak_this_factory_
.GetWeakPtr()));
981 void DXVAVideoDecodeAccelerator::StopOnError(
982 media::VideoDecodeAccelerator::Error error
) {
983 DCHECK(CalledOnValidThread());
986 client_
->NotifyError(error
);
989 if (state_
!= kUninitialized
) {
994 void DXVAVideoDecodeAccelerator::Invalidate() {
995 if (state_
== kUninitialized
)
997 weak_this_factory_
.InvalidateWeakPtrs();
998 output_picture_buffers_
.clear();
999 pending_output_samples_
.clear();
1000 pending_input_buffers_
.clear();
1003 state_
= kUninitialized
;
1006 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id
) {
1008 client_
->NotifyEndOfBitstreamBuffer(input_buffer_id
);
1011 void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
1013 client_
->NotifyFlushDone();
1016 void DXVAVideoDecodeAccelerator::NotifyResetDone() {
1018 client_
->NotifyResetDone();
1021 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width
, int height
) {
1022 // This task could execute after the decoder has been torn down.
1023 if (state_
!= kUninitialized
&& client_
) {
1024 client_
->ProvidePictureBuffers(
1026 gfx::Size(width
, height
),
1031 void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1032 const media::Picture
& picture
) {
1033 // This task could execute after the decoder has been torn down.
1034 if (state_
!= kUninitialized
&& client_
)
1035 client_
->PictureReady(picture
);
1038 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1039 if (!client_
|| !pending_output_samples_
.empty())
1042 for (PendingInputs::iterator it
= pending_input_buffers_
.begin();
1043 it
!= pending_input_buffers_
.end(); ++it
) {
1044 LONGLONG input_buffer_id
= 0;
1045 RETURN_ON_HR_FAILURE((*it
)->GetSampleTime(&input_buffer_id
),
1046 "Failed to get buffer id associated with sample",);
1047 client_
->NotifyEndOfBitstreamBuffer(input_buffer_id
);
1049 pending_input_buffers_
.clear();
1052 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1053 RETURN_AND_NOTIFY_ON_FAILURE((state_
!= kUninitialized
),
1054 "Invalid state: " << state_
, ILLEGAL_STATE
,);
1056 if (pending_input_buffers_
.empty() || !pending_output_samples_
.empty())
1059 PendingInputs pending_input_buffers_copy
;
1060 std::swap(pending_input_buffers_
, pending_input_buffers_copy
);
1062 for (PendingInputs::iterator it
= pending_input_buffers_copy
.begin();
1063 it
!= pending_input_buffers_copy
.end(); ++it
) {
1064 DecodeInternal(*it
);
1068 void DXVAVideoDecodeAccelerator::FlushInternal() {
1069 // The DoDecode function sets the state to kStopped when the decoder returns
1070 // MF_E_TRANSFORM_NEED_MORE_INPUT.
1071 // The MFT decoder can buffer upto 30 frames worth of input before returning
1072 // an output frame. This loop here attempts to retrieve as many output frames
1073 // as possible from the buffered set.
1074 while (state_
!= kStopped
) {
1076 if (!pending_output_samples_
.empty())
1080 base::MessageLoop::current()->PostTask(
1082 base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone
,
1083 weak_this_factory_
.GetWeakPtr()));
1088 void DXVAVideoDecodeAccelerator::DecodeInternal(
1089 const base::win::ScopedComPtr
<IMFSample
>& sample
) {
1090 DCHECK(CalledOnValidThread());
1092 if (state_
== kUninitialized
)
1095 if (!pending_output_samples_
.empty() || !pending_input_buffers_
.empty()) {
1096 pending_input_buffers_
.push_back(sample
);
1100 if (!inputs_before_decode_
) {
1101 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1103 inputs_before_decode_
++;
1105 HRESULT hr
= decoder_
->ProcessInput(0, sample
, 0);
1106 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1107 // has enough data to produce one or more output samples. In this case the
1108 // recommended options are to
1109 // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1110 // returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1111 // 2. Flush the input data
1112 // We implement the first option, i.e to retrieve the output sample and then
1113 // process the input again. Failure in either of these steps is treated as a
1115 if (hr
== MF_E_NOTACCEPTING
) {
1117 RETURN_AND_NOTIFY_ON_FAILURE((state_
== kStopped
|| state_
== kNormal
),
1118 "Failed to process output. Unexpected decoder state: " << state_
,
1120 hr
= decoder_
->ProcessInput(0, sample
, 0);
1121 // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1122 // 1. Add the input sample to the pending queue.
1123 // 2. If we don't have any output samples we post the
1124 // DecodePendingInputBuffers task to process the pending input samples.
1125 // If we have an output sample then the above task is posted when the
1126 // output samples are sent to the client.
1127 // This is because we only support 1 pending output sample at any
1128 // given time due to the limitation with the Microsoft media foundation
1129 // decoder where it recycles the output Decoder surfaces.
1130 if (hr
== MF_E_NOTACCEPTING
) {
1131 pending_input_buffers_
.push_back(sample
);
1132 if (pending_output_samples_
.empty()) {
1133 base::MessageLoop::current()->PostTask(
1135 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers
,
1136 weak_this_factory_
.GetWeakPtr()));
1141 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr
, "Failed to process input sample",
1146 RETURN_AND_NOTIFY_ON_FAILURE((state_
== kStopped
|| state_
== kNormal
),
1147 "Failed to process output. Unexpected decoder state: " << state_
,
1150 LONGLONG input_buffer_id
= 0;
1151 RETURN_ON_HR_FAILURE(sample
->GetSampleTime(&input_buffer_id
),
1152 "Failed to get input buffer id associated with sample",);
1153 // The Microsoft Media foundation decoder internally buffers up to 30 frames
1154 // before returning a decoded frame. We need to inform the client that this
1155 // input buffer is processed as it may stop sending us further input.
1156 // Note: This may break clients which expect every input buffer to be
1157 // associated with a decoded output buffer.
1159 // Do some more investigation into whether it is possible to get the MFT
1160 // decoder to emit an output packet for every input packet.
1161 // http://code.google.com/p/chromium/issues/detail?id=108121
1162 // http://code.google.com/p/chromium/issues/detail?id=150925
1163 base::MessageLoop::current()->PostTask(
1165 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead
,
1166 weak_this_factory_
.GetWeakPtr(),
1170 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width
,
1172 base::MessageLoop::current()->PostTask(
1174 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers
,
1175 weak_this_factory_
.GetWeakPtr(),
1176 output_picture_buffers_
));
1178 base::MessageLoop::current()->PostTask(
1180 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers
,
1181 weak_this_factory_
.GetWeakPtr(),
1185 output_picture_buffers_
.clear();
1188 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(
1189 const OutputBuffers
& picture_buffers
) {
1190 OutputBuffers::const_iterator index
;
1192 for (index
= picture_buffers
.begin();
1193 index
!= picture_buffers
.end();
1195 DVLOG(1) << "Dismissing picture id: " << index
->second
->id();
1196 client_
->DismissPictureBuffer(index
->second
->id());
1200 } // namespace content