1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // The bulk of this file is support code; sorry about that. Here's an overview
6 // to hopefully help readers of this code:
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or
9 // - ClientState is an enum for the state of the decode client used by the test.
10 // - ClientStateNotification is a barrier abstraction that allows the test code
11 // to be written sequentially and wait for the decode client to see certain
13 // - GLRenderingVDAClient is a VideoDecodeAccelerator::Client implementation
14 // - Finally actual TEST cases are at the bottom of this file, using the above
19 #include <sys/types.h>
24 // Include gtest.h out of order because <X11/X.h> #define's Bool & None, which
25 // gtest uses as struct names (inside a namespace). This means that
26 // #include'ing gtest after anything that pulls in X.h fails to compile.
27 // This is http://code.google.com/p/googletest/issues/detail?id=371
28 #include "testing/gtest/include/gtest/gtest.h"
30 #include "base/at_exit.h"
31 #include "base/bind.h"
32 #include "base/callback_helpers.h"
33 #include "base/command_line.h"
34 #include "base/files/file.h"
35 #include "base/files/file_util.h"
36 #include "base/format_macros.h"
38 #include "base/message_loop/message_loop_proxy.h"
39 #include "base/process/process_handle.h"
40 #include "base/stl_util.h"
41 #include "base/strings/string_number_conversions.h"
42 #include "base/strings/string_split.h"
43 #include "base/strings/stringize_macros.h"
44 #include "base/strings/stringprintf.h"
45 #include "base/strings/utf_string_conversions.h"
46 #include "base/synchronization/condition_variable.h"
47 #include "base/synchronization/lock.h"
48 #include "base/synchronization/waitable_event.h"
49 #include "base/thread_task_runner_handle.h"
50 #include "base/threading/thread.h"
51 #include "content/common/gpu/media/fake_video_decode_accelerator.h"
52 #include "content/common/gpu/media/rendering_helper.h"
53 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
54 #include "content/public/common/content_switches.h"
55 #include "media/filters/h264_parser.h"
56 #include "ui/gfx/codec/png_codec.h"
57 #include "ui/gl/gl_image.h"
60 #include "base/win/windows_version.h"
61 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
62 #elif defined(OS_CHROMEOS)
63 #if defined(USE_V4L2_CODEC)
64 #include "content/common/gpu/media/v4l2_device.h"
65 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h"
66 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
68 #if defined(ARCH_CPU_X86_FAMILY)
69 #include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
70 #include "content/common/gpu/media/vaapi_wrapper.h"
71 #endif // defined(ARCH_CPU_X86_FAMILY)
73 #error The VideoAccelerator tests are not supported on this platform.
76 #if defined(USE_OZONE)
77 #include "ui/ozone/public/ozone_gpu_test_helper.h"
78 #include "ui/ozone/public/ozone_platform.h"
79 #endif // defined(USE_OZONE)
81 using media::VideoDecodeAccelerator
;
86 // Values optionally filled in from flags; see main() below.
87 // The syntax of multiple test videos is:
88 // test-video1;test-video2;test-video3
89 // where only the first video is required and other optional videos would be
90 // decoded by concurrent decoders.
91 // The syntax of each test-video is:
92 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender
93 // where only the first field is required. Value details:
94 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8/9 stream.
95 // - |width| and |height| are in pixels.
96 // - |numframes| is the number of picture frames in the file.
97 // - |numfragments| NALU (h264) or frame (VP8/9) count in the stream.
98 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds
99 // expected to be achieved with and without rendering to the screen, resp.
100 // (the latter tests just decode speed).
101 // - |profile| is the media::VideoCodecProfile set during Initialization.
102 // An empty value for a numeric field means "ignore".
103 const base::FilePath::CharType
* g_test_video_data
=
104 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
105 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
107 // The file path of the test output log. This is used to communicate the test
108 // results to CrOS autotests. We can enable the log and specify the filename by
109 // the "--output_log" switch.
110 const base::FilePath::CharType
* g_output_log
= NULL
;
112 // The value is set by the switch "--rendering_fps".
113 double g_rendering_fps
= 60;
115 // The value is set by the switch "--rendering_warm_up".
116 int g_rendering_warm_up
= 0;
118 // The value is set by the switch "--num_play_throughs". The video will play
119 // the specified number of times. In different test cases, we have different
120 // values for |num_play_throughs|. This setting will override the value. A
121 // special value "0" means no override.
122 int g_num_play_throughs
= 0;
124 int g_fake_decoder
= 0;
126 // Environment to store rendering thread.
127 class VideoDecodeAcceleratorTestEnvironment
;
128 VideoDecodeAcceleratorTestEnvironment
* g_env
;
130 // Magic constants for differentiating the reasons for NotifyResetDone being
133 // Reset() just after calling Decode() with a fragment containing config info.
134 RESET_AFTER_FIRST_CONFIG_INFO
= -4,
135 START_OF_STREAM_RESET
= -3,
136 MID_STREAM_RESET
= -2,
137 END_OF_STREAM_RESET
= -1
140 const int kMaxResetAfterFrameNum
= 100;
141 const int kMaxFramesToDelayReuse
= 64;
142 const base::TimeDelta kReuseDelay
= base::TimeDelta::FromSeconds(1);
143 // Simulate WebRTC and call VDA::Decode 30 times per second.
144 const int kWebRtcDecodeCallsPerSecond
= 30;
146 struct TestVideoFile
{
147 explicit TestVideoFile(base::FilePath::StringType file_name
)
148 : file_name(file_name
),
154 min_fps_no_render(-1),
155 profile(media::VIDEO_CODEC_PROFILE_UNKNOWN
),
156 reset_after_frame_num(END_OF_STREAM_RESET
) {
159 base::FilePath::StringType file_name
;
165 int min_fps_no_render
;
166 media::VideoCodecProfile profile
;
167 int reset_after_frame_num
;
168 std::string data_str
;
171 const gfx::Size
kThumbnailsPageSize(1600, 1200);
172 const gfx::Size
kThumbnailSize(160, 120);
173 const int kMD5StringLength
= 32;
175 // Read in golden MD5s for the thumbnailed rendering of this video
176 void ReadGoldenThumbnailMD5s(const TestVideoFile
* video_file
,
177 std::vector
<std::string
>* md5_strings
) {
178 base::FilePath
filepath(video_file
->file_name
);
179 filepath
= filepath
.AddExtension(FILE_PATH_LITERAL(".md5"));
180 std::string all_md5s
;
181 base::ReadFileToString(filepath
, &all_md5s
);
182 base::SplitString(all_md5s
, '\n', md5_strings
);
183 // Check these are legitimate MD5s.
184 for (std::vector
<std::string
>::iterator md5_string
= md5_strings
->begin();
185 md5_string
!= md5_strings
->end(); ++md5_string
) {
186 // Ignore the empty string added by SplitString
187 if (!md5_string
->length())
190 if (md5_string
->at(0) == '#')
193 CHECK_EQ(static_cast<int>(md5_string
->length()),
194 kMD5StringLength
) << *md5_string
;
195 bool hex_only
= std::count_if(md5_string
->begin(),
196 md5_string
->end(), isxdigit
) ==
198 CHECK(hex_only
) << *md5_string
;
200 CHECK_GE(md5_strings
->size(), 1U) << all_md5s
;
203 // State of the GLRenderingVDAClient below. Order matters here as the test
204 // makes assumptions about it.
215 CS_MAX
, // Must be last entry.
218 // Initialize the GPU thread for rendering. We only need to setup once
219 // for all test cases.
220 class VideoDecodeAcceleratorTestEnvironment
: public ::testing::Environment
{
222 VideoDecodeAcceleratorTestEnvironment()
223 : rendering_thread_("GLRenderingVDAClientThread") {}
225 void SetUp() override
{
226 rendering_thread_
.Start();
228 base::WaitableEvent
done(false, false);
229 rendering_thread_
.task_runner()->PostTask(
230 FROM_HERE
, base::Bind(&RenderingHelper::InitializeOneOff
, &done
));
233 #if defined(USE_OZONE)
234 // Need to initialize after the rendering side since the rendering side
235 // initializes the "GPU" parts of Ozone.
237 // This also needs to be done in the test environment since this shouldn't
238 // be initialized multiple times for the same Ozone platform.
239 gpu_helper_
.Initialize(base::ThreadTaskRunnerHandle::Get(),
240 GetRenderingTaskRunner());
244 void TearDown() override
{ rendering_thread_
.Stop(); }
246 scoped_refptr
<base::SingleThreadTaskRunner
> GetRenderingTaskRunner() const {
247 return rendering_thread_
.task_runner();
251 base::Thread rendering_thread_
;
252 #if defined(USE_OZONE)
253 ui::OzoneGpuTestHelper gpu_helper_
;
256 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTestEnvironment
);
259 // A helper class used to manage the lifetime of a Texture.
260 class TextureRef
: public base::RefCounted
<TextureRef
> {
262 TextureRef(uint32 texture_id
, const base::Closure
& no_longer_needed_cb
)
263 : texture_id_(texture_id
), no_longer_needed_cb_(no_longer_needed_cb
) {}
265 int32
texture_id() const { return texture_id_
; }
268 friend class base::RefCounted
<TextureRef
>;
272 base::Closure no_longer_needed_cb_
;
275 TextureRef::~TextureRef() {
276 base::ResetAndReturn(&no_longer_needed_cb_
).Run();
279 // Client that can accept callbacks from a VideoDecodeAccelerator and is used by
281 class GLRenderingVDAClient
282 : public VideoDecodeAccelerator::Client
,
283 public base::SupportsWeakPtr
<GLRenderingVDAClient
> {
285 // |window_id| the window_id of the client, which is used to identify the
286 // rendering area in the |rendering_helper|.
287 // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
289 // |num_play_throughs| indicates how many times to play through the video.
290 // |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
291 // Reset() should be done after that frame number is delivered, or
292 // END_OF_STREAM_RESET to indicate no mid-stream Reset().
293 // |delete_decoder_state| indicates when the underlying decoder should be
294 // Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
295 // calls have been made, N>=0 means interpret as ClientState.
296 // Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
297 // last play-through (governed by |num_play_throughs|).
298 // |suppress_rendering| indicates GL rendering is supressed or not.
299 // After |delay_reuse_after_frame_num| frame has been delivered, the client
300 // will start delaying the call to ReusePictureBuffer() for kReuseDelay.
301 // |decode_calls_per_second| is the number of VDA::Decode calls per second.
302 // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
303 GLRenderingVDAClient(size_t window_id
,
304 RenderingHelper
* rendering_helper
,
305 ClientStateNotification
<ClientState
>* note
,
306 const std::string
& encoded_data
,
307 int num_in_flight_decodes
,
308 int num_play_throughs
,
309 int reset_after_frame_num
,
310 int delete_decoder_state
,
313 media::VideoCodecProfile profile
,
315 bool suppress_rendering
,
316 int delay_reuse_after_frame_num
,
317 int decode_calls_per_second
,
318 bool render_as_thumbnails
);
319 ~GLRenderingVDAClient() override
;
320 void CreateAndStartDecoder();
322 // VideoDecodeAccelerator::Client implementation.
323 // The heart of the Client.
324 void ProvidePictureBuffers(uint32 requested_num_of_buffers
,
325 const gfx::Size
& dimensions
,
326 uint32 texture_target
) override
;
327 void DismissPictureBuffer(int32 picture_buffer_id
) override
;
328 void PictureReady(const media::Picture
& picture
) override
;
329 // Simple state changes.
330 void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id
) override
;
331 void NotifyFlushDone() override
;
332 void NotifyResetDone() override
;
333 void NotifyError(VideoDecodeAccelerator::Error error
) override
;
335 void OutputFrameDeliveryTimes(base::File
* output
);
337 // Simple getters for inspecting the state of the Client.
338 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_
; }
339 int num_skipped_fragments() { return num_skipped_fragments_
; }
340 int num_queued_fragments() { return num_queued_fragments_
; }
341 int num_decoded_frames() { return num_decoded_frames_
; }
342 double frames_per_second();
343 // Return the median of the decode time of all decoded frames.
344 base::TimeDelta
decode_time_median();
345 bool decoder_deleted() { return !decoder_
.get(); }
348 typedef std::map
<int32
, scoped_refptr
<TextureRef
>> TextureRefMap
;
350 scoped_ptr
<media::VideoDecodeAccelerator
> CreateFakeVDA();
351 scoped_ptr
<media::VideoDecodeAccelerator
> CreateDXVAVDA();
352 scoped_ptr
<media::VideoDecodeAccelerator
> CreateV4L2VDA();
353 scoped_ptr
<media::VideoDecodeAccelerator
> CreateV4L2SliceVDA();
354 scoped_ptr
<media::VideoDecodeAccelerator
> CreateVaapiVDA();
356 void BindImage(uint32 client_texture_id
,
357 uint32 texture_target
,
358 scoped_refptr
<gfx::GLImage
> image
);
360 void SetState(ClientState new_state
);
361 void FinishInitialization();
362 void ReturnPicture(int32 picture_buffer_id
);
364 // Delete the associated decoder helper.
365 void DeleteDecoder();
367 // Compute & return the first encoded bytes (including a start frame) to send
368 // to the decoder, starting at |start_pos| and returning one fragment. Skips
369 // to the first decodable position.
370 std::string
GetBytesForFirstFragment(size_t start_pos
, size_t* end_pos
);
371 // Compute & return the encoded bytes of next fragment to send to the decoder
372 // (based on |start_pos|).
373 std::string
GetBytesForNextFragment(size_t start_pos
, size_t* end_pos
);
374 // Helpers for GetBytesForNextFragment above.
375 void GetBytesForNextNALU(size_t start_pos
, size_t* end_pos
); // For h.264.
376 std::string
GetBytesForNextFrame(
377 size_t start_pos
, size_t* end_pos
); // For VP8/9.
379 // Request decode of the next fragment in the encoded data.
380 void DecodeNextFragment();
383 RenderingHelper
* rendering_helper_
;
384 gfx::Size frame_size_
;
385 std::string encoded_data_
;
386 const int num_in_flight_decodes_
;
387 int outstanding_decodes_
;
388 size_t encoded_data_next_pos_to_decode_
;
389 int next_bitstream_buffer_id_
;
390 ClientStateNotification
<ClientState
>* note_
;
391 scoped_ptr
<VideoDecodeAccelerator
> decoder_
;
392 scoped_ptr
<base::WeakPtrFactory
<VideoDecodeAccelerator
> >
393 weak_decoder_factory_
;
394 int remaining_play_throughs_
;
395 int reset_after_frame_num_
;
396 int delete_decoder_state_
;
398 int num_skipped_fragments_
;
399 int num_queued_fragments_
;
400 int num_decoded_frames_
;
401 int num_done_bitstream_buffers_
;
402 base::TimeTicks initialize_done_ticks_
;
403 media::VideoCodecProfile profile_
;
405 GLenum texture_target_
;
406 bool suppress_rendering_
;
407 std::vector
<base::TimeTicks
> frame_delivery_times_
;
408 int delay_reuse_after_frame_num_
;
409 // A map from bitstream buffer id to the decode start time of the buffer.
410 std::map
<int, base::TimeTicks
> decode_start_time_
;
411 // The decode time of all decoded frames.
412 std::vector
<base::TimeDelta
> decode_time_
;
413 // The number of VDA::Decode calls per second. This is to simulate webrtc.
414 int decode_calls_per_second_
;
415 bool render_as_thumbnails_
;
417 // A map of the textures that are currently active for the decoder, i.e.,
418 // have been created via AssignPictureBuffers() and not dismissed via
419 // DismissPictureBuffer(). The keys in the map are the IDs of the
420 // corresponding picture buffers, and the values are TextureRefs to the
422 TextureRefMap active_textures_
;
424 // A map of the textures that are still pending in the renderer.
425 // We check this to ensure all frames are rendered before entering the
427 TextureRefMap pending_textures_
;
429 int32 next_picture_buffer_id_
;
431 DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient
);
434 GLRenderingVDAClient::GLRenderingVDAClient(
436 RenderingHelper
* rendering_helper
,
437 ClientStateNotification
<ClientState
>* note
,
438 const std::string
& encoded_data
,
439 int num_in_flight_decodes
,
440 int num_play_throughs
,
441 int reset_after_frame_num
,
442 int delete_decoder_state
,
445 media::VideoCodecProfile profile
,
447 bool suppress_rendering
,
448 int delay_reuse_after_frame_num
,
449 int decode_calls_per_second
,
450 bool render_as_thumbnails
)
451 : window_id_(window_id
),
452 rendering_helper_(rendering_helper
),
453 frame_size_(frame_width
, frame_height
),
454 encoded_data_(encoded_data
),
455 num_in_flight_decodes_(num_in_flight_decodes
),
456 outstanding_decodes_(0),
457 encoded_data_next_pos_to_decode_(0),
458 next_bitstream_buffer_id_(0),
460 remaining_play_throughs_(num_play_throughs
),
461 reset_after_frame_num_(reset_after_frame_num
),
462 delete_decoder_state_(delete_decoder_state
),
464 num_skipped_fragments_(0),
465 num_queued_fragments_(0),
466 num_decoded_frames_(0),
467 num_done_bitstream_buffers_(0),
468 fake_decoder_(fake_decoder
),
470 suppress_rendering_(suppress_rendering
),
471 delay_reuse_after_frame_num_(delay_reuse_after_frame_num
),
472 decode_calls_per_second_(decode_calls_per_second
),
473 render_as_thumbnails_(render_as_thumbnails
),
474 next_picture_buffer_id_(1) {
475 CHECK_GT(num_in_flight_decodes
, 0);
476 CHECK_GT(num_play_throughs
, 0);
477 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
478 if (decode_calls_per_second_
> 0)
479 CHECK_EQ(1, num_in_flight_decodes_
);
481 // Default to H264 baseline if no profile provided.
482 profile_
= (profile
!= media::VIDEO_CODEC_PROFILE_UNKNOWN
484 : media::H264PROFILE_BASELINE
);
487 GLRenderingVDAClient::~GLRenderingVDAClient() {
488 DeleteDecoder(); // Clean up in case of expected error.
489 CHECK(decoder_deleted());
490 SetState(CS_DESTROYED
);
493 static bool DoNothingReturnTrue() { return true; }
495 scoped_ptr
<media::VideoDecodeAccelerator
>
496 GLRenderingVDAClient::CreateFakeVDA() {
497 scoped_ptr
<media::VideoDecodeAccelerator
> decoder
;
499 decoder
.reset(new FakeVideoDecodeAccelerator(
500 static_cast<gfx::GLContext
*> (rendering_helper_
->GetGLContextHandle()),
502 base::Bind(&DoNothingReturnTrue
)));
504 return decoder
.Pass();
507 scoped_ptr
<media::VideoDecodeAccelerator
>
508 GLRenderingVDAClient::CreateDXVAVDA() {
509 scoped_ptr
<media::VideoDecodeAccelerator
> decoder
;
511 if (base::win::GetVersion() >= base::win::VERSION_WIN7
)
513 new DXVAVideoDecodeAccelerator(
514 base::Bind(&DoNothingReturnTrue
),
515 rendering_helper_
->GetGLContext().get()));
517 return decoder
.Pass();
520 scoped_ptr
<media::VideoDecodeAccelerator
>
521 GLRenderingVDAClient::CreateV4L2VDA() {
522 scoped_ptr
<media::VideoDecodeAccelerator
> decoder
;
523 #if defined(OS_CHROMEOS) && defined(USE_V4L2_CODEC)
524 scoped_refptr
<V4L2Device
> device
= V4L2Device::Create(V4L2Device::kDecoder
);
526 base::WeakPtr
<VideoDecodeAccelerator::Client
> weak_client
= AsWeakPtr();
527 decoder
.reset(new V4L2VideoDecodeAccelerator(
528 static_cast<EGLDisplay
>(rendering_helper_
->GetGLDisplay()),
529 static_cast<EGLContext
>(rendering_helper_
->GetGLContextHandle()),
531 base::Bind(&DoNothingReturnTrue
),
533 base::MessageLoopProxy::current()));
536 return decoder
.Pass();
539 scoped_ptr
<media::VideoDecodeAccelerator
>
540 GLRenderingVDAClient::CreateV4L2SliceVDA() {
541 scoped_ptr
<media::VideoDecodeAccelerator
> decoder
;
542 #if defined(OS_CHROMEOS) && defined(USE_V4L2_CODEC)
543 scoped_refptr
<V4L2Device
> device
= V4L2Device::Create(V4L2Device::kDecoder
);
545 base::WeakPtr
<VideoDecodeAccelerator::Client
> weak_client
= AsWeakPtr();
546 decoder
.reset(new V4L2SliceVideoDecodeAccelerator(
548 static_cast<EGLDisplay
>(rendering_helper_
->GetGLDisplay()),
549 static_cast<EGLContext
>(rendering_helper_
->GetGLContextHandle()),
551 base::Bind(&DoNothingReturnTrue
),
552 base::MessageLoopProxy::current()));
555 return decoder
.Pass();
558 scoped_ptr
<media::VideoDecodeAccelerator
>
559 GLRenderingVDAClient::CreateVaapiVDA() {
560 scoped_ptr
<media::VideoDecodeAccelerator
> decoder
;
561 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
562 decoder
.reset(new VaapiVideoDecodeAccelerator(
563 base::Bind(&DoNothingReturnTrue
),
564 base::Bind(&GLRenderingVDAClient::BindImage
, base::Unretained(this))));
566 return decoder
.Pass();
569 void GLRenderingVDAClient::BindImage(uint32 client_texture_id
,
570 uint32 texture_target
,
571 scoped_refptr
<gfx::GLImage
> image
) {
574 void GLRenderingVDAClient::CreateAndStartDecoder() {
575 CHECK(decoder_deleted());
576 CHECK(!decoder_
.get());
578 VideoDecodeAccelerator::Client
* client
= this;
580 scoped_ptr
<media::VideoDecodeAccelerator
> decoders
[] = {
584 CreateV4L2SliceVDA(),
588 for (size_t i
= 0; i
< arraysize(decoders
); ++i
) {
591 decoder_
= decoders
[i
].Pass();
592 weak_decoder_factory_
.reset(
593 new base::WeakPtrFactory
<VideoDecodeAccelerator
>(decoder_
.get()));
594 if (decoder_
->Initialize(profile_
, client
)) {
595 SetState(CS_DECODER_SET
);
596 FinishInitialization();
600 // Decoders are all initialize failed.
601 LOG(ERROR
) << "VideoDecodeAccelerator::Initialize() failed";
605 void GLRenderingVDAClient::ProvidePictureBuffers(
606 uint32 requested_num_of_buffers
,
607 const gfx::Size
& dimensions
,
608 uint32 texture_target
) {
609 if (decoder_deleted())
611 std::vector
<media::PictureBuffer
> buffers
;
613 texture_target_
= texture_target
;
614 for (uint32 i
= 0; i
< requested_num_of_buffers
; ++i
) {
616 base::WaitableEvent
done(false, false);
617 rendering_helper_
->CreateTexture(
618 texture_target_
, &texture_id
, dimensions
, &done
);
621 int32 picture_buffer_id
= next_picture_buffer_id_
++;
622 CHECK(active_textures_
623 .insert(std::make_pair(
625 new TextureRef(texture_id
,
626 base::Bind(&RenderingHelper::DeleteTexture
,
627 base::Unretained(rendering_helper_
),
632 media::PictureBuffer(picture_buffer_id
, dimensions
, texture_id
));
634 decoder_
->AssignPictureBuffers(buffers
);
637 void GLRenderingVDAClient::DismissPictureBuffer(int32 picture_buffer_id
) {
638 CHECK_EQ(1U, active_textures_
.erase(picture_buffer_id
));
641 void GLRenderingVDAClient::PictureReady(const media::Picture
& picture
) {
642 // We shouldn't be getting pictures delivered after Reset has completed.
643 CHECK_LT(state_
, CS_RESET
);
645 if (decoder_deleted())
648 base::TimeTicks now
= base::TimeTicks::Now();
650 frame_delivery_times_
.push_back(now
);
652 // Save the decode time of this picture.
653 std::map
<int, base::TimeTicks
>::iterator it
=
654 decode_start_time_
.find(picture
.bitstream_buffer_id());
655 ASSERT_NE(decode_start_time_
.end(), it
);
656 decode_time_
.push_back(now
- it
->second
);
657 decode_start_time_
.erase(it
);
659 CHECK_LE(picture
.bitstream_buffer_id(), next_bitstream_buffer_id_
);
660 ++num_decoded_frames_
;
662 // Mid-stream reset applies only to the last play-through per constructor
664 if (remaining_play_throughs_
== 1 &&
665 reset_after_frame_num_
== num_decoded_frames_
) {
666 reset_after_frame_num_
= MID_STREAM_RESET
;
668 // Re-start decoding from the beginning of the stream to avoid needing to
669 // know how to find I-frames and so on in this test.
670 encoded_data_next_pos_to_decode_
= 0;
673 TextureRefMap::iterator texture_it
=
674 active_textures_
.find(picture
.picture_buffer_id());
675 ASSERT_NE(active_textures_
.end(), texture_it
);
677 scoped_refptr
<VideoFrameTexture
> video_frame
= new VideoFrameTexture(
678 texture_target_
, texture_it
->second
->texture_id(),
679 base::Bind(&GLRenderingVDAClient::ReturnPicture
, AsWeakPtr(),
680 picture
.picture_buffer_id()));
681 ASSERT_TRUE(pending_textures_
.insert(*texture_it
).second
);
683 if (render_as_thumbnails_
) {
684 rendering_helper_
->RenderThumbnail(video_frame
->texture_target(),
685 video_frame
->texture_id());
686 } else if (!suppress_rendering_
) {
687 rendering_helper_
->QueueVideoFrame(window_id_
, video_frame
);
691 void GLRenderingVDAClient::ReturnPicture(int32 picture_buffer_id
) {
692 if (decoder_deleted())
694 CHECK_EQ(1U, pending_textures_
.erase(picture_buffer_id
));
696 if (pending_textures_
.empty() && state_
== CS_RESETTING
) {
702 if (num_decoded_frames_
> delay_reuse_after_frame_num_
) {
703 base::MessageLoop::current()->PostDelayedTask(
705 base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer
,
706 weak_decoder_factory_
->GetWeakPtr(),
710 decoder_
->ReusePictureBuffer(picture_buffer_id
);
714 void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
715 int32 bitstream_buffer_id
) {
716 // TODO(fischman): this test currently relies on this notification to make
717 // forward progress during a Reset(). But the VDA::Reset() API doesn't
718 // guarantee this, so stop relying on it (and remove the notifications from
719 // VaapiVideoDecodeAccelerator::FinishReset()).
720 ++num_done_bitstream_buffers_
;
721 --outstanding_decodes_
;
722 if (decode_calls_per_second_
== 0)
723 DecodeNextFragment();
726 void GLRenderingVDAClient::NotifyFlushDone() {
727 if (decoder_deleted())
730 SetState(CS_FLUSHED
);
731 --remaining_play_throughs_
;
732 DCHECK_GE(remaining_play_throughs_
, 0);
733 if (decoder_deleted())
736 SetState(CS_RESETTING
);
739 void GLRenderingVDAClient::NotifyResetDone() {
740 if (decoder_deleted())
743 if (reset_after_frame_num_
== MID_STREAM_RESET
) {
744 reset_after_frame_num_
= END_OF_STREAM_RESET
;
745 DecodeNextFragment();
747 } else if (reset_after_frame_num_
== START_OF_STREAM_RESET
) {
748 reset_after_frame_num_
= END_OF_STREAM_RESET
;
749 for (int i
= 0; i
< num_in_flight_decodes_
; ++i
)
750 DecodeNextFragment();
754 if (remaining_play_throughs_
) {
755 encoded_data_next_pos_to_decode_
= 0;
756 FinishInitialization();
760 rendering_helper_
->Flush(window_id_
);
762 if (pending_textures_
.empty()) {
768 void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error
) {
772 void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File
* output
) {
773 std::string s
= base::StringPrintf("frame count: %" PRIuS
"\n",
774 frame_delivery_times_
.size());
775 output
->WriteAtCurrentPos(s
.data(), s
.length());
776 base::TimeTicks t0
= initialize_done_ticks_
;
777 for (size_t i
= 0; i
< frame_delivery_times_
.size(); ++i
) {
778 s
= base::StringPrintf("frame %04" PRIuS
": %" PRId64
" us\n",
780 (frame_delivery_times_
[i
] - t0
).InMicroseconds());
781 t0
= frame_delivery_times_
[i
];
782 output
->WriteAtCurrentPos(s
.data(), s
.length());
786 static bool LookingAtNAL(const std::string
& encoded
, size_t pos
) {
787 return encoded
[pos
] == 0 && encoded
[pos
+ 1] == 0 &&
788 encoded
[pos
+ 2] == 0 && encoded
[pos
+ 3] == 1;
791 void GLRenderingVDAClient::SetState(ClientState new_state
) {
792 note_
->Notify(new_state
);
794 if (!remaining_play_throughs_
&& new_state
== delete_decoder_state_
) {
795 CHECK(!decoder_deleted());
800 void GLRenderingVDAClient::FinishInitialization() {
801 SetState(CS_INITIALIZED
);
802 initialize_done_ticks_
= base::TimeTicks::Now();
804 if (reset_after_frame_num_
== START_OF_STREAM_RESET
) {
805 reset_after_frame_num_
= MID_STREAM_RESET
;
810 for (int i
= 0; i
< num_in_flight_decodes_
; ++i
)
811 DecodeNextFragment();
812 DCHECK_EQ(outstanding_decodes_
, num_in_flight_decodes_
);
815 void GLRenderingVDAClient::DeleteDecoder() {
816 if (decoder_deleted())
818 weak_decoder_factory_
.reset();
820 STLClearObject(&encoded_data_
);
821 active_textures_
.clear();
823 // Cascade through the rest of the states to simplify test code below.
824 for (int i
= state_
+ 1; i
< CS_MAX
; ++i
)
825 SetState(static_cast<ClientState
>(i
));
828 std::string
GLRenderingVDAClient::GetBytesForFirstFragment(
829 size_t start_pos
, size_t* end_pos
) {
830 if (profile_
< media::H264PROFILE_MAX
) {
831 *end_pos
= start_pos
;
832 while (*end_pos
+ 4 < encoded_data_
.size()) {
833 if ((encoded_data_
[*end_pos
+ 4] & 0x1f) == 0x7) // SPS start frame
834 return GetBytesForNextFragment(*end_pos
, end_pos
);
835 GetBytesForNextNALU(*end_pos
, end_pos
);
836 num_skipped_fragments_
++;
838 *end_pos
= start_pos
;
839 return std::string();
841 DCHECK_LE(profile_
, media::VP9PROFILE_MAX
);
842 return GetBytesForNextFragment(start_pos
, end_pos
);
845 std::string
GLRenderingVDAClient::GetBytesForNextFragment(
846 size_t start_pos
, size_t* end_pos
) {
847 if (profile_
< media::H264PROFILE_MAX
) {
848 *end_pos
= start_pos
;
849 GetBytesForNextNALU(*end_pos
, end_pos
);
850 if (start_pos
!= *end_pos
) {
851 num_queued_fragments_
++;
853 return encoded_data_
.substr(start_pos
, *end_pos
- start_pos
);
855 DCHECK_LE(profile_
, media::VP9PROFILE_MAX
);
856 return GetBytesForNextFrame(start_pos
, end_pos
);
859 void GLRenderingVDAClient::GetBytesForNextNALU(
860 size_t start_pos
, size_t* end_pos
) {
861 *end_pos
= start_pos
;
862 if (*end_pos
+ 4 > encoded_data_
.size())
864 CHECK(LookingAtNAL(encoded_data_
, start_pos
));
866 while (*end_pos
+ 4 <= encoded_data_
.size() &&
867 !LookingAtNAL(encoded_data_
, *end_pos
)) {
870 if (*end_pos
+ 3 >= encoded_data_
.size())
871 *end_pos
= encoded_data_
.size();
874 std::string
GLRenderingVDAClient::GetBytesForNextFrame(
875 size_t start_pos
, size_t* end_pos
) {
876 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
879 start_pos
= 32; // Skip IVF header.
880 *end_pos
= start_pos
;
881 uint32 frame_size
= *reinterpret_cast<uint32
*>(&encoded_data_
[*end_pos
]);
882 *end_pos
+= 12; // Skip frame header.
883 bytes
.append(encoded_data_
.substr(*end_pos
, frame_size
));
884 *end_pos
+= frame_size
;
885 num_queued_fragments_
++;
889 static bool FragmentHasConfigInfo(const uint8
* data
, size_t size
,
890 media::VideoCodecProfile profile
) {
891 if (profile
>= media::H264PROFILE_MIN
&&
892 profile
<= media::H264PROFILE_MAX
) {
893 media::H264Parser parser
;
894 parser
.SetStream(data
, size
);
895 media::H264NALU nalu
;
896 media::H264Parser::Result result
= parser
.AdvanceToNextNALU(&nalu
);
897 if (result
!= media::H264Parser::kOk
) {
898 // Let the VDA figure out there's something wrong with the stream.
902 return nalu
.nal_unit_type
== media::H264NALU::kSPS
;
903 } else if (profile
>= media::VP8PROFILE_MIN
&&
904 profile
<= media::VP9PROFILE_MAX
) {
905 return (size
> 0 && !(data
[0] & 0x01));
907 // Shouldn't happen at this point.
908 LOG(FATAL
) << "Invalid profile: " << profile
;
912 void GLRenderingVDAClient::DecodeNextFragment() {
913 if (decoder_deleted())
915 if (encoded_data_next_pos_to_decode_
== encoded_data_
.size()) {
916 if (outstanding_decodes_
== 0) {
918 SetState(CS_FLUSHING
);
923 std::string next_fragment_bytes
;
924 if (encoded_data_next_pos_to_decode_
== 0) {
925 next_fragment_bytes
= GetBytesForFirstFragment(0, &end_pos
);
927 next_fragment_bytes
=
928 GetBytesForNextFragment(encoded_data_next_pos_to_decode_
, &end_pos
);
930 size_t next_fragment_size
= next_fragment_bytes
.size();
932 // Call Reset() just after Decode() if the fragment contains config info.
933 // This tests how the VDA behaves when it gets a reset request before it has
934 // a chance to ProvidePictureBuffers().
935 bool reset_here
= false;
936 if (reset_after_frame_num_
== RESET_AFTER_FIRST_CONFIG_INFO
) {
937 reset_here
= FragmentHasConfigInfo(
938 reinterpret_cast<const uint8
*>(next_fragment_bytes
.data()),
942 reset_after_frame_num_
= END_OF_STREAM_RESET
;
945 // Populate the shared memory buffer w/ the fragment, duplicate its handle,
946 // and hand it off to the decoder.
947 base::SharedMemory shm
;
948 CHECK(shm
.CreateAndMapAnonymous(next_fragment_size
));
949 memcpy(shm
.memory(), next_fragment_bytes
.data(), next_fragment_size
);
950 base::SharedMemoryHandle dup_handle
;
951 CHECK(shm
.ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle
));
952 media::BitstreamBuffer
bitstream_buffer(
953 next_bitstream_buffer_id_
, dup_handle
, next_fragment_size
);
954 decode_start_time_
[next_bitstream_buffer_id_
] = base::TimeTicks::Now();
955 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
956 next_bitstream_buffer_id_
= (next_bitstream_buffer_id_
+ 1) & 0x3FFFFFFF;
957 decoder_
->Decode(bitstream_buffer
);
958 ++outstanding_decodes_
;
959 if (!remaining_play_throughs_
&&
960 -delete_decoder_state_
== next_bitstream_buffer_id_
) {
965 reset_after_frame_num_
= MID_STREAM_RESET
;
967 // Restart from the beginning to re-Decode() the SPS we just sent.
968 encoded_data_next_pos_to_decode_
= 0;
970 encoded_data_next_pos_to_decode_
= end_pos
;
973 if (decode_calls_per_second_
> 0) {
974 base::MessageLoop::current()->PostDelayedTask(
976 base::Bind(&GLRenderingVDAClient::DecodeNextFragment
, AsWeakPtr()),
977 base::TimeDelta::FromSeconds(1) / decode_calls_per_second_
);
981 double GLRenderingVDAClient::frames_per_second() {
982 base::TimeDelta delta
= frame_delivery_times_
.back() - initialize_done_ticks_
;
983 return num_decoded_frames_
/ delta
.InSecondsF();
986 base::TimeDelta
GLRenderingVDAClient::decode_time_median() {
987 if (decode_time_
.size() == 0)
988 return base::TimeDelta();
989 std::sort(decode_time_
.begin(), decode_time_
.end());
990 int index
= decode_time_
.size() / 2;
991 if (decode_time_
.size() % 2 != 0)
992 return decode_time_
[index
];
994 return (decode_time_
[index
] + decode_time_
[index
- 1]) / 2;
997 class VideoDecodeAcceleratorTest
: public ::testing::Test
{
999 VideoDecodeAcceleratorTest();
1000 void SetUp() override
;
1001 void TearDown() override
;
1003 // Parse |data| into its constituent parts, set the various output fields
1004 // accordingly, and read in video stream. CHECK-fails on unexpected or
1005 // missing required data. Unspecified optional fields are set to -1.
1006 void ParseAndReadTestVideoData(base::FilePath::StringType data
,
1007 std::vector
<TestVideoFile
*>* test_video_files
);
1009 // Update the parameters of |test_video_files| according to
1010 // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
1011 // frames should be adjusted if decoder is reset in the middle of the stream.
1012 void UpdateTestVideoFileParams(
1013 size_t num_concurrent_decoders
,
1015 std::vector
<TestVideoFile
*>* test_video_files
);
1017 void InitializeRenderingHelper(const RenderingHelperParams
& helper_params
);
1018 void CreateAndStartDecoder(GLRenderingVDAClient
* client
,
1019 ClientStateNotification
<ClientState
>* note
);
1020 void WaitUntilDecodeFinish(ClientStateNotification
<ClientState
>* note
);
1021 void WaitUntilIdle();
1022 void OutputLogFile(const base::FilePath::CharType
* log_path
,
1023 const std::string
& content
);
1025 std::vector
<TestVideoFile
*> test_video_files_
;
1026 RenderingHelper rendering_helper_
;
1029 // Required for Thread to work. Not used otherwise.
1030 base::ShadowingAtExitManager at_exit_manager_
;
1032 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest
);
1035 VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest() {
1038 void VideoDecodeAcceleratorTest::SetUp() {
1039 ParseAndReadTestVideoData(g_test_video_data
, &test_video_files_
);
1042 void VideoDecodeAcceleratorTest::TearDown() {
1043 g_env
->GetRenderingTaskRunner()->PostTask(
1044 FROM_HERE
, base::Bind(&STLDeleteElements
<std::vector
<TestVideoFile
*>>,
1045 &test_video_files_
));
1047 base::WaitableEvent
done(false, false);
1048 g_env
->GetRenderingTaskRunner()->PostTask(
1049 FROM_HERE
, base::Bind(&RenderingHelper::UnInitialize
,
1050 base::Unretained(&rendering_helper_
), &done
));
1053 rendering_helper_
.TearDown();
1056 void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
1057 base::FilePath::StringType data
,
1058 std::vector
<TestVideoFile
*>* test_video_files
) {
1059 std::vector
<base::FilePath::StringType
> entries
;
1060 base::SplitString(data
, ';', &entries
);
1061 CHECK_GE(entries
.size(), 1U) << data
;
1062 for (size_t index
= 0; index
< entries
.size(); ++index
) {
1063 std::vector
<base::FilePath::StringType
> fields
;
1064 base::SplitString(entries
[index
], ':', &fields
);
1065 CHECK_GE(fields
.size(), 1U) << entries
[index
];
1066 CHECK_LE(fields
.size(), 8U) << entries
[index
];
1067 TestVideoFile
* video_file
= new TestVideoFile(fields
[0]);
1068 if (!fields
[1].empty())
1069 CHECK(base::StringToInt(fields
[1], &video_file
->width
));
1070 if (!fields
[2].empty())
1071 CHECK(base::StringToInt(fields
[2], &video_file
->height
));
1072 if (!fields
[3].empty())
1073 CHECK(base::StringToInt(fields
[3], &video_file
->num_frames
));
1074 if (!fields
[4].empty())
1075 CHECK(base::StringToInt(fields
[4], &video_file
->num_fragments
));
1076 if (!fields
[5].empty())
1077 CHECK(base::StringToInt(fields
[5], &video_file
->min_fps_render
));
1078 if (!fields
[6].empty())
1079 CHECK(base::StringToInt(fields
[6], &video_file
->min_fps_no_render
));
1081 if (!fields
[7].empty())
1082 CHECK(base::StringToInt(fields
[7], &profile
));
1083 video_file
->profile
= static_cast<media::VideoCodecProfile
>(profile
);
1085 // Read in the video data.
1086 base::FilePath
filepath(video_file
->file_name
);
1087 CHECK(base::ReadFileToString(filepath
, &video_file
->data_str
))
1088 << "test_video_file: " << filepath
.MaybeAsASCII();
1090 test_video_files
->push_back(video_file
);
1094 void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
1095 size_t num_concurrent_decoders
,
1097 std::vector
<TestVideoFile
*>* test_video_files
) {
1098 for (size_t i
= 0; i
< test_video_files
->size(); i
++) {
1099 TestVideoFile
* video_file
= (*test_video_files
)[i
];
1100 if (reset_point
== MID_STREAM_RESET
) {
1101 // Reset should not go beyond the last frame;
1102 // reset in the middle of the stream for short videos.
1103 video_file
->reset_after_frame_num
= kMaxResetAfterFrameNum
;
1104 if (video_file
->num_frames
<= video_file
->reset_after_frame_num
)
1105 video_file
->reset_after_frame_num
= video_file
->num_frames
/ 2;
1107 video_file
->num_frames
+= video_file
->reset_after_frame_num
;
1109 video_file
->reset_after_frame_num
= reset_point
;
1112 if (video_file
->min_fps_render
!= -1)
1113 video_file
->min_fps_render
/= num_concurrent_decoders
;
1114 if (video_file
->min_fps_no_render
!= -1)
1115 video_file
->min_fps_no_render
/= num_concurrent_decoders
;
1119 void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
1120 const RenderingHelperParams
& helper_params
) {
1121 rendering_helper_
.Setup();
1123 base::WaitableEvent
done(false, false);
1124 g_env
->GetRenderingTaskRunner()->PostTask(
1126 base::Bind(&RenderingHelper::Initialize
,
1127 base::Unretained(&rendering_helper_
), helper_params
, &done
));
1131 void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
1132 GLRenderingVDAClient
* client
,
1133 ClientStateNotification
<ClientState
>* note
) {
1134 g_env
->GetRenderingTaskRunner()->PostTask(
1135 FROM_HERE
, base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder
,
1136 base::Unretained(client
)));
1137 ASSERT_EQ(note
->Wait(), CS_DECODER_SET
);
1140 void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
1141 ClientStateNotification
<ClientState
>* note
) {
1142 for (int i
= 0; i
< CS_MAX
; i
++) {
1143 if (note
->Wait() == CS_DESTROYED
)
1148 void VideoDecodeAcceleratorTest::WaitUntilIdle() {
1149 base::WaitableEvent
done(false, false);
1150 g_env
->GetRenderingTaskRunner()->PostTask(
1152 base::Bind(&base::WaitableEvent::Signal
, base::Unretained(&done
)));
1156 void VideoDecodeAcceleratorTest::OutputLogFile(
1157 const base::FilePath::CharType
* log_path
,
1158 const std::string
& content
) {
1159 base::File
file(base::FilePath(log_path
),
1160 base::File::FLAG_CREATE_ALWAYS
| base::File::FLAG_WRITE
);
1161 file
.WriteAtCurrentPos(content
.data(), content
.length());
1165 // - Number of concurrent decoders. The value takes effect when there is only
1166 // one input stream; otherwise, one decoder per input stream will be
1168 // - Number of concurrent in-flight Decode() calls per decoder.
1169 // - Number of play-throughs.
1170 // - reset_after_frame_num: see GLRenderingVDAClient ctor.
1171 // - delete_decoder_phase: see GLRenderingVDAClient ctor.
1172 // - whether to test slow rendering by delaying ReusePictureBuffer().
1173 // - whether the video frames are rendered as thumbnails.
1174 class VideoDecodeAcceleratorParamTest
1175 : public VideoDecodeAcceleratorTest
,
1176 public ::testing::WithParamInterface
<
1177 Tuple
<int, int, int, ResetPoint
, ClientState
, bool, bool> > {
1180 // Helper so that gtest failures emit a more readable version of the tuple than
1181 // its byte representation.
1182 ::std::ostream
& operator<<(
1184 const Tuple
<int, int, int, ResetPoint
, ClientState
, bool, bool>& t
) {
1185 return os
<< get
<0>(t
) << ", " << get
<1>(t
) << ", " << get
<2>(t
) << ", "
1186 << get
<3>(t
) << ", " << get
<4>(t
) << ", " << get
<5>(t
) << ", "
1190 // Wait for |note| to report a state and if it's not |expected_state| then
1191 // assert |client| has deleted its decoder.
1192 static void AssertWaitForStateOrDeleted(
1193 ClientStateNotification
<ClientState
>* note
,
1194 GLRenderingVDAClient
* client
,
1195 ClientState expected_state
) {
1196 ClientState state
= note
->Wait();
1197 if (state
== expected_state
) return;
1198 ASSERT_TRUE(client
->decoder_deleted())
1199 << "Decoder not deleted but Wait() returned " << state
1200 << ", instead of " << expected_state
;
1203 // We assert a minimal number of concurrent decoders we expect to succeed.
1204 // Different platforms can support more concurrent decoders, so we don't assert
1205 // failure above this.
1206 enum { kMinSupportedNumConcurrentDecoders
= 3 };
1208 // Test the most straightforward case possible: data is decoded from a single
1209 // chunk and rendered to the screen.
1210 TEST_P(VideoDecodeAcceleratorParamTest
, TestSimpleDecode
) {
1211 size_t num_concurrent_decoders
= get
<0>(GetParam());
1212 const size_t num_in_flight_decodes
= get
<1>(GetParam());
1213 int num_play_throughs
= get
<2>(GetParam());
1214 const int reset_point
= get
<3>(GetParam());
1215 const int delete_decoder_state
= get
<4>(GetParam());
1216 bool test_reuse_delay
= get
<5>(GetParam());
1217 const bool render_as_thumbnails
= get
<6>(GetParam());
1219 if (test_video_files_
.size() > 1)
1220 num_concurrent_decoders
= test_video_files_
.size();
1222 if (g_num_play_throughs
> 0)
1223 num_play_throughs
= g_num_play_throughs
;
1225 UpdateTestVideoFileParams(
1226 num_concurrent_decoders
, reset_point
, &test_video_files_
);
1228 // Suppress GL rendering for all tests when the "--rendering_fps" is 0.
1229 const bool suppress_rendering
= g_rendering_fps
== 0;
1231 std::vector
<ClientStateNotification
<ClientState
>*>
1232 notes(num_concurrent_decoders
, NULL
);
1233 std::vector
<GLRenderingVDAClient
*> clients(num_concurrent_decoders
, NULL
);
1235 RenderingHelperParams helper_params
;
1236 helper_params
.rendering_fps
= g_rendering_fps
;
1237 helper_params
.warm_up_iterations
= g_rendering_warm_up
;
1238 helper_params
.render_as_thumbnails
= render_as_thumbnails
;
1239 if (render_as_thumbnails
) {
1240 // Only one decoder is supported with thumbnail rendering
1241 CHECK_EQ(num_concurrent_decoders
, 1U);
1242 helper_params
.thumbnails_page_size
= kThumbnailsPageSize
;
1243 helper_params
.thumbnail_size
= kThumbnailSize
;
1246 // First kick off all the decoders.
1247 for (size_t index
= 0; index
< num_concurrent_decoders
; ++index
) {
1248 TestVideoFile
* video_file
=
1249 test_video_files_
[index
% test_video_files_
.size()];
1250 ClientStateNotification
<ClientState
>* note
=
1251 new ClientStateNotification
<ClientState
>();
1252 notes
[index
] = note
;
1254 int delay_after_frame_num
= std::numeric_limits
<int>::max();
1255 if (test_reuse_delay
&&
1256 kMaxFramesToDelayReuse
* 2 < video_file
->num_frames
) {
1257 delay_after_frame_num
= video_file
->num_frames
- kMaxFramesToDelayReuse
;
1260 GLRenderingVDAClient
* client
=
1261 new GLRenderingVDAClient(index
,
1264 video_file
->data_str
,
1265 num_in_flight_decodes
,
1267 video_file
->reset_after_frame_num
,
1268 delete_decoder_state
,
1271 video_file
->profile
,
1274 delay_after_frame_num
,
1276 render_as_thumbnails
);
1278 clients
[index
] = client
;
1279 helper_params
.window_sizes
.push_back(
1280 render_as_thumbnails
1281 ? kThumbnailsPageSize
1282 : gfx::Size(video_file
->width
, video_file
->height
));
1285 InitializeRenderingHelper(helper_params
);
1287 for (size_t index
= 0; index
< num_concurrent_decoders
; ++index
) {
1288 CreateAndStartDecoder(clients
[index
], notes
[index
]);
1291 // Then wait for all the decodes to finish.
1292 // Only check performance & correctness later if we play through only once.
1293 bool skip_performance_and_correctness_checks
= num_play_throughs
> 1;
1294 for (size_t i
= 0; i
< num_concurrent_decoders
; ++i
) {
1295 ClientStateNotification
<ClientState
>* note
= notes
[i
];
1296 ClientState state
= note
->Wait();
1297 if (state
!= CS_INITIALIZED
) {
1298 skip_performance_and_correctness_checks
= true;
1299 // We expect initialization to fail only when more than the supported
1300 // number of decoders is instantiated. Assert here that something else
1301 // didn't trigger failure.
1302 ASSERT_GT(num_concurrent_decoders
,
1303 static_cast<size_t>(kMinSupportedNumConcurrentDecoders
));
1306 ASSERT_EQ(state
, CS_INITIALIZED
);
1307 for (int n
= 0; n
< num_play_throughs
; ++n
) {
1308 // For play-throughs other than the first, we expect initialization to
1309 // succeed unconditionally.
1311 ASSERT_NO_FATAL_FAILURE(
1312 AssertWaitForStateOrDeleted(note
, clients
[i
], CS_INITIALIZED
));
1314 // InitializeDone kicks off decoding inside the client, so we just need to
1316 ASSERT_NO_FATAL_FAILURE(
1317 AssertWaitForStateOrDeleted(note
, clients
[i
], CS_FLUSHING
));
1318 ASSERT_NO_FATAL_FAILURE(
1319 AssertWaitForStateOrDeleted(note
, clients
[i
], CS_FLUSHED
));
1320 // FlushDone requests Reset().
1321 ASSERT_NO_FATAL_FAILURE(
1322 AssertWaitForStateOrDeleted(note
, clients
[i
], CS_RESETTING
));
1324 ASSERT_NO_FATAL_FAILURE(
1325 AssertWaitForStateOrDeleted(note
, clients
[i
], CS_RESET
));
1326 // ResetDone requests Destroy().
1327 ASSERT_NO_FATAL_FAILURE(
1328 AssertWaitForStateOrDeleted(note
, clients
[i
], CS_DESTROYED
));
1330 // Finally assert that decoding went as expected.
1331 for (size_t i
= 0; i
< num_concurrent_decoders
&&
1332 !skip_performance_and_correctness_checks
; ++i
) {
1333 // We can only make performance/correctness assertions if the decoder was
1334 // allowed to finish.
1335 if (delete_decoder_state
< CS_FLUSHED
)
1337 GLRenderingVDAClient
* client
= clients
[i
];
1338 TestVideoFile
* video_file
= test_video_files_
[i
% test_video_files_
.size()];
1339 if (video_file
->num_frames
> 0) {
1340 // Expect the decoded frames may be more than the video frames as frames
1341 // could still be returned until resetting done.
1342 if (video_file
->reset_after_frame_num
> 0)
1343 EXPECT_GE(client
->num_decoded_frames(), video_file
->num_frames
);
1345 EXPECT_EQ(client
->num_decoded_frames(), video_file
->num_frames
);
1347 if (reset_point
== END_OF_STREAM_RESET
) {
1348 EXPECT_EQ(video_file
->num_fragments
, client
->num_skipped_fragments() +
1349 client
->num_queued_fragments());
1350 EXPECT_EQ(client
->num_done_bitstream_buffers(),
1351 client
->num_queued_fragments());
1353 LOG(INFO
) << "Decoder " << i
<< " fps: " << client
->frames_per_second();
1354 if (!render_as_thumbnails
) {
1355 int min_fps
= suppress_rendering
?
1356 video_file
->min_fps_no_render
: video_file
->min_fps_render
;
1357 if (min_fps
> 0 && !test_reuse_delay
)
1358 EXPECT_GT(client
->frames_per_second(), min_fps
);
1362 if (render_as_thumbnails
) {
1363 std::vector
<unsigned char> rgb
;
1365 base::WaitableEvent
done(false, false);
1366 g_env
->GetRenderingTaskRunner()->PostTask(
1367 FROM_HERE
, base::Bind(&RenderingHelper::GetThumbnailsAsRGB
,
1368 base::Unretained(&rendering_helper_
), &rgb
,
1369 &alpha_solid
, &done
));
1372 std::vector
<std::string
> golden_md5s
;
1373 std::string md5_string
= base::MD5String(
1374 base::StringPiece(reinterpret_cast<char*>(&rgb
[0]), rgb
.size()));
1375 ReadGoldenThumbnailMD5s(test_video_files_
[0], &golden_md5s
);
1376 std::vector
<std::string
>::iterator match
=
1377 find(golden_md5s
.begin(), golden_md5s
.end(), md5_string
);
1378 if (match
== golden_md5s
.end()) {
1379 // Convert raw RGB into PNG for export.
1380 std::vector
<unsigned char> png
;
1381 gfx::PNGCodec::Encode(&rgb
[0],
1382 gfx::PNGCodec::FORMAT_RGB
,
1383 kThumbnailsPageSize
,
1384 kThumbnailsPageSize
.width() * 3,
1386 std::vector
<gfx::PNGCodec::Comment
>(),
1389 LOG(ERROR
) << "Unknown thumbnails MD5: " << md5_string
;
1391 base::FilePath
filepath(test_video_files_
[0]->file_name
);
1392 filepath
= filepath
.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
1393 filepath
= filepath
.AddExtension(FILE_PATH_LITERAL(".png"));
1394 int num_bytes
= base::WriteFile(filepath
,
1395 reinterpret_cast<char*>(&png
[0]),
1397 ASSERT_EQ(num_bytes
, static_cast<int>(png
.size()));
1399 ASSERT_NE(match
, golden_md5s
.end());
1400 EXPECT_EQ(alpha_solid
, true) << "RGBA frame had incorrect alpha";
1403 // Output the frame delivery time to file
1404 // We can only make performance/correctness assertions if the decoder was
1405 // allowed to finish.
1406 if (g_output_log
!= NULL
&& delete_decoder_state
>= CS_FLUSHED
) {
1407 base::File
output_file(
1408 base::FilePath(g_output_log
),
1409 base::File::FLAG_CREATE_ALWAYS
| base::File::FLAG_WRITE
);
1410 for (size_t i
= 0; i
< num_concurrent_decoders
; ++i
) {
1411 clients
[i
]->OutputFrameDeliveryTimes(&output_file
);
1415 g_env
->GetRenderingTaskRunner()->PostTask(
1417 base::Bind(&STLDeleteElements
<std::vector
<GLRenderingVDAClient
*>>,
1419 g_env
->GetRenderingTaskRunner()->PostTask(
1421 base::Bind(&STLDeleteElements
<
1422 std::vector
<ClientStateNotification
<ClientState
>*>>,
1427 // Test that replay after EOS works fine.
1428 INSTANTIATE_TEST_CASE_P(
1429 ReplayAfterEOS
, VideoDecodeAcceleratorParamTest
,
1431 MakeTuple(1, 1, 4, END_OF_STREAM_RESET
, CS_RESET
, false, false)));
1433 // Test that Reset() before the first Decode() works fine.
1434 INSTANTIATE_TEST_CASE_P(
1435 ResetBeforeDecode
, VideoDecodeAcceleratorParamTest
,
1437 MakeTuple(1, 1, 1, START_OF_STREAM_RESET
, CS_RESET
, false, false)));
1439 // Test Reset() immediately after Decode() containing config info.
1440 INSTANTIATE_TEST_CASE_P(
1441 ResetAfterFirstConfigInfo
, VideoDecodeAcceleratorParamTest
,
1444 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO
, CS_RESET
, false, false)));
1446 // Test that Reset() mid-stream works fine and doesn't affect decoding even when
1447 // Decode() calls are made during the reset.
1448 INSTANTIATE_TEST_CASE_P(
1449 MidStreamReset
, VideoDecodeAcceleratorParamTest
,
1451 MakeTuple(1, 1, 1, MID_STREAM_RESET
, CS_RESET
, false, false)));
1453 INSTANTIATE_TEST_CASE_P(
1454 SlowRendering
, VideoDecodeAcceleratorParamTest
,
1456 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_RESET
, true, false)));
1458 // Test that Destroy() mid-stream works fine (primarily this is testing that no
1460 INSTANTIATE_TEST_CASE_P(
1461 TearDownTiming
, VideoDecodeAcceleratorParamTest
,
1463 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_DECODER_SET
, false, false),
1464 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_INITIALIZED
, false, false),
1465 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_FLUSHING
, false, false),
1466 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_FLUSHED
, false, false),
1467 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_RESETTING
, false, false),
1468 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_RESET
, false, false),
1469 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
,
1470 static_cast<ClientState
>(-1), false, false),
1471 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
,
1472 static_cast<ClientState
>(-10), false, false),
1473 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
,
1474 static_cast<ClientState
>(-100), false, false)));
1476 // Test that decoding various variation works with multiple in-flight decodes.
1477 INSTANTIATE_TEST_CASE_P(
1478 DecodeVariations
, VideoDecodeAcceleratorParamTest
,
1480 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_RESET
, false, false),
1481 MakeTuple(1, 10, 1, END_OF_STREAM_RESET
, CS_RESET
, false, false),
1483 MakeTuple(1, 15, 1, END_OF_STREAM_RESET
, CS_RESET
, false, false)));
1485 // Find out how many concurrent decoders can go before we exhaust system
1487 INSTANTIATE_TEST_CASE_P(
1488 ResourceExhaustion
, VideoDecodeAcceleratorParamTest
,
1490 // +0 hack below to promote enum to int.
1491 MakeTuple(kMinSupportedNumConcurrentDecoders
+ 0, 1, 1,
1492 END_OF_STREAM_RESET
, CS_RESET
, false, false),
1493 MakeTuple(kMinSupportedNumConcurrentDecoders
+ 1, 1, 1,
1494 END_OF_STREAM_RESET
, CS_RESET
, false, false)));
1496 // Thumbnailing test
1497 INSTANTIATE_TEST_CASE_P(
1498 Thumbnail
, VideoDecodeAcceleratorParamTest
,
1500 MakeTuple(1, 1, 1, END_OF_STREAM_RESET
, CS_RESET
, false, true)));
1502 // Measure the median of the decode time when VDA::Decode is called 30 times per
1504 TEST_F(VideoDecodeAcceleratorTest
, TestDecodeTimeMedian
) {
1505 RenderingHelperParams helper_params
;
1507 // Disable rendering by setting the rendering_fps = 0.
1508 helper_params
.rendering_fps
= 0;
1509 helper_params
.warm_up_iterations
= 0;
1510 helper_params
.render_as_thumbnails
= false;
1512 ClientStateNotification
<ClientState
>* note
=
1513 new ClientStateNotification
<ClientState
>();
1514 GLRenderingVDAClient
* client
=
1515 new GLRenderingVDAClient(0,
1518 test_video_files_
[0]->data_str
,
1521 test_video_files_
[0]->reset_after_frame_num
,
1523 test_video_files_
[0]->width
,
1524 test_video_files_
[0]->height
,
1525 test_video_files_
[0]->profile
,
1528 std::numeric_limits
<int>::max(),
1529 kWebRtcDecodeCallsPerSecond
,
1530 false /* render_as_thumbnail */);
1531 helper_params
.window_sizes
.push_back(
1532 gfx::Size(test_video_files_
[0]->width
, test_video_files_
[0]->height
));
1533 InitializeRenderingHelper(helper_params
);
1534 CreateAndStartDecoder(client
, note
);
1535 WaitUntilDecodeFinish(note
);
1537 base::TimeDelta decode_time_median
= client
->decode_time_median();
1538 std::string output_string
=
1539 base::StringPrintf("Decode time median: %" PRId64
" us",
1540 decode_time_median
.InMicroseconds());
1541 LOG(INFO
) << output_string
;
1543 if (g_output_log
!= NULL
)
1544 OutputLogFile(g_output_log
, output_string
);
1546 g_env
->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE
, client
);
1547 g_env
->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE
, note
);
1551 // TODO(fischman, vrk): add more tests! In particular:
1552 // - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
1553 // - Test alternate configurations
1554 // - Test failure conditions.
1555 // - Test frame size changes mid-stream
1558 } // namespace content
1560 int main(int argc
, char **argv
) {
1561 testing::InitGoogleTest(&argc
, argv
); // Removes gtest-specific args.
1562 base::CommandLine::Init(argc
, argv
);
1564 // Needed to enable DVLOG through --vmodule.
1565 logging::LoggingSettings settings
;
1566 settings
.logging_dest
= logging::LOG_TO_SYSTEM_DEBUG_LOG
;
1567 CHECK(logging::InitLogging(settings
));
1569 const base::CommandLine
* cmd_line
= base::CommandLine::ForCurrentProcess();
1572 base::CommandLine::SwitchMap switches
= cmd_line
->GetSwitches();
1573 for (base::CommandLine::SwitchMap::const_iterator it
= switches
.begin();
1574 it
!= switches
.end(); ++it
) {
1575 if (it
->first
== "test_video_data") {
1576 content::g_test_video_data
= it
->second
.c_str();
1579 // The output log for VDA performance test.
1580 if (it
->first
== "output_log") {
1581 content::g_output_log
= it
->second
.c_str();
1584 if (it
->first
== "rendering_fps") {
1585 // On Windows, CommandLine::StringType is wstring. We need to convert
1586 // it to std::string first
1587 std::string
input(it
->second
.begin(), it
->second
.end());
1588 CHECK(base::StringToDouble(input
, &content::g_rendering_fps
));
1591 if (it
->first
== "rendering_warm_up") {
1592 std::string
input(it
->second
.begin(), it
->second
.end());
1593 CHECK(base::StringToInt(input
, &content::g_rendering_warm_up
));
1596 // TODO(owenlin): Remove this flag once it is not used in autotest.
1597 if (it
->first
== "disable_rendering") {
1598 content::g_rendering_fps
= 0;
1602 if (it
->first
== "num_play_throughs") {
1603 std::string
input(it
->second
.begin(), it
->second
.end());
1604 CHECK(base::StringToInt(input
, &content::g_num_play_throughs
));
1607 if (it
->first
== "fake_decoder") {
1608 content::g_fake_decoder
= 1;
1611 if (it
->first
== "v" || it
->first
== "vmodule")
1613 if (it
->first
== "ozone-platform" || it
->first
== "ozone-use-surfaceless")
1615 LOG(FATAL
) << "Unexpected switch: " << it
->first
<< ":" << it
->second
;
1618 base::ShadowingAtExitManager at_exit_manager
;
1619 #if defined(OS_WIN) || defined(USE_OZONE)
1620 // For windows the decoding thread initializes the media foundation decoder
1621 // which uses COM. We need the thread to be a UI thread.
1622 // On Ozone, the backend initializes the event system using a UI
1624 base::MessageLoopForUI main_loop
;
1626 base::MessageLoop main_loop
;
1627 #endif // OS_WIN || USE_OZONE
1629 #if defined(USE_OZONE)
1630 ui::OzonePlatform::InitializeForUI();
1634 reinterpret_cast<content::VideoDecodeAcceleratorTestEnvironment
*>(
1635 testing::AddGlobalTestEnvironment(
1636 new content::VideoDecodeAcceleratorTestEnvironment()));
1638 return RUN_ALL_TESTS();