[Android WebView] Fix webview perf bot switchover to use org.chromium.webview_shell...
[chromium-blink-merge.git] / content / common / gpu / media / video_decode_accelerator_unittest.cc
blob04978bc61837ff06367d3a15e2894cbb817dbab4
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // The bulk of this file is support code; sorry about that. Here's an overview
6 // to hopefully help readers of this code:
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or
8 // Win/EGL.
9 // - ClientState is an enum for the state of the decode client used by the test.
10 // - ClientStateNotification is a barrier abstraction that allows the test code
11 // to be written sequentially and wait for the decode client to see certain
12 // state transitions.
13 // - GLRenderingVDAClient is a VideoDecodeAccelerator::Client implementation
14 // - Finally actual TEST cases are at the bottom of this file, using the above
15 // infrastructure.
17 #include <fcntl.h>
18 #include <sys/stat.h>
19 #include <sys/types.h>
20 #include <algorithm>
21 #include <deque>
22 #include <map>
24 // Include gtest.h out of order because <X11/X.h> #define's Bool & None, which
25 // gtest uses as struct names (inside a namespace). This means that
26 // #include'ing gtest after anything that pulls in X.h fails to compile.
27 // This is http://code.google.com/p/googletest/issues/detail?id=371
28 #include "testing/gtest/include/gtest/gtest.h"
30 #include "base/at_exit.h"
31 #include "base/bind.h"
32 #include "base/callback_helpers.h"
33 #include "base/command_line.h"
34 #include "base/files/file.h"
35 #include "base/files/file_util.h"
36 #include "base/format_macros.h"
37 #include "base/md5.h"
38 #include "base/process/process_handle.h"
39 #include "base/stl_util.h"
40 #include "base/strings/string_number_conversions.h"
41 #include "base/strings/string_split.h"
42 #include "base/strings/stringize_macros.h"
43 #include "base/strings/stringprintf.h"
44 #include "base/strings/utf_string_conversions.h"
45 #include "base/synchronization/condition_variable.h"
46 #include "base/synchronization/lock.h"
47 #include "base/synchronization/waitable_event.h"
48 #include "base/thread_task_runner_handle.h"
49 #include "base/threading/thread.h"
50 #include "content/common/gpu/media/fake_video_decode_accelerator.h"
51 #include "content/common/gpu/media/rendering_helper.h"
52 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
53 #include "content/public/common/content_switches.h"
54 #include "media/filters/h264_parser.h"
55 #include "ui/gfx/codec/png_codec.h"
56 #include "ui/gl/gl_image.h"
58 #if defined(OS_WIN)
59 #include "base/win/windows_version.h"
60 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
61 #elif defined(OS_CHROMEOS)
62 #if defined(USE_V4L2_CODEC)
63 #include "content/common/gpu/media/v4l2_device.h"
64 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h"
65 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
66 #endif
67 #if defined(ARCH_CPU_X86_FAMILY)
68 #include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
69 #include "content/common/gpu/media/vaapi_wrapper.h"
70 #endif // defined(ARCH_CPU_X86_FAMILY)
71 #else
72 #error The VideoAccelerator tests are not supported on this platform.
73 #endif // OS_WIN
75 #if defined(USE_OZONE)
76 #include "ui/ozone/public/ozone_gpu_test_helper.h"
77 #include "ui/ozone/public/ozone_platform.h"
78 #endif // defined(USE_OZONE)
80 using media::VideoDecodeAccelerator;
82 namespace content {
83 namespace {
85 using base::MakeTuple;
87 // Values optionally filled in from flags; see main() below.
88 // The syntax of multiple test videos is:
89 // test-video1;test-video2;test-video3
90 // where only the first video is required and other optional videos would be
91 // decoded by concurrent decoders.
92 // The syntax of each test-video is:
93 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender
94 // where only the first field is required. Value details:
95 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8/9 stream.
96 // - |width| and |height| are in pixels.
97 // - |numframes| is the number of picture frames in the file.
98 // - |numfragments| NALU (h264) or frame (VP8/9) count in the stream.
99 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds
100 // expected to be achieved with and without rendering to the screen, resp.
101 // (the latter tests just decode speed).
102 // - |profile| is the media::VideoCodecProfile set during Initialization.
103 // An empty value for a numeric field means "ignore".
104 const base::FilePath::CharType* g_test_video_data =
105 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
106 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
108 // The file path of the test output log. This is used to communicate the test
109 // results to CrOS autotests. We can enable the log and specify the filename by
110 // the "--output_log" switch.
111 const base::FilePath::CharType* g_output_log = NULL;
113 // The value is set by the switch "--rendering_fps".
114 double g_rendering_fps = 60;
116 // The value is set by the switch "--rendering_warm_up".
117 int g_rendering_warm_up = 0;
119 // The value is set by the switch "--num_play_throughs". The video will play
120 // the specified number of times. In different test cases, we have different
121 // values for |num_play_throughs|. This setting will override the value. A
122 // special value "0" means no override.
123 int g_num_play_throughs = 0;
124 // Fake decode
125 int g_fake_decoder = 0;
127 // Environment to store rendering thread.
128 class VideoDecodeAcceleratorTestEnvironment;
129 VideoDecodeAcceleratorTestEnvironment* g_env;
131 // Magic constants for differentiating the reasons for NotifyResetDone being
132 // called.
133 enum ResetPoint {
134 // Reset() just after calling Decode() with a fragment containing config info.
135 RESET_AFTER_FIRST_CONFIG_INFO = -4,
136 START_OF_STREAM_RESET = -3,
137 MID_STREAM_RESET = -2,
138 END_OF_STREAM_RESET = -1
141 const int kMaxResetAfterFrameNum = 100;
142 const int kMaxFramesToDelayReuse = 64;
143 const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
144 // Simulate WebRTC and call VDA::Decode 30 times per second.
145 const int kWebRtcDecodeCallsPerSecond = 30;
147 struct TestVideoFile {
148 explicit TestVideoFile(base::FilePath::StringType file_name)
149 : file_name(file_name),
150 width(-1),
151 height(-1),
152 num_frames(-1),
153 num_fragments(-1),
154 min_fps_render(-1),
155 min_fps_no_render(-1),
156 profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
157 reset_after_frame_num(END_OF_STREAM_RESET) {
160 base::FilePath::StringType file_name;
161 int width;
162 int height;
163 int num_frames;
164 int num_fragments;
165 int min_fps_render;
166 int min_fps_no_render;
167 media::VideoCodecProfile profile;
168 int reset_after_frame_num;
169 std::string data_str;
172 const gfx::Size kThumbnailsPageSize(1600, 1200);
173 const gfx::Size kThumbnailSize(160, 120);
174 const int kMD5StringLength = 32;
176 // Read in golden MD5s for the thumbnailed rendering of this video
177 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
178 std::vector<std::string>* md5_strings) {
179 base::FilePath filepath(video_file->file_name);
180 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
181 std::string all_md5s;
182 base::ReadFileToString(filepath, &all_md5s);
183 base::SplitString(all_md5s, '\n', md5_strings);
184 // Check these are legitimate MD5s.
185 for (std::vector<std::string>::iterator md5_string = md5_strings->begin();
186 md5_string != md5_strings->end(); ++md5_string) {
187 // Ignore the empty string added by SplitString
188 if (!md5_string->length())
189 continue;
190 // Ignore comments
191 if (md5_string->at(0) == '#')
192 continue;
194 CHECK_EQ(static_cast<int>(md5_string->length()),
195 kMD5StringLength) << *md5_string;
196 bool hex_only = std::count_if(md5_string->begin(),
197 md5_string->end(), isxdigit) ==
198 kMD5StringLength;
199 CHECK(hex_only) << *md5_string;
201 CHECK_GE(md5_strings->size(), 1U) << " MD5 checksum file ("
202 << filepath.MaybeAsASCII()
203 << ") missing or empty.";
206 // State of the GLRenderingVDAClient below. Order matters here as the test
207 // makes assumptions about it.
208 enum ClientState {
209 CS_CREATED = 0,
210 CS_DECODER_SET = 1,
211 CS_INITIALIZED = 2,
212 CS_FLUSHING = 3,
213 CS_FLUSHED = 4,
214 CS_RESETTING = 5,
215 CS_RESET = 6,
216 CS_ERROR = 7,
217 CS_DESTROYED = 8,
218 CS_MAX, // Must be last entry.
221 // Initialize the GPU thread for rendering. We only need to setup once
222 // for all test cases.
223 class VideoDecodeAcceleratorTestEnvironment : public ::testing::Environment {
224 public:
225 VideoDecodeAcceleratorTestEnvironment()
226 : rendering_thread_("GLRenderingVDAClientThread") {}
228 void SetUp() override {
229 rendering_thread_.Start();
231 base::WaitableEvent done(false, false);
232 rendering_thread_.task_runner()->PostTask(
233 FROM_HERE, base::Bind(&RenderingHelper::InitializeOneOff, &done));
234 done.Wait();
236 #if defined(USE_OZONE)
237 gpu_helper_.reset(new ui::OzoneGpuTestHelper());
238 // Need to initialize after the rendering side since the rendering side
239 // initializes the "GPU" parts of Ozone.
241 // This also needs to be done in the test environment since this shouldn't
242 // be initialized multiple times for the same Ozone platform.
243 gpu_helper_->Initialize(base::ThreadTaskRunnerHandle::Get(),
244 GetRenderingTaskRunner());
245 #endif
248 void TearDown() override {
249 #if defined(USE_OZONE)
250 gpu_helper_.reset();
251 #endif
252 rendering_thread_.Stop();
255 scoped_refptr<base::SingleThreadTaskRunner> GetRenderingTaskRunner() const {
256 return rendering_thread_.task_runner();
259 private:
260 base::Thread rendering_thread_;
261 #if defined(USE_OZONE)
262 scoped_ptr<ui::OzoneGpuTestHelper> gpu_helper_;
263 #endif
265 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTestEnvironment);
268 // A helper class used to manage the lifetime of a Texture.
269 class TextureRef : public base::RefCounted<TextureRef> {
270 public:
271 TextureRef(uint32 texture_id, const base::Closure& no_longer_needed_cb)
272 : texture_id_(texture_id), no_longer_needed_cb_(no_longer_needed_cb) {}
274 int32 texture_id() const { return texture_id_; }
276 private:
277 friend class base::RefCounted<TextureRef>;
278 ~TextureRef();
280 uint32 texture_id_;
281 base::Closure no_longer_needed_cb_;
284 TextureRef::~TextureRef() {
285 base::ResetAndReturn(&no_longer_needed_cb_).Run();
288 // Client that can accept callbacks from a VideoDecodeAccelerator and is used by
289 // the TESTs below.
290 class GLRenderingVDAClient
291 : public VideoDecodeAccelerator::Client,
292 public base::SupportsWeakPtr<GLRenderingVDAClient> {
293 public:
294 // |window_id| the window_id of the client, which is used to identify the
295 // rendering area in the |rendering_helper|.
296 // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
297 // |*this|.
298 // |num_play_throughs| indicates how many times to play through the video.
299 // |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
300 // Reset() should be done after that frame number is delivered, or
301 // END_OF_STREAM_RESET to indicate no mid-stream Reset().
302 // |delete_decoder_state| indicates when the underlying decoder should be
303 // Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
304 // calls have been made, N>=0 means interpret as ClientState.
305 // Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
306 // last play-through (governed by |num_play_throughs|).
307 // |suppress_rendering| indicates GL rendering is supressed or not.
308 // After |delay_reuse_after_frame_num| frame has been delivered, the client
309 // will start delaying the call to ReusePictureBuffer() for kReuseDelay.
310 // |decode_calls_per_second| is the number of VDA::Decode calls per second.
311 // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
312 GLRenderingVDAClient(size_t window_id,
313 RenderingHelper* rendering_helper,
314 ClientStateNotification<ClientState>* note,
315 const std::string& encoded_data,
316 int num_in_flight_decodes,
317 int num_play_throughs,
318 int reset_after_frame_num,
319 int delete_decoder_state,
320 int frame_width,
321 int frame_height,
322 media::VideoCodecProfile profile,
323 int fake_decoder,
324 bool suppress_rendering,
325 int delay_reuse_after_frame_num,
326 int decode_calls_per_second,
327 bool render_as_thumbnails);
328 ~GLRenderingVDAClient() override;
329 void CreateAndStartDecoder();
331 // VideoDecodeAccelerator::Client implementation.
332 // The heart of the Client.
333 void ProvidePictureBuffers(uint32 requested_num_of_buffers,
334 const gfx::Size& dimensions,
335 uint32 texture_target) override;
336 void DismissPictureBuffer(int32 picture_buffer_id) override;
337 void PictureReady(const media::Picture& picture) override;
338 // Simple state changes.
339 void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) override;
340 void NotifyFlushDone() override;
341 void NotifyResetDone() override;
342 void NotifyError(VideoDecodeAccelerator::Error error) override;
344 void OutputFrameDeliveryTimes(base::File* output);
346 // Simple getters for inspecting the state of the Client.
347 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
348 int num_skipped_fragments() { return num_skipped_fragments_; }
349 int num_queued_fragments() { return num_queued_fragments_; }
350 int num_decoded_frames() { return num_decoded_frames_; }
351 double frames_per_second();
352 // Return the median of the decode time of all decoded frames.
353 base::TimeDelta decode_time_median();
354 bool decoder_deleted() { return !decoder_.get(); }
356 private:
357 typedef std::map<int32, scoped_refptr<TextureRef>> TextureRefMap;
359 scoped_ptr<media::VideoDecodeAccelerator> CreateFakeVDA();
360 scoped_ptr<media::VideoDecodeAccelerator> CreateDXVAVDA();
361 scoped_ptr<media::VideoDecodeAccelerator> CreateV4L2VDA();
362 scoped_ptr<media::VideoDecodeAccelerator> CreateV4L2SliceVDA();
363 scoped_ptr<media::VideoDecodeAccelerator> CreateVaapiVDA();
365 void BindImage(uint32 client_texture_id,
366 uint32 texture_target,
367 scoped_refptr<gfx::GLImage> image);
369 void SetState(ClientState new_state);
370 void FinishInitialization();
371 void ReturnPicture(int32 picture_buffer_id);
373 // Delete the associated decoder helper.
374 void DeleteDecoder();
376 // Compute & return the first encoded bytes (including a start frame) to send
377 // to the decoder, starting at |start_pos| and returning one fragment. Skips
378 // to the first decodable position.
379 std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
380 // Compute & return the encoded bytes of next fragment to send to the decoder
381 // (based on |start_pos|).
382 std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
383 // Helpers for GetBytesForNextFragment above.
384 void GetBytesForNextNALU(size_t start_pos, size_t* end_pos); // For h.264.
385 std::string GetBytesForNextFrame(
386 size_t start_pos, size_t* end_pos); // For VP8/9.
388 // Request decode of the next fragment in the encoded data.
389 void DecodeNextFragment();
391 size_t window_id_;
392 RenderingHelper* rendering_helper_;
393 gfx::Size frame_size_;
394 std::string encoded_data_;
395 const int num_in_flight_decodes_;
396 int outstanding_decodes_;
397 size_t encoded_data_next_pos_to_decode_;
398 int next_bitstream_buffer_id_;
399 ClientStateNotification<ClientState>* note_;
400 scoped_ptr<VideoDecodeAccelerator> decoder_;
401 scoped_ptr<base::WeakPtrFactory<VideoDecodeAccelerator> >
402 weak_decoder_factory_;
403 int remaining_play_throughs_;
404 int reset_after_frame_num_;
405 int delete_decoder_state_;
406 ClientState state_;
407 int num_skipped_fragments_;
408 int num_queued_fragments_;
409 int num_decoded_frames_;
410 int num_done_bitstream_buffers_;
411 base::TimeTicks initialize_done_ticks_;
412 media::VideoCodecProfile profile_;
413 int fake_decoder_;
414 GLenum texture_target_;
415 bool suppress_rendering_;
416 std::vector<base::TimeTicks> frame_delivery_times_;
417 int delay_reuse_after_frame_num_;
418 // A map from bitstream buffer id to the decode start time of the buffer.
419 std::map<int, base::TimeTicks> decode_start_time_;
420 // The decode time of all decoded frames.
421 std::vector<base::TimeDelta> decode_time_;
422 // The number of VDA::Decode calls per second. This is to simulate webrtc.
423 int decode_calls_per_second_;
424 bool render_as_thumbnails_;
426 // A map of the textures that are currently active for the decoder, i.e.,
427 // have been created via AssignPictureBuffers() and not dismissed via
428 // DismissPictureBuffer(). The keys in the map are the IDs of the
429 // corresponding picture buffers, and the values are TextureRefs to the
430 // textures.
431 TextureRefMap active_textures_;
433 // A map of the textures that are still pending in the renderer.
434 // We check this to ensure all frames are rendered before entering the
435 // CS_RESET_State.
436 TextureRefMap pending_textures_;
438 int32 next_picture_buffer_id_;
440 DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
443 GLRenderingVDAClient::GLRenderingVDAClient(
444 size_t window_id,
445 RenderingHelper* rendering_helper,
446 ClientStateNotification<ClientState>* note,
447 const std::string& encoded_data,
448 int num_in_flight_decodes,
449 int num_play_throughs,
450 int reset_after_frame_num,
451 int delete_decoder_state,
452 int frame_width,
453 int frame_height,
454 media::VideoCodecProfile profile,
455 int fake_decoder,
456 bool suppress_rendering,
457 int delay_reuse_after_frame_num,
458 int decode_calls_per_second,
459 bool render_as_thumbnails)
460 : window_id_(window_id),
461 rendering_helper_(rendering_helper),
462 frame_size_(frame_width, frame_height),
463 encoded_data_(encoded_data),
464 num_in_flight_decodes_(num_in_flight_decodes),
465 outstanding_decodes_(0),
466 encoded_data_next_pos_to_decode_(0),
467 next_bitstream_buffer_id_(0),
468 note_(note),
469 remaining_play_throughs_(num_play_throughs),
470 reset_after_frame_num_(reset_after_frame_num),
471 delete_decoder_state_(delete_decoder_state),
472 state_(CS_CREATED),
473 num_skipped_fragments_(0),
474 num_queued_fragments_(0),
475 num_decoded_frames_(0),
476 num_done_bitstream_buffers_(0),
477 fake_decoder_(fake_decoder),
478 texture_target_(0),
479 suppress_rendering_(suppress_rendering),
480 delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
481 decode_calls_per_second_(decode_calls_per_second),
482 render_as_thumbnails_(render_as_thumbnails),
483 next_picture_buffer_id_(1) {
484 CHECK_GT(num_in_flight_decodes, 0);
485 CHECK_GT(num_play_throughs, 0);
486 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
487 if (decode_calls_per_second_ > 0)
488 CHECK_EQ(1, num_in_flight_decodes_);
490 // Default to H264 baseline if no profile provided.
491 profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
492 ? profile
493 : media::H264PROFILE_BASELINE);
496 GLRenderingVDAClient::~GLRenderingVDAClient() {
497 DeleteDecoder(); // Clean up in case of expected error.
498 CHECK(decoder_deleted());
499 SetState(CS_DESTROYED);
502 static bool DoNothingReturnTrue() { return true; }
504 scoped_ptr<media::VideoDecodeAccelerator>
505 GLRenderingVDAClient::CreateFakeVDA() {
506 scoped_ptr<media::VideoDecodeAccelerator> decoder;
507 if (fake_decoder_) {
508 decoder.reset(new FakeVideoDecodeAccelerator(
509 static_cast<gfx::GLContext*> (rendering_helper_->GetGLContextHandle()),
510 frame_size_,
511 base::Bind(&DoNothingReturnTrue)));
513 return decoder.Pass();
516 scoped_ptr<media::VideoDecodeAccelerator>
517 GLRenderingVDAClient::CreateDXVAVDA() {
518 scoped_ptr<media::VideoDecodeAccelerator> decoder;
519 #if defined(OS_WIN)
520 if (base::win::GetVersion() >= base::win::VERSION_WIN7)
521 decoder.reset(
522 new DXVAVideoDecodeAccelerator(
523 base::Bind(&DoNothingReturnTrue),
524 rendering_helper_->GetGLContext().get()));
525 #endif
526 return decoder.Pass();
529 scoped_ptr<media::VideoDecodeAccelerator>
530 GLRenderingVDAClient::CreateV4L2VDA() {
531 scoped_ptr<media::VideoDecodeAccelerator> decoder;
532 #if defined(OS_CHROMEOS) && defined(USE_V4L2_CODEC)
533 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
534 if (device.get()) {
535 base::WeakPtr<VideoDecodeAccelerator::Client> weak_client = AsWeakPtr();
536 decoder.reset(new V4L2VideoDecodeAccelerator(
537 static_cast<EGLDisplay>(rendering_helper_->GetGLDisplay()),
538 static_cast<EGLContext>(rendering_helper_->GetGLContextHandle()),
539 weak_client, base::Bind(&DoNothingReturnTrue), device,
540 base::ThreadTaskRunnerHandle::Get()));
542 #endif
543 return decoder.Pass();
546 scoped_ptr<media::VideoDecodeAccelerator>
547 GLRenderingVDAClient::CreateV4L2SliceVDA() {
548 scoped_ptr<media::VideoDecodeAccelerator> decoder;
549 #if defined(OS_CHROMEOS) && defined(USE_V4L2_CODEC)
550 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
551 if (device.get()) {
552 base::WeakPtr<VideoDecodeAccelerator::Client> weak_client = AsWeakPtr();
553 decoder.reset(new V4L2SliceVideoDecodeAccelerator(
554 device, static_cast<EGLDisplay>(rendering_helper_->GetGLDisplay()),
555 static_cast<EGLContext>(rendering_helper_->GetGLContextHandle()),
556 weak_client, base::Bind(&DoNothingReturnTrue),
557 base::ThreadTaskRunnerHandle::Get()));
559 #endif
560 return decoder.Pass();
563 scoped_ptr<media::VideoDecodeAccelerator>
564 GLRenderingVDAClient::CreateVaapiVDA() {
565 scoped_ptr<media::VideoDecodeAccelerator> decoder;
566 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
567 decoder.reset(new VaapiVideoDecodeAccelerator(
568 base::Bind(&DoNothingReturnTrue),
569 base::Bind(&GLRenderingVDAClient::BindImage, base::Unretained(this))));
570 #endif
571 return decoder.Pass();
574 void GLRenderingVDAClient::BindImage(uint32 client_texture_id,
575 uint32 texture_target,
576 scoped_refptr<gfx::GLImage> image) {
579 void GLRenderingVDAClient::CreateAndStartDecoder() {
580 CHECK(decoder_deleted());
581 CHECK(!decoder_.get());
583 VideoDecodeAccelerator::Client* client = this;
585 scoped_ptr<media::VideoDecodeAccelerator> decoders[] = {
586 CreateFakeVDA(),
587 CreateDXVAVDA(),
588 CreateV4L2VDA(),
589 CreateV4L2SliceVDA(),
590 CreateVaapiVDA(),
593 for (size_t i = 0; i < arraysize(decoders); ++i) {
594 if (!decoders[i])
595 continue;
596 decoder_ = decoders[i].Pass();
597 weak_decoder_factory_.reset(
598 new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get()));
599 if (decoder_->Initialize(profile_, client)) {
600 SetState(CS_DECODER_SET);
601 FinishInitialization();
602 return;
605 // Decoders are all initialize failed.
606 LOG(ERROR) << "VideoDecodeAccelerator::Initialize() failed";
607 CHECK(false);
610 void GLRenderingVDAClient::ProvidePictureBuffers(
611 uint32 requested_num_of_buffers,
612 const gfx::Size& dimensions,
613 uint32 texture_target) {
614 if (decoder_deleted())
615 return;
616 std::vector<media::PictureBuffer> buffers;
618 texture_target_ = texture_target;
619 for (uint32 i = 0; i < requested_num_of_buffers; ++i) {
620 uint32 texture_id;
621 base::WaitableEvent done(false, false);
622 rendering_helper_->CreateTexture(
623 texture_target_, &texture_id, dimensions, &done);
624 done.Wait();
626 int32 picture_buffer_id = next_picture_buffer_id_++;
627 CHECK(active_textures_
628 .insert(std::make_pair(
629 picture_buffer_id,
630 new TextureRef(texture_id,
631 base::Bind(&RenderingHelper::DeleteTexture,
632 base::Unretained(rendering_helper_),
633 texture_id))))
634 .second);
636 buffers.push_back(
637 media::PictureBuffer(picture_buffer_id, dimensions, texture_id));
639 decoder_->AssignPictureBuffers(buffers);
642 void GLRenderingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
643 CHECK_EQ(1U, active_textures_.erase(picture_buffer_id));
646 void GLRenderingVDAClient::PictureReady(const media::Picture& picture) {
647 // We shouldn't be getting pictures delivered after Reset has completed.
648 CHECK_LT(state_, CS_RESET);
650 if (decoder_deleted())
651 return;
653 base::TimeTicks now = base::TimeTicks::Now();
655 frame_delivery_times_.push_back(now);
657 // Save the decode time of this picture.
658 std::map<int, base::TimeTicks>::iterator it =
659 decode_start_time_.find(picture.bitstream_buffer_id());
660 ASSERT_NE(decode_start_time_.end(), it);
661 decode_time_.push_back(now - it->second);
662 decode_start_time_.erase(it);
664 CHECK_LE(picture.bitstream_buffer_id(), next_bitstream_buffer_id_);
665 ++num_decoded_frames_;
667 // Mid-stream reset applies only to the last play-through per constructor
668 // comment.
669 if (remaining_play_throughs_ == 1 &&
670 reset_after_frame_num_ == num_decoded_frames_) {
671 reset_after_frame_num_ = MID_STREAM_RESET;
672 decoder_->Reset();
673 // Re-start decoding from the beginning of the stream to avoid needing to
674 // know how to find I-frames and so on in this test.
675 encoded_data_next_pos_to_decode_ = 0;
678 TextureRefMap::iterator texture_it =
679 active_textures_.find(picture.picture_buffer_id());
680 ASSERT_NE(active_textures_.end(), texture_it);
682 scoped_refptr<VideoFrameTexture> video_frame = new VideoFrameTexture(
683 texture_target_, texture_it->second->texture_id(),
684 base::Bind(&GLRenderingVDAClient::ReturnPicture, AsWeakPtr(),
685 picture.picture_buffer_id()));
686 ASSERT_TRUE(pending_textures_.insert(*texture_it).second);
688 if (render_as_thumbnails_) {
689 rendering_helper_->RenderThumbnail(video_frame->texture_target(),
690 video_frame->texture_id());
691 } else if (!suppress_rendering_) {
692 rendering_helper_->QueueVideoFrame(window_id_, video_frame);
696 void GLRenderingVDAClient::ReturnPicture(int32 picture_buffer_id) {
697 if (decoder_deleted())
698 return;
699 CHECK_EQ(1U, pending_textures_.erase(picture_buffer_id));
701 if (pending_textures_.empty() && state_ == CS_RESETTING) {
702 SetState(CS_RESET);
703 DeleteDecoder();
704 return;
707 if (num_decoded_frames_ > delay_reuse_after_frame_num_) {
708 base::MessageLoop::current()->PostDelayedTask(
709 FROM_HERE,
710 base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
711 weak_decoder_factory_->GetWeakPtr(),
712 picture_buffer_id),
713 kReuseDelay);
714 } else {
715 decoder_->ReusePictureBuffer(picture_buffer_id);
719 void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
720 int32 bitstream_buffer_id) {
721 // TODO(fischman): this test currently relies on this notification to make
722 // forward progress during a Reset(). But the VDA::Reset() API doesn't
723 // guarantee this, so stop relying on it (and remove the notifications from
724 // VaapiVideoDecodeAccelerator::FinishReset()).
725 ++num_done_bitstream_buffers_;
726 --outstanding_decodes_;
727 if (decode_calls_per_second_ == 0)
728 DecodeNextFragment();
731 void GLRenderingVDAClient::NotifyFlushDone() {
732 if (decoder_deleted())
733 return;
735 SetState(CS_FLUSHED);
736 --remaining_play_throughs_;
737 DCHECK_GE(remaining_play_throughs_, 0);
738 if (decoder_deleted())
739 return;
740 decoder_->Reset();
741 SetState(CS_RESETTING);
744 void GLRenderingVDAClient::NotifyResetDone() {
745 if (decoder_deleted())
746 return;
748 if (reset_after_frame_num_ == MID_STREAM_RESET) {
749 reset_after_frame_num_ = END_OF_STREAM_RESET;
750 DecodeNextFragment();
751 return;
752 } else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
753 reset_after_frame_num_ = END_OF_STREAM_RESET;
754 for (int i = 0; i < num_in_flight_decodes_; ++i)
755 DecodeNextFragment();
756 return;
759 if (remaining_play_throughs_) {
760 encoded_data_next_pos_to_decode_ = 0;
761 FinishInitialization();
762 return;
765 rendering_helper_->Flush(window_id_);
767 if (pending_textures_.empty()) {
768 SetState(CS_RESET);
769 DeleteDecoder();
773 void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
774 SetState(CS_ERROR);
777 void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
778 std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
779 frame_delivery_times_.size());
780 output->WriteAtCurrentPos(s.data(), s.length());
781 base::TimeTicks t0 = initialize_done_ticks_;
782 for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
783 s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
785 (frame_delivery_times_[i] - t0).InMicroseconds());
786 t0 = frame_delivery_times_[i];
787 output->WriteAtCurrentPos(s.data(), s.length());
791 static bool LookingAtNAL(const std::string& encoded, size_t pos) {
792 return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
793 encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
796 void GLRenderingVDAClient::SetState(ClientState new_state) {
797 note_->Notify(new_state);
798 state_ = new_state;
799 if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
800 CHECK(!decoder_deleted());
801 DeleteDecoder();
805 void GLRenderingVDAClient::FinishInitialization() {
806 SetState(CS_INITIALIZED);
807 initialize_done_ticks_ = base::TimeTicks::Now();
809 if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
810 reset_after_frame_num_ = MID_STREAM_RESET;
811 decoder_->Reset();
812 return;
815 for (int i = 0; i < num_in_flight_decodes_; ++i)
816 DecodeNextFragment();
817 DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
820 void GLRenderingVDAClient::DeleteDecoder() {
821 if (decoder_deleted())
822 return;
823 weak_decoder_factory_.reset();
824 decoder_.reset();
825 STLClearObject(&encoded_data_);
826 active_textures_.clear();
828 // Cascade through the rest of the states to simplify test code below.
829 for (int i = state_ + 1; i < CS_MAX; ++i)
830 SetState(static_cast<ClientState>(i));
833 std::string GLRenderingVDAClient::GetBytesForFirstFragment(
834 size_t start_pos, size_t* end_pos) {
835 if (profile_ < media::H264PROFILE_MAX) {
836 *end_pos = start_pos;
837 while (*end_pos + 4 < encoded_data_.size()) {
838 if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
839 return GetBytesForNextFragment(*end_pos, end_pos);
840 GetBytesForNextNALU(*end_pos, end_pos);
841 num_skipped_fragments_++;
843 *end_pos = start_pos;
844 return std::string();
846 DCHECK_LE(profile_, media::VP9PROFILE_MAX);
847 return GetBytesForNextFragment(start_pos, end_pos);
850 std::string GLRenderingVDAClient::GetBytesForNextFragment(
851 size_t start_pos, size_t* end_pos) {
852 if (profile_ < media::H264PROFILE_MAX) {
853 *end_pos = start_pos;
854 GetBytesForNextNALU(*end_pos, end_pos);
855 if (start_pos != *end_pos) {
856 num_queued_fragments_++;
858 return encoded_data_.substr(start_pos, *end_pos - start_pos);
860 DCHECK_LE(profile_, media::VP9PROFILE_MAX);
861 return GetBytesForNextFrame(start_pos, end_pos);
864 void GLRenderingVDAClient::GetBytesForNextNALU(
865 size_t start_pos, size_t* end_pos) {
866 *end_pos = start_pos;
867 if (*end_pos + 4 > encoded_data_.size())
868 return;
869 CHECK(LookingAtNAL(encoded_data_, start_pos));
870 *end_pos += 4;
871 while (*end_pos + 4 <= encoded_data_.size() &&
872 !LookingAtNAL(encoded_data_, *end_pos)) {
873 ++*end_pos;
875 if (*end_pos + 3 >= encoded_data_.size())
876 *end_pos = encoded_data_.size();
879 std::string GLRenderingVDAClient::GetBytesForNextFrame(
880 size_t start_pos, size_t* end_pos) {
881 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
882 std::string bytes;
883 if (start_pos == 0)
884 start_pos = 32; // Skip IVF header.
885 *end_pos = start_pos;
886 uint32 frame_size = *reinterpret_cast<uint32*>(&encoded_data_[*end_pos]);
887 *end_pos += 12; // Skip frame header.
888 bytes.append(encoded_data_.substr(*end_pos, frame_size));
889 *end_pos += frame_size;
890 num_queued_fragments_++;
891 return bytes;
894 static bool FragmentHasConfigInfo(const uint8* data, size_t size,
895 media::VideoCodecProfile profile) {
896 if (profile >= media::H264PROFILE_MIN &&
897 profile <= media::H264PROFILE_MAX) {
898 media::H264Parser parser;
899 parser.SetStream(data, size);
900 media::H264NALU nalu;
901 media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
902 if (result != media::H264Parser::kOk) {
903 // Let the VDA figure out there's something wrong with the stream.
904 return false;
907 return nalu.nal_unit_type == media::H264NALU::kSPS;
908 } else if (profile >= media::VP8PROFILE_MIN &&
909 profile <= media::VP9PROFILE_MAX) {
910 return (size > 0 && !(data[0] & 0x01));
912 // Shouldn't happen at this point.
913 LOG(FATAL) << "Invalid profile: " << profile;
914 return false;
917 void GLRenderingVDAClient::DecodeNextFragment() {
918 if (decoder_deleted())
919 return;
920 if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
921 if (outstanding_decodes_ == 0) {
922 decoder_->Flush();
923 SetState(CS_FLUSHING);
925 return;
927 size_t end_pos;
928 std::string next_fragment_bytes;
929 if (encoded_data_next_pos_to_decode_ == 0) {
930 next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
931 } else {
932 next_fragment_bytes =
933 GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
935 size_t next_fragment_size = next_fragment_bytes.size();
937 // Call Reset() just after Decode() if the fragment contains config info.
938 // This tests how the VDA behaves when it gets a reset request before it has
939 // a chance to ProvidePictureBuffers().
940 bool reset_here = false;
941 if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
942 reset_here = FragmentHasConfigInfo(
943 reinterpret_cast<const uint8*>(next_fragment_bytes.data()),
944 next_fragment_size,
945 profile_);
946 if (reset_here)
947 reset_after_frame_num_ = END_OF_STREAM_RESET;
950 // Populate the shared memory buffer w/ the fragment, duplicate its handle,
951 // and hand it off to the decoder.
952 base::SharedMemory shm;
953 CHECK(shm.CreateAndMapAnonymous(next_fragment_size));
954 memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
955 base::SharedMemoryHandle dup_handle;
956 CHECK(shm.ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle));
957 media::BitstreamBuffer bitstream_buffer(
958 next_bitstream_buffer_id_, dup_handle, next_fragment_size);
959 decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
960 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
961 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
962 decoder_->Decode(bitstream_buffer);
963 ++outstanding_decodes_;
964 if (!remaining_play_throughs_ &&
965 -delete_decoder_state_ == next_bitstream_buffer_id_) {
966 DeleteDecoder();
969 if (reset_here) {
970 reset_after_frame_num_ = MID_STREAM_RESET;
971 decoder_->Reset();
972 // Restart from the beginning to re-Decode() the SPS we just sent.
973 encoded_data_next_pos_to_decode_ = 0;
974 } else {
975 encoded_data_next_pos_to_decode_ = end_pos;
978 if (decode_calls_per_second_ > 0) {
979 base::MessageLoop::current()->PostDelayedTask(
980 FROM_HERE,
981 base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
982 base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
986 double GLRenderingVDAClient::frames_per_second() {
987 base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
988 return num_decoded_frames_ / delta.InSecondsF();
991 base::TimeDelta GLRenderingVDAClient::decode_time_median() {
992 if (decode_time_.size() == 0)
993 return base::TimeDelta();
994 std::sort(decode_time_.begin(), decode_time_.end());
995 int index = decode_time_.size() / 2;
996 if (decode_time_.size() % 2 != 0)
997 return decode_time_[index];
999 return (decode_time_[index] + decode_time_[index - 1]) / 2;
1002 class VideoDecodeAcceleratorTest : public ::testing::Test {
1003 protected:
1004 VideoDecodeAcceleratorTest();
1005 void SetUp() override;
1006 void TearDown() override;
1008 // Parse |data| into its constituent parts, set the various output fields
1009 // accordingly, and read in video stream. CHECK-fails on unexpected or
1010 // missing required data. Unspecified optional fields are set to -1.
1011 void ParseAndReadTestVideoData(base::FilePath::StringType data,
1012 std::vector<TestVideoFile*>* test_video_files);
1014 // Update the parameters of |test_video_files| according to
1015 // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
1016 // frames should be adjusted if decoder is reset in the middle of the stream.
1017 void UpdateTestVideoFileParams(
1018 size_t num_concurrent_decoders,
1019 int reset_point,
1020 std::vector<TestVideoFile*>* test_video_files);
1022 void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
1023 void CreateAndStartDecoder(GLRenderingVDAClient* client,
1024 ClientStateNotification<ClientState>* note);
1025 void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
1026 void WaitUntilIdle();
1027 void OutputLogFile(const base::FilePath::CharType* log_path,
1028 const std::string& content);
1030 std::vector<TestVideoFile*> test_video_files_;
1031 RenderingHelper rendering_helper_;
1033 private:
1034 // Required for Thread to work. Not used otherwise.
1035 base::ShadowingAtExitManager at_exit_manager_;
1037 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
1040 VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest() {
1043 void VideoDecodeAcceleratorTest::SetUp() {
1044 ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
1047 void VideoDecodeAcceleratorTest::TearDown() {
1048 g_env->GetRenderingTaskRunner()->PostTask(
1049 FROM_HERE, base::Bind(&STLDeleteElements<std::vector<TestVideoFile*>>,
1050 &test_video_files_));
1052 base::WaitableEvent done(false, false);
1053 g_env->GetRenderingTaskRunner()->PostTask(
1054 FROM_HERE, base::Bind(&RenderingHelper::UnInitialize,
1055 base::Unretained(&rendering_helper_), &done));
1056 done.Wait();
1058 rendering_helper_.TearDown();
1061 void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
1062 base::FilePath::StringType data,
1063 std::vector<TestVideoFile*>* test_video_files) {
1064 std::vector<base::FilePath::StringType> entries;
1065 base::SplitString(data, ';', &entries);
1066 CHECK_GE(entries.size(), 1U) << data;
1067 for (size_t index = 0; index < entries.size(); ++index) {
1068 std::vector<base::FilePath::StringType> fields;
1069 base::SplitString(entries[index], ':', &fields);
1070 CHECK_GE(fields.size(), 1U) << entries[index];
1071 CHECK_LE(fields.size(), 8U) << entries[index];
1072 TestVideoFile* video_file = new TestVideoFile(fields[0]);
1073 if (!fields[1].empty())
1074 CHECK(base::StringToInt(fields[1], &video_file->width));
1075 if (!fields[2].empty())
1076 CHECK(base::StringToInt(fields[2], &video_file->height));
1077 if (!fields[3].empty())
1078 CHECK(base::StringToInt(fields[3], &video_file->num_frames));
1079 if (!fields[4].empty())
1080 CHECK(base::StringToInt(fields[4], &video_file->num_fragments));
1081 if (!fields[5].empty())
1082 CHECK(base::StringToInt(fields[5], &video_file->min_fps_render));
1083 if (!fields[6].empty())
1084 CHECK(base::StringToInt(fields[6], &video_file->min_fps_no_render));
1085 int profile = -1;
1086 if (!fields[7].empty())
1087 CHECK(base::StringToInt(fields[7], &profile));
1088 video_file->profile = static_cast<media::VideoCodecProfile>(profile);
1090 // Read in the video data.
1091 base::FilePath filepath(video_file->file_name);
1092 CHECK(base::ReadFileToString(filepath, &video_file->data_str))
1093 << "test_video_file: " << filepath.MaybeAsASCII();
1095 test_video_files->push_back(video_file);
1099 void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
1100 size_t num_concurrent_decoders,
1101 int reset_point,
1102 std::vector<TestVideoFile*>* test_video_files) {
1103 for (size_t i = 0; i < test_video_files->size(); i++) {
1104 TestVideoFile* video_file = (*test_video_files)[i];
1105 if (reset_point == MID_STREAM_RESET) {
1106 // Reset should not go beyond the last frame;
1107 // reset in the middle of the stream for short videos.
1108 video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
1109 if (video_file->num_frames <= video_file->reset_after_frame_num)
1110 video_file->reset_after_frame_num = video_file->num_frames / 2;
1112 video_file->num_frames += video_file->reset_after_frame_num;
1113 } else {
1114 video_file->reset_after_frame_num = reset_point;
1117 if (video_file->min_fps_render != -1)
1118 video_file->min_fps_render /= num_concurrent_decoders;
1119 if (video_file->min_fps_no_render != -1)
1120 video_file->min_fps_no_render /= num_concurrent_decoders;
1124 void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
1125 const RenderingHelperParams& helper_params) {
1126 rendering_helper_.Setup();
1128 base::WaitableEvent done(false, false);
1129 g_env->GetRenderingTaskRunner()->PostTask(
1130 FROM_HERE,
1131 base::Bind(&RenderingHelper::Initialize,
1132 base::Unretained(&rendering_helper_), helper_params, &done));
1133 done.Wait();
1136 void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
1137 GLRenderingVDAClient* client,
1138 ClientStateNotification<ClientState>* note) {
1139 g_env->GetRenderingTaskRunner()->PostTask(
1140 FROM_HERE, base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
1141 base::Unretained(client)));
1142 ASSERT_EQ(note->Wait(), CS_DECODER_SET);
1145 void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
1146 ClientStateNotification<ClientState>* note) {
1147 for (int i = 0; i < CS_MAX; i++) {
1148 if (note->Wait() == CS_DESTROYED)
1149 break;
1153 void VideoDecodeAcceleratorTest::WaitUntilIdle() {
1154 base::WaitableEvent done(false, false);
1155 g_env->GetRenderingTaskRunner()->PostTask(
1156 FROM_HERE,
1157 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
1158 done.Wait();
1161 void VideoDecodeAcceleratorTest::OutputLogFile(
1162 const base::FilePath::CharType* log_path,
1163 const std::string& content) {
1164 base::File file(base::FilePath(log_path),
1165 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1166 file.WriteAtCurrentPos(content.data(), content.length());
1169 // Test parameters:
1170 // - Number of concurrent decoders. The value takes effect when there is only
1171 // one input stream; otherwise, one decoder per input stream will be
1172 // instantiated.
1173 // - Number of concurrent in-flight Decode() calls per decoder.
1174 // - Number of play-throughs.
1175 // - reset_after_frame_num: see GLRenderingVDAClient ctor.
1176 // - delete_decoder_phase: see GLRenderingVDAClient ctor.
1177 // - whether to test slow rendering by delaying ReusePictureBuffer().
1178 // - whether the video frames are rendered as thumbnails.
1179 class VideoDecodeAcceleratorParamTest
1180 : public VideoDecodeAcceleratorTest,
1181 public ::testing::WithParamInterface<
1182 base::Tuple<int, int, int, ResetPoint, ClientState, bool, bool> > {
1185 // Helper so that gtest failures emit a more readable version of the tuple than
1186 // its byte representation.
1187 ::std::ostream& operator<<(
1188 ::std::ostream& os,
1189 const base::Tuple<int, int, int, ResetPoint, ClientState, bool, bool>& t) {
1190 return os << base::get<0>(t) << ", " << base::get<1>(t) << ", "
1191 << base::get<2>(t) << ", " << base::get<3>(t) << ", "
1192 << base::get<4>(t) << ", " << base::get<5>(t) << ", "
1193 << base::get<6>(t);
1196 // Wait for |note| to report a state and if it's not |expected_state| then
1197 // assert |client| has deleted its decoder.
1198 static void AssertWaitForStateOrDeleted(
1199 ClientStateNotification<ClientState>* note,
1200 GLRenderingVDAClient* client,
1201 ClientState expected_state) {
1202 ClientState state = note->Wait();
1203 if (state == expected_state) return;
1204 ASSERT_TRUE(client->decoder_deleted())
1205 << "Decoder not deleted but Wait() returned " << state
1206 << ", instead of " << expected_state;
1209 // We assert a minimal number of concurrent decoders we expect to succeed.
1210 // Different platforms can support more concurrent decoders, so we don't assert
1211 // failure above this.
1212 enum { kMinSupportedNumConcurrentDecoders = 3 };
1214 // Test the most straightforward case possible: data is decoded from a single
1215 // chunk and rendered to the screen.
1216 TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
1217 size_t num_concurrent_decoders = base::get<0>(GetParam());
1218 const size_t num_in_flight_decodes = base::get<1>(GetParam());
1219 int num_play_throughs = base::get<2>(GetParam());
1220 const int reset_point = base::get<3>(GetParam());
1221 const int delete_decoder_state = base::get<4>(GetParam());
1222 bool test_reuse_delay = base::get<5>(GetParam());
1223 const bool render_as_thumbnails = base::get<6>(GetParam());
1225 if (test_video_files_.size() > 1)
1226 num_concurrent_decoders = test_video_files_.size();
1228 if (g_num_play_throughs > 0)
1229 num_play_throughs = g_num_play_throughs;
1231 UpdateTestVideoFileParams(
1232 num_concurrent_decoders, reset_point, &test_video_files_);
1234 // Suppress GL rendering for all tests when the "--rendering_fps" is 0.
1235 const bool suppress_rendering = g_rendering_fps == 0;
1237 std::vector<ClientStateNotification<ClientState>*>
1238 notes(num_concurrent_decoders, NULL);
1239 std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
1241 RenderingHelperParams helper_params;
1242 helper_params.rendering_fps = g_rendering_fps;
1243 helper_params.warm_up_iterations = g_rendering_warm_up;
1244 helper_params.render_as_thumbnails = render_as_thumbnails;
1245 if (render_as_thumbnails) {
1246 // Only one decoder is supported with thumbnail rendering
1247 CHECK_EQ(num_concurrent_decoders, 1U);
1248 helper_params.thumbnails_page_size = kThumbnailsPageSize;
1249 helper_params.thumbnail_size = kThumbnailSize;
1252 // First kick off all the decoders.
1253 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1254 TestVideoFile* video_file =
1255 test_video_files_[index % test_video_files_.size()];
1256 ClientStateNotification<ClientState>* note =
1257 new ClientStateNotification<ClientState>();
1258 notes[index] = note;
1260 int delay_after_frame_num = std::numeric_limits<int>::max();
1261 if (test_reuse_delay &&
1262 kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
1263 delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
1266 GLRenderingVDAClient* client =
1267 new GLRenderingVDAClient(index,
1268 &rendering_helper_,
1269 note,
1270 video_file->data_str,
1271 num_in_flight_decodes,
1272 num_play_throughs,
1273 video_file->reset_after_frame_num,
1274 delete_decoder_state,
1275 video_file->width,
1276 video_file->height,
1277 video_file->profile,
1278 g_fake_decoder,
1279 suppress_rendering,
1280 delay_after_frame_num,
1282 render_as_thumbnails);
1284 clients[index] = client;
1285 helper_params.window_sizes.push_back(
1286 render_as_thumbnails
1287 ? kThumbnailsPageSize
1288 : gfx::Size(video_file->width, video_file->height));
1291 InitializeRenderingHelper(helper_params);
1293 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1294 CreateAndStartDecoder(clients[index], notes[index]);
1297 // Then wait for all the decodes to finish.
1298 // Only check performance & correctness later if we play through only once.
1299 bool skip_performance_and_correctness_checks = num_play_throughs > 1;
1300 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1301 ClientStateNotification<ClientState>* note = notes[i];
1302 ClientState state = note->Wait();
1303 if (state != CS_INITIALIZED) {
1304 skip_performance_and_correctness_checks = true;
1305 // We expect initialization to fail only when more than the supported
1306 // number of decoders is instantiated. Assert here that something else
1307 // didn't trigger failure.
1308 ASSERT_GT(num_concurrent_decoders,
1309 static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
1310 continue;
1312 ASSERT_EQ(state, CS_INITIALIZED);
1313 for (int n = 0; n < num_play_throughs; ++n) {
1314 // For play-throughs other than the first, we expect initialization to
1315 // succeed unconditionally.
1316 if (n > 0) {
1317 ASSERT_NO_FATAL_FAILURE(
1318 AssertWaitForStateOrDeleted(note, clients[i], CS_INITIALIZED));
1320 // InitializeDone kicks off decoding inside the client, so we just need to
1321 // wait for Flush.
1322 ASSERT_NO_FATAL_FAILURE(
1323 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHING));
1324 ASSERT_NO_FATAL_FAILURE(
1325 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHED));
1326 // FlushDone requests Reset().
1327 ASSERT_NO_FATAL_FAILURE(
1328 AssertWaitForStateOrDeleted(note, clients[i], CS_RESETTING));
1330 ASSERT_NO_FATAL_FAILURE(
1331 AssertWaitForStateOrDeleted(note, clients[i], CS_RESET));
1332 // ResetDone requests Destroy().
1333 ASSERT_NO_FATAL_FAILURE(
1334 AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
1336 // Finally assert that decoding went as expected.
1337 for (size_t i = 0; i < num_concurrent_decoders &&
1338 !skip_performance_and_correctness_checks; ++i) {
1339 // We can only make performance/correctness assertions if the decoder was
1340 // allowed to finish.
1341 if (delete_decoder_state < CS_FLUSHED)
1342 continue;
1343 GLRenderingVDAClient* client = clients[i];
1344 TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
1345 if (video_file->num_frames > 0) {
1346 // Expect the decoded frames may be more than the video frames as frames
1347 // could still be returned until resetting done.
1348 if (video_file->reset_after_frame_num > 0)
1349 EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
1350 else
1351 EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
1353 if (reset_point == END_OF_STREAM_RESET) {
1354 EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
1355 client->num_queued_fragments());
1356 EXPECT_EQ(client->num_done_bitstream_buffers(),
1357 client->num_queued_fragments());
1359 LOG(INFO) << "Decoder " << i << " fps: " << client->frames_per_second();
1360 if (!render_as_thumbnails) {
1361 int min_fps = suppress_rendering ?
1362 video_file->min_fps_no_render : video_file->min_fps_render;
1363 if (min_fps > 0 && !test_reuse_delay)
1364 EXPECT_GT(client->frames_per_second(), min_fps);
1368 if (render_as_thumbnails) {
1369 std::vector<unsigned char> rgb;
1370 bool alpha_solid;
1371 base::WaitableEvent done(false, false);
1372 g_env->GetRenderingTaskRunner()->PostTask(
1373 FROM_HERE, base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
1374 base::Unretained(&rendering_helper_), &rgb,
1375 &alpha_solid, &done));
1376 done.Wait();
1378 std::vector<std::string> golden_md5s;
1379 std::string md5_string = base::MD5String(
1380 base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
1381 ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
1382 std::vector<std::string>::iterator match =
1383 find(golden_md5s.begin(), golden_md5s.end(), md5_string);
1384 if (match == golden_md5s.end()) {
1385 // Convert raw RGB into PNG for export.
1386 std::vector<unsigned char> png;
1387 gfx::PNGCodec::Encode(&rgb[0],
1388 gfx::PNGCodec::FORMAT_RGB,
1389 kThumbnailsPageSize,
1390 kThumbnailsPageSize.width() * 3,
1391 true,
1392 std::vector<gfx::PNGCodec::Comment>(),
1393 &png);
1395 LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
1397 base::FilePath filepath(test_video_files_[0]->file_name);
1398 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
1399 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
1400 int num_bytes = base::WriteFile(filepath,
1401 reinterpret_cast<char*>(&png[0]),
1402 png.size());
1403 ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
1405 ASSERT_NE(match, golden_md5s.end());
1406 EXPECT_EQ(alpha_solid, true) << "RGBA frame had incorrect alpha";
1409 // Output the frame delivery time to file
1410 // We can only make performance/correctness assertions if the decoder was
1411 // allowed to finish.
1412 if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
1413 base::File output_file(
1414 base::FilePath(g_output_log),
1415 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1416 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1417 clients[i]->OutputFrameDeliveryTimes(&output_file);
1421 g_env->GetRenderingTaskRunner()->PostTask(
1422 FROM_HERE,
1423 base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*>>,
1424 &clients));
1425 g_env->GetRenderingTaskRunner()->PostTask(
1426 FROM_HERE,
1427 base::Bind(&STLDeleteElements<
1428 std::vector<ClientStateNotification<ClientState>*>>,
1429 &notes));
1430 WaitUntilIdle();
1433 // Test that replay after EOS works fine.
1434 INSTANTIATE_TEST_CASE_P(
1435 ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
1436 ::testing::Values(
1437 MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
1439 // Test that Reset() before the first Decode() works fine.
1440 INSTANTIATE_TEST_CASE_P(
1441 ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
1442 ::testing::Values(
1443 MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
1445 // Test Reset() immediately after Decode() containing config info.
1446 INSTANTIATE_TEST_CASE_P(
1447 ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
1448 ::testing::Values(
1449 MakeTuple(
1450 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
1452 // Test that Reset() mid-stream works fine and doesn't affect decoding even when
1453 // Decode() calls are made during the reset.
1454 INSTANTIATE_TEST_CASE_P(
1455 MidStreamReset, VideoDecodeAcceleratorParamTest,
1456 ::testing::Values(
1457 MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
1459 INSTANTIATE_TEST_CASE_P(
1460 SlowRendering, VideoDecodeAcceleratorParamTest,
1461 ::testing::Values(
1462 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
1464 // Test that Destroy() mid-stream works fine (primarily this is testing that no
1465 // crashes occur).
1466 INSTANTIATE_TEST_CASE_P(
1467 TearDownTiming, VideoDecodeAcceleratorParamTest,
1468 ::testing::Values(
1469 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
1470 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
1471 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHING, false, false),
1472 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
1473 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
1474 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1475 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1476 static_cast<ClientState>(-1), false, false),
1477 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1478 static_cast<ClientState>(-10), false, false),
1479 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1480 static_cast<ClientState>(-100), false, false)));
1482 // Test that decoding various variation works with multiple in-flight decodes.
1483 INSTANTIATE_TEST_CASE_P(
1484 DecodeVariations, VideoDecodeAcceleratorParamTest,
1485 ::testing::Values(
1486 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1487 MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1488 // Tests queuing.
1489 MakeTuple(1, 15, 1, END_OF_STREAM_RESET, CS_RESET, false, false)));
1491 // Find out how many concurrent decoders can go before we exhaust system
1492 // resources.
1493 INSTANTIATE_TEST_CASE_P(
1494 ResourceExhaustion, VideoDecodeAcceleratorParamTest,
1495 ::testing::Values(
1496 // +0 hack below to promote enum to int.
1497 MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
1498 END_OF_STREAM_RESET, CS_RESET, false, false),
1499 MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
1500 END_OF_STREAM_RESET, CS_RESET, false, false)));
1502 // Thumbnailing test
1503 INSTANTIATE_TEST_CASE_P(
1504 Thumbnail, VideoDecodeAcceleratorParamTest,
1505 ::testing::Values(
1506 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
1508 // Measure the median of the decode time when VDA::Decode is called 30 times per
1509 // second.
1510 TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
1511 RenderingHelperParams helper_params;
1513 // Disable rendering by setting the rendering_fps = 0.
1514 helper_params.rendering_fps = 0;
1515 helper_params.warm_up_iterations = 0;
1516 helper_params.render_as_thumbnails = false;
1518 ClientStateNotification<ClientState>* note =
1519 new ClientStateNotification<ClientState>();
1520 GLRenderingVDAClient* client =
1521 new GLRenderingVDAClient(0,
1522 &rendering_helper_,
1523 note,
1524 test_video_files_[0]->data_str,
1527 test_video_files_[0]->reset_after_frame_num,
1528 CS_RESET,
1529 test_video_files_[0]->width,
1530 test_video_files_[0]->height,
1531 test_video_files_[0]->profile,
1532 g_fake_decoder,
1533 true,
1534 std::numeric_limits<int>::max(),
1535 kWebRtcDecodeCallsPerSecond,
1536 false /* render_as_thumbnail */);
1537 helper_params.window_sizes.push_back(
1538 gfx::Size(test_video_files_[0]->width, test_video_files_[0]->height));
1539 InitializeRenderingHelper(helper_params);
1540 CreateAndStartDecoder(client, note);
1541 WaitUntilDecodeFinish(note);
1543 base::TimeDelta decode_time_median = client->decode_time_median();
1544 std::string output_string =
1545 base::StringPrintf("Decode time median: %" PRId64 " us",
1546 decode_time_median.InMicroseconds());
1547 LOG(INFO) << output_string;
1549 if (g_output_log != NULL)
1550 OutputLogFile(g_output_log, output_string);
1552 g_env->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE, client);
1553 g_env->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE, note);
1554 WaitUntilIdle();
1557 // TODO(fischman, vrk): add more tests! In particular:
1558 // - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
1559 // - Test alternate configurations
1560 // - Test failure conditions.
1561 // - Test frame size changes mid-stream
1563 } // namespace
1564 } // namespace content
1566 int main(int argc, char **argv) {
1567 testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
1568 base::CommandLine::Init(argc, argv);
1570 // Needed to enable DVLOG through --vmodule.
1571 logging::LoggingSettings settings;
1572 settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
1573 CHECK(logging::InitLogging(settings));
1575 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
1576 DCHECK(cmd_line);
1578 base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
1579 for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
1580 it != switches.end(); ++it) {
1581 if (it->first == "test_video_data") {
1582 content::g_test_video_data = it->second.c_str();
1583 continue;
1585 // The output log for VDA performance test.
1586 if (it->first == "output_log") {
1587 content::g_output_log = it->second.c_str();
1588 continue;
1590 if (it->first == "rendering_fps") {
1591 // On Windows, CommandLine::StringType is wstring. We need to convert
1592 // it to std::string first
1593 std::string input(it->second.begin(), it->second.end());
1594 CHECK(base::StringToDouble(input, &content::g_rendering_fps));
1595 continue;
1597 if (it->first == "rendering_warm_up") {
1598 std::string input(it->second.begin(), it->second.end());
1599 CHECK(base::StringToInt(input, &content::g_rendering_warm_up));
1600 continue;
1602 // TODO(owenlin): Remove this flag once it is not used in autotest.
1603 if (it->first == "disable_rendering") {
1604 content::g_rendering_fps = 0;
1605 continue;
1608 if (it->first == "num_play_throughs") {
1609 std::string input(it->second.begin(), it->second.end());
1610 CHECK(base::StringToInt(input, &content::g_num_play_throughs));
1611 continue;
1613 if (it->first == "fake_decoder") {
1614 content::g_fake_decoder = 1;
1615 continue;
1617 if (it->first == "v" || it->first == "vmodule")
1618 continue;
1619 if (it->first == "ozone-platform" || it->first == "ozone-use-surfaceless")
1620 continue;
1621 LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
1624 base::ShadowingAtExitManager at_exit_manager;
1625 #if defined(OS_WIN) || defined(USE_OZONE)
1626 // For windows the decoding thread initializes the media foundation decoder
1627 // which uses COM. We need the thread to be a UI thread.
1628 // On Ozone, the backend initializes the event system using a UI
1629 // thread.
1630 base::MessageLoopForUI main_loop;
1631 #else
1632 base::MessageLoop main_loop;
1633 #endif // OS_WIN || USE_OZONE
1635 #if defined(USE_OZONE)
1636 ui::OzonePlatform::InitializeForUI();
1637 #endif
1639 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
1640 content::VaapiWrapper::PreSandboxInitialization();
1641 #endif
1643 content::g_env =
1644 reinterpret_cast<content::VideoDecodeAcceleratorTestEnvironment*>(
1645 testing::AddGlobalTestEnvironment(
1646 new content::VideoDecodeAcceleratorTestEnvironment()));
1648 return RUN_ALL_TESTS();