Updating trunk VERSION from 2139.0 to 2140.0
[chromium-blink-merge.git] / content / common / gpu / media / video_decode_accelerator_unittest.cc
blob127378ab0856d78ca2f6438f75b4c8f099fb4fbc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // The bulk of this file is support code; sorry about that. Here's an overview
6 // to hopefully help readers of this code:
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or
8 // Win/EGL.
9 // - ClientState is an enum for the state of the decode client used by the test.
10 // - ClientStateNotification is a barrier abstraction that allows the test code
11 // to be written sequentially and wait for the decode client to see certain
12 // state transitions.
13 // - GLRenderingVDAClient is a VideoDecodeAccelerator::Client implementation
14 // - Finally actual TEST cases are at the bottom of this file, using the above
15 // infrastructure.
17 #include <fcntl.h>
18 #include <sys/stat.h>
19 #include <sys/types.h>
20 #include <algorithm>
21 #include <deque>
22 #include <map>
24 // Include gtest.h out of order because <X11/X.h> #define's Bool & None, which
25 // gtest uses as struct names (inside a namespace). This means that
26 // #include'ing gtest after anything that pulls in X.h fails to compile.
27 // This is http://code.google.com/p/googletest/issues/detail?id=371
28 #include "testing/gtest/include/gtest/gtest.h"
30 #include "base/at_exit.h"
31 #include "base/bind.h"
32 #include "base/command_line.h"
33 #include "base/file_util.h"
34 #include "base/files/file.h"
35 #include "base/format_macros.h"
36 #include "base/md5.h"
37 #include "base/message_loop/message_loop_proxy.h"
38 #include "base/process/process.h"
39 #include "base/stl_util.h"
40 #include "base/strings/string_number_conversions.h"
41 #include "base/strings/string_split.h"
42 #include "base/strings/stringize_macros.h"
43 #include "base/strings/stringprintf.h"
44 #include "base/strings/utf_string_conversions.h"
45 #include "base/synchronization/condition_variable.h"
46 #include "base/synchronization/lock.h"
47 #include "base/synchronization/waitable_event.h"
48 #include "base/threading/thread.h"
49 #include "content/common/gpu/media/rendering_helper.h"
50 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
51 #include "content/public/common/content_switches.h"
52 #include "media/filters/h264_parser.h"
53 #include "ui/gfx/codec/png_codec.h"
55 #if defined(OS_WIN)
56 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
57 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
58 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
59 #include "content/common/gpu/media/v4l2_video_device.h"
60 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
61 #include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
62 #include "content/common/gpu/media/vaapi_wrapper.h"
63 #if defined(USE_X11)
64 #include "ui/gl/gl_implementation.h"
65 #endif // USE_X11
66 #else
67 #error The VideoAccelerator tests are not supported on this platform.
68 #endif // OS_WIN
70 using media::VideoDecodeAccelerator;
72 namespace content {
73 namespace {
75 // Values optionally filled in from flags; see main() below.
76 // The syntax of multiple test videos is:
77 // test-video1;test-video2;test-video3
78 // where only the first video is required and other optional videos would be
79 // decoded by concurrent decoders.
80 // The syntax of each test-video is:
81 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender
82 // where only the first field is required. Value details:
83 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8 stream.
84 // - |width| and |height| are in pixels.
85 // - |numframes| is the number of picture frames in the file.
86 // - |numfragments| NALU (h264) or frame (VP8) count in the stream.
87 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds
88 // expected to be achieved with and without rendering to the screen, resp.
89 // (the latter tests just decode speed).
90 // - |profile| is the media::VideoCodecProfile set during Initialization.
91 // An empty value for a numeric field means "ignore".
92 const base::FilePath::CharType* g_test_video_data =
93 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
94 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
96 // The file path of the test output log. This is used to communicate the test
97 // results to CrOS autotests. We can enable the log and specify the filename by
98 // the "--output_log" switch.
99 const base::FilePath::CharType* g_output_log = NULL;
101 // The value is set by the switch "--rendering_fps".
102 double g_rendering_fps = 60;
104 // Magic constants for differentiating the reasons for NotifyResetDone being
105 // called.
106 enum ResetPoint {
107 // Reset() just after calling Decode() with a fragment containing config info.
108 RESET_AFTER_FIRST_CONFIG_INFO = -4,
109 START_OF_STREAM_RESET = -3,
110 MID_STREAM_RESET = -2,
111 END_OF_STREAM_RESET = -1
114 const int kMaxResetAfterFrameNum = 100;
115 const int kMaxFramesToDelayReuse = 64;
116 const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
117 // Simulate WebRTC and call VDA::Decode 30 times per second.
118 const int kWebRtcDecodeCallsPerSecond = 30;
120 struct TestVideoFile {
121 explicit TestVideoFile(base::FilePath::StringType file_name)
122 : file_name(file_name),
123 width(-1),
124 height(-1),
125 num_frames(-1),
126 num_fragments(-1),
127 min_fps_render(-1),
128 min_fps_no_render(-1),
129 profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
130 reset_after_frame_num(END_OF_STREAM_RESET) {
133 base::FilePath::StringType file_name;
134 int width;
135 int height;
136 int num_frames;
137 int num_fragments;
138 int min_fps_render;
139 int min_fps_no_render;
140 media::VideoCodecProfile profile;
141 int reset_after_frame_num;
142 std::string data_str;
145 const gfx::Size kThumbnailsPageSize(1600, 1200);
146 const gfx::Size kThumbnailSize(160, 120);
147 const int kMD5StringLength = 32;
149 // Read in golden MD5s for the thumbnailed rendering of this video
150 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
151 std::vector<std::string>* md5_strings) {
152 base::FilePath filepath(video_file->file_name);
153 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
154 std::string all_md5s;
155 base::ReadFileToString(filepath, &all_md5s);
156 base::SplitString(all_md5s, '\n', md5_strings);
157 // Check these are legitimate MD5s.
158 for (std::vector<std::string>::iterator md5_string = md5_strings->begin();
159 md5_string != md5_strings->end(); ++md5_string) {
160 // Ignore the empty string added by SplitString
161 if (!md5_string->length())
162 continue;
163 // Ignore comments
164 if (md5_string->at(0) == '#')
165 continue;
167 CHECK_EQ(static_cast<int>(md5_string->length()),
168 kMD5StringLength) << *md5_string;
169 bool hex_only = std::count_if(md5_string->begin(),
170 md5_string->end(), isxdigit) ==
171 kMD5StringLength;
172 CHECK(hex_only) << *md5_string;
174 CHECK_GE(md5_strings->size(), 1U) << all_md5s;
177 // State of the GLRenderingVDAClient below. Order matters here as the test
178 // makes assumptions about it.
179 enum ClientState {
180 CS_CREATED = 0,
181 CS_DECODER_SET = 1,
182 CS_INITIALIZED = 2,
183 CS_FLUSHING = 3,
184 CS_FLUSHED = 4,
185 CS_RESETTING = 5,
186 CS_RESET = 6,
187 CS_ERROR = 7,
188 CS_DESTROYED = 8,
189 CS_MAX, // Must be last entry.
192 // Client that can accept callbacks from a VideoDecodeAccelerator and is used by
193 // the TESTs below.
194 class GLRenderingVDAClient
195 : public VideoDecodeAccelerator::Client,
196 public base::SupportsWeakPtr<GLRenderingVDAClient> {
197 public:
198 // |window_id| the window_id of the client, which is used to identify the
199 // rendering area in the |rendering_helper|.
200 // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
201 // |*this|.
202 // |num_play_throughs| indicates how many times to play through the video.
203 // |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
204 // Reset() should be done after that frame number is delivered, or
205 // END_OF_STREAM_RESET to indicate no mid-stream Reset().
206 // |delete_decoder_state| indicates when the underlying decoder should be
207 // Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
208 // calls have been made, N>=0 means interpret as ClientState.
209 // Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
210 // last play-through (governed by |num_play_throughs|).
211 // |suppress_rendering| indicates GL rendering is supressed or not.
212 // After |delay_reuse_after_frame_num| frame has been delivered, the client
213 // will start delaying the call to ReusePictureBuffer() for kReuseDelay.
214 // |decode_calls_per_second| is the number of VDA::Decode calls per second.
215 // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
216 GLRenderingVDAClient(size_t window_id,
217 RenderingHelper* rendering_helper,
218 ClientStateNotification<ClientState>* note,
219 const std::string& encoded_data,
220 int num_in_flight_decodes,
221 int num_play_throughs,
222 int reset_after_frame_num,
223 int delete_decoder_state,
224 int frame_width,
225 int frame_height,
226 media::VideoCodecProfile profile,
227 bool suppress_rendering,
228 int delay_reuse_after_frame_num,
229 int decode_calls_per_second,
230 bool render_as_thumbnails);
231 virtual ~GLRenderingVDAClient();
232 void CreateAndStartDecoder();
234 // VideoDecodeAccelerator::Client implementation.
235 // The heart of the Client.
236 virtual void ProvidePictureBuffers(uint32 requested_num_of_buffers,
237 const gfx::Size& dimensions,
238 uint32 texture_target) OVERRIDE;
239 virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
240 virtual void PictureReady(const media::Picture& picture) OVERRIDE;
241 // Simple state changes.
242 virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
243 virtual void NotifyFlushDone() OVERRIDE;
244 virtual void NotifyResetDone() OVERRIDE;
245 virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
247 void OutputFrameDeliveryTimes(base::File* output);
249 // Simple getters for inspecting the state of the Client.
250 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
251 int num_skipped_fragments() { return num_skipped_fragments_; }
252 int num_queued_fragments() { return num_queued_fragments_; }
253 int num_decoded_frames() { return num_decoded_frames_; }
254 double frames_per_second();
255 // Return the median of the decode time of all decoded frames.
256 base::TimeDelta decode_time_median();
257 bool decoder_deleted() { return !decoder_.get(); }
259 private:
260 typedef std::map<int, media::PictureBuffer*> PictureBufferById;
262 void SetState(ClientState new_state);
263 void FinishInitialization();
264 void ReturnPicture(int32 picture_buffer_id);
266 // Delete the associated decoder helper.
267 void DeleteDecoder();
269 // Compute & return the first encoded bytes (including a start frame) to send
270 // to the decoder, starting at |start_pos| and returning one fragment. Skips
271 // to the first decodable position.
272 std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
273 // Compute & return the encoded bytes of next fragment to send to the decoder
274 // (based on |start_pos|).
275 std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
276 // Helpers for GetBytesForNextFragment above.
277 void GetBytesForNextNALU(size_t start_pos, size_t* end_pos); // For h.264.
278 std::string GetBytesForNextFrame(
279 size_t start_pos, size_t* end_pos); // For VP8.
281 // Request decode of the next fragment in the encoded data.
282 void DecodeNextFragment();
284 size_t window_id_;
285 RenderingHelper* rendering_helper_;
286 gfx::Size frame_size_;
287 std::string encoded_data_;
288 const int num_in_flight_decodes_;
289 int outstanding_decodes_;
290 size_t encoded_data_next_pos_to_decode_;
291 int next_bitstream_buffer_id_;
292 ClientStateNotification<ClientState>* note_;
293 scoped_ptr<VideoDecodeAccelerator> decoder_;
294 scoped_ptr<base::WeakPtrFactory<VideoDecodeAccelerator> >
295 weak_decoder_factory_;
296 std::set<int> outstanding_texture_ids_;
297 int remaining_play_throughs_;
298 int reset_after_frame_num_;
299 int delete_decoder_state_;
300 ClientState state_;
301 int num_skipped_fragments_;
302 int num_queued_fragments_;
303 int num_decoded_frames_;
304 int num_done_bitstream_buffers_;
305 PictureBufferById picture_buffers_by_id_;
306 base::TimeTicks initialize_done_ticks_;
307 media::VideoCodecProfile profile_;
308 GLenum texture_target_;
309 bool suppress_rendering_;
310 std::vector<base::TimeTicks> frame_delivery_times_;
311 int delay_reuse_after_frame_num_;
312 // A map from bitstream buffer id to the decode start time of the buffer.
313 std::map<int, base::TimeTicks> decode_start_time_;
314 // The decode time of all decoded frames.
315 std::vector<base::TimeDelta> decode_time_;
316 // The number of VDA::Decode calls per second. This is to simulate webrtc.
317 int decode_calls_per_second_;
318 bool render_as_thumbnails_;
319 // The number of frames that are not returned from rendering_helper_. We
320 // checks this count to ensure all frames are rendered before entering the
321 // CS_RESET state.
322 int frames_at_render_;
324 DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
327 GLRenderingVDAClient::GLRenderingVDAClient(
328 size_t window_id,
329 RenderingHelper* rendering_helper,
330 ClientStateNotification<ClientState>* note,
331 const std::string& encoded_data,
332 int num_in_flight_decodes,
333 int num_play_throughs,
334 int reset_after_frame_num,
335 int delete_decoder_state,
336 int frame_width,
337 int frame_height,
338 media::VideoCodecProfile profile,
339 bool suppress_rendering,
340 int delay_reuse_after_frame_num,
341 int decode_calls_per_second,
342 bool render_as_thumbnails)
343 : window_id_(window_id),
344 rendering_helper_(rendering_helper),
345 frame_size_(frame_width, frame_height),
346 encoded_data_(encoded_data),
347 num_in_flight_decodes_(num_in_flight_decodes),
348 outstanding_decodes_(0),
349 encoded_data_next_pos_to_decode_(0),
350 next_bitstream_buffer_id_(0),
351 note_(note),
352 remaining_play_throughs_(num_play_throughs),
353 reset_after_frame_num_(reset_after_frame_num),
354 delete_decoder_state_(delete_decoder_state),
355 state_(CS_CREATED),
356 num_skipped_fragments_(0),
357 num_queued_fragments_(0),
358 num_decoded_frames_(0),
359 num_done_bitstream_buffers_(0),
360 texture_target_(0),
361 suppress_rendering_(suppress_rendering),
362 delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
363 decode_calls_per_second_(decode_calls_per_second),
364 render_as_thumbnails_(render_as_thumbnails),
365 frames_at_render_(0) {
366 CHECK_GT(num_in_flight_decodes, 0);
367 CHECK_GT(num_play_throughs, 0);
368 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
369 if (decode_calls_per_second_ > 0)
370 CHECK_EQ(1, num_in_flight_decodes_);
372 // Default to H264 baseline if no profile provided.
373 profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
374 ? profile
375 : media::H264PROFILE_BASELINE);
378 GLRenderingVDAClient::~GLRenderingVDAClient() {
379 DeleteDecoder(); // Clean up in case of expected error.
380 CHECK(decoder_deleted());
381 STLDeleteValues(&picture_buffers_by_id_);
382 SetState(CS_DESTROYED);
385 static bool DoNothingReturnTrue() { return true; }
387 void GLRenderingVDAClient::CreateAndStartDecoder() {
388 CHECK(decoder_deleted());
389 CHECK(!decoder_.get());
391 VideoDecodeAccelerator::Client* client = this;
392 base::WeakPtr<VideoDecodeAccelerator::Client> weak_client = AsWeakPtr();
393 #if defined(OS_WIN)
394 decoder_.reset(
395 new DXVAVideoDecodeAccelerator(base::Bind(&DoNothingReturnTrue)));
396 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
398 scoped_ptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kDecoder);
399 if (!device.get()) {
400 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
401 return;
403 decoder_.reset(new V4L2VideoDecodeAccelerator(
404 static_cast<EGLDisplay>(rendering_helper_->GetGLDisplay()),
405 static_cast<EGLContext>(rendering_helper_->GetGLContext()),
406 weak_client,
407 base::Bind(&DoNothingReturnTrue),
408 device.Pass(),
409 base::MessageLoopProxy::current()));
410 #elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
411 CHECK_EQ(gfx::kGLImplementationDesktopGL, gfx::GetGLImplementation())
412 << "Hardware video decode does not work with OSMesa";
413 decoder_.reset(new VaapiVideoDecodeAccelerator(
414 static_cast<Display*>(rendering_helper_->GetGLDisplay()),
415 base::Bind(&DoNothingReturnTrue)));
416 #endif // OS_WIN
417 CHECK(decoder_.get());
418 weak_decoder_factory_.reset(
419 new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get()));
420 SetState(CS_DECODER_SET);
421 if (decoder_deleted())
422 return;
424 CHECK(decoder_->Initialize(profile_, client));
425 FinishInitialization();
428 void GLRenderingVDAClient::ProvidePictureBuffers(
429 uint32 requested_num_of_buffers,
430 const gfx::Size& dimensions,
431 uint32 texture_target) {
432 if (decoder_deleted())
433 return;
434 std::vector<media::PictureBuffer> buffers;
436 texture_target_ = texture_target;
437 for (uint32 i = 0; i < requested_num_of_buffers; ++i) {
438 uint32 id = picture_buffers_by_id_.size();
439 uint32 texture_id;
440 base::WaitableEvent done(false, false);
441 rendering_helper_->CreateTexture(
442 texture_target_, &texture_id, dimensions, &done);
443 done.Wait();
444 CHECK(outstanding_texture_ids_.insert(texture_id).second);
445 media::PictureBuffer* buffer =
446 new media::PictureBuffer(id, dimensions, texture_id);
447 CHECK(picture_buffers_by_id_.insert(std::make_pair(id, buffer)).second);
448 buffers.push_back(*buffer);
450 decoder_->AssignPictureBuffers(buffers);
453 void GLRenderingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
454 PictureBufferById::iterator it =
455 picture_buffers_by_id_.find(picture_buffer_id);
456 CHECK(it != picture_buffers_by_id_.end());
457 CHECK_EQ(outstanding_texture_ids_.erase(it->second->texture_id()), 1U);
458 rendering_helper_->DeleteTexture(it->second->texture_id());
459 delete it->second;
460 picture_buffers_by_id_.erase(it);
463 void GLRenderingVDAClient::PictureReady(const media::Picture& picture) {
464 // We shouldn't be getting pictures delivered after Reset has completed.
465 CHECK_LT(state_, CS_RESET);
467 if (decoder_deleted())
468 return;
470 base::TimeTicks now = base::TimeTicks::Now();
472 frame_delivery_times_.push_back(now);
474 // Save the decode time of this picture.
475 std::map<int, base::TimeTicks>::iterator it =
476 decode_start_time_.find(picture.bitstream_buffer_id());
477 ASSERT_NE(decode_start_time_.end(), it);
478 decode_time_.push_back(now - it->second);
479 decode_start_time_.erase(it);
481 CHECK_LE(picture.bitstream_buffer_id(), next_bitstream_buffer_id_);
482 ++num_decoded_frames_;
484 // Mid-stream reset applies only to the last play-through per constructor
485 // comment.
486 if (remaining_play_throughs_ == 1 &&
487 reset_after_frame_num_ == num_decoded_frames_) {
488 reset_after_frame_num_ = MID_STREAM_RESET;
489 decoder_->Reset();
490 // Re-start decoding from the beginning of the stream to avoid needing to
491 // know how to find I-frames and so on in this test.
492 encoded_data_next_pos_to_decode_ = 0;
495 media::PictureBuffer* picture_buffer =
496 picture_buffers_by_id_[picture.picture_buffer_id()];
497 CHECK(picture_buffer);
499 scoped_refptr<VideoFrameTexture> video_frame =
500 new VideoFrameTexture(texture_target_,
501 picture_buffer->texture_id(),
502 base::Bind(&GLRenderingVDAClient::ReturnPicture,
503 AsWeakPtr(),
504 picture.picture_buffer_id()));
505 ++frames_at_render_;
507 if (render_as_thumbnails_) {
508 rendering_helper_->RenderThumbnail(video_frame->texture_target(),
509 video_frame->texture_id());
510 } else if (!suppress_rendering_) {
511 rendering_helper_->QueueVideoFrame(window_id_, video_frame);
515 void GLRenderingVDAClient::ReturnPicture(int32 picture_buffer_id) {
516 if (decoder_deleted())
517 return;
519 --frames_at_render_;
520 if (frames_at_render_ == 0 && state_ == CS_RESETTING) {
521 SetState(CS_RESET);
522 DeleteDecoder();
523 return;
526 if (num_decoded_frames_ > delay_reuse_after_frame_num_) {
527 base::MessageLoop::current()->PostDelayedTask(
528 FROM_HERE,
529 base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
530 weak_decoder_factory_->GetWeakPtr(),
531 picture_buffer_id),
532 kReuseDelay);
533 } else {
534 decoder_->ReusePictureBuffer(picture_buffer_id);
538 void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
539 int32 bitstream_buffer_id) {
540 // TODO(fischman): this test currently relies on this notification to make
541 // forward progress during a Reset(). But the VDA::Reset() API doesn't
542 // guarantee this, so stop relying on it (and remove the notifications from
543 // VaapiVideoDecodeAccelerator::FinishReset()).
544 ++num_done_bitstream_buffers_;
545 --outstanding_decodes_;
546 if (decode_calls_per_second_ == 0)
547 DecodeNextFragment();
550 void GLRenderingVDAClient::NotifyFlushDone() {
551 if (decoder_deleted())
552 return;
554 SetState(CS_FLUSHED);
555 --remaining_play_throughs_;
556 DCHECK_GE(remaining_play_throughs_, 0);
557 if (decoder_deleted())
558 return;
559 decoder_->Reset();
560 SetState(CS_RESETTING);
563 void GLRenderingVDAClient::NotifyResetDone() {
564 if (decoder_deleted())
565 return;
567 if (reset_after_frame_num_ == MID_STREAM_RESET) {
568 reset_after_frame_num_ = END_OF_STREAM_RESET;
569 DecodeNextFragment();
570 return;
571 } else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
572 reset_after_frame_num_ = END_OF_STREAM_RESET;
573 for (int i = 0; i < num_in_flight_decodes_; ++i)
574 DecodeNextFragment();
575 return;
578 if (remaining_play_throughs_) {
579 encoded_data_next_pos_to_decode_ = 0;
580 FinishInitialization();
581 return;
584 rendering_helper_->Flush(window_id_);
586 if (frames_at_render_ == 0) {
587 SetState(CS_RESET);
588 DeleteDecoder();
592 void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
593 SetState(CS_ERROR);
596 void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
597 std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
598 frame_delivery_times_.size());
599 output->WriteAtCurrentPos(s.data(), s.length());
600 base::TimeTicks t0 = initialize_done_ticks_;
601 for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
602 s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
604 (frame_delivery_times_[i] - t0).InMicroseconds());
605 t0 = frame_delivery_times_[i];
606 output->WriteAtCurrentPos(s.data(), s.length());
610 static bool LookingAtNAL(const std::string& encoded, size_t pos) {
611 return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
612 encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
615 void GLRenderingVDAClient::SetState(ClientState new_state) {
616 note_->Notify(new_state);
617 state_ = new_state;
618 if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
619 CHECK(!decoder_deleted());
620 DeleteDecoder();
624 void GLRenderingVDAClient::FinishInitialization() {
625 SetState(CS_INITIALIZED);
626 initialize_done_ticks_ = base::TimeTicks::Now();
628 if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
629 reset_after_frame_num_ = MID_STREAM_RESET;
630 decoder_->Reset();
631 return;
634 for (int i = 0; i < num_in_flight_decodes_; ++i)
635 DecodeNextFragment();
636 DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
639 void GLRenderingVDAClient::DeleteDecoder() {
640 if (decoder_deleted())
641 return;
642 weak_decoder_factory_.reset();
643 decoder_.reset();
644 STLClearObject(&encoded_data_);
645 for (std::set<int>::iterator it = outstanding_texture_ids_.begin();
646 it != outstanding_texture_ids_.end(); ++it) {
647 rendering_helper_->DeleteTexture(*it);
649 outstanding_texture_ids_.clear();
650 // Cascade through the rest of the states to simplify test code below.
651 for (int i = state_ + 1; i < CS_MAX; ++i)
652 SetState(static_cast<ClientState>(i));
655 std::string GLRenderingVDAClient::GetBytesForFirstFragment(
656 size_t start_pos, size_t* end_pos) {
657 if (profile_ < media::H264PROFILE_MAX) {
658 *end_pos = start_pos;
659 while (*end_pos + 4 < encoded_data_.size()) {
660 if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
661 return GetBytesForNextFragment(*end_pos, end_pos);
662 GetBytesForNextNALU(*end_pos, end_pos);
663 num_skipped_fragments_++;
665 *end_pos = start_pos;
666 return std::string();
668 DCHECK_LE(profile_, media::VP8PROFILE_MAX);
669 return GetBytesForNextFragment(start_pos, end_pos);
672 std::string GLRenderingVDAClient::GetBytesForNextFragment(
673 size_t start_pos, size_t* end_pos) {
674 if (profile_ < media::H264PROFILE_MAX) {
675 *end_pos = start_pos;
676 GetBytesForNextNALU(*end_pos, end_pos);
677 if (start_pos != *end_pos) {
678 num_queued_fragments_++;
680 return encoded_data_.substr(start_pos, *end_pos - start_pos);
682 DCHECK_LE(profile_, media::VP8PROFILE_MAX);
683 return GetBytesForNextFrame(start_pos, end_pos);
686 void GLRenderingVDAClient::GetBytesForNextNALU(
687 size_t start_pos, size_t* end_pos) {
688 *end_pos = start_pos;
689 if (*end_pos + 4 > encoded_data_.size())
690 return;
691 CHECK(LookingAtNAL(encoded_data_, start_pos));
692 *end_pos += 4;
693 while (*end_pos + 4 <= encoded_data_.size() &&
694 !LookingAtNAL(encoded_data_, *end_pos)) {
695 ++*end_pos;
697 if (*end_pos + 3 >= encoded_data_.size())
698 *end_pos = encoded_data_.size();
701 std::string GLRenderingVDAClient::GetBytesForNextFrame(
702 size_t start_pos, size_t* end_pos) {
703 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
704 std::string bytes;
705 if (start_pos == 0)
706 start_pos = 32; // Skip IVF header.
707 *end_pos = start_pos;
708 uint32 frame_size = *reinterpret_cast<uint32*>(&encoded_data_[*end_pos]);
709 *end_pos += 12; // Skip frame header.
710 bytes.append(encoded_data_.substr(*end_pos, frame_size));
711 *end_pos += frame_size;
712 num_queued_fragments_++;
713 return bytes;
716 static bool FragmentHasConfigInfo(const uint8* data, size_t size,
717 media::VideoCodecProfile profile) {
718 if (profile >= media::H264PROFILE_MIN &&
719 profile <= media::H264PROFILE_MAX) {
720 media::H264Parser parser;
721 parser.SetStream(data, size);
722 media::H264NALU nalu;
723 media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
724 if (result != media::H264Parser::kOk) {
725 // Let the VDA figure out there's something wrong with the stream.
726 return false;
729 return nalu.nal_unit_type == media::H264NALU::kSPS;
730 } else if (profile >= media::VP8PROFILE_MIN &&
731 profile <= media::VP8PROFILE_MAX) {
732 return (size > 0 && !(data[0] & 0x01));
734 // Shouldn't happen at this point.
735 LOG(FATAL) << "Invalid profile: " << profile;
736 return false;
739 void GLRenderingVDAClient::DecodeNextFragment() {
740 if (decoder_deleted())
741 return;
742 if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
743 if (outstanding_decodes_ == 0) {
744 decoder_->Flush();
745 SetState(CS_FLUSHING);
747 return;
749 size_t end_pos;
750 std::string next_fragment_bytes;
751 if (encoded_data_next_pos_to_decode_ == 0) {
752 next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
753 } else {
754 next_fragment_bytes =
755 GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
757 size_t next_fragment_size = next_fragment_bytes.size();
759 // Call Reset() just after Decode() if the fragment contains config info.
760 // This tests how the VDA behaves when it gets a reset request before it has
761 // a chance to ProvidePictureBuffers().
762 bool reset_here = false;
763 if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
764 reset_here = FragmentHasConfigInfo(
765 reinterpret_cast<const uint8*>(next_fragment_bytes.data()),
766 next_fragment_size,
767 profile_);
768 if (reset_here)
769 reset_after_frame_num_ = END_OF_STREAM_RESET;
772 // Populate the shared memory buffer w/ the fragment, duplicate its handle,
773 // and hand it off to the decoder.
774 base::SharedMemory shm;
775 CHECK(shm.CreateAndMapAnonymous(next_fragment_size));
776 memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
777 base::SharedMemoryHandle dup_handle;
778 CHECK(shm.ShareToProcess(base::Process::Current().handle(), &dup_handle));
779 media::BitstreamBuffer bitstream_buffer(
780 next_bitstream_buffer_id_, dup_handle, next_fragment_size);
781 decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
782 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
783 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
784 decoder_->Decode(bitstream_buffer);
785 ++outstanding_decodes_;
786 if (!remaining_play_throughs_ &&
787 -delete_decoder_state_ == next_bitstream_buffer_id_) {
788 DeleteDecoder();
791 if (reset_here) {
792 reset_after_frame_num_ = MID_STREAM_RESET;
793 decoder_->Reset();
794 // Restart from the beginning to re-Decode() the SPS we just sent.
795 encoded_data_next_pos_to_decode_ = 0;
796 } else {
797 encoded_data_next_pos_to_decode_ = end_pos;
800 if (decode_calls_per_second_ > 0) {
801 base::MessageLoop::current()->PostDelayedTask(
802 FROM_HERE,
803 base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
804 base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
808 double GLRenderingVDAClient::frames_per_second() {
809 base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
810 return num_decoded_frames_ / delta.InSecondsF();
813 base::TimeDelta GLRenderingVDAClient::decode_time_median() {
814 if (decode_time_.size() == 0)
815 return base::TimeDelta();
816 std::sort(decode_time_.begin(), decode_time_.end());
817 int index = decode_time_.size() / 2;
818 if (decode_time_.size() % 2 != 0)
819 return decode_time_[index];
821 return (decode_time_[index] + decode_time_[index - 1]) / 2;
824 class VideoDecodeAcceleratorTest : public ::testing::Test {
825 protected:
826 VideoDecodeAcceleratorTest();
827 virtual void SetUp();
828 virtual void TearDown();
830 // Parse |data| into its constituent parts, set the various output fields
831 // accordingly, and read in video stream. CHECK-fails on unexpected or
832 // missing required data. Unspecified optional fields are set to -1.
833 void ParseAndReadTestVideoData(base::FilePath::StringType data,
834 std::vector<TestVideoFile*>* test_video_files);
836 // Update the parameters of |test_video_files| according to
837 // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
838 // frames should be adjusted if decoder is reset in the middle of the stream.
839 void UpdateTestVideoFileParams(
840 size_t num_concurrent_decoders,
841 int reset_point,
842 std::vector<TestVideoFile*>* test_video_files);
844 void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
845 void CreateAndStartDecoder(GLRenderingVDAClient* client,
846 ClientStateNotification<ClientState>* note);
847 void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
848 void WaitUntilIdle();
849 void OutputLogFile(const base::FilePath::CharType* log_path,
850 const std::string& content);
852 std::vector<TestVideoFile*> test_video_files_;
853 RenderingHelper rendering_helper_;
854 scoped_refptr<base::MessageLoopProxy> rendering_loop_proxy_;
856 private:
857 base::Thread rendering_thread_;
858 // Required for Thread to work. Not used otherwise.
859 base::ShadowingAtExitManager at_exit_manager_;
861 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
864 VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest()
865 : rendering_thread_("GLRenderingVDAClientThread") {}
867 void VideoDecodeAcceleratorTest::SetUp() {
868 ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
870 // Initialize the rendering thread.
871 base::Thread::Options options;
872 options.message_loop_type = base::MessageLoop::TYPE_DEFAULT;
873 #if defined(OS_WIN)
874 // For windows the decoding thread initializes the media foundation decoder
875 // which uses COM. We need the thread to be a UI thread.
876 options.message_loop_type = base::MessageLoop::TYPE_UI;
877 #endif // OS_WIN
879 rendering_thread_.StartWithOptions(options);
880 rendering_loop_proxy_ = rendering_thread_.message_loop_proxy();
883 void VideoDecodeAcceleratorTest::TearDown() {
884 rendering_loop_proxy_->PostTask(
885 FROM_HERE,
886 base::Bind(&STLDeleteElements<std::vector<TestVideoFile*> >,
887 &test_video_files_));
889 base::WaitableEvent done(false, false);
890 rendering_loop_proxy_->PostTask(
891 FROM_HERE,
892 base::Bind(&RenderingHelper::UnInitialize,
893 base::Unretained(&rendering_helper_),
894 &done));
895 done.Wait();
897 rendering_thread_.Stop();
900 void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
901 base::FilePath::StringType data,
902 std::vector<TestVideoFile*>* test_video_files) {
903 std::vector<base::FilePath::StringType> entries;
904 base::SplitString(data, ';', &entries);
905 CHECK_GE(entries.size(), 1U) << data;
906 for (size_t index = 0; index < entries.size(); ++index) {
907 std::vector<base::FilePath::StringType> fields;
908 base::SplitString(entries[index], ':', &fields);
909 CHECK_GE(fields.size(), 1U) << entries[index];
910 CHECK_LE(fields.size(), 8U) << entries[index];
911 TestVideoFile* video_file = new TestVideoFile(fields[0]);
912 if (!fields[1].empty())
913 CHECK(base::StringToInt(fields[1], &video_file->width));
914 if (!fields[2].empty())
915 CHECK(base::StringToInt(fields[2], &video_file->height));
916 if (!fields[3].empty())
917 CHECK(base::StringToInt(fields[3], &video_file->num_frames));
918 if (!fields[4].empty())
919 CHECK(base::StringToInt(fields[4], &video_file->num_fragments));
920 if (!fields[5].empty())
921 CHECK(base::StringToInt(fields[5], &video_file->min_fps_render));
922 if (!fields[6].empty())
923 CHECK(base::StringToInt(fields[6], &video_file->min_fps_no_render));
924 int profile = -1;
925 if (!fields[7].empty())
926 CHECK(base::StringToInt(fields[7], &profile));
927 video_file->profile = static_cast<media::VideoCodecProfile>(profile);
929 // Read in the video data.
930 base::FilePath filepath(video_file->file_name);
931 CHECK(base::ReadFileToString(filepath, &video_file->data_str))
932 << "test_video_file: " << filepath.MaybeAsASCII();
934 test_video_files->push_back(video_file);
938 void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
939 size_t num_concurrent_decoders,
940 int reset_point,
941 std::vector<TestVideoFile*>* test_video_files) {
942 for (size_t i = 0; i < test_video_files->size(); i++) {
943 TestVideoFile* video_file = (*test_video_files)[i];
944 if (reset_point == MID_STREAM_RESET) {
945 // Reset should not go beyond the last frame;
946 // reset in the middle of the stream for short videos.
947 video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
948 if (video_file->num_frames <= video_file->reset_after_frame_num)
949 video_file->reset_after_frame_num = video_file->num_frames / 2;
951 video_file->num_frames += video_file->reset_after_frame_num;
952 } else {
953 video_file->reset_after_frame_num = reset_point;
956 if (video_file->min_fps_render != -1)
957 video_file->min_fps_render /= num_concurrent_decoders;
958 if (video_file->min_fps_no_render != -1)
959 video_file->min_fps_no_render /= num_concurrent_decoders;
963 void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
964 const RenderingHelperParams& helper_params) {
965 base::WaitableEvent done(false, false);
966 rendering_loop_proxy_->PostTask(
967 FROM_HERE,
968 base::Bind(&RenderingHelper::Initialize,
969 base::Unretained(&rendering_helper_),
970 helper_params,
971 &done));
972 done.Wait();
975 void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
976 GLRenderingVDAClient* client,
977 ClientStateNotification<ClientState>* note) {
978 rendering_loop_proxy_->PostTask(
979 FROM_HERE,
980 base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
981 base::Unretained(client)));
982 ASSERT_EQ(note->Wait(), CS_DECODER_SET);
985 void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
986 ClientStateNotification<ClientState>* note) {
987 for (int i = 0; i < CS_MAX; i++) {
988 if (note->Wait() == CS_DESTROYED)
989 break;
993 void VideoDecodeAcceleratorTest::WaitUntilIdle() {
994 base::WaitableEvent done(false, false);
995 rendering_loop_proxy_->PostTask(
996 FROM_HERE,
997 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
998 done.Wait();
1001 void VideoDecodeAcceleratorTest::OutputLogFile(
1002 const base::FilePath::CharType* log_path,
1003 const std::string& content) {
1004 base::File file(base::FilePath(log_path),
1005 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1006 file.WriteAtCurrentPos(content.data(), content.length());
1009 // Test parameters:
1010 // - Number of concurrent decoders.
1011 // - Number of concurrent in-flight Decode() calls per decoder.
1012 // - Number of play-throughs.
1013 // - reset_after_frame_num: see GLRenderingVDAClient ctor.
1014 // - delete_decoder_phase: see GLRenderingVDAClient ctor.
1015 // - whether to test slow rendering by delaying ReusePictureBuffer().
1016 // - whether the video frames are rendered as thumbnails.
1017 class VideoDecodeAcceleratorParamTest
1018 : public VideoDecodeAcceleratorTest,
1019 public ::testing::WithParamInterface<
1020 Tuple7<int, int, int, ResetPoint, ClientState, bool, bool> > {
1023 // Helper so that gtest failures emit a more readable version of the tuple than
1024 // its byte representation.
1025 ::std::ostream& operator<<(
1026 ::std::ostream& os,
1027 const Tuple7<int, int, int, ResetPoint, ClientState, bool, bool>& t) {
1028 return os << t.a << ", " << t.b << ", " << t.c << ", " << t.d << ", " << t.e
1029 << ", " << t.f << ", " << t.g;
1032 // Wait for |note| to report a state and if it's not |expected_state| then
1033 // assert |client| has deleted its decoder.
1034 static void AssertWaitForStateOrDeleted(
1035 ClientStateNotification<ClientState>* note,
1036 GLRenderingVDAClient* client,
1037 ClientState expected_state) {
1038 ClientState state = note->Wait();
1039 if (state == expected_state) return;
1040 ASSERT_TRUE(client->decoder_deleted())
1041 << "Decoder not deleted but Wait() returned " << state
1042 << ", instead of " << expected_state;
1045 // We assert a minimal number of concurrent decoders we expect to succeed.
1046 // Different platforms can support more concurrent decoders, so we don't assert
1047 // failure above this.
1048 enum { kMinSupportedNumConcurrentDecoders = 3 };
1050 // Test the most straightforward case possible: data is decoded from a single
1051 // chunk and rendered to the screen.
1052 TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
1053 const size_t num_concurrent_decoders = GetParam().a;
1054 const size_t num_in_flight_decodes = GetParam().b;
1055 const int num_play_throughs = GetParam().c;
1056 const int reset_point = GetParam().d;
1057 const int delete_decoder_state = GetParam().e;
1058 bool test_reuse_delay = GetParam().f;
1059 const bool render_as_thumbnails = GetParam().g;
1061 UpdateTestVideoFileParams(
1062 num_concurrent_decoders, reset_point, &test_video_files_);
1064 // Suppress GL rendering for all tests when the "--rendering_fps" is 0.
1065 const bool suppress_rendering = g_rendering_fps == 0;
1067 std::vector<ClientStateNotification<ClientState>*>
1068 notes(num_concurrent_decoders, NULL);
1069 std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
1071 RenderingHelperParams helper_params;
1072 helper_params.rendering_fps = g_rendering_fps;
1073 helper_params.render_as_thumbnails = render_as_thumbnails;
1074 if (render_as_thumbnails) {
1075 // Only one decoder is supported with thumbnail rendering
1076 CHECK_EQ(num_concurrent_decoders, 1U);
1077 helper_params.thumbnails_page_size = kThumbnailsPageSize;
1078 helper_params.thumbnail_size = kThumbnailSize;
1081 // First kick off all the decoders.
1082 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1083 TestVideoFile* video_file =
1084 test_video_files_[index % test_video_files_.size()];
1085 ClientStateNotification<ClientState>* note =
1086 new ClientStateNotification<ClientState>();
1087 notes[index] = note;
1089 int delay_after_frame_num = std::numeric_limits<int>::max();
1090 if (test_reuse_delay &&
1091 kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
1092 delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
1095 GLRenderingVDAClient* client =
1096 new GLRenderingVDAClient(index,
1097 &rendering_helper_,
1098 note,
1099 video_file->data_str,
1100 num_in_flight_decodes,
1101 num_play_throughs,
1102 video_file->reset_after_frame_num,
1103 delete_decoder_state,
1104 video_file->width,
1105 video_file->height,
1106 video_file->profile,
1107 suppress_rendering,
1108 delay_after_frame_num,
1110 render_as_thumbnails);
1112 clients[index] = client;
1113 helper_params.window_sizes.push_back(
1114 render_as_thumbnails
1115 ? kThumbnailsPageSize
1116 : gfx::Size(video_file->width, video_file->height));
1119 InitializeRenderingHelper(helper_params);
1121 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1122 CreateAndStartDecoder(clients[index], notes[index]);
1125 // Then wait for all the decodes to finish.
1126 // Only check performance & correctness later if we play through only once.
1127 bool skip_performance_and_correctness_checks = num_play_throughs > 1;
1128 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1129 ClientStateNotification<ClientState>* note = notes[i];
1130 ClientState state = note->Wait();
1131 if (state != CS_INITIALIZED) {
1132 skip_performance_and_correctness_checks = true;
1133 // We expect initialization to fail only when more than the supported
1134 // number of decoders is instantiated. Assert here that something else
1135 // didn't trigger failure.
1136 ASSERT_GT(num_concurrent_decoders,
1137 static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
1138 continue;
1140 ASSERT_EQ(state, CS_INITIALIZED);
1141 for (int n = 0; n < num_play_throughs; ++n) {
1142 // For play-throughs other than the first, we expect initialization to
1143 // succeed unconditionally.
1144 if (n > 0) {
1145 ASSERT_NO_FATAL_FAILURE(
1146 AssertWaitForStateOrDeleted(note, clients[i], CS_INITIALIZED));
1148 // InitializeDone kicks off decoding inside the client, so we just need to
1149 // wait for Flush.
1150 ASSERT_NO_FATAL_FAILURE(
1151 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHING));
1152 ASSERT_NO_FATAL_FAILURE(
1153 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHED));
1154 // FlushDone requests Reset().
1155 ASSERT_NO_FATAL_FAILURE(
1156 AssertWaitForStateOrDeleted(note, clients[i], CS_RESETTING));
1158 ASSERT_NO_FATAL_FAILURE(
1159 AssertWaitForStateOrDeleted(note, clients[i], CS_RESET));
1160 // ResetDone requests Destroy().
1161 ASSERT_NO_FATAL_FAILURE(
1162 AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
1164 // Finally assert that decoding went as expected.
1165 for (size_t i = 0; i < num_concurrent_decoders &&
1166 !skip_performance_and_correctness_checks; ++i) {
1167 // We can only make performance/correctness assertions if the decoder was
1168 // allowed to finish.
1169 if (delete_decoder_state < CS_FLUSHED)
1170 continue;
1171 GLRenderingVDAClient* client = clients[i];
1172 TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
1173 if (video_file->num_frames > 0) {
1174 // Expect the decoded frames may be more than the video frames as frames
1175 // could still be returned until resetting done.
1176 if (video_file->reset_after_frame_num > 0)
1177 EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
1178 else
1179 EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
1181 if (reset_point == END_OF_STREAM_RESET) {
1182 EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
1183 client->num_queued_fragments());
1184 EXPECT_EQ(client->num_done_bitstream_buffers(),
1185 client->num_queued_fragments());
1187 LOG(INFO) << "Decoder " << i << " fps: " << client->frames_per_second();
1188 if (!render_as_thumbnails) {
1189 int min_fps = suppress_rendering ?
1190 video_file->min_fps_no_render : video_file->min_fps_render;
1191 if (min_fps > 0 && !test_reuse_delay)
1192 EXPECT_GT(client->frames_per_second(), min_fps);
1196 if (render_as_thumbnails) {
1197 std::vector<unsigned char> rgb;
1198 bool alpha_solid;
1199 base::WaitableEvent done(false, false);
1200 rendering_loop_proxy_->PostTask(
1201 FROM_HERE,
1202 base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
1203 base::Unretained(&rendering_helper_),
1204 &rgb, &alpha_solid, &done));
1205 done.Wait();
1207 std::vector<std::string> golden_md5s;
1208 std::string md5_string = base::MD5String(
1209 base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
1210 ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
1211 std::vector<std::string>::iterator match =
1212 find(golden_md5s.begin(), golden_md5s.end(), md5_string);
1213 if (match == golden_md5s.end()) {
1214 // Convert raw RGB into PNG for export.
1215 std::vector<unsigned char> png;
1216 gfx::PNGCodec::Encode(&rgb[0],
1217 gfx::PNGCodec::FORMAT_RGB,
1218 kThumbnailsPageSize,
1219 kThumbnailsPageSize.width() * 3,
1220 true,
1221 std::vector<gfx::PNGCodec::Comment>(),
1222 &png);
1224 LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
1226 base::FilePath filepath(test_video_files_[0]->file_name);
1227 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
1228 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
1229 int num_bytes = base::WriteFile(filepath,
1230 reinterpret_cast<char*>(&png[0]),
1231 png.size());
1232 ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
1234 ASSERT_NE(match, golden_md5s.end());
1235 EXPECT_EQ(alpha_solid, true) << "RGBA frame had incorrect alpha";
1238 // Output the frame delivery time to file
1239 // We can only make performance/correctness assertions if the decoder was
1240 // allowed to finish.
1241 if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
1242 base::File output_file(
1243 base::FilePath(g_output_log),
1244 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1245 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1246 clients[i]->OutputFrameDeliveryTimes(&output_file);
1250 rendering_loop_proxy_->PostTask(
1251 FROM_HERE,
1252 base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*> >,
1253 &clients));
1254 rendering_loop_proxy_->PostTask(
1255 FROM_HERE,
1256 base::Bind(&STLDeleteElements<
1257 std::vector<ClientStateNotification<ClientState>*> >,
1258 &notes));
1259 WaitUntilIdle();
1262 // Test that replay after EOS works fine.
1263 INSTANTIATE_TEST_CASE_P(
1264 ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
1265 ::testing::Values(
1266 MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
1268 // Test that Reset() before the first Decode() works fine.
1269 INSTANTIATE_TEST_CASE_P(
1270 ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
1271 ::testing::Values(
1272 MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
1274 // Test Reset() immediately after Decode() containing config info.
1275 INSTANTIATE_TEST_CASE_P(
1276 ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
1277 ::testing::Values(
1278 MakeTuple(
1279 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
1281 // Test that Reset() mid-stream works fine and doesn't affect decoding even when
1282 // Decode() calls are made during the reset.
1283 INSTANTIATE_TEST_CASE_P(
1284 MidStreamReset, VideoDecodeAcceleratorParamTest,
1285 ::testing::Values(
1286 MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
1288 INSTANTIATE_TEST_CASE_P(
1289 SlowRendering, VideoDecodeAcceleratorParamTest,
1290 ::testing::Values(
1291 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
1293 // Test that Destroy() mid-stream works fine (primarily this is testing that no
1294 // crashes occur).
1295 INSTANTIATE_TEST_CASE_P(
1296 TearDownTiming, VideoDecodeAcceleratorParamTest,
1297 ::testing::Values(
1298 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
1299 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
1300 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHING, false, false),
1301 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
1302 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
1303 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1304 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1305 static_cast<ClientState>(-1), false, false),
1306 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1307 static_cast<ClientState>(-10), false, false),
1308 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1309 static_cast<ClientState>(-100), false, false)));
1311 // Test that decoding various variation works with multiple in-flight decodes.
1312 INSTANTIATE_TEST_CASE_P(
1313 DecodeVariations, VideoDecodeAcceleratorParamTest,
1314 ::testing::Values(
1315 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1316 MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1317 // Tests queuing.
1318 MakeTuple(1, 15, 1, END_OF_STREAM_RESET, CS_RESET, false, false)));
1320 // Find out how many concurrent decoders can go before we exhaust system
1321 // resources.
1322 INSTANTIATE_TEST_CASE_P(
1323 ResourceExhaustion, VideoDecodeAcceleratorParamTest,
1324 ::testing::Values(
1325 // +0 hack below to promote enum to int.
1326 MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
1327 END_OF_STREAM_RESET, CS_RESET, false, false),
1328 MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
1329 END_OF_STREAM_RESET, CS_RESET, false, false)));
1331 // Thumbnailing test
1332 INSTANTIATE_TEST_CASE_P(
1333 Thumbnail, VideoDecodeAcceleratorParamTest,
1334 ::testing::Values(
1335 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
1337 // Measure the median of the decode time when VDA::Decode is called 30 times per
1338 // second.
1339 TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
1340 RenderingHelperParams helper_params;
1342 // Disable rendering by setting the rendering_fps = 0.
1343 helper_params.rendering_fps = 0;
1344 helper_params.render_as_thumbnails = false;
1346 ClientStateNotification<ClientState>* note =
1347 new ClientStateNotification<ClientState>();
1348 GLRenderingVDAClient* client =
1349 new GLRenderingVDAClient(0,
1350 &rendering_helper_,
1351 note,
1352 test_video_files_[0]->data_str,
1355 test_video_files_[0]->reset_after_frame_num,
1356 CS_RESET,
1357 test_video_files_[0]->width,
1358 test_video_files_[0]->height,
1359 test_video_files_[0]->profile,
1360 true,
1361 std::numeric_limits<int>::max(),
1362 kWebRtcDecodeCallsPerSecond,
1363 false /* render_as_thumbnail */);
1364 helper_params.window_sizes.push_back(
1365 gfx::Size(test_video_files_[0]->width, test_video_files_[0]->height));
1366 InitializeRenderingHelper(helper_params);
1367 CreateAndStartDecoder(client, note);
1368 WaitUntilDecodeFinish(note);
1370 base::TimeDelta decode_time_median = client->decode_time_median();
1371 std::string output_string =
1372 base::StringPrintf("Decode time median: %" PRId64 " us",
1373 decode_time_median.InMicroseconds());
1374 LOG(INFO) << output_string;
1376 if (g_output_log != NULL)
1377 OutputLogFile(g_output_log, output_string);
1379 rendering_loop_proxy_->DeleteSoon(FROM_HERE, client);
1380 rendering_loop_proxy_->DeleteSoon(FROM_HERE, note);
1381 WaitUntilIdle();
1384 // TODO(fischman, vrk): add more tests! In particular:
1385 // - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
1386 // - Test alternate configurations
1387 // - Test failure conditions.
1388 // - Test frame size changes mid-stream
1390 } // namespace
1391 } // namespace content
1393 int main(int argc, char **argv) {
1394 testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
1395 base::CommandLine::Init(argc, argv);
1397 // Needed to enable DVLOG through --vmodule.
1398 logging::LoggingSettings settings;
1399 settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
1400 CHECK(logging::InitLogging(settings));
1402 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
1403 DCHECK(cmd_line);
1405 base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
1406 for (CommandLine::SwitchMap::const_iterator it = switches.begin();
1407 it != switches.end(); ++it) {
1408 if (it->first == "test_video_data") {
1409 content::g_test_video_data = it->second.c_str();
1410 continue;
1412 // The output log for VDA performance test.
1413 if (it->first == "output_log") {
1414 content::g_output_log = it->second.c_str();
1415 continue;
1417 if (it->first == "rendering_fps") {
1418 // On Windows, CommandLine::StringType is wstring. We need to convert
1419 // it to std::string first
1420 std::string input(it->second.begin(), it->second.end());
1421 CHECK(base::StringToDouble(input, &content::g_rendering_fps));
1422 continue;
1424 // TODO(owenlin): Remove this flag once it is not used in autotest.
1425 if (it->first == "disable_rendering") {
1426 content::g_rendering_fps = 0;
1427 continue;
1429 if (it->first == "v" || it->first == "vmodule")
1430 continue;
1431 LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
1434 base::ShadowingAtExitManager at_exit_manager;
1435 content::RenderingHelper::InitializeOneOff();
1437 return RUN_ALL_TESTS();