Roll src/third_party/WebKit d9c6159:8139f33 (svn 201974:201975)
[chromium-blink-merge.git] / chromecast / media / cma / backend / audio_video_pipeline_device_unittest.cc
blob9be90b00992905a481cbf275bc65d5595745eb64
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <vector>
7 #include "base/basictypes.h"
8 #include "base/bind.h"
9 #include "base/command_line.h"
10 #include "base/files/file_path.h"
11 #include "base/files/memory_mapped_file.h"
12 #include "base/logging.h"
13 #include "base/memory/ref_counted.h"
14 #include "base/memory/scoped_ptr.h"
15 #include "base/memory/scoped_vector.h"
16 #include "base/message_loop/message_loop.h"
17 #include "base/path_service.h"
18 #include "base/single_thread_task_runner.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/threading/thread.h"
21 #include "base/time/time.h"
22 #include "chromecast/base/task_runner_impl.h"
23 #include "chromecast/media/cma/base/decoder_buffer_adapter.h"
24 #include "chromecast/media/cma/base/decoder_config_adapter.h"
25 #include "chromecast/media/cma/test/frame_segmenter_for_test.h"
26 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
27 #include "chromecast/public/cast_media_shlib.h"
28 #include "chromecast/public/media/audio_pipeline_device.h"
29 #include "chromecast/public/media/cast_decoder_buffer.h"
30 #include "chromecast/public/media/decoder_config.h"
31 #include "chromecast/public/media/media_clock_device.h"
32 #include "chromecast/public/media/media_pipeline_backend.h"
33 #include "chromecast/public/media/media_pipeline_device_params.h"
34 #include "chromecast/public/media/video_pipeline_device.h"
35 #include "media/base/audio_decoder_config.h"
36 #include "media/base/decoder_buffer.h"
37 #include "media/base/video_decoder_config.h"
38 #include "testing/gtest/include/gtest/gtest.h"
40 namespace chromecast {
41 namespace media {
43 namespace {
45 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator
46 ComponentDeviceIterator;
48 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20);
50 base::FilePath GetTestDataFilePath(const std::string& name) {
51 base::FilePath file_path;
52 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
54 file_path = file_path.Append(FILE_PATH_LITERAL("media"))
55 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data"))
56 .AppendASCII(name);
57 return file_path;
60 } // namespace
62 class AudioVideoPipelineDeviceTest : public testing::Test {
63 public:
64 struct PauseInfo {
65 PauseInfo() {}
66 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {}
67 ~PauseInfo() {}
69 base::TimeDelta delay;
70 base::TimeDelta length;
73 AudioVideoPipelineDeviceTest();
74 ~AudioVideoPipelineDeviceTest() override;
76 void SetUp() override {
77 CastMediaShlib::Initialize(
78 base::CommandLine::ForCurrentProcess()->argv());
81 void TearDown() override {
82 CastMediaShlib::Finalize();
85 void ConfigureForFile(std::string filename);
86 void ConfigureForAudioOnly(std::string filename);
87 void ConfigureForVideoOnly(std::string filename, bool raw_h264);
89 // Pattern loops, waiting >= pattern[i].delay against media clock between
90 // pauses, then pausing for >= pattern[i].length against MessageLoop
91 // A pause with delay <0 signals to stop sequence and do not loop
92 void SetPausePattern(const std::vector<PauseInfo> pattern);
94 // Adds a pause to the end of pause pattern
95 void AddPause(base::TimeDelta delay, base::TimeDelta length);
97 void Start();
99 private:
100 void Initialize();
102 void LoadAudioStream(std::string filename);
103 void LoadVideoStream(std::string filename, bool raw_h264);
105 void MonitorLoop();
107 void OnPauseCompleted();
109 void OnEos(MediaComponentDeviceFeederForTest* device_feeder);
111 scoped_ptr<TaskRunnerImpl> task_runner_;
112 scoped_ptr<MediaPipelineBackend> backend_;
113 MediaClockDevice* media_clock_device_;
115 // Devices to feed
116 ScopedVector<MediaComponentDeviceFeederForTest>
117 component_device_feeders_;
119 // Current media time.
120 base::TimeDelta pause_time_;
122 // Pause settings
123 std::vector<PauseInfo> pause_pattern_;
124 int pause_pattern_idx_;
126 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
129 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest()
130 : pause_pattern_() {
133 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
136 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay,
137 base::TimeDelta length) {
138 pause_pattern_.push_back(PauseInfo(delay, length));
141 void AudioVideoPipelineDeviceTest::SetPausePattern(
142 const std::vector<PauseInfo> pattern) {
143 pause_pattern_ = pattern;
146 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(std::string filename) {
147 Initialize();
148 LoadAudioStream(filename);
151 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(std::string filename,
152 bool raw_h264) {
153 Initialize();
154 LoadVideoStream(filename, raw_h264);
157 void AudioVideoPipelineDeviceTest::ConfigureForFile(std::string filename) {
158 Initialize();
159 LoadVideoStream(filename, false /* raw_h264 */);
160 LoadAudioStream(filename);
163 void AudioVideoPipelineDeviceTest::LoadAudioStream(std::string filename) {
164 base::FilePath file_path = GetTestDataFilePath(filename);
165 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */);
166 BufferList frames = demux_result.frames;
168 AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio();
170 bool success = audio_pipeline_device->SetConfig(
171 DecoderConfigAdapter::ToCastAudioConfig(kPrimary,
172 demux_result.audio_config));
173 ASSERT_TRUE(success);
175 VLOG(2) << "Got " << frames.size() << " audio input frames";
177 frames.push_back(
178 scoped_refptr<DecoderBufferBase>(
179 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
181 MediaComponentDeviceFeederForTest* device_feeder =
182 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames);
183 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
184 base::Unretained(this),
185 device_feeder));
186 component_device_feeders_.push_back(device_feeder);
189 void AudioVideoPipelineDeviceTest::LoadVideoStream(std::string filename,
190 bool raw_h264) {
191 BufferList frames;
192 VideoConfig video_config;
194 if (raw_h264) {
195 base::FilePath file_path = GetTestDataFilePath(filename);
196 base::MemoryMappedFile video_stream;
197 ASSERT_TRUE(video_stream.Initialize(file_path))
198 << "Couldn't open stream file: " << file_path.MaybeAsASCII();
199 frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
201 // TODO(erickung): Either pull data from stream or make caller specify value
202 video_config.codec = kCodecH264;
203 video_config.profile = kH264Main;
204 video_config.additional_config = NULL;
205 video_config.is_encrypted = false;
206 } else {
207 base::FilePath file_path = GetTestDataFilePath(filename);
208 DemuxResult demux_result = FFmpegDemuxForTest(file_path,
209 /*audio*/ false);
210 frames = demux_result.frames;
211 video_config = DecoderConfigAdapter::ToCastVideoConfig(
212 kPrimary, demux_result.video_config);
215 VideoPipelineDevice* video_pipeline_device = backend_->GetVideo();
217 // Set configuration.
218 bool success = video_pipeline_device->SetConfig(video_config);
219 ASSERT_TRUE(success);
221 VLOG(2) << "Got " << frames.size() << " video input frames";
223 frames.push_back(
224 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
225 ::media::DecoderBuffer::CreateEOSBuffer())));
227 MediaComponentDeviceFeederForTest* device_feeder =
228 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames);
229 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
230 base::Unretained(this),
231 device_feeder));
232 component_device_feeders_.push_back(device_feeder);
235 void AudioVideoPipelineDeviceTest::Start() {
236 pause_time_ = base::TimeDelta();
237 pause_pattern_idx_ = 0;
239 for (size_t i = 0; i < component_device_feeders_.size(); i++) {
240 base::ThreadTaskRunnerHandle::Get()->PostTask(
241 FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed,
242 base::Unretained(component_device_feeders_[i])));
245 media_clock_device_->SetState(MediaClockDevice::kStateRunning);
247 base::ThreadTaskRunnerHandle::Get()->PostTask(
248 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
249 base::Unretained(this)));
252 void AudioVideoPipelineDeviceTest::MonitorLoop() {
253 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
254 media_clock_device_->GetTimeMicroseconds());
256 if (!pause_pattern_.empty() &&
257 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
258 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
259 // Do Pause
260 media_clock_device_->SetRate(0.0);
261 pause_time_ = base::TimeDelta::FromMicroseconds(
262 media_clock_device_->GetTimeMicroseconds());
264 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " <<
265 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms";
267 // Wait for pause finish
268 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
269 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted,
270 base::Unretained(this)),
271 pause_pattern_[pause_pattern_idx_].length);
272 return;
275 // Check state again in a little while
276 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
277 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
278 base::Unretained(this)),
279 kMonitorLoopDelay);
282 void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
283 // Make sure the media time didn't move during that time.
284 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
285 media_clock_device_->GetTimeMicroseconds());
287 // TODO(damienv):
288 // Should be:
289 // EXPECT_EQ(media_time, media_time_);
290 // However, some backends, when rendering the first frame while in paused
291 // mode moves the time forward.
292 // This behaviour is not intended.
293 EXPECT_GE(media_time, pause_time_);
294 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50));
296 pause_time_ = media_time;
297 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size();
299 VLOG(2) << "Pause complete, restarting media clock";
301 // Resume playback and frame feeding.
302 media_clock_device_->SetRate(1.0);
304 MonitorLoop();
307 void AudioVideoPipelineDeviceTest::OnEos(
308 MediaComponentDeviceFeederForTest* device_feeder) {
309 for (ComponentDeviceIterator it = component_device_feeders_.begin();
310 it != component_device_feeders_.end();
311 ++it) {
312 if (*it == device_feeder) {
313 component_device_feeders_.erase(it);
314 break;
318 // Check if all streams finished
319 if (component_device_feeders_.empty())
320 base::MessageLoop::current()->QuitWhenIdle();
323 void AudioVideoPipelineDeviceTest::Initialize() {
324 // Create the media device.
325 task_runner_.reset(new TaskRunnerImpl());
326 MediaPipelineDeviceParams params(task_runner_.get());
327 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params));
328 media_clock_device_ = backend_->GetClock();
330 // Clock initialization and configuration.
331 bool success =
332 media_clock_device_->SetState(MediaClockDevice::kStateIdle);
333 ASSERT_TRUE(success);
334 success = media_clock_device_->ResetTimeline(0);
335 ASSERT_TRUE(success);
336 media_clock_device_->SetRate(1.0);
339 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
340 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
342 ConfigureForAudioOnly("sfx.mp3");
343 Start();
344 message_loop->Run();
347 TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) {
348 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
350 ConfigureForAudioOnly("sfx.ogg");
351 Start();
352 message_loop->Run();
355 TEST_F(AudioVideoPipelineDeviceTest, H264Playback) {
356 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
358 ConfigureForVideoOnly("bear.h264", true /* raw_h264 */);
359 Start();
360 message_loop->Run();
363 TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) {
364 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
366 // Setup to pause for 100ms every 500ms
367 AddPause(base::TimeDelta::FromMilliseconds(500),
368 base::TimeDelta::FromMilliseconds(100));
370 ConfigureForVideoOnly("bear-640x360.webm", false /* raw_h264 */);
371 Start();
372 message_loop->Run();
375 TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) {
376 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
378 ConfigureForVideoOnly("bear-vp8a.webm", false /* raw_h264 */);
379 Start();
380 message_loop->Run();
383 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) {
384 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
386 ConfigureForFile("bear-640x360.webm");
387 Start();
388 message_loop->Run();
391 } // namespace media
392 } // namespace chromecast