Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / chromecast / media / cma / backend / audio_video_pipeline_device_unittest.cc
blob84004a87363f4e98677a2e428dd06da6e1509fab
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <vector>
7 #include "base/basictypes.h"
8 #include "base/bind.h"
9 #include "base/command_line.h"
10 #include "base/files/file_path.h"
11 #include "base/files/memory_mapped_file.h"
12 #include "base/logging.h"
13 #include "base/memory/ref_counted.h"
14 #include "base/memory/scoped_ptr.h"
15 #include "base/memory/scoped_vector.h"
16 #include "base/message_loop/message_loop.h"
17 #include "base/path_service.h"
18 #include "base/single_thread_task_runner.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/threading/thread.h"
21 #include "base/time/time.h"
22 #include "chromecast/base/task_runner_impl.h"
23 #include "chromecast/media/cma/base/decoder_buffer_adapter.h"
24 #include "chromecast/media/cma/base/decoder_config_adapter.h"
25 #include "chromecast/media/cma/test/frame_segmenter_for_test.h"
26 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
27 #include "chromecast/public/cast_media_shlib.h"
28 #include "chromecast/public/media/audio_pipeline_device.h"
29 #include "chromecast/public/media/cast_decoder_buffer.h"
30 #include "chromecast/public/media/decoder_config.h"
31 #include "chromecast/public/media/media_clock_device.h"
32 #include "chromecast/public/media/media_pipeline_backend.h"
33 #include "chromecast/public/media/media_pipeline_device_params.h"
34 #include "chromecast/public/media/video_pipeline_device.h"
35 #include "media/base/audio_decoder_config.h"
36 #include "media/base/decoder_buffer.h"
37 #include "media/base/video_decoder_config.h"
38 #include "testing/gtest/include/gtest/gtest.h"
40 namespace chromecast {
41 namespace media {
43 namespace {
45 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator
46 ComponentDeviceIterator;
48 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20);
50 base::FilePath GetTestDataFilePath(const std::string& name) {
51 base::FilePath file_path;
52 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
54 file_path = file_path.Append(FILE_PATH_LITERAL("media"))
55 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data"))
56 .AppendASCII(name);
57 return file_path;
60 } // namespace
62 class AudioVideoPipelineDeviceTest : public testing::Test {
63 public:
64 struct PauseInfo {
65 PauseInfo() {}
66 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {}
67 ~PauseInfo() {}
69 base::TimeDelta delay;
70 base::TimeDelta length;
73 AudioVideoPipelineDeviceTest();
74 ~AudioVideoPipelineDeviceTest() override;
76 void SetUp() override {
77 CastMediaShlib::Initialize(
78 base::CommandLine::ForCurrentProcess()->argv());
81 void TearDown() override {
82 CastMediaShlib::Finalize();
85 void ConfigureForFile(const std::string& filename);
86 void ConfigureForAudioOnly(const std::string& filename);
87 void ConfigureForVideoOnly(const std::string& filename, bool raw_h264);
89 // Pattern loops, waiting >= pattern[i].delay against media clock between
90 // pauses, then pausing for >= pattern[i].length against MessageLoop
91 // A pause with delay <0 signals to stop sequence and do not loop
92 void SetPausePattern(const std::vector<PauseInfo> pattern);
94 // Adds a pause to the end of pause pattern
95 void AddPause(base::TimeDelta delay, base::TimeDelta length);
97 void Start();
99 private:
100 void Initialize();
102 void LoadAudioStream(const std::string& filename);
103 void LoadVideoStream(const std::string& filename, bool raw_h264);
105 void MonitorLoop();
107 void OnPauseCompleted();
109 void OnEos(MediaComponentDeviceFeederForTest* device_feeder);
111 scoped_ptr<TaskRunnerImpl> task_runner_;
112 scoped_ptr<MediaPipelineBackend> backend_;
113 MediaClockDevice* media_clock_device_;
115 // Devices to feed
116 ScopedVector<MediaComponentDeviceFeederForTest>
117 component_device_feeders_;
119 // Current media time.
120 base::TimeDelta pause_time_;
122 // Pause settings
123 std::vector<PauseInfo> pause_pattern_;
124 int pause_pattern_idx_;
126 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
129 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest()
130 : pause_pattern_() {
133 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
136 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay,
137 base::TimeDelta length) {
138 pause_pattern_.push_back(PauseInfo(delay, length));
141 void AudioVideoPipelineDeviceTest::SetPausePattern(
142 const std::vector<PauseInfo> pattern) {
143 pause_pattern_ = pattern;
146 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(
147 const std::string& filename) {
148 Initialize();
149 LoadAudioStream(filename);
152 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(
153 const std::string& filename,
154 bool raw_h264) {
155 Initialize();
156 LoadVideoStream(filename, raw_h264);
159 void AudioVideoPipelineDeviceTest::ConfigureForFile(
160 const std::string& filename) {
161 Initialize();
162 LoadVideoStream(filename, false /* raw_h264 */);
163 LoadAudioStream(filename);
166 void AudioVideoPipelineDeviceTest::LoadAudioStream(
167 const std::string& filename) {
168 base::FilePath file_path = GetTestDataFilePath(filename);
169 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */);
170 BufferList frames = demux_result.frames;
172 AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio();
174 bool success = audio_pipeline_device->SetConfig(
175 DecoderConfigAdapter::ToCastAudioConfig(kPrimary,
176 demux_result.audio_config));
177 ASSERT_TRUE(success);
179 VLOG(2) << "Got " << frames.size() << " audio input frames";
181 frames.push_back(
182 scoped_refptr<DecoderBufferBase>(
183 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
185 MediaComponentDeviceFeederForTest* device_feeder =
186 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames);
187 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
188 base::Unretained(this),
189 device_feeder));
190 component_device_feeders_.push_back(device_feeder);
193 void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename,
194 bool raw_h264) {
195 BufferList frames;
196 VideoConfig video_config;
198 if (raw_h264) {
199 base::FilePath file_path = GetTestDataFilePath(filename);
200 base::MemoryMappedFile video_stream;
201 ASSERT_TRUE(video_stream.Initialize(file_path))
202 << "Couldn't open stream file: " << file_path.MaybeAsASCII();
203 frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
205 // TODO(erickung): Either pull data from stream or make caller specify value
206 video_config.codec = kCodecH264;
207 video_config.profile = kH264Main;
208 video_config.additional_config = NULL;
209 video_config.is_encrypted = false;
210 } else {
211 base::FilePath file_path = GetTestDataFilePath(filename);
212 DemuxResult demux_result = FFmpegDemuxForTest(file_path,
213 /*audio*/ false);
214 frames = demux_result.frames;
215 video_config = DecoderConfigAdapter::ToCastVideoConfig(
216 kPrimary, demux_result.video_config);
219 VideoPipelineDevice* video_pipeline_device = backend_->GetVideo();
221 // Set configuration.
222 bool success = video_pipeline_device->SetConfig(video_config);
223 ASSERT_TRUE(success);
225 VLOG(2) << "Got " << frames.size() << " video input frames";
227 frames.push_back(
228 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
229 ::media::DecoderBuffer::CreateEOSBuffer())));
231 MediaComponentDeviceFeederForTest* device_feeder =
232 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames);
233 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
234 base::Unretained(this),
235 device_feeder));
236 component_device_feeders_.push_back(device_feeder);
239 void AudioVideoPipelineDeviceTest::Start() {
240 pause_time_ = base::TimeDelta();
241 pause_pattern_idx_ = 0;
243 for (size_t i = 0; i < component_device_feeders_.size(); i++) {
244 base::ThreadTaskRunnerHandle::Get()->PostTask(
245 FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed,
246 base::Unretained(component_device_feeders_[i])));
249 media_clock_device_->SetState(MediaClockDevice::kStateRunning);
251 base::ThreadTaskRunnerHandle::Get()->PostTask(
252 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
253 base::Unretained(this)));
256 void AudioVideoPipelineDeviceTest::MonitorLoop() {
257 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
258 media_clock_device_->GetTimeMicroseconds());
260 if (!pause_pattern_.empty() &&
261 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
262 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
263 // Do Pause
264 media_clock_device_->SetRate(0.0);
265 pause_time_ = base::TimeDelta::FromMicroseconds(
266 media_clock_device_->GetTimeMicroseconds());
268 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " <<
269 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms";
271 // Wait for pause finish
272 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
273 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted,
274 base::Unretained(this)),
275 pause_pattern_[pause_pattern_idx_].length);
276 return;
279 // Check state again in a little while
280 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
281 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
282 base::Unretained(this)),
283 kMonitorLoopDelay);
286 void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
287 // Make sure the media time didn't move during that time.
288 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
289 media_clock_device_->GetTimeMicroseconds());
291 // TODO(damienv):
292 // Should be:
293 // EXPECT_EQ(media_time, media_time_);
294 // However, some backends, when rendering the first frame while in paused
295 // mode moves the time forward.
296 // This behaviour is not intended.
297 EXPECT_GE(media_time, pause_time_);
298 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50));
300 pause_time_ = media_time;
301 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size();
303 VLOG(2) << "Pause complete, restarting media clock";
305 // Resume playback and frame feeding.
306 media_clock_device_->SetRate(1.0);
308 MonitorLoop();
311 void AudioVideoPipelineDeviceTest::OnEos(
312 MediaComponentDeviceFeederForTest* device_feeder) {
313 for (ComponentDeviceIterator it = component_device_feeders_.begin();
314 it != component_device_feeders_.end();
315 ++it) {
316 if (*it == device_feeder) {
317 component_device_feeders_.erase(it);
318 break;
322 // Check if all streams finished
323 if (component_device_feeders_.empty())
324 base::MessageLoop::current()->QuitWhenIdle();
327 void AudioVideoPipelineDeviceTest::Initialize() {
328 // Create the media device.
329 task_runner_.reset(new TaskRunnerImpl());
330 MediaPipelineDeviceParams params(task_runner_.get());
331 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params));
332 media_clock_device_ = backend_->GetClock();
334 // Clock initialization and configuration.
335 bool success =
336 media_clock_device_->SetState(MediaClockDevice::kStateIdle);
337 ASSERT_TRUE(success);
338 success = media_clock_device_->ResetTimeline(0);
339 ASSERT_TRUE(success);
340 media_clock_device_->SetRate(1.0);
343 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
344 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
346 ConfigureForAudioOnly("sfx.mp3");
347 Start();
348 message_loop->Run();
351 TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) {
352 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
354 ConfigureForAudioOnly("sfx.ogg");
355 Start();
356 message_loop->Run();
359 TEST_F(AudioVideoPipelineDeviceTest, H264Playback) {
360 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
362 ConfigureForVideoOnly("bear.h264", true /* raw_h264 */);
363 Start();
364 message_loop->Run();
367 TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) {
368 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
370 // Setup to pause for 100ms every 500ms
371 AddPause(base::TimeDelta::FromMilliseconds(500),
372 base::TimeDelta::FromMilliseconds(100));
374 ConfigureForVideoOnly("bear-640x360.webm", false /* raw_h264 */);
375 Start();
376 message_loop->Run();
379 TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) {
380 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
382 ConfigureForVideoOnly("bear-vp8a.webm", false /* raw_h264 */);
383 Start();
384 message_loop->Run();
387 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) {
388 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
390 ConfigureForFile("bear-640x360.webm");
391 Start();
392 message_loop->Run();
395 } // namespace media
396 } // namespace chromecast