Refactored not to expose raw pointers on ProxyList class.
[chromium-blink-merge.git] / chromecast / media / cma / backend / audio_video_pipeline_device_unittest.cc
blobd112ab28feb61a0c94853a7bf84683fac758c894
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <vector>
7 #include "base/basictypes.h"
8 #include "base/bind.h"
9 #include "base/command_line.h"
10 #include "base/files/file_path.h"
11 #include "base/files/memory_mapped_file.h"
12 #include "base/logging.h"
13 #include "base/memory/ref_counted.h"
14 #include "base/memory/scoped_ptr.h"
15 #include "base/memory/scoped_vector.h"
16 #include "base/message_loop/message_loop.h"
17 #include "base/path_service.h"
18 #include "base/single_thread_task_runner.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/threading/thread.h"
21 #include "base/time/time.h"
22 #include "chromecast/base/task_runner_impl.h"
23 #include "chromecast/media/cma/base/decoder_buffer_adapter.h"
24 #include "chromecast/media/cma/base/decoder_config_adapter.h"
25 #include "chromecast/media/cma/test/frame_segmenter_for_test.h"
26 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
27 #include "chromecast/public/cast_media_shlib.h"
28 #include "chromecast/public/media/audio_pipeline_device.h"
29 #include "chromecast/public/media/cast_decoder_buffer.h"
30 #include "chromecast/public/media/decoder_config.h"
31 #include "chromecast/public/media/media_clock_device.h"
32 #include "chromecast/public/media/media_pipeline_backend.h"
33 #include "chromecast/public/media/media_pipeline_device_params.h"
34 #include "chromecast/public/media/video_pipeline_device.h"
35 #include "media/base/audio_decoder_config.h"
36 #include "media/base/buffers.h"
37 #include "media/base/decoder_buffer.h"
38 #include "media/base/video_decoder_config.h"
39 #include "testing/gtest/include/gtest/gtest.h"
41 namespace chromecast {
42 namespace media {
44 namespace {
46 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator
47 ComponentDeviceIterator;
49 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20);
51 base::FilePath GetTestDataFilePath(const std::string& name) {
52 base::FilePath file_path;
53 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
55 file_path = file_path.Append(FILE_PATH_LITERAL("media"))
56 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data"))
57 .AppendASCII(name);
58 return file_path;
61 } // namespace
63 class AudioVideoPipelineDeviceTest : public testing::Test {
64 public:
65 struct PauseInfo {
66 PauseInfo() {}
67 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {}
68 ~PauseInfo() {}
70 base::TimeDelta delay;
71 base::TimeDelta length;
74 AudioVideoPipelineDeviceTest();
75 ~AudioVideoPipelineDeviceTest() override;
77 void SetUp() override {
78 CastMediaShlib::Initialize(
79 base::CommandLine::ForCurrentProcess()->argv());
82 void TearDown() override {
83 CastMediaShlib::Finalize();
86 void ConfigureForFile(std::string filename);
87 void ConfigureForAudioOnly(std::string filename);
88 void ConfigureForVideoOnly(std::string filename, bool raw_h264);
90 // Pattern loops, waiting >= pattern[i].delay against media clock between
91 // pauses, then pausing for >= pattern[i].length against MessageLoop
92 // A pause with delay <0 signals to stop sequence and do not loop
93 void SetPausePattern(const std::vector<PauseInfo> pattern);
95 // Adds a pause to the end of pause pattern
96 void AddPause(base::TimeDelta delay, base::TimeDelta length);
98 void Start();
100 private:
101 void Initialize();
103 void LoadAudioStream(std::string filename);
104 void LoadVideoStream(std::string filename, bool raw_h264);
106 void MonitorLoop();
108 void OnPauseCompleted();
110 void OnEos(MediaComponentDeviceFeederForTest* device_feeder);
112 scoped_ptr<TaskRunnerImpl> task_runner_;
113 scoped_ptr<MediaPipelineBackend> backend_;
114 MediaClockDevice* media_clock_device_;
116 // Devices to feed
117 ScopedVector<MediaComponentDeviceFeederForTest>
118 component_device_feeders_;
120 // Current media time.
121 base::TimeDelta pause_time_;
123 // Pause settings
124 std::vector<PauseInfo> pause_pattern_;
125 int pause_pattern_idx_;
127 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
130 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest()
131 : pause_pattern_() {
134 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
137 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay,
138 base::TimeDelta length) {
139 pause_pattern_.push_back(PauseInfo(delay, length));
142 void AudioVideoPipelineDeviceTest::SetPausePattern(
143 const std::vector<PauseInfo> pattern) {
144 pause_pattern_ = pattern;
147 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(std::string filename) {
148 Initialize();
149 LoadAudioStream(filename);
152 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(std::string filename,
153 bool raw_h264) {
154 Initialize();
155 LoadVideoStream(filename, raw_h264);
158 void AudioVideoPipelineDeviceTest::ConfigureForFile(std::string filename) {
159 Initialize();
160 LoadVideoStream(filename, false /* raw_h264 */);
161 LoadAudioStream(filename);
164 void AudioVideoPipelineDeviceTest::LoadAudioStream(std::string filename) {
165 base::FilePath file_path = GetTestDataFilePath(filename);
166 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */);
167 BufferList frames = demux_result.frames;
169 AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio();
171 bool success = audio_pipeline_device->SetConfig(
172 DecoderConfigAdapter::ToCastAudioConfig(kPrimary,
173 demux_result.audio_config));
174 ASSERT_TRUE(success);
176 VLOG(2) << "Got " << frames.size() << " audio input frames";
178 frames.push_back(
179 scoped_refptr<DecoderBufferBase>(
180 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
182 MediaComponentDeviceFeederForTest* device_feeder =
183 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames);
184 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
185 base::Unretained(this),
186 device_feeder));
187 component_device_feeders_.push_back(device_feeder);
190 void AudioVideoPipelineDeviceTest::LoadVideoStream(std::string filename,
191 bool raw_h264) {
192 BufferList frames;
193 VideoConfig video_config;
195 if (raw_h264) {
196 base::FilePath file_path = GetTestDataFilePath(filename);
197 base::MemoryMappedFile video_stream;
198 ASSERT_TRUE(video_stream.Initialize(file_path))
199 << "Couldn't open stream file: " << file_path.MaybeAsASCII();
200 frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
202 // TODO(erickung): Either pull data from stream or make caller specify value
203 video_config.codec = kCodecH264;
204 video_config.profile = kH264Main;
205 video_config.additional_config = NULL;
206 video_config.is_encrypted = false;
207 } else {
208 base::FilePath file_path = GetTestDataFilePath(filename);
209 DemuxResult demux_result = FFmpegDemuxForTest(file_path,
210 /*audio*/ false);
211 frames = demux_result.frames;
212 video_config = DecoderConfigAdapter::ToCastVideoConfig(
213 kPrimary, demux_result.video_config);
216 VideoPipelineDevice* video_pipeline_device = backend_->GetVideo();
218 // Set configuration.
219 bool success = video_pipeline_device->SetConfig(video_config);
220 ASSERT_TRUE(success);
222 VLOG(2) << "Got " << frames.size() << " video input frames";
224 frames.push_back(
225 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
226 ::media::DecoderBuffer::CreateEOSBuffer())));
228 MediaComponentDeviceFeederForTest* device_feeder =
229 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames);
230 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
231 base::Unretained(this),
232 device_feeder));
233 component_device_feeders_.push_back(device_feeder);
236 void AudioVideoPipelineDeviceTest::Start() {
237 pause_time_ = base::TimeDelta();
238 pause_pattern_idx_ = 0;
240 for (size_t i = 0; i < component_device_feeders_.size(); i++) {
241 base::ThreadTaskRunnerHandle::Get()->PostTask(
242 FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed,
243 base::Unretained(component_device_feeders_[i])));
246 media_clock_device_->SetState(MediaClockDevice::kStateRunning);
248 base::ThreadTaskRunnerHandle::Get()->PostTask(
249 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
250 base::Unretained(this)));
253 void AudioVideoPipelineDeviceTest::MonitorLoop() {
254 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
255 media_clock_device_->GetTimeMicroseconds());
257 if (!pause_pattern_.empty() &&
258 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
259 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
260 // Do Pause
261 media_clock_device_->SetRate(0.0);
262 pause_time_ = base::TimeDelta::FromMicroseconds(
263 media_clock_device_->GetTimeMicroseconds());
265 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " <<
266 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms";
268 // Wait for pause finish
269 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
270 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted,
271 base::Unretained(this)),
272 pause_pattern_[pause_pattern_idx_].length);
273 return;
276 // Check state again in a little while
277 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
278 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
279 base::Unretained(this)),
280 kMonitorLoopDelay);
283 void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
284 // Make sure the media time didn't move during that time.
285 base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
286 media_clock_device_->GetTimeMicroseconds());
288 // TODO(damienv):
289 // Should be:
290 // EXPECT_EQ(media_time, media_time_);
291 // However, some backends, when rendering the first frame while in paused
292 // mode moves the time forward.
293 // This behaviour is not intended.
294 EXPECT_GE(media_time, pause_time_);
295 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50));
297 pause_time_ = media_time;
298 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size();
300 VLOG(2) << "Pause complete, restarting media clock";
302 // Resume playback and frame feeding.
303 media_clock_device_->SetRate(1.0);
305 MonitorLoop();
308 void AudioVideoPipelineDeviceTest::OnEos(
309 MediaComponentDeviceFeederForTest* device_feeder) {
310 for (ComponentDeviceIterator it = component_device_feeders_.begin();
311 it != component_device_feeders_.end();
312 ++it) {
313 if (*it == device_feeder) {
314 component_device_feeders_.erase(it);
315 break;
319 // Check if all streams finished
320 if (component_device_feeders_.empty())
321 base::MessageLoop::current()->QuitWhenIdle();
324 void AudioVideoPipelineDeviceTest::Initialize() {
325 // Create the media device.
326 task_runner_.reset(new TaskRunnerImpl());
327 MediaPipelineDeviceParams params(task_runner_.get());
328 backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params));
329 media_clock_device_ = backend_->GetClock();
331 // Clock initialization and configuration.
332 bool success =
333 media_clock_device_->SetState(MediaClockDevice::kStateIdle);
334 ASSERT_TRUE(success);
335 success = media_clock_device_->ResetTimeline(0);
336 ASSERT_TRUE(success);
337 media_clock_device_->SetRate(1.0);
340 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
341 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
343 ConfigureForAudioOnly("sfx.mp3");
344 Start();
345 message_loop->Run();
348 TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) {
349 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
351 ConfigureForAudioOnly("sfx.ogg");
352 Start();
353 message_loop->Run();
356 TEST_F(AudioVideoPipelineDeviceTest, H264Playback) {
357 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
359 ConfigureForVideoOnly("bear.h264", true /* raw_h264 */);
360 Start();
361 message_loop->Run();
364 TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) {
365 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
367 // Setup to pause for 100ms every 500ms
368 AddPause(base::TimeDelta::FromMilliseconds(500),
369 base::TimeDelta::FromMilliseconds(100));
371 ConfigureForVideoOnly("bear-640x360.webm", false /* raw_h264 */);
372 Start();
373 message_loop->Run();
376 TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) {
377 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
379 ConfigureForVideoOnly("bear-vp8a.webm", false /* raw_h264 */);
380 Start();
381 message_loop->Run();
384 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) {
385 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
387 ConfigureForFile("bear-640x360.webm");
388 Start();
389 message_loop->Run();
392 } // namespace media
393 } // namespace chromecast