Upstreaming browser/ui/uikit_ui_util from iOS.
[chromium-blink-merge.git] / chromecast / media / cma / backend / audio_video_pipeline_device_unittest.cc
blob9c84b2f4ee4b5cb336e657c70c3ce7d75f02c739
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <vector>
7 #include "base/basictypes.h"
8 #include "base/bind.h"
9 #include "base/command_line.h"
10 #include "base/files/file_path.h"
11 #include "base/files/memory_mapped_file.h"
12 #include "base/logging.h"
13 #include "base/memory/ref_counted.h"
14 #include "base/memory/scoped_ptr.h"
15 #include "base/memory/scoped_vector.h"
16 #include "base/message_loop/message_loop.h"
17 #include "base/path_service.h"
18 #include "base/single_thread_task_runner.h"
19 #include "base/thread_task_runner_handle.h"
20 #include "base/threading/thread.h"
21 #include "base/time/time.h"
22 #include "chromecast/media/base/decrypt_context.h"
23 #include "chromecast/media/cma/backend/audio_pipeline_device.h"
24 #include "chromecast/media/cma/backend/media_clock_device.h"
25 #include "chromecast/media/cma/backend/media_pipeline_device.h"
26 #include "chromecast/media/cma/backend/media_pipeline_device_factory.h"
27 #include "chromecast/media/cma/backend/media_pipeline_device_params.h"
28 #include "chromecast/media/cma/backend/video_pipeline_device.h"
29 #include "chromecast/media/cma/base/decoder_buffer_adapter.h"
30 #include "chromecast/media/cma/base/decoder_buffer_base.h"
31 #include "chromecast/media/cma/base/decoder_config_adapter.h"
32 #include "chromecast/media/cma/test/frame_segmenter_for_test.h"
33 #include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
34 #include "chromecast/public/cast_media_shlib.h"
35 #include "chromecast/public/media/decoder_config.h"
36 #include "media/base/audio_decoder_config.h"
37 #include "media/base/buffers.h"
38 #include "media/base/decoder_buffer.h"
39 #include "media/base/video_decoder_config.h"
40 #include "testing/gtest/include/gtest/gtest.h"
42 namespace chromecast {
43 namespace media {
45 namespace {
47 typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator
48 ComponentDeviceIterator;
50 const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20);
52 base::FilePath GetTestDataFilePath(const std::string& name) {
53 base::FilePath file_path;
54 CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
56 file_path = file_path.Append(FILE_PATH_LITERAL("media"))
57 .Append(FILE_PATH_LITERAL("test")).Append(FILE_PATH_LITERAL("data"))
58 .AppendASCII(name);
59 return file_path;
62 } // namespace
64 class AudioVideoPipelineDeviceTest : public testing::Test {
65 public:
66 struct PauseInfo {
67 PauseInfo() {}
68 PauseInfo(base::TimeDelta d, base::TimeDelta l) : delay(d), length(l) {}
69 ~PauseInfo() {}
71 base::TimeDelta delay;
72 base::TimeDelta length;
75 AudioVideoPipelineDeviceTest();
76 ~AudioVideoPipelineDeviceTest() override;
78 void SetUp() override {
79 CastMediaShlib::Initialize(
80 base::CommandLine::ForCurrentProcess()->argv());
83 void TearDown() override {
84 CastMediaShlib::Finalize();
87 void ConfigureForFile(std::string filename);
88 void ConfigureForAudioOnly(std::string filename);
89 void ConfigureForVideoOnly(std::string filename, bool raw_h264);
91 // Pattern loops, waiting >= pattern[i].delay against media clock between
92 // pauses, then pausing for >= pattern[i].length against MessageLoop
93 // A pause with delay <0 signals to stop sequence and do not loop
94 void SetPausePattern(const std::vector<PauseInfo> pattern);
96 // Adds a pause to the end of pause pattern
97 void AddPause(base::TimeDelta delay, base::TimeDelta length);
99 void Start();
101 private:
102 void Initialize();
104 void LoadAudioStream(std::string filename);
105 void LoadVideoStream(std::string filename, bool raw_h264);
107 void MonitorLoop();
109 void OnPauseCompleted();
111 void OnEos(MediaComponentDeviceFeederForTest* device_feeder);
113 scoped_ptr<MediaPipelineDevice> media_pipeline_device_;
114 MediaClockDevice* media_clock_device_;
116 // Devices to feed
117 ScopedVector<MediaComponentDeviceFeederForTest>
118 component_device_feeders_;
120 // Current media time.
121 base::TimeDelta pause_time_;
123 // Pause settings
124 std::vector<PauseInfo> pause_pattern_;
125 int pause_pattern_idx_;
127 DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
130 AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest()
131 : pause_pattern_() {
134 AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
137 void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay,
138 base::TimeDelta length) {
139 pause_pattern_.push_back(PauseInfo(delay, length));
142 void AudioVideoPipelineDeviceTest::SetPausePattern(
143 const std::vector<PauseInfo> pattern) {
144 pause_pattern_ = pattern;
147 void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(std::string filename) {
148 Initialize();
149 LoadAudioStream(filename);
152 void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(std::string filename,
153 bool raw_h264) {
154 Initialize();
155 LoadVideoStream(filename, raw_h264);
158 void AudioVideoPipelineDeviceTest::ConfigureForFile(std::string filename) {
159 Initialize();
160 LoadVideoStream(filename, false /* raw_h264 */);
161 LoadAudioStream(filename);
164 void AudioVideoPipelineDeviceTest::LoadAudioStream(std::string filename) {
165 base::FilePath file_path = GetTestDataFilePath(filename);
166 DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */);
167 BufferList frames = demux_result.frames;
169 AudioPipelineDevice* audio_pipeline_device =
170 media_pipeline_device_->GetAudioPipelineDevice();
172 bool success = audio_pipeline_device->SetConfig(
173 DecoderConfigAdapter::ToCastAudioConfig(kPrimary,
174 demux_result.audio_config));
175 ASSERT_TRUE(success);
177 VLOG(2) << "Got " << frames.size() << " audio input frames";
179 frames.push_back(
180 scoped_refptr<DecoderBufferBase>(
181 new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
183 MediaComponentDeviceFeederForTest* device_feeder =
184 new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames);
185 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
186 base::Unretained(this),
187 device_feeder));
188 component_device_feeders_.push_back(device_feeder);
191 void AudioVideoPipelineDeviceTest::LoadVideoStream(std::string filename,
192 bool raw_h264) {
193 BufferList frames;
194 VideoConfig video_config;
196 if (raw_h264) {
197 base::FilePath file_path = GetTestDataFilePath(filename);
198 base::MemoryMappedFile video_stream;
199 ASSERT_TRUE(video_stream.Initialize(file_path))
200 << "Couldn't open stream file: " << file_path.MaybeAsASCII();
201 frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
203 // TODO(erickung): Either pull data from stream or make caller specify value
204 video_config.codec = kCodecH264;
205 video_config.profile = kH264Main;
206 video_config.additional_config = NULL;
207 video_config.is_encrypted = false;
208 } else {
209 base::FilePath file_path = GetTestDataFilePath(filename);
210 DemuxResult demux_result = FFmpegDemuxForTest(file_path,
211 /*audio*/ false);
212 frames = demux_result.frames;
213 video_config = DecoderConfigAdapter::ToCastVideoConfig(
214 kPrimary, demux_result.video_config);
217 VideoPipelineDevice* video_pipeline_device =
218 media_pipeline_device_->GetVideoPipelineDevice();
220 // Set configuration.
221 bool success = video_pipeline_device->SetConfig(video_config);
222 ASSERT_TRUE(success);
224 VLOG(2) << "Got " << frames.size() << " video input frames";
226 frames.push_back(
227 scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
228 ::media::DecoderBuffer::CreateEOSBuffer())));
230 MediaComponentDeviceFeederForTest* device_feeder =
231 new MediaComponentDeviceFeederForTest(video_pipeline_device, frames);
232 device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
233 base::Unretained(this),
234 device_feeder));
235 component_device_feeders_.push_back(device_feeder);
238 void AudioVideoPipelineDeviceTest::Start() {
239 pause_time_ = base::TimeDelta();
240 pause_pattern_idx_ = 0;
242 for (size_t i = 0; i < component_device_feeders_.size(); i++) {
243 base::ThreadTaskRunnerHandle::Get()->PostTask(
244 FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed,
245 base::Unretained(component_device_feeders_[i])));
248 media_clock_device_->SetState(MediaClockDevice::kStateRunning);
250 base::ThreadTaskRunnerHandle::Get()->PostTask(
251 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
252 base::Unretained(this)));
255 void AudioVideoPipelineDeviceTest::MonitorLoop() {
256 base::TimeDelta media_time = media_clock_device_->GetTime();
258 if (!pause_pattern_.empty() &&
259 pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
260 media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
261 // Do Pause
262 media_clock_device_->SetRate(0.0);
263 pause_time_ = media_clock_device_->GetTime();
265 VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " <<
266 pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms";
268 // Wait for pause finish
269 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
270 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::OnPauseCompleted,
271 base::Unretained(this)),
272 pause_pattern_[pause_pattern_idx_].length);
273 return;
276 // Check state again in a little while
277 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
278 FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
279 base::Unretained(this)),
280 kMonitorLoopDelay);
283 void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
284 // Make sure the media time didn't move during that time.
285 base::TimeDelta media_time = media_clock_device_->GetTime();
287 // TODO(damienv):
288 // Should be:
289 // EXPECT_EQ(media_time, media_time_);
290 // However, some backends, when rendering the first frame while in paused
291 // mode moves the time forward.
292 // This behaviour is not intended.
293 EXPECT_GE(media_time, pause_time_);
294 EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50));
296 pause_time_ = media_time;
297 pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size();
299 VLOG(2) << "Pause complete, restarting media clock";
301 // Resume playback and frame feeding.
302 media_clock_device_->SetRate(1.0);
304 MonitorLoop();
307 void AudioVideoPipelineDeviceTest::OnEos(
308 MediaComponentDeviceFeederForTest* device_feeder) {
309 for (ComponentDeviceIterator it = component_device_feeders_.begin();
310 it != component_device_feeders_.end();
311 ++it) {
312 if (*it == device_feeder) {
313 component_device_feeders_.erase(it);
314 break;
318 // Check if all streams finished
319 if (component_device_feeders_.empty())
320 base::MessageLoop::current()->QuitWhenIdle();
323 void AudioVideoPipelineDeviceTest::Initialize() {
324 // Create the media device.
325 MediaPipelineDeviceParams params;
326 scoped_ptr<MediaPipelineDeviceFactory> device_factory =
327 GetMediaPipelineDeviceFactory(params);
328 media_pipeline_device_.reset(new MediaPipelineDevice(device_factory.Pass()));
329 media_clock_device_ = media_pipeline_device_->GetMediaClockDevice();
331 // Clock initialization and configuration.
332 bool success =
333 media_clock_device_->SetState(MediaClockDevice::kStateIdle);
334 ASSERT_TRUE(success);
335 success = media_clock_device_->ResetTimeline(base::TimeDelta());
336 ASSERT_TRUE(success);
337 media_clock_device_->SetRate(1.0);
340 TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
341 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
343 ConfigureForAudioOnly("sfx.mp3");
344 Start();
345 message_loop->Run();
348 TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) {
349 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
351 ConfigureForAudioOnly("sfx.ogg");
352 Start();
353 message_loop->Run();
356 TEST_F(AudioVideoPipelineDeviceTest, H264Playback) {
357 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
359 ConfigureForVideoOnly("bear.h264", true /* raw_h264 */);
360 Start();
361 message_loop->Run();
364 TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) {
365 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
367 // Setup to pause for 100ms every 500ms
368 AddPause(base::TimeDelta::FromMilliseconds(500),
369 base::TimeDelta::FromMilliseconds(100));
371 ConfigureForVideoOnly("bear-640x360.webm", false /* raw_h264 */);
372 Start();
373 message_loop->Run();
376 TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) {
377 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
379 ConfigureForVideoOnly("bear-vp8a.webm", false /* raw_h264 */);
380 Start();
381 message_loop->Run();
384 TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) {
385 scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop());
387 ConfigureForFile("bear-640x360.webm");
388 Start();
389 message_loop->Run();
392 } // namespace media
393 } // namespace chromecast