This sets up API to release OutputSurface from LTHClient.
[chromium-blink-merge.git] / media / cast / test / receiver.cc
blob30d4f20d8772492e21f949662f0c376e51bb4a5c
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <algorithm>
6 #include <climits>
7 #include <cstdarg>
8 #include <cstdio>
9 #include <deque>
10 #include <map>
11 #include <string>
12 #include <utility>
14 #include "base/at_exit.h"
15 #include "base/command_line.h"
16 #include "base/logging.h"
17 #include "base/memory/ref_counted.h"
18 #include "base/memory/scoped_ptr.h"
19 #include "base/message_loop/message_loop.h"
20 #include "base/synchronization/lock.h"
21 #include "base/synchronization/waitable_event.h"
22 #include "base/threading/thread.h"
23 #include "base/time/default_tick_clock.h"
24 #include "base/timer/timer.h"
25 #include "media/audio/audio_io.h"
26 #include "media/audio/audio_manager.h"
27 #include "media/audio/audio_parameters.h"
28 #include "media/audio/fake_audio_log_factory.h"
29 #include "media/base/audio_bus.h"
30 #include "media/base/channel_layout.h"
31 #include "media/base/video_frame.h"
32 #include "media/cast/cast_config.h"
33 #include "media/cast/cast_environment.h"
34 #include "media/cast/cast_receiver.h"
35 #include "media/cast/logging/logging_defines.h"
36 #include "media/cast/net/udp_transport.h"
37 #include "media/cast/test/utility/audio_utility.h"
38 #include "media/cast/test/utility/barcode.h"
39 #include "media/cast/test/utility/default_config.h"
40 #include "media/cast/test/utility/in_process_receiver.h"
41 #include "media/cast/test/utility/input_builder.h"
42 #include "media/cast/test/utility/standalone_cast_environment.h"
43 #include "net/base/net_util.h"
45 #if defined(USE_X11)
46 #include "media/cast/test/linux_output_window.h"
47 #endif // defined(USE_X11)
49 namespace media {
50 namespace cast {
52 // Settings chosen to match default sender settings.
53 #define DEFAULT_SEND_PORT "0"
54 #define DEFAULT_RECEIVE_PORT "2344"
55 #define DEFAULT_SEND_IP "0.0.0.0"
56 #define DEFAULT_AUDIO_FEEDBACK_SSRC "2"
57 #define DEFAULT_AUDIO_INCOMING_SSRC "1"
58 #define DEFAULT_AUDIO_PAYLOAD_TYPE "127"
59 #define DEFAULT_VIDEO_FEEDBACK_SSRC "12"
60 #define DEFAULT_VIDEO_INCOMING_SSRC "11"
61 #define DEFAULT_VIDEO_PAYLOAD_TYPE "96"
63 #if defined(USE_X11)
64 const char* kVideoWindowWidth = "1280";
65 const char* kVideoWindowHeight = "720";
66 #endif // defined(USE_X11)
68 void GetPorts(uint16* tx_port, uint16* rx_port) {
69 test::InputBuilder tx_input(
70 "Enter send port.", DEFAULT_SEND_PORT, 1, 65535);
71 *tx_port = static_cast<uint16>(tx_input.GetIntInput());
73 test::InputBuilder rx_input(
74 "Enter receive port.", DEFAULT_RECEIVE_PORT, 1, 65535);
75 *rx_port = static_cast<uint16>(rx_input.GetIntInput());
78 std::string GetIpAddress(const std::string display_text) {
79 test::InputBuilder input(display_text, DEFAULT_SEND_IP, INT_MIN, INT_MAX);
80 std::string ip_address = input.GetStringInput();
81 // Ensure IP address is either the default value or in correct form.
82 while (ip_address != DEFAULT_SEND_IP &&
83 std::count(ip_address.begin(), ip_address.end(), '.') != 3) {
84 ip_address = input.GetStringInput();
86 return ip_address;
89 void GetAudioSsrcs(FrameReceiverConfig* audio_config) {
90 test::InputBuilder input_tx(
91 "Choose audio sender SSRC.", DEFAULT_AUDIO_FEEDBACK_SSRC, 1, INT_MAX);
92 audio_config->receiver_ssrc = input_tx.GetIntInput();
94 test::InputBuilder input_rx(
95 "Choose audio receiver SSRC.", DEFAULT_AUDIO_INCOMING_SSRC, 1, INT_MAX);
96 audio_config->sender_ssrc = input_rx.GetIntInput();
99 void GetVideoSsrcs(FrameReceiverConfig* video_config) {
100 test::InputBuilder input_tx(
101 "Choose video sender SSRC.", DEFAULT_VIDEO_FEEDBACK_SSRC, 1, INT_MAX);
102 video_config->receiver_ssrc = input_tx.GetIntInput();
104 test::InputBuilder input_rx(
105 "Choose video receiver SSRC.", DEFAULT_VIDEO_INCOMING_SSRC, 1, INT_MAX);
106 video_config->sender_ssrc = input_rx.GetIntInput();
109 #if defined(USE_X11)
110 void GetWindowSize(int* width, int* height) {
111 // Resolution values based on sender settings
112 test::InputBuilder input_w(
113 "Choose window width.", kVideoWindowWidth, 144, 1920);
114 *width = input_w.GetIntInput();
116 test::InputBuilder input_h(
117 "Choose window height.", kVideoWindowHeight, 176, 1080);
118 *height = input_h.GetIntInput();
120 #endif // defined(USE_X11)
122 void GetAudioPayloadtype(FrameReceiverConfig* audio_config) {
123 test::InputBuilder input("Choose audio receiver payload type.",
124 DEFAULT_AUDIO_PAYLOAD_TYPE,
126 127);
127 audio_config->rtp_payload_type = input.GetIntInput();
130 FrameReceiverConfig GetAudioReceiverConfig() {
131 FrameReceiverConfig audio_config = GetDefaultAudioReceiverConfig();
132 GetAudioSsrcs(&audio_config);
133 GetAudioPayloadtype(&audio_config);
134 audio_config.rtp_max_delay_ms = 300;
135 return audio_config;
138 void GetVideoPayloadtype(FrameReceiverConfig* video_config) {
139 test::InputBuilder input("Choose video receiver payload type.",
140 DEFAULT_VIDEO_PAYLOAD_TYPE,
142 127);
143 video_config->rtp_payload_type = input.GetIntInput();
146 FrameReceiverConfig GetVideoReceiverConfig() {
147 FrameReceiverConfig video_config = GetDefaultVideoReceiverConfig();
148 GetVideoSsrcs(&video_config);
149 GetVideoPayloadtype(&video_config);
150 video_config.rtp_max_delay_ms = 300;
151 return video_config;
154 AudioParameters ToAudioParameters(const FrameReceiverConfig& config) {
155 const int samples_in_10ms = config.rtp_timebase / 100;
156 return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
157 GuessChannelLayout(config.channels),
158 config.rtp_timebase, 32, samples_in_10ms);
161 // An InProcessReceiver that renders video frames to a LinuxOutputWindow and
162 // audio frames via Chromium's audio stack.
164 // InProcessReceiver pushes audio and video frames to this subclass, and these
165 // frames are pushed into a queue. Then, for audio, the Chromium audio stack
166 // will make polling calls on a separate, unknown thread whereby audio frames
167 // are pulled out of the audio queue as needed. For video, however, NaivePlayer
168 // is responsible for scheduling updates to the screen itself. For both, the
169 // queues are pruned (i.e., received frames are skipped) when the system is not
170 // able to play back as fast as frames are entering the queue.
172 // This is NOT a good reference implementation for a Cast receiver player since:
173 // 1. It only skips frames to handle slower-than-expected playout, or halts
174 // playback to handle frame underruns.
175 // 2. It makes no attempt to synchronize the timing of playout of the video
176 // frames with the audio frames.
177 // 3. It does nothing to smooth or hide discontinuities in playback due to
178 // timing issues or missing frames.
179 class NaivePlayer : public InProcessReceiver,
180 public AudioOutputStream::AudioSourceCallback {
181 public:
182 NaivePlayer(const scoped_refptr<CastEnvironment>& cast_environment,
183 const net::IPEndPoint& local_end_point,
184 const net::IPEndPoint& remote_end_point,
185 const FrameReceiverConfig& audio_config,
186 const FrameReceiverConfig& video_config,
187 int window_width,
188 int window_height)
189 : InProcessReceiver(cast_environment,
190 local_end_point,
191 remote_end_point,
192 audio_config,
193 video_config),
194 // Maximum age is the duration of 3 video frames. 3 was chosen
195 // arbitrarily, but seems to work well.
196 max_frame_age_(base::TimeDelta::FromSeconds(1) * 3 /
197 video_config.target_frame_rate),
198 #if defined(USE_X11)
199 render_(0, 0, window_width, window_height, "Cast_receiver"),
200 #endif // defined(USE_X11)
201 num_video_frames_processed_(0),
202 num_audio_frames_processed_(0),
203 currently_playing_audio_frame_start_(-1) {}
205 ~NaivePlayer() final {}
207 void Start() final {
208 AudioManager::Get()->GetTaskRunner()->PostTask(
209 FROM_HERE,
210 base::Bind(&NaivePlayer::StartAudioOutputOnAudioManagerThread,
211 base::Unretained(this)));
212 // Note: No need to wait for audio polling to start since the push-and-pull
213 // mechanism is synchronized via the |audio_playout_queue_|.
214 InProcessReceiver::Start();
217 void Stop() final {
218 // First, stop audio output to the Chromium audio stack.
219 base::WaitableEvent done(false, false);
220 DCHECK(!AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
221 AudioManager::Get()->GetTaskRunner()->PostTask(
222 FROM_HERE,
223 base::Bind(&NaivePlayer::StopAudioOutputOnAudioManagerThread,
224 base::Unretained(this),
225 &done));
226 done.Wait();
228 // Now, stop receiving new frames.
229 InProcessReceiver::Stop();
231 // Finally, clear out any frames remaining in the queues.
232 while (!audio_playout_queue_.empty()) {
233 const scoped_ptr<AudioBus> to_be_deleted(
234 audio_playout_queue_.front().second);
235 audio_playout_queue_.pop_front();
237 video_playout_queue_.clear();
240 private:
241 void StartAudioOutputOnAudioManagerThread() {
242 DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
243 DCHECK(!audio_output_stream_);
244 audio_output_stream_.reset(AudioManager::Get()->MakeAudioOutputStreamProxy(
245 ToAudioParameters(audio_config()), ""));
246 if (audio_output_stream_.get() && audio_output_stream_->Open()) {
247 audio_output_stream_->Start(this);
248 } else {
249 LOG(ERROR) << "Failed to open an audio output stream. "
250 << "Audio playback disabled.";
251 audio_output_stream_.reset();
255 void StopAudioOutputOnAudioManagerThread(base::WaitableEvent* done) {
256 DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
257 if (audio_output_stream_.get()) {
258 audio_output_stream_->Stop();
259 audio_output_stream_->Close();
260 audio_output_stream_.reset();
262 done->Signal();
265 ////////////////////////////////////////////////////////////////////
266 // InProcessReceiver finals.
268 void OnVideoFrame(const scoped_refptr<VideoFrame>& video_frame,
269 const base::TimeTicks& playout_time,
270 bool is_continuous) final {
271 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
272 LOG_IF(WARNING, !is_continuous)
273 << "Video: Discontinuity in received frames.";
274 video_playout_queue_.push_back(std::make_pair(playout_time, video_frame));
275 ScheduleVideoPlayout();
276 uint16 frame_no;
277 if (media::cast::test::DecodeBarcode(video_frame, &frame_no)) {
278 video_play_times_.insert(
279 std::pair<uint16, base::TimeTicks>(frame_no, playout_time));
280 } else {
281 VLOG(2) << "Barcode decode failed!";
285 void OnAudioFrame(scoped_ptr<AudioBus> audio_frame,
286 const base::TimeTicks& playout_time,
287 bool is_continuous) final {
288 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
289 LOG_IF(WARNING, !is_continuous)
290 << "Audio: Discontinuity in received frames.";
291 base::AutoLock auto_lock(audio_lock_);
292 uint16 frame_no;
293 if (media::cast::DecodeTimestamp(audio_frame->channel(0),
294 audio_frame->frames(),
295 &frame_no)) {
296 // Since there are lots of audio packets with the same frame_no,
297 // we really want to make sure that we get the playout_time from
298 // the first one. If is_continous is true, then it's possible
299 // that we already missed the first one.
300 if (is_continuous && frame_no == last_audio_frame_no_ + 1) {
301 audio_play_times_.insert(
302 std::pair<uint16, base::TimeTicks>(frame_no, playout_time));
304 last_audio_frame_no_ = frame_no;
305 } else {
306 VLOG(2) << "Audio decode failed!";
307 last_audio_frame_no_ = -2;
309 audio_playout_queue_.push_back(
310 std::make_pair(playout_time, audio_frame.release()));
313 // End of InProcessReceiver finals.
314 ////////////////////////////////////////////////////////////////////
316 ////////////////////////////////////////////////////////////////////
317 // AudioSourceCallback implementation.
319 int OnMoreData(AudioBus* dest, uint32 total_bytes_delay) final {
320 // Note: This method is being invoked by a separate thread unknown to us
321 // (i.e., outside of CastEnvironment).
323 int samples_remaining = dest->frames();
325 while (samples_remaining > 0) {
326 // Get next audio frame ready for playout.
327 if (!currently_playing_audio_frame_.get()) {
328 base::AutoLock auto_lock(audio_lock_);
330 // Prune the queue, skipping entries that are too old.
331 // TODO(miu): Use |total_bytes_delay| to account for audio buffering
332 // delays upstream.
333 const base::TimeTicks earliest_time_to_play =
334 cast_env()->Clock()->NowTicks() - max_frame_age_;
335 while (!audio_playout_queue_.empty() &&
336 audio_playout_queue_.front().first < earliest_time_to_play) {
337 PopOneAudioFrame(true);
339 if (audio_playout_queue_.empty())
340 break;
342 currently_playing_audio_frame_ = PopOneAudioFrame(false).Pass();
343 currently_playing_audio_frame_start_ = 0;
346 // Copy some or all of the samples in |currently_playing_audio_frame_| to
347 // |dest|. Once all samples in |currently_playing_audio_frame_| have been
348 // consumed, release it.
349 const int num_samples_to_copy =
350 std::min(samples_remaining,
351 currently_playing_audio_frame_->frames() -
352 currently_playing_audio_frame_start_);
353 currently_playing_audio_frame_->CopyPartialFramesTo(
354 currently_playing_audio_frame_start_,
355 num_samples_to_copy,
357 dest);
358 samples_remaining -= num_samples_to_copy;
359 currently_playing_audio_frame_start_ += num_samples_to_copy;
360 if (currently_playing_audio_frame_start_ ==
361 currently_playing_audio_frame_->frames()) {
362 currently_playing_audio_frame_.reset();
366 // If |dest| has not been fully filled, then an underrun has occurred; and
367 // fill the remainder of |dest| with zeros.
368 if (samples_remaining > 0) {
369 // Note: Only logging underruns after the first frame has been received.
370 LOG_IF(WARNING, currently_playing_audio_frame_start_ != -1)
371 << "Audio: Playback underrun of " << samples_remaining << " samples!";
372 dest->ZeroFramesPartial(dest->frames() - samples_remaining,
373 samples_remaining);
376 return dest->frames();
379 void OnError(AudioOutputStream* stream) final {
380 LOG(ERROR) << "AudioOutputStream reports an error. "
381 << "Playback is unlikely to continue.";
384 // End of AudioSourceCallback implementation.
385 ////////////////////////////////////////////////////////////////////
387 void ScheduleVideoPlayout() {
388 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
390 // Prune the queue, skipping entries that are too old.
391 const base::TimeTicks now = cast_env()->Clock()->NowTicks();
392 const base::TimeTicks earliest_time_to_play = now - max_frame_age_;
393 while (!video_playout_queue_.empty() &&
394 video_playout_queue_.front().first < earliest_time_to_play) {
395 PopOneVideoFrame(true);
398 // If the queue is not empty, schedule playout of its first frame.
399 if (video_playout_queue_.empty()) {
400 video_playout_timer_.Stop();
401 } else {
402 video_playout_timer_.Start(
403 FROM_HERE,
404 video_playout_queue_.front().first - now,
405 base::Bind(&NaivePlayer::PlayNextVideoFrame,
406 base::Unretained(this)));
410 void PlayNextVideoFrame() {
411 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
412 if (!video_playout_queue_.empty()) {
413 const scoped_refptr<VideoFrame> video_frame = PopOneVideoFrame(false);
414 #if defined(USE_X11)
415 render_.RenderFrame(video_frame);
416 #endif // defined(USE_X11)
418 ScheduleVideoPlayout();
419 CheckAVSync();
422 scoped_refptr<VideoFrame> PopOneVideoFrame(bool is_being_skipped) {
423 DCHECK(cast_env()->CurrentlyOn(CastEnvironment::MAIN));
425 if (is_being_skipped) {
426 VLOG(1) << "VideoFrame[" << num_video_frames_processed_
427 << " (dt=" << (video_playout_queue_.front().first -
428 last_popped_video_playout_time_).InMicroseconds()
429 << " usec)]: Skipped.";
430 } else {
431 VLOG(1) << "VideoFrame[" << num_video_frames_processed_
432 << " (dt=" << (video_playout_queue_.front().first -
433 last_popped_video_playout_time_).InMicroseconds()
434 << " usec)]: Playing "
435 << (cast_env()->Clock()->NowTicks() -
436 video_playout_queue_.front().first).InMicroseconds()
437 << " usec later than intended.";
440 last_popped_video_playout_time_ = video_playout_queue_.front().first;
441 const scoped_refptr<VideoFrame> ret = video_playout_queue_.front().second;
442 video_playout_queue_.pop_front();
443 ++num_video_frames_processed_;
444 return ret;
447 scoped_ptr<AudioBus> PopOneAudioFrame(bool was_skipped) {
448 audio_lock_.AssertAcquired();
450 if (was_skipped) {
451 VLOG(1) << "AudioFrame[" << num_audio_frames_processed_
452 << " (dt=" << (audio_playout_queue_.front().first -
453 last_popped_audio_playout_time_).InMicroseconds()
454 << " usec)]: Skipped.";
455 } else {
456 VLOG(1) << "AudioFrame[" << num_audio_frames_processed_
457 << " (dt=" << (audio_playout_queue_.front().first -
458 last_popped_audio_playout_time_).InMicroseconds()
459 << " usec)]: Playing "
460 << (cast_env()->Clock()->NowTicks() -
461 audio_playout_queue_.front().first).InMicroseconds()
462 << " usec later than intended.";
465 last_popped_audio_playout_time_ = audio_playout_queue_.front().first;
466 scoped_ptr<AudioBus> ret(audio_playout_queue_.front().second);
467 audio_playout_queue_.pop_front();
468 ++num_audio_frames_processed_;
469 return ret.Pass();
472 void CheckAVSync() {
473 if (video_play_times_.size() > 30 &&
474 audio_play_times_.size() > 30) {
475 size_t num_events = 0;
476 base::TimeDelta delta;
477 std::map<uint16, base::TimeTicks>::iterator audio_iter, video_iter;
478 for (video_iter = video_play_times_.begin();
479 video_iter != video_play_times_.end();
480 ++video_iter) {
481 audio_iter = audio_play_times_.find(video_iter->first);
482 if (audio_iter != audio_play_times_.end()) {
483 num_events++;
484 // Positive values means audio is running behind video.
485 delta += audio_iter->second - video_iter->second;
489 if (num_events > 30) {
490 VLOG(0) << "Audio behind by: "
491 << (delta / num_events).InMilliseconds()
492 << "ms";
493 video_play_times_.clear();
494 audio_play_times_.clear();
496 } else if (video_play_times_.size() + audio_play_times_.size() > 500) {
497 // We are decoding audio or video timestamps, but not both, clear it out.
498 video_play_times_.clear();
499 audio_play_times_.clear();
503 // Frames in the queue older than this (relative to NowTicks()) will be
504 // dropped (i.e., playback is falling behind).
505 const base::TimeDelta max_frame_age_;
507 // Outputs created, started, and destroyed by this NaivePlayer.
508 #if defined(USE_X11)
509 test::LinuxOutputWindow render_;
510 #endif // defined(USE_X11)
511 scoped_ptr<AudioOutputStream> audio_output_stream_;
513 // Video playout queue.
514 typedef std::pair<base::TimeTicks, scoped_refptr<VideoFrame> >
515 VideoQueueEntry;
516 std::deque<VideoQueueEntry> video_playout_queue_;
517 base::TimeTicks last_popped_video_playout_time_;
518 int64 num_video_frames_processed_;
520 base::OneShotTimer<NaivePlayer> video_playout_timer_;
522 // Audio playout queue, synchronized by |audio_lock_|.
523 base::Lock audio_lock_;
524 typedef std::pair<base::TimeTicks, AudioBus*> AudioQueueEntry;
525 std::deque<AudioQueueEntry> audio_playout_queue_;
526 base::TimeTicks last_popped_audio_playout_time_;
527 int64 num_audio_frames_processed_;
529 // These must only be used on the audio thread calling OnMoreData().
530 scoped_ptr<AudioBus> currently_playing_audio_frame_;
531 int currently_playing_audio_frame_start_;
533 std::map<uint16, base::TimeTicks> audio_play_times_;
534 std::map<uint16, base::TimeTicks> video_play_times_;
535 int32 last_audio_frame_no_;
538 } // namespace cast
539 } // namespace media
541 int main(int argc, char** argv) {
542 base::AtExitManager at_exit;
543 base::CommandLine::Init(argc, argv);
544 InitLogging(logging::LoggingSettings());
546 scoped_refptr<media::cast::CastEnvironment> cast_environment(
547 new media::cast::StandaloneCastEnvironment);
549 // Start up Chromium audio system.
550 media::FakeAudioLogFactory fake_audio_log_factory_;
551 const scoped_ptr<media::AudioManager> audio_manager(
552 media::AudioManager::Create(&fake_audio_log_factory_));
553 CHECK(media::AudioManager::Get());
555 media::cast::FrameReceiverConfig audio_config =
556 media::cast::GetAudioReceiverConfig();
557 media::cast::FrameReceiverConfig video_config =
558 media::cast::GetVideoReceiverConfig();
560 // Determine local and remote endpoints.
561 uint16 remote_port, local_port;
562 media::cast::GetPorts(&remote_port, &local_port);
563 if (!local_port) {
564 LOG(ERROR) << "Invalid local port.";
565 return 1;
567 std::string remote_ip_address = media::cast::GetIpAddress("Enter remote IP.");
568 std::string local_ip_address = media::cast::GetIpAddress("Enter local IP.");
569 net::IPAddressNumber remote_ip_number;
570 net::IPAddressNumber local_ip_number;
571 if (!net::ParseIPLiteralToNumber(remote_ip_address, &remote_ip_number)) {
572 LOG(ERROR) << "Invalid remote IP address.";
573 return 1;
575 if (!net::ParseIPLiteralToNumber(local_ip_address, &local_ip_number)) {
576 LOG(ERROR) << "Invalid local IP address.";
577 return 1;
579 net::IPEndPoint remote_end_point(remote_ip_number, remote_port);
580 net::IPEndPoint local_end_point(local_ip_number, local_port);
582 // Create and start the player.
583 int window_width = 0;
584 int window_height = 0;
585 #if defined(USE_X11)
586 media::cast::GetWindowSize(&window_width, &window_height);
587 #endif // defined(USE_X11)
588 media::cast::NaivePlayer player(cast_environment,
589 local_end_point,
590 remote_end_point,
591 audio_config,
592 video_config,
593 window_width,
594 window_height);
595 player.Start();
597 base::MessageLoop().Run(); // Run forever (i.e., until SIGTERM).
598 NOTREACHED();
599 return 0;