1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // Test application that simulates a cast sender - Data can be either generated
6 // or read from a file.
10 #include "base/at_exit.h"
11 #include "base/base_paths.h"
12 #include "base/command_line.h"
13 #include "base/file_util.h"
14 #include "base/files/file_path.h"
15 #include "base/files/memory_mapped_file.h"
16 #include "base/files/scoped_file.h"
17 #include "base/json/json_writer.h"
18 #include "base/logging.h"
19 #include "base/memory/scoped_ptr.h"
20 #include "base/path_service.h"
21 #include "base/strings/string_number_conversions.h"
22 #include "base/threading/thread.h"
23 #include "base/time/default_tick_clock.h"
24 #include "base/values.h"
25 #include "media/audio/audio_parameters.h"
26 #include "media/base/audio_buffer.h"
27 #include "media/base/audio_bus.h"
28 #include "media/base/audio_fifo.h"
29 #include "media/base/audio_timestamp_helper.h"
30 #include "media/base/media.h"
31 #include "media/base/multi_channel_resampler.h"
32 #include "media/base/video_frame.h"
33 #include "media/base/video_util.h"
34 #include "media/cast/cast_config.h"
35 #include "media/cast/cast_environment.h"
36 #include "media/cast/cast_sender.h"
37 #include "media/cast/logging/encoding_event_subscriber.h"
38 #include "media/cast/logging/log_serializer.h"
39 #include "media/cast/logging/logging_defines.h"
40 #include "media/cast/logging/proto/raw_events.pb.h"
41 #include "media/cast/logging/receiver_time_offset_estimator_impl.h"
42 #include "media/cast/logging/stats_event_subscriber.h"
43 #include "media/cast/test/utility/audio_utility.h"
44 #include "media/cast/test/utility/default_config.h"
45 #include "media/cast/test/utility/input_builder.h"
46 #include "media/cast/test/utility/video_utility.h"
47 #include "media/cast/transport/cast_transport_defines.h"
48 #include "media/cast/transport/cast_transport_sender.h"
49 #include "media/cast/transport/transport/udp_transport.h"
50 #include "media/ffmpeg/ffmpeg_common.h"
51 #include "media/ffmpeg/ffmpeg_deleters.h"
52 #include "media/filters/audio_renderer_algorithm.h"
53 #include "media/filters/ffmpeg_demuxer.h"
54 #include "media/filters/ffmpeg_glue.h"
55 #include "media/filters/in_memory_url_protocol.h"
56 #include "ui/gfx/size.h"
59 static const int kAudioChannels
= 2;
60 static const int kAudioSamplingFrequency
= 48000;
61 static const int kSoundFrequency
= 1234; // Frequency of sinusoid wave.
62 static const float kSoundVolume
= 0.5f
;
63 static const int kAudioFrameMs
= 10; // Each audio frame is exactly 10ms.
64 static const int kAudioPacketsPerSecond
= 1000 / kAudioFrameMs
;
66 // The max allowed size of serialized log.
67 const int kMaxSerializedLogBytes
= 10 * 1000 * 1000;
69 // Flags for this program:
71 // --address=xx.xx.xx.xx
72 // IP address of receiver.
75 // Port number of receiver.
77 // --source-file=xxx.webm
78 // WebM file as source of video frames.
81 // Override framerate of the video stream.
83 const char kSwitchAddress
[] = "address";
84 const char kSwitchPort
[] = "port";
85 const char kSwitchSourceFile
[] = "source-file";
86 const char kSwitchFps
[] = "fps";
93 AudioSenderConfig
GetAudioSenderConfig() {
94 AudioSenderConfig audio_config
;
96 audio_config
.rtcp_c_name
= "audio_sender@a.b.c.d";
98 audio_config
.use_external_encoder
= false;
99 audio_config
.frequency
= kAudioSamplingFrequency
;
100 audio_config
.channels
= kAudioChannels
;
101 audio_config
.bitrate
= 64000;
102 audio_config
.codec
= transport::kOpus
;
103 audio_config
.rtp_config
.ssrc
= 1;
104 audio_config
.incoming_feedback_ssrc
= 2;
105 audio_config
.rtp_config
.payload_type
= 127;
106 audio_config
.rtp_config
.max_delay_ms
= 300;
110 VideoSenderConfig
GetVideoSenderConfig() {
111 VideoSenderConfig video_config
;
113 video_config
.rtcp_c_name
= "video_sender@a.b.c.d";
114 video_config
.use_external_encoder
= false;
117 video_config
.width
= 1280;
118 video_config
.height
= 720;
119 video_config
.max_frame_rate
= 30;
122 video_config
.max_bitrate
= 2500000;
123 video_config
.min_bitrate
= 100000;
124 video_config
.start_bitrate
= video_config
.min_bitrate
;
127 video_config
.codec
= transport::kVp8
;
128 video_config
.max_number_of_video_buffers_used
= 1;
129 video_config
.number_of_encode_threads
= 2;
132 video_config
.min_qp
= 4;
133 video_config
.max_qp
= 40;
135 // SSRCs and payload type. Don't change them.
136 video_config
.rtp_config
.ssrc
= 11;
137 video_config
.incoming_feedback_ssrc
= 12;
138 video_config
.rtp_config
.payload_type
= 96;
139 video_config
.rtp_config
.max_delay_ms
= 300;
143 void AVFreeFrame(AVFrame
* frame
) { avcodec_free_frame(&frame
); }
147 SendProcess(scoped_refptr
<base::SingleThreadTaskRunner
> thread_proxy
,
148 base::TickClock
* clock
,
149 const VideoSenderConfig
& video_config
)
150 : test_app_thread_proxy_(thread_proxy
),
151 video_config_(video_config
),
154 audio_frame_count_(0),
155 video_frame_count_(0),
157 av_format_context_(NULL
),
158 audio_stream_index_(-1),
160 video_stream_index_(-1),
161 video_frame_rate_numerator_(video_config
.max_frame_rate
),
162 video_frame_rate_denominator_(1),
164 video_first_pts_set_(false) {
165 audio_bus_factory_
.reset(new TestAudioBusFactory(kAudioChannels
,
166 kAudioSamplingFrequency
,
169 const CommandLine
* cmd
= CommandLine::ForCurrentProcess();
170 int override_fps
= 0;
171 if (base::StringToInt(cmd
->GetSwitchValueASCII(kSwitchFps
),
173 video_config_
.max_frame_rate
= override_fps
;
174 video_frame_rate_numerator_
= override_fps
;
177 // Load source file and prepare FFmpeg demuxer.
178 base::FilePath source_path
= cmd
->GetSwitchValuePath(kSwitchSourceFile
);
179 if (source_path
.empty())
182 LOG(INFO
) << "Source: " << source_path
.value();
183 if (!file_data_
.Initialize(source_path
)) {
184 LOG(ERROR
) << "Cannot load file.";
188 new InMemoryUrlProtocol(file_data_
.data(), file_data_
.length(), false));
189 glue_
.reset(new FFmpegGlue(protocol_
.get()));
191 if (!glue_
->OpenContext()) {
192 LOG(ERROR
) << "Cannot open file.";
196 // AVFormatContext is owned by the glue.
197 av_format_context_
= glue_
->format_context();
198 if (avformat_find_stream_info(av_format_context_
, NULL
) < 0) {
199 LOG(ERROR
) << "Cannot find stream information.";
203 // Prepare FFmpeg decoders.
204 for (unsigned int i
= 0; i
< av_format_context_
->nb_streams
; ++i
) {
205 AVStream
* av_stream
= av_format_context_
->streams
[i
];
206 AVCodecContext
* av_codec_context
= av_stream
->codec
;
207 AVCodec
* av_codec
= avcodec_find_decoder(av_codec_context
->codec_id
);
210 LOG(ERROR
) << "Cannot find decoder for the codec: "
211 << av_codec_context
->codec_id
;
215 // Number of threads for decoding.
216 av_codec_context
->thread_count
= 2;
217 av_codec_context
->error_concealment
= FF_EC_GUESS_MVS
| FF_EC_DEBLOCK
;
218 av_codec_context
->request_sample_fmt
= AV_SAMPLE_FMT_S16
;
220 if (avcodec_open2(av_codec_context
, av_codec
, NULL
) < 0) {
221 LOG(ERROR
) << "Cannot open AVCodecContext for the codec: "
222 << av_codec_context
->codec_id
;
226 if (av_codec
->type
== AVMEDIA_TYPE_AUDIO
) {
227 if (av_codec_context
->sample_fmt
== AV_SAMPLE_FMT_S16P
) {
228 LOG(ERROR
) << "Audio format not supported.";
231 ChannelLayout layout
= ChannelLayoutToChromeChannelLayout(
232 av_codec_context
->channel_layout
,
233 av_codec_context
->channels
);
234 if (layout
== CHANNEL_LAYOUT_UNSUPPORTED
) {
235 LOG(ERROR
) << "Unsupported audio channels layout.";
238 if (audio_stream_index_
!= -1) {
239 LOG(WARNING
) << "Found multiple audio streams.";
241 audio_stream_index_
= static_cast<int>(i
);
243 AudioParameters::AUDIO_PCM_LINEAR
,
245 av_codec_context
->channels
,
246 av_codec_context
->channels
,
247 av_codec_context
->sample_rate
,
248 8 * av_get_bytes_per_sample(av_codec_context
->sample_fmt
),
249 av_codec_context
->sample_rate
/ kAudioPacketsPerSecond
);
250 LOG(INFO
) << "Source file has audio.";
251 } else if (av_codec
->type
== AVMEDIA_TYPE_VIDEO
) {
252 VideoFrame::Format format
=
253 PixelFormatToVideoFormat(av_codec_context
->pix_fmt
);
254 if (format
!= VideoFrame::YV12
) {
255 LOG(ERROR
) << "Cannot handle non YV12 video format: " << format
;
258 if (video_stream_index_
!= -1) {
259 LOG(WARNING
) << "Found multiple video streams.";
261 video_stream_index_
= static_cast<int>(i
);
263 video_frame_rate_numerator_
= av_stream
->r_frame_rate
.num
;
264 video_frame_rate_denominator_
= av_stream
->r_frame_rate
.den
;
265 // Max frame rate is rounded up.
266 video_config_
.max_frame_rate
=
267 video_frame_rate_denominator_
+
268 video_frame_rate_numerator_
- 1;
269 video_config_
.max_frame_rate
/= video_frame_rate_denominator_
;
271 // If video is played at a manual speed audio needs to match.
272 playback_rate_
= 1.0 * override_fps
*
273 av_stream
->r_frame_rate
.den
/ av_stream
->r_frame_rate
.num
;
275 LOG(INFO
) << "Source file has video.";
277 LOG(ERROR
) << "Unknown stream type; ignore.";
287 void Start(scoped_refptr
<AudioFrameInput
> audio_frame_input
,
288 scoped_refptr
<VideoFrameInput
> video_frame_input
) {
289 audio_frame_input_
= audio_frame_input
;
290 video_frame_input_
= video_frame_input
;
292 LOG(INFO
) << "Max Frame rate: " << video_config_
.max_frame_rate
;
293 LOG(INFO
) << "Real Frame rate: "
294 << video_frame_rate_numerator_
<< "/"
295 << video_frame_rate_denominator_
<< " fps.";
296 LOG(INFO
) << "Audio playback rate: " << playback_rate_
;
298 if (!is_transcoding_audio() && !is_transcoding_video()) {
299 // Send fake patterns.
300 test_app_thread_proxy_
->PostTask(
303 &SendProcess::SendNextFakeFrame
,
304 base::Unretained(this)));
308 // Send transcoding streams.
309 audio_algo_
.Initialize(playback_rate_
, audio_params_
);
310 audio_algo_
.FlushBuffers();
311 audio_fifo_input_bus_
=
313 audio_params_
.channels(), audio_params_
.frames_per_buffer());
314 // Audio FIFO can carry all data fron AudioRendererAlgorithm.
316 new AudioFifo(audio_params_
.channels(),
317 audio_algo_
.QueueCapacity()));
318 audio_resampler_
.reset(new media::MultiChannelResampler(
319 audio_params_
.channels(),
320 static_cast<double>(audio_params_
.sample_rate()) /
321 kAudioSamplingFrequency
,
322 audio_params_
.frames_per_buffer(),
323 base::Bind(&SendProcess::ProvideData
, base::Unretained(this))));
324 test_app_thread_proxy_
->PostTask(
327 &SendProcess::SendNextFrame
,
328 base::Unretained(this)));
331 void SendNextFakeFrame() {
332 gfx::Size
size(video_config_
.width
, video_config_
.height
);
333 scoped_refptr
<VideoFrame
> video_frame
=
334 VideoFrame::CreateBlackFrame(size
);
335 PopulateVideoFrame(video_frame
, synthetic_count_
);
338 base::TimeTicks now
= clock_
->NowTicks();
339 if (start_time_
.is_null())
342 base::TimeDelta video_time
= VideoFrameTime(video_frame_count_
);
343 video_frame
->set_timestamp(video_time
);
344 video_frame_input_
->InsertRawVideoFrame(video_frame
,
345 start_time_
+ video_time
);
347 // Send just enough audio data to match next video frame's time.
348 base::TimeDelta audio_time
= AudioFrameTime(audio_frame_count_
);
349 while (audio_time
< video_time
) {
350 if (is_transcoding_audio()) {
352 CHECK(!audio_bus_queue_
.empty()) << "No audio decoded.";
353 scoped_ptr
<AudioBus
> bus(audio_bus_queue_
.front());
354 audio_bus_queue_
.pop();
355 audio_frame_input_
->InsertAudio(
356 bus
.Pass(), start_time_
+ audio_time
);
358 audio_frame_input_
->InsertAudio(
359 audio_bus_factory_
->NextAudioBus(
360 base::TimeDelta::FromMilliseconds(kAudioFrameMs
)),
361 start_time_
+ audio_time
);
363 audio_time
= AudioFrameTime(++audio_frame_count_
);
366 // This is the time since the stream started.
367 const base::TimeDelta elapsed_time
= now
- start_time_
;
369 // Handle the case when frame generation cannot keep up.
370 // Move the time ahead to match the next frame.
371 while (video_time
< elapsed_time
) {
372 LOG(WARNING
) << "Skipping one frame.";
373 video_time
= VideoFrameTime(++video_frame_count_
);
376 test_app_thread_proxy_
->PostDelayedTask(
378 base::Bind(&SendProcess::SendNextFakeFrame
,
379 weak_factory_
.GetWeakPtr()),
380 video_time
- elapsed_time
);
383 // Return true if a frame was sent.
384 bool SendNextTranscodedVideo(base::TimeDelta elapsed_time
) {
385 if (!is_transcoding_video())
389 if (video_frame_queue_
.empty())
392 scoped_refptr
<VideoFrame
> decoded_frame
=
393 video_frame_queue_
.front();
394 if (elapsed_time
< decoded_frame
->timestamp())
397 gfx::Size
size(video_config_
.width
, video_config_
.height
);
398 scoped_refptr
<VideoFrame
> video_frame
=
399 VideoFrame::CreateBlackFrame(size
);
400 video_frame_queue_
.pop();
401 media::CopyPlane(VideoFrame::kYPlane
,
402 decoded_frame
->data(VideoFrame::kYPlane
),
403 decoded_frame
->stride(VideoFrame::kYPlane
),
404 decoded_frame
->rows(VideoFrame::kYPlane
),
406 media::CopyPlane(VideoFrame::kUPlane
,
407 decoded_frame
->data(VideoFrame::kUPlane
),
408 decoded_frame
->stride(VideoFrame::kUPlane
),
409 decoded_frame
->rows(VideoFrame::kUPlane
),
411 media::CopyPlane(VideoFrame::kVPlane
,
412 decoded_frame
->data(VideoFrame::kVPlane
),
413 decoded_frame
->stride(VideoFrame::kVPlane
),
414 decoded_frame
->rows(VideoFrame::kVPlane
),
417 base::TimeDelta video_time
;
418 // Use the timestamp from the file if we're transcoding.
419 video_time
= ScaleTimestamp(decoded_frame
->timestamp());
420 video_frame_input_
->InsertRawVideoFrame(
421 video_frame
, start_time_
+ video_time
);
423 // Make sure queue is not empty.
428 // Return true if a frame was sent.
429 bool SendNextTranscodedAudio(base::TimeDelta elapsed_time
) {
430 if (!is_transcoding_audio())
434 if (audio_bus_queue_
.empty())
437 base::TimeDelta audio_time
= audio_sent_ts_
->GetTimestamp();
438 if (elapsed_time
< audio_time
)
440 scoped_ptr
<AudioBus
> bus(audio_bus_queue_
.front());
441 audio_bus_queue_
.pop();
442 audio_sent_ts_
->AddFrames(bus
->frames());
443 audio_frame_input_
->InsertAudio(
444 bus
.Pass(), start_time_
+ audio_time
);
446 // Make sure queue is not empty.
451 void SendNextFrame() {
452 if (start_time_
.is_null())
453 start_time_
= clock_
->NowTicks();
454 if (start_time_
.is_null())
455 start_time_
= clock_
->NowTicks();
457 // Send as much as possible. Audio is sent according to
459 while (SendNextTranscodedAudio(clock_
->NowTicks() - start_time_
));
461 // Video is sync'ed to audio.
462 while (SendNextTranscodedVideo(audio_sent_ts_
->GetTimestamp()));
464 if (audio_bus_queue_
.empty() && video_frame_queue_
.empty()) {
465 // Both queues are empty can only mean that we have reached
466 // the end of the stream.
467 LOG(INFO
) << "Rewind.";
469 start_time_
= base::TimeTicks();
470 audio_sent_ts_
.reset();
471 video_first_pts_set_
= false;
475 test_app_thread_proxy_
->PostDelayedTask(
478 &SendProcess::SendNextFrame
,
479 base::Unretained(this)),
480 base::TimeDelta::FromMilliseconds(kAudioFrameMs
));
483 const VideoSenderConfig
& get_video_config() const { return video_config_
; }
486 bool is_transcoding_audio() { return audio_stream_index_
>= 0; }
487 bool is_transcoding_video() { return video_stream_index_
>= 0; }
489 // Helper methods to compute timestamps for the frame number specified.
490 base::TimeDelta
VideoFrameTime(int frame_number
) {
491 return frame_number
* base::TimeDelta::FromSeconds(1) *
492 video_frame_rate_denominator_
/ video_frame_rate_numerator_
;
495 base::TimeDelta
ScaleTimestamp(base::TimeDelta timestamp
) {
496 return base::TimeDelta::FromMicroseconds(
497 timestamp
.InMicroseconds() / playback_rate_
);
500 base::TimeDelta
AudioFrameTime(int frame_number
) {
501 return frame_number
* base::TimeDelta::FromMilliseconds(kAudioFrameMs
);
504 // Go to the beginning of the stream.
506 CHECK(av_seek_frame(av_format_context_
, -1, 0, AVSEEK_FLAG_BACKWARD
) >= 0)
507 << "Failed to rewind to the beginning.";
510 // Call FFmpeg to fetch one packet.
511 ScopedAVPacket
DemuxOnePacket(bool* audio
) {
512 ScopedAVPacket
packet(new AVPacket());
513 if (av_read_frame(av_format_context_
, packet
.get()) < 0) {
514 LOG(ERROR
) << "Failed to read one AVPacket.";
516 return packet
.Pass();
519 int stream_index
= static_cast<int>(packet
->stream_index
);
520 if (stream_index
== audio_stream_index_
) {
522 } else if (stream_index
== video_stream_index_
) {
525 // Ignore unknown packet.
526 LOG(INFO
) << "Unknown packet.";
529 return packet
.Pass();
532 void DecodeAudio(ScopedAVPacket packet
) {
534 AVFrame
* avframe
= av_frame_alloc();
536 // Shallow copy of the packet.
537 AVPacket packet_temp
= *packet
.get();
540 avcodec_get_frame_defaults(avframe
);
541 int frame_decoded
= 0;
542 int result
= avcodec_decode_audio4(
543 av_audio_context(), avframe
, &frame_decoded
, &packet_temp
);
544 CHECK(result
>= 0) << "Failed to decode audio.";
545 packet_temp
.size
-= result
;
546 packet_temp
.data
+= result
;
550 int frames_read
= avframe
->nb_samples
;
554 if (!audio_sent_ts_
) {
555 // Initialize the base time to the first packet in the file.
556 // This is set to the frequency we send to the receiver.
557 // Not the frequency of the source file. This is because we
558 // increment the frame count by samples we sent.
559 audio_sent_ts_
.reset(
560 new AudioTimestampHelper(kAudioSamplingFrequency
));
561 // For some files this is an invalid value.
562 base::TimeDelta base_ts
;
563 audio_sent_ts_
->SetBaseTimestamp(base_ts
);
566 scoped_refptr
<AudioBuffer
> buffer
=
567 AudioBuffer::CopyFrom(
568 AVSampleFormatToSampleFormat(
569 av_audio_context()->sample_fmt
),
570 ChannelLayoutToChromeChannelLayout(
571 av_audio_context()->channel_layout
,
572 av_audio_context()->channels
),
573 av_audio_context()->channels
,
574 av_audio_context()->sample_rate
,
577 // Note: Not all files have correct values for pkt_pts.
578 base::TimeDelta::FromMilliseconds(avframe
->pkt_pts
));
579 audio_algo_
.EnqueueBuffer(buffer
);
580 } while (packet_temp
.size
> 0);
581 avcodec_free_frame(&avframe
);
583 const int frames_needed_to_scale
=
584 playback_rate_
* av_audio_context()->sample_rate
/
585 kAudioPacketsPerSecond
;
586 while (frames_needed_to_scale
<= audio_algo_
.frames_buffered()) {
587 if (!audio_algo_
.FillBuffer(audio_fifo_input_bus_
.get(),
588 audio_fifo_input_bus_
->frames())) {
589 // Nothing can be scaled. Decode some more.
593 // Prevent overflow of audio data in the FIFO.
594 if (audio_fifo_input_bus_
->frames() + audio_fifo_
->frames()
595 <= audio_fifo_
->max_frames()) {
596 audio_fifo_
->Push(audio_fifo_input_bus_
.get());
598 LOG(WARNING
) << "Audio FIFO full; dropping samples.";
601 // Make sure there's enough data to resample audio.
602 if (audio_fifo_
->frames() <
603 2 * audio_params_
.sample_rate() / kAudioPacketsPerSecond
) {
607 scoped_ptr
<media::AudioBus
> resampled_bus(
608 media::AudioBus::Create(
609 audio_params_
.channels(),
610 kAudioSamplingFrequency
/ kAudioPacketsPerSecond
));
611 audio_resampler_
->Resample(resampled_bus
->frames(),
612 resampled_bus
.get());
613 audio_bus_queue_
.push(resampled_bus
.release());
617 void DecodeVideo(ScopedAVPacket packet
) {
620 AVFrame
* avframe
= av_frame_alloc();
621 avcodec_get_frame_defaults(avframe
);
622 // Tell the decoder to reorder for us.
623 avframe
->reordered_opaque
=
624 av_video_context()->reordered_opaque
= packet
->pts
;
625 CHECK(avcodec_decode_video2(
626 av_video_context(), avframe
, &got_picture
, packet
.get()) >= 0)
627 << "Video decode error.";
630 gfx::Size
size(av_video_context()->width
, av_video_context()->height
);
631 if (!video_first_pts_set_
||
632 avframe
->reordered_opaque
< video_first_pts_
) {
633 video_first_pts_set_
= true;
634 video_first_pts_
= avframe
->reordered_opaque
;
636 int64 pts
= avframe
->reordered_opaque
- video_first_pts_
;
637 video_frame_queue_
.push(
638 VideoFrame::WrapExternalYuvData(
639 media::VideoFrame::YV12
,
643 avframe
->linesize
[0],
644 avframe
->linesize
[1],
645 avframe
->linesize
[2],
649 base::TimeDelta::FromMilliseconds(pts
),
650 base::Bind(&AVFreeFrame
, avframe
)));
653 void Decode(bool decode_audio
) {
654 // Read the stream until one video frame can be decoded.
656 if (decode_audio
&& !audio_bus_queue_
.empty())
658 if (!decode_audio
&& !video_frame_queue_
.empty())
661 bool audio_packet
= false;
662 ScopedAVPacket packet
= DemuxOnePacket(&audio_packet
);
664 LOG(INFO
) << "End of stream.";
669 DecodeAudio(packet
.Pass());
671 DecodeVideo(packet
.Pass());
675 void ProvideData(int frame_delay
, media::AudioBus
* output_bus
) {
676 if (audio_fifo_
->frames() >= output_bus
->frames()) {
677 audio_fifo_
->Consume(output_bus
, 0, output_bus
->frames());
679 LOG(WARNING
) << "Not enough audio data for resampling.";
684 AVStream
* av_audio_stream() {
685 return av_format_context_
->streams
[audio_stream_index_
];
687 AVStream
* av_video_stream() {
688 return av_format_context_
->streams
[video_stream_index_
];
690 AVCodecContext
* av_audio_context() { return av_audio_stream()->codec
; }
691 AVCodecContext
* av_video_context() { return av_video_stream()->codec
; }
693 scoped_refptr
<base::SingleThreadTaskRunner
> test_app_thread_proxy_
;
694 VideoSenderConfig video_config_
;
695 scoped_refptr
<AudioFrameInput
> audio_frame_input_
;
696 scoped_refptr
<VideoFrameInput
> video_frame_input_
;
697 uint8 synthetic_count_
;
698 base::TickClock
* const clock_
; // Not owned by this class.
700 // Time when the stream starts.
701 base::TimeTicks start_time_
;
703 // The following three members are used only for fake frames.
704 int audio_frame_count_
; // Each audio frame is exactly 10ms.
705 int video_frame_count_
;
706 scoped_ptr
<TestAudioBusFactory
> audio_bus_factory_
;
708 // NOTE: Weak pointers must be invalidated before all other member variables.
709 base::WeakPtrFactory
<SendProcess
> weak_factory_
;
711 base::MemoryMappedFile file_data_
;
712 scoped_ptr
<InMemoryUrlProtocol
> protocol_
;
713 scoped_ptr
<FFmpegGlue
> glue_
;
714 AVFormatContext
* av_format_context_
;
716 int audio_stream_index_
;
717 AudioParameters audio_params_
;
718 double playback_rate_
;
720 int video_stream_index_
;
721 int video_frame_rate_numerator_
;
722 int video_frame_rate_denominator_
;
724 // These are used for audio resampling.
725 scoped_ptr
<media::MultiChannelResampler
> audio_resampler_
;
726 scoped_ptr
<media::AudioFifo
> audio_fifo_
;
727 scoped_ptr
<media::AudioBus
> audio_fifo_input_bus_
;
728 media::AudioRendererAlgorithm audio_algo_
;
730 // Track the timestamp of audio sent to the receiver.
731 scoped_ptr
<media::AudioTimestampHelper
> audio_sent_ts_
;
733 std::queue
<scoped_refptr
<VideoFrame
> > video_frame_queue_
;
734 int64 video_first_pts_
;
735 bool video_first_pts_set_
;
737 std::queue
<AudioBus
*> audio_bus_queue_
;
739 DISALLOW_COPY_AND_ASSIGN(SendProcess
);
746 void UpdateCastTransportStatus(
747 media::cast::transport::CastTransportStatus status
) {
748 VLOG(21) << "Transport status: " << status
;
752 const scoped_refptr
<media::cast::CastEnvironment
>& cast_environment
,
753 const std::vector
<media::cast::PacketEvent
>& packet_events
) {
754 VLOG(1) << "Got packet events from transport, size: " << packet_events
.size();
755 for (std::vector
<media::cast::PacketEvent
>::const_iterator it
=
756 packet_events
.begin();
757 it
!= packet_events
.end();
759 cast_environment
->Logging()->InsertPacketEvent(it
->timestamp
,
769 void InitializationResult(media::cast::CastInitializationStatus result
) {
770 bool end_result
= result
== media::cast::STATUS_AUDIO_INITIALIZED
||
771 result
== media::cast::STATUS_VIDEO_INITIALIZED
;
772 CHECK(end_result
) << "Cast sender uninitialized";
775 net::IPEndPoint
CreateUDPAddress(std::string ip_str
, int port
) {
776 net::IPAddressNumber ip_number
;
777 CHECK(net::ParseIPLiteralToNumber(ip_str
, &ip_number
));
778 return net::IPEndPoint(ip_number
, port
);
781 void DumpLoggingData(const media::cast::proto::LogMetadata
& log_metadata
,
782 const media::cast::FrameEventList
& frame_events
,
783 const media::cast::PacketEventList
& packet_events
,
784 base::ScopedFILE log_file
) {
785 VLOG(0) << "Frame map size: " << frame_events
.size();
786 VLOG(0) << "Packet map size: " << packet_events
.size();
788 scoped_ptr
<char[]> event_log(new char[kMaxSerializedLogBytes
]);
790 if (!media::cast::SerializeEvents(log_metadata
,
794 kMaxSerializedLogBytes
,
797 VLOG(0) << "Failed to serialize events.";
801 VLOG(0) << "Events serialized length: " << event_log_bytes
;
803 int ret
= fwrite(event_log
.get(), 1, event_log_bytes
, log_file
.get());
804 if (ret
!= event_log_bytes
)
805 VLOG(0) << "Failed to write logs to file.";
808 void WriteLogsToFileAndDestroySubscribers(
809 const scoped_refptr
<media::cast::CastEnvironment
>& cast_environment
,
810 scoped_ptr
<media::cast::EncodingEventSubscriber
> video_event_subscriber
,
811 scoped_ptr
<media::cast::EncodingEventSubscriber
> audio_event_subscriber
,
812 base::ScopedFILE video_log_file
,
813 base::ScopedFILE audio_log_file
) {
814 cast_environment
->Logging()->RemoveRawEventSubscriber(
815 video_event_subscriber
.get());
816 cast_environment
->Logging()->RemoveRawEventSubscriber(
817 audio_event_subscriber
.get());
819 VLOG(0) << "Dumping logging data for video stream.";
820 media::cast::proto::LogMetadata log_metadata
;
821 media::cast::FrameEventList frame_events
;
822 media::cast::PacketEventList packet_events
;
823 video_event_subscriber
->GetEventsAndReset(
824 &log_metadata
, &frame_events
, &packet_events
);
826 DumpLoggingData(log_metadata
,
829 video_log_file
.Pass());
831 VLOG(0) << "Dumping logging data for audio stream.";
832 audio_event_subscriber
->GetEventsAndReset(
833 &log_metadata
, &frame_events
, &packet_events
);
835 DumpLoggingData(log_metadata
,
838 audio_log_file
.Pass());
841 void WriteStatsAndDestroySubscribers(
842 const scoped_refptr
<media::cast::CastEnvironment
>& cast_environment
,
843 scoped_ptr
<media::cast::StatsEventSubscriber
> video_event_subscriber
,
844 scoped_ptr
<media::cast::StatsEventSubscriber
> audio_event_subscriber
,
845 scoped_ptr
<media::cast::ReceiverTimeOffsetEstimatorImpl
> estimator
) {
846 cast_environment
->Logging()->RemoveRawEventSubscriber(
847 video_event_subscriber
.get());
848 cast_environment
->Logging()->RemoveRawEventSubscriber(
849 audio_event_subscriber
.get());
850 cast_environment
->Logging()->RemoveRawEventSubscriber(estimator
.get());
852 scoped_ptr
<base::DictionaryValue
> stats
= video_event_subscriber
->GetStats();
854 base::JSONWriter::WriteWithOptions(
855 stats
.get(), base::JSONWriter::OPTIONS_PRETTY_PRINT
, &json
);
856 VLOG(0) << "Video stats: " << json
;
858 stats
= audio_event_subscriber
->GetStats();
860 base::JSONWriter::WriteWithOptions(
861 stats
.get(), base::JSONWriter::OPTIONS_PRETTY_PRINT
, &json
);
862 VLOG(0) << "Audio stats: " << json
;
867 int main(int argc
, char** argv
) {
868 base::AtExitManager at_exit
;
869 CommandLine::Init(argc
, argv
);
870 InitLogging(logging::LoggingSettings());
872 // Load the media module for FFmpeg decoding.
874 PathService::Get(base::DIR_MODULE
, &path
);
875 if (!media::InitializeMediaLibrary(path
)) {
876 LOG(ERROR
) << "Could not initialize media library.";
880 base::Thread
test_thread("Cast sender test app thread");
881 base::Thread
audio_thread("Cast audio encoder thread");
882 base::Thread
video_thread("Cast video encoder thread");
884 audio_thread
.Start();
885 video_thread
.Start();
887 base::MessageLoopForIO io_message_loop
;
889 // Default parameters.
890 CommandLine
* cmd
= CommandLine::ForCurrentProcess();
891 std::string remote_ip_address
= cmd
->GetSwitchValueASCII(kSwitchAddress
);
892 if (remote_ip_address
.empty())
893 remote_ip_address
= "127.0.0.1";
895 if (!base::StringToInt(cmd
->GetSwitchValueASCII(kSwitchPort
),
899 LOG(INFO
) << "Sending to " << remote_ip_address
<< ":" << remote_port
902 media::cast::AudioSenderConfig audio_config
=
903 media::cast::GetAudioSenderConfig();
904 media::cast::VideoSenderConfig video_config
=
905 media::cast::GetVideoSenderConfig();
907 // Running transport on the main thread.
908 // Setting up transport config.
909 net::IPEndPoint remote_endpoint
=
910 CreateUDPAddress(remote_ip_address
, remote_port
);
912 // Enable raw event and stats logging.
913 // Running transport on the main thread.
914 scoped_refptr
<media::cast::CastEnvironment
> cast_environment(
915 new media::cast::CastEnvironment(
916 make_scoped_ptr
<base::TickClock
>(new base::DefaultTickClock()),
917 io_message_loop
.message_loop_proxy(),
918 audio_thread
.message_loop_proxy(),
919 video_thread
.message_loop_proxy()));
921 // SendProcess initialization.
922 scoped_ptr
<media::cast::SendProcess
> send_process(
923 new media::cast::SendProcess(test_thread
.message_loop_proxy(),
924 cast_environment
->Clock(),
927 // CastTransportSender initialization.
928 scoped_ptr
<media::cast::transport::CastTransportSender
> transport_sender
=
929 media::cast::transport::CastTransportSender::Create(
931 cast_environment
->Clock(),
933 base::Bind(&UpdateCastTransportStatus
),
934 base::Bind(&LogRawEvents
, cast_environment
),
935 base::TimeDelta::FromSeconds(1),
936 io_message_loop
.message_loop_proxy());
938 // CastSender initialization.
939 scoped_ptr
<media::cast::CastSender
> cast_sender
=
940 media::cast::CastSender::Create(cast_environment
, transport_sender
.get());
941 cast_sender
->InitializeVideo(
942 send_process
->get_video_config(),
943 base::Bind(&InitializationResult
),
944 media::cast::CreateDefaultVideoEncodeAcceleratorCallback(),
945 media::cast::CreateDefaultVideoEncodeMemoryCallback());
946 cast_sender
->InitializeAudio(audio_config
, base::Bind(&InitializationResult
));
947 transport_sender
->SetPacketReceiver(cast_sender
->packet_receiver());
949 // Set up event subscribers.
950 scoped_ptr
<media::cast::EncodingEventSubscriber
> video_event_subscriber
;
951 scoped_ptr
<media::cast::EncodingEventSubscriber
> audio_event_subscriber
;
952 std::string
video_log_file_name("/tmp/video_events.log.gz");
953 std::string
audio_log_file_name("/tmp/audio_events.log.gz");
954 LOG(INFO
) << "Logging audio events to: " << audio_log_file_name
;
955 LOG(INFO
) << "Logging video events to: " << video_log_file_name
;
956 video_event_subscriber
.reset(new media::cast::EncodingEventSubscriber(
957 media::cast::VIDEO_EVENT
, 10000));
958 audio_event_subscriber
.reset(new media::cast::EncodingEventSubscriber(
959 media::cast::AUDIO_EVENT
, 10000));
960 cast_environment
->Logging()->AddRawEventSubscriber(
961 video_event_subscriber
.get());
962 cast_environment
->Logging()->AddRawEventSubscriber(
963 audio_event_subscriber
.get());
965 // Subscribers for stats.
966 scoped_ptr
<media::cast::ReceiverTimeOffsetEstimatorImpl
> offset_estimator(
967 new media::cast::ReceiverTimeOffsetEstimatorImpl
);
968 cast_environment
->Logging()->AddRawEventSubscriber(offset_estimator
.get());
969 scoped_ptr
<media::cast::StatsEventSubscriber
> video_stats_subscriber(
970 new media::cast::StatsEventSubscriber(media::cast::VIDEO_EVENT
,
971 cast_environment
->Clock(),
972 offset_estimator
.get()));
973 scoped_ptr
<media::cast::StatsEventSubscriber
> audio_stats_subscriber(
974 new media::cast::StatsEventSubscriber(media::cast::AUDIO_EVENT
,
975 cast_environment
->Clock(),
976 offset_estimator
.get()));
977 cast_environment
->Logging()->AddRawEventSubscriber(
978 video_stats_subscriber
.get());
979 cast_environment
->Logging()->AddRawEventSubscriber(
980 audio_stats_subscriber
.get());
982 base::ScopedFILE
video_log_file(fopen(video_log_file_name
.c_str(), "w"));
983 if (!video_log_file
) {
984 VLOG(1) << "Failed to open video log file for writing.";
988 base::ScopedFILE
audio_log_file(fopen(audio_log_file_name
.c_str(), "w"));
989 if (!audio_log_file
) {
990 VLOG(1) << "Failed to open audio log file for writing.";
994 const int logging_duration_seconds
= 10;
995 io_message_loop
.message_loop_proxy()->PostDelayedTask(
997 base::Bind(&WriteLogsToFileAndDestroySubscribers
,
999 base::Passed(&video_event_subscriber
),
1000 base::Passed(&audio_event_subscriber
),
1001 base::Passed(&video_log_file
),
1002 base::Passed(&audio_log_file
)),
1003 base::TimeDelta::FromSeconds(logging_duration_seconds
));
1005 io_message_loop
.message_loop_proxy()->PostDelayedTask(
1007 base::Bind(&WriteStatsAndDestroySubscribers
,
1009 base::Passed(&video_stats_subscriber
),
1010 base::Passed(&audio_stats_subscriber
),
1011 base::Passed(&offset_estimator
)),
1012 base::TimeDelta::FromSeconds(logging_duration_seconds
));
1014 send_process
->Start(cast_sender
->audio_frame_input(),
1015 cast_sender
->video_frame_input());
1017 io_message_loop
.Run();