Unregister from GCM when the only GCM app is removed
[chromium-blink-merge.git] / chrome / renderer / media / cast_rtp_stream.cc
bloba3f970d107455b730ba97998379dbd3756f0ee6f
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/renderer/media/cast_rtp_stream.h"
7 #include "base/bind.h"
8 #include "base/command_line.h"
9 #include "base/logging.h"
10 #include "base/memory/weak_ptr.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/sys_info.h"
13 #include "base/trace_event/trace_event.h"
14 #include "chrome/common/chrome_switches.h"
15 #include "chrome/renderer/media/cast_session.h"
16 #include "chrome/renderer/media/cast_udp_transport.h"
17 #include "content/public/renderer/media_stream_audio_sink.h"
18 #include "content/public/renderer/media_stream_video_sink.h"
19 #include "content/public/renderer/render_thread.h"
20 #include "content/public/renderer/video_encode_accelerator.h"
21 #include "media/audio/audio_parameters.h"
22 #include "media/base/audio_bus.h"
23 #include "media/base/audio_converter.h"
24 #include "media/base/audio_fifo.h"
25 #include "media/base/bind_to_current_loop.h"
26 #include "media/base/video_frame.h"
27 #include "media/cast/cast_config.h"
28 #include "media/cast/cast_defines.h"
29 #include "media/cast/cast_sender.h"
30 #include "media/cast/net/cast_transport_config.h"
31 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
32 #include "ui/gfx/geometry/size.h"
34 using media::cast::AudioSenderConfig;
35 using media::cast::VideoSenderConfig;
37 namespace {
39 const char kCodecNameOpus[] = "OPUS";
40 const char kCodecNameVp8[] = "VP8";
41 const char kCodecNameH264[] = "H264";
43 // To convert from kilobits per second to bits to per second.
44 const int kBitrateMultiplier = 1000;
46 CastRtpPayloadParams DefaultOpusPayload() {
47 CastRtpPayloadParams payload;
48 payload.payload_type = 127;
49 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
50 payload.ssrc = 1;
51 payload.feedback_ssrc = 2;
52 payload.clock_rate = media::cast::kDefaultAudioSamplingRate;
53 // The value is 0 which means VBR.
54 payload.min_bitrate = payload.max_bitrate =
55 media::cast::kDefaultAudioEncoderBitrate;
56 payload.channels = 2;
57 payload.max_frame_rate = 100; // 10 ms audio frames
58 payload.codec_name = kCodecNameOpus;
59 return payload;
62 CastRtpPayloadParams DefaultVp8Payload() {
63 CastRtpPayloadParams payload;
64 payload.payload_type = 96;
65 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
66 payload.ssrc = 11;
67 payload.feedback_ssrc = 12;
68 payload.clock_rate = media::cast::kVideoFrequency;
69 payload.max_bitrate = 2000;
70 payload.min_bitrate = 50;
71 payload.channels = 1;
72 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
73 payload.width = 1280;
74 payload.height = 720;
75 payload.codec_name = kCodecNameVp8;
76 return payload;
79 CastRtpPayloadParams DefaultH264Payload() {
80 CastRtpPayloadParams payload;
81 // TODO(hshi): set different ssrc/rtpPayloadType values for H264 and VP8
82 // once b/13696137 is fixed.
83 payload.payload_type = 96;
84 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
85 payload.ssrc = 11;
86 payload.feedback_ssrc = 12;
87 payload.clock_rate = media::cast::kVideoFrequency;
88 payload.max_bitrate = 2000;
89 payload.min_bitrate = 50;
90 payload.channels = 1;
91 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
92 payload.width = 1280;
93 payload.height = 720;
94 payload.codec_name = kCodecNameH264;
95 return payload;
98 bool IsHardwareVP8EncodingSupported() {
99 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
100 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
101 DVLOG(1) << "Disabled hardware VP8 support for Cast Streaming.";
102 return false;
105 // Query for hardware VP8 encoder support.
106 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
107 content::GetSupportedVideoEncodeAcceleratorProfiles();
108 for (size_t i = 0; i < vea_profiles.size(); ++i) {
109 if (vea_profiles[i].profile >= media::VP8PROFILE_MIN &&
110 vea_profiles[i].profile <= media::VP8PROFILE_MAX) {
111 return true;
114 return false;
117 bool IsHardwareH264EncodingSupported() {
118 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
119 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
120 DVLOG(1) << "Disabled hardware h264 support for Cast Streaming.";
121 return false;
124 // Query for hardware H.264 encoder support.
125 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
126 content::GetSupportedVideoEncodeAcceleratorProfiles();
127 for (size_t i = 0; i < vea_profiles.size(); ++i) {
128 if (vea_profiles[i].profile >= media::H264PROFILE_MIN &&
129 vea_profiles[i].profile <= media::H264PROFILE_MAX) {
130 return true;
133 return false;
136 int NumberOfEncodeThreads() {
137 // We want to give CPU cycles for capturing and not to saturate the system
138 // just for encoding. So on a lower end system with only 1 or 2 cores we
139 // use only one thread for encoding.
140 if (base::SysInfo::NumberOfProcessors() <= 2)
141 return 1;
143 // On higher end we want to use 2 threads for encoding to reduce latency.
144 // In theory a physical CPU core has maximum 2 hyperthreads. Having 3 or
145 // more logical processors means the system has at least 2 physical cores.
146 return 2;
149 std::vector<CastRtpParams> SupportedAudioParams() {
150 // TODO(hclam): Fill in more codecs here.
151 std::vector<CastRtpParams> supported_params;
152 supported_params.push_back(CastRtpParams(DefaultOpusPayload()));
153 return supported_params;
156 std::vector<CastRtpParams> SupportedVideoParams() {
157 std::vector<CastRtpParams> supported_params;
158 if (IsHardwareH264EncodingSupported())
159 supported_params.push_back(CastRtpParams(DefaultH264Payload()));
160 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
161 return supported_params;
164 bool ToAudioSenderConfig(const CastRtpParams& params,
165 AudioSenderConfig* config) {
166 config->ssrc = params.payload.ssrc;
167 config->receiver_ssrc = params.payload.feedback_ssrc;
168 if (config->ssrc == config->receiver_ssrc)
169 return false;
170 config->min_playout_delay =
171 base::TimeDelta::FromMilliseconds(
172 params.payload.min_latency_ms ?
173 params.payload.min_latency_ms :
174 params.payload.max_latency_ms);
175 config->max_playout_delay =
176 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
177 if (config->min_playout_delay <= base::TimeDelta())
178 return false;
179 if (config->min_playout_delay > config->max_playout_delay)
180 return false;
181 config->rtp_payload_type = params.payload.payload_type;
182 config->use_external_encoder = false;
183 config->frequency = params.payload.clock_rate;
184 if (config->frequency < 8000)
185 return false;
186 config->channels = params.payload.channels;
187 if (config->channels < 1)
188 return false;
189 config->bitrate = params.payload.max_bitrate * kBitrateMultiplier;
190 if (params.payload.codec_name == kCodecNameOpus)
191 config->codec = media::cast::CODEC_AUDIO_OPUS;
192 else
193 return false;
194 config->aes_key = params.payload.aes_key;
195 config->aes_iv_mask = params.payload.aes_iv_mask;
196 return true;
199 bool ToVideoSenderConfig(const CastRtpParams& params,
200 VideoSenderConfig* config) {
201 config->ssrc = params.payload.ssrc;
202 config->receiver_ssrc = params.payload.feedback_ssrc;
203 if (config->ssrc == config->receiver_ssrc)
204 return false;
205 config->min_playout_delay =
206 base::TimeDelta::FromMilliseconds(
207 params.payload.min_latency_ms ?
208 params.payload.min_latency_ms :
209 params.payload.max_latency_ms);
210 config->max_playout_delay =
211 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
212 if (config->min_playout_delay <= base::TimeDelta())
213 return false;
214 if (config->min_playout_delay > config->max_playout_delay)
215 return false;
216 config->rtp_payload_type = params.payload.payload_type;
217 config->min_bitrate = config->start_bitrate =
218 params.payload.min_bitrate * kBitrateMultiplier;
219 config->max_bitrate = params.payload.max_bitrate * kBitrateMultiplier;
220 if (config->min_bitrate > config->max_bitrate)
221 return false;
222 config->start_bitrate = config->min_bitrate;
223 config->max_frame_rate = static_cast<int>(
224 std::max(1.0, params.payload.max_frame_rate) + 0.5);
225 if (config->max_frame_rate > 120)
226 return false;
227 if (params.payload.codec_name == kCodecNameVp8) {
228 config->use_external_encoder = IsHardwareVP8EncodingSupported();
229 config->codec = media::cast::CODEC_VIDEO_VP8;
230 } else if (params.payload.codec_name == kCodecNameH264) {
231 config->use_external_encoder = IsHardwareH264EncodingSupported();
232 config->codec = media::cast::CODEC_VIDEO_H264;
233 } else {
234 return false;
236 if (!config->use_external_encoder) {
237 config->number_of_encode_threads = NumberOfEncodeThreads();
239 config->aes_key = params.payload.aes_key;
240 config->aes_iv_mask = params.payload.aes_iv_mask;
241 return true;
244 } // namespace
246 // This class receives MediaStreamTrack events and video frames from a
247 // MediaStreamTrack.
249 // Threading: Video frames are received on the IO thread and then
250 // forwarded to media::cast::VideoFrameInput through a static method.
251 // Member variables of this class are only accessed on the render thread.
252 class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
253 public content::MediaStreamVideoSink {
254 public:
255 // |track| provides data for this sink.
256 // |error_callback| is called if video formats don't match.
257 CastVideoSink(const blink::WebMediaStreamTrack& track,
258 const CastRtpStream::ErrorCallback& error_callback)
259 : track_(track),
260 sink_added_(false),
261 error_callback_(error_callback) {}
263 ~CastVideoSink() override {
264 if (sink_added_)
265 RemoveFromVideoTrack(this, track_);
268 // This static method is used to forward video frames to |frame_input|.
269 static void OnVideoFrame(
270 // These parameters are already bound when callback is created.
271 const CastRtpStream::ErrorCallback& error_callback,
272 const scoped_refptr<media::cast::VideoFrameInput> frame_input,
273 // These parameters are passed for each frame.
274 const scoped_refptr<media::VideoFrame>& frame,
275 const media::VideoCaptureFormat& format,
276 const base::TimeTicks& estimated_capture_time) {
277 base::TimeTicks timestamp;
278 if (estimated_capture_time.is_null())
279 timestamp = base::TimeTicks::Now();
280 else
281 timestamp = estimated_capture_time;
283 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
284 TRACE_EVENT_INSTANT2(
285 "cast_perf_test", "MediaStreamVideoSink::OnVideoFrame",
286 TRACE_EVENT_SCOPE_THREAD,
287 "timestamp", timestamp.ToInternalValue(),
288 "time_delta", frame->timestamp().ToInternalValue());
289 frame_input->InsertRawVideoFrame(frame, timestamp);
292 // Attach this sink to a video track represented by |track_|.
293 // Data received from the track will be submitted to |frame_input|.
294 void AddToTrack(
295 const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
296 DCHECK(!sink_added_);
297 sink_added_ = true;
298 AddToVideoTrack(
299 this,
300 base::Bind(
301 &CastVideoSink::OnVideoFrame,
302 error_callback_,
303 frame_input),
304 track_);
307 private:
308 blink::WebMediaStreamTrack track_;
309 bool sink_added_;
310 CastRtpStream::ErrorCallback error_callback_;
312 DISALLOW_COPY_AND_ASSIGN(CastVideoSink);
315 // Receives audio data from a MediaStreamTrack. Data is submitted to
316 // media::cast::FrameInput.
318 // Threading: Audio frames are received on the real-time audio thread.
319 // Note that RemoveFromAudioTrack() is synchronous and we have
320 // gurantee that there will be no more audio data after calling it.
321 class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
322 public content::MediaStreamAudioSink,
323 public media::AudioConverter::InputCallback {
324 public:
325 // |track| provides data for this sink.
326 CastAudioSink(const blink::WebMediaStreamTrack& track,
327 int output_channels,
328 int output_sample_rate)
329 : track_(track),
330 output_channels_(output_channels),
331 output_sample_rate_(output_sample_rate),
332 current_input_bus_(nullptr),
333 sample_frames_in_(0),
334 sample_frames_out_(0) {}
336 ~CastAudioSink() override {
337 if (frame_input_.get())
338 RemoveFromAudioTrack(this, track_);
341 // Add this sink to the track. Data received from the track will be
342 // submitted to |frame_input|.
343 void AddToTrack(
344 const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
345 DCHECK(frame_input.get());
346 DCHECK(!frame_input_.get());
347 // This member is written here and then accessed on the IO thread
348 // We will not get data until AddToAudioTrack is called so it is
349 // safe to access this member now.
350 frame_input_ = frame_input;
351 AddToAudioTrack(this, track_);
354 protected:
355 // Called on real-time audio thread.
356 void OnData(const media::AudioBus& input_bus,
357 base::TimeTicks estimated_capture_time) override {
358 DCHECK(input_params_.IsValid());
359 DCHECK_EQ(input_bus.channels(), input_params_.channels());
360 DCHECK_EQ(input_bus.frames(), input_params_.frames_per_buffer());
361 DCHECK(!estimated_capture_time.is_null());
362 DCHECK(converter_.get());
364 // Determine the duration of the audio signal enqueued within |converter_|.
365 const base::TimeDelta signal_duration_already_buffered =
366 (sample_frames_in_ * base::TimeDelta::FromSeconds(1) /
367 input_params_.sample_rate()) -
368 (sample_frames_out_ * base::TimeDelta::FromSeconds(1) /
369 output_sample_rate_);
370 DVLOG(2) << "Audio reference time adjustment: -("
371 << signal_duration_already_buffered.InMicroseconds() << " us)";
372 const base::TimeTicks capture_time_of_first_converted_sample =
373 estimated_capture_time - signal_duration_already_buffered;
375 // Convert the entire input signal. AudioConverter is efficient in that no
376 // additional copying or conversion will occur if the input signal is in the
377 // same format as the output. Note that, while the number of sample frames
378 // provided as input is always the same, the chunk size (and the size of the
379 // |audio_bus| here) can be variable. This is not an issue since
380 // media::cast::AudioFrameInput can handle variable-sized AudioBuses.
381 scoped_ptr<media::AudioBus> audio_bus =
382 media::AudioBus::Create(output_channels_, converter_->ChunkSize());
383 // AudioConverter will call ProvideInput() to fetch from |current_data_|.
384 current_input_bus_ = &input_bus;
385 converter_->Convert(audio_bus.get());
386 DCHECK(!current_input_bus_); // ProvideInput() called exactly once?
388 sample_frames_in_ += input_params_.frames_per_buffer();
389 sample_frames_out_ += audio_bus->frames();
391 frame_input_->InsertAudio(audio_bus.Pass(),
392 capture_time_of_first_converted_sample);
395 // Called on real-time audio thread.
396 void OnSetFormat(const media::AudioParameters& params) override {
397 if (input_params_.Equals(params))
398 return;
399 input_params_ = params;
401 DVLOG(1) << "Setting up audio resampling: {"
402 << input_params_.channels() << " channels, "
403 << input_params_.sample_rate() << " Hz} --> {"
404 << output_channels_ << " channels, "
405 << output_sample_rate_ << " Hz}";
406 const media::AudioParameters output_params(
407 media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
408 media::GuessChannelLayout(output_channels_),
409 output_sample_rate_, 32,
410 output_sample_rate_ * input_params_.frames_per_buffer() /
411 input_params_.sample_rate());
412 converter_.reset(
413 new media::AudioConverter(input_params_, output_params, false));
414 converter_->AddInput(this);
415 sample_frames_in_ = 0;
416 sample_frames_out_ = 0;
419 // Called on real-time audio thread.
420 double ProvideInput(media::AudioBus* audio_bus,
421 base::TimeDelta buffer_delay) override {
422 DCHECK(current_input_bus_);
423 current_input_bus_->CopyTo(audio_bus);
424 current_input_bus_ = nullptr;
425 return 1.0;
428 private:
429 const blink::WebMediaStreamTrack track_;
430 const int output_channels_;
431 const int output_sample_rate_;
433 // This must be set before the real-time audio thread starts calling OnData(),
434 // and remain unchanged until after the thread will stop calling OnData().
435 scoped_refptr<media::cast::AudioFrameInput> frame_input_;
437 // These members are accessed on the real-time audio time only.
438 media::AudioParameters input_params_;
439 scoped_ptr<media::AudioConverter> converter_;
440 const media::AudioBus* current_input_bus_;
441 int64 sample_frames_in_;
442 int64 sample_frames_out_;
444 DISALLOW_COPY_AND_ASSIGN(CastAudioSink);
447 CastRtpParams::CastRtpParams(const CastRtpPayloadParams& payload_params)
448 : payload(payload_params) {}
450 CastCodecSpecificParams::CastCodecSpecificParams() {}
452 CastCodecSpecificParams::~CastCodecSpecificParams() {}
454 CastRtpPayloadParams::CastRtpPayloadParams()
455 : payload_type(0),
456 max_latency_ms(0),
457 min_latency_ms(0),
458 ssrc(0),
459 feedback_ssrc(0),
460 clock_rate(0),
461 max_bitrate(0),
462 min_bitrate(0),
463 channels(0),
464 max_frame_rate(0.0),
465 width(0),
466 height(0) {}
468 CastRtpPayloadParams::~CastRtpPayloadParams() {}
470 CastRtpParams::CastRtpParams() {}
472 CastRtpParams::~CastRtpParams() {}
474 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
475 const scoped_refptr<CastSession>& session)
476 : track_(track), cast_session_(session), weak_factory_(this) {}
478 CastRtpStream::~CastRtpStream() {}
480 std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
481 if (IsAudio())
482 return SupportedAudioParams();
483 else
484 return SupportedVideoParams();
487 CastRtpParams CastRtpStream::GetParams() { return params_; }
489 void CastRtpStream::Start(const CastRtpParams& params,
490 const base::Closure& start_callback,
491 const base::Closure& stop_callback,
492 const ErrorCallback& error_callback) {
493 DVLOG(1) << "CastRtpStream::Start = " << (IsAudio() ? "audio" : "video");
494 stop_callback_ = stop_callback;
495 error_callback_ = error_callback;
497 if (IsAudio()) {
498 AudioSenderConfig config;
499 if (!ToAudioSenderConfig(params, &config)) {
500 DidEncounterError("Invalid parameters for audio.");
501 return;
504 // In case of error we have to go through DidEncounterError() to stop
505 // the streaming after reporting the error.
506 audio_sink_.reset(new CastAudioSink(
507 track_,
508 params.payload.channels,
509 params.payload.clock_rate));
510 cast_session_->StartAudio(
511 config,
512 base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()),
513 base::Bind(&CastRtpStream::DidEncounterError,
514 weak_factory_.GetWeakPtr()));
515 start_callback.Run();
516 } else {
517 VideoSenderConfig config;
518 if (!ToVideoSenderConfig(params, &config)) {
519 DidEncounterError("Invalid parameters for video.");
520 return;
522 // See the code for audio above for explanation of callbacks.
523 video_sink_.reset(new CastVideoSink(
524 track_,
525 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
526 weak_factory_.GetWeakPtr()))));
527 cast_session_->StartVideo(
528 config,
529 base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()),
530 base::Bind(&CastRtpStream::DidEncounterError,
531 weak_factory_.GetWeakPtr()));
532 start_callback.Run();
536 void CastRtpStream::Stop() {
537 DVLOG(1) << "CastRtpStream::Stop = " << (IsAudio() ? "audio" : "video");
538 audio_sink_.reset();
539 video_sink_.reset();
540 if (!stop_callback_.is_null())
541 stop_callback_.Run();
544 void CastRtpStream::ToggleLogging(bool enable) {
545 DVLOG(1) << "CastRtpStream::ToggleLogging(" << enable << ") = "
546 << (IsAudio() ? "audio" : "video");
547 cast_session_->ToggleLogging(IsAudio(), enable);
550 void CastRtpStream::GetRawEvents(
551 const base::Callback<void(scoped_ptr<base::BinaryValue>)>& callback,
552 const std::string& extra_data) {
553 DVLOG(1) << "CastRtpStream::GetRawEvents = "
554 << (IsAudio() ? "audio" : "video");
555 cast_session_->GetEventLogsAndReset(IsAudio(), extra_data, callback);
558 void CastRtpStream::GetStats(
559 const base::Callback<void(scoped_ptr<base::DictionaryValue>)>& callback) {
560 DVLOG(1) << "CastRtpStream::GetStats = "
561 << (IsAudio() ? "audio" : "video");
562 cast_session_->GetStatsAndReset(IsAudio(), callback);
565 bool CastRtpStream::IsAudio() const {
566 return track_.source().type() == blink::WebMediaStreamSource::TypeAudio;
569 void CastRtpStream::DidEncounterError(const std::string& message) {
570 DVLOG(1) << "CastRtpStream::DidEncounterError(" << message << ") = "
571 << (IsAudio() ? "audio" : "video");
572 // Save the WeakPtr first because the error callback might delete this object.
573 base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr();
574 error_callback_.Run(message);
575 content::RenderThread::Get()->GetTaskRunner()->PostTask(
576 FROM_HERE,
577 base::Bind(&CastRtpStream::Stop, ptr));