Only grant permissions to new extensions from sync if they have the expected version
[chromium-blink-merge.git] / chrome / renderer / media / cast_rtp_stream.cc
blob1b9d2da7b055d32dfe99a87b7fd11399fa83e1c7
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/renderer/media/cast_rtp_stream.h"
7 #include <algorithm>
9 #include "base/bind.h"
10 #include "base/callback_helpers.h"
11 #include "base/command_line.h"
12 #include "base/logging.h"
13 #include "base/memory/weak_ptr.h"
14 #include "base/strings/stringprintf.h"
15 #include "base/sys_info.h"
16 #include "base/trace_event/trace_event.h"
17 #include "chrome/common/chrome_switches.h"
18 #include "chrome/renderer/media/cast_session.h"
19 #include "chrome/renderer/media/cast_udp_transport.h"
20 #include "content/public/renderer/media_stream_audio_sink.h"
21 #include "content/public/renderer/media_stream_video_sink.h"
22 #include "content/public/renderer/render_thread.h"
23 #include "content/public/renderer/video_encode_accelerator.h"
24 #include "media/audio/audio_parameters.h"
25 #include "media/base/audio_bus.h"
26 #include "media/base/audio_converter.h"
27 #include "media/base/audio_fifo.h"
28 #include "media/base/bind_to_current_loop.h"
29 #include "media/base/video_frame.h"
30 #include "media/cast/cast_config.h"
31 #include "media/cast/cast_defines.h"
32 #include "media/cast/cast_sender.h"
33 #include "media/cast/net/cast_transport_config.h"
34 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
35 #include "ui/gfx/geometry/size.h"
37 using media::cast::AudioSenderConfig;
38 using media::cast::VideoSenderConfig;
40 namespace {
42 const char kCodecNameOpus[] = "OPUS";
43 const char kCodecNameVp8[] = "VP8";
44 const char kCodecNameH264[] = "H264";
46 // To convert from kilobits per second to bits to per second.
47 const int kBitrateMultiplier = 1000;
49 CastRtpPayloadParams DefaultOpusPayload() {
50 CastRtpPayloadParams payload;
51 payload.payload_type = 127;
52 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
53 payload.ssrc = 1;
54 payload.feedback_ssrc = 2;
55 payload.clock_rate = media::cast::kDefaultAudioSamplingRate;
56 // The value is 0 which means VBR.
57 payload.min_bitrate = payload.max_bitrate =
58 media::cast::kDefaultAudioEncoderBitrate;
59 payload.channels = 2;
60 payload.max_frame_rate = 100; // 10 ms audio frames
61 payload.codec_name = kCodecNameOpus;
62 return payload;
65 CastRtpPayloadParams DefaultVp8Payload() {
66 CastRtpPayloadParams payload;
67 payload.payload_type = 96;
68 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
69 payload.ssrc = 11;
70 payload.feedback_ssrc = 12;
71 payload.clock_rate = media::cast::kVideoFrequency;
72 payload.max_bitrate = 2000;
73 payload.min_bitrate = 50;
74 payload.channels = 1;
75 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
76 payload.codec_name = kCodecNameVp8;
77 return payload;
80 CastRtpPayloadParams DefaultH264Payload() {
81 CastRtpPayloadParams payload;
82 // TODO(hshi): set different ssrc/rtpPayloadType values for H264 and VP8
83 // once b/13696137 is fixed.
84 payload.payload_type = 96;
85 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
86 payload.ssrc = 11;
87 payload.feedback_ssrc = 12;
88 payload.clock_rate = media::cast::kVideoFrequency;
89 payload.max_bitrate = 2000;
90 payload.min_bitrate = 50;
91 payload.channels = 1;
92 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
93 payload.codec_name = kCodecNameH264;
94 return payload;
97 bool IsHardwareVP8EncodingSupported() {
98 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
99 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
100 DVLOG(1) << "Disabled hardware VP8 support for Cast Streaming.";
101 return false;
104 // Query for hardware VP8 encoder support.
105 const std::vector<media::VideoEncodeAccelerator::SupportedProfile>
106 vea_profiles = content::GetSupportedVideoEncodeAcceleratorProfiles();
107 for (const auto& vea_profile : vea_profiles) {
108 if (vea_profile.profile >= media::VP8PROFILE_MIN &&
109 vea_profile.profile <= media::VP8PROFILE_MAX) {
110 return true;
113 return false;
116 bool IsHardwareH264EncodingSupported() {
117 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
118 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
119 DVLOG(1) << "Disabled hardware h264 support for Cast Streaming.";
120 return false;
123 // Query for hardware H.264 encoder support.
124 const std::vector<media::VideoEncodeAccelerator::SupportedProfile>
125 vea_profiles = content::GetSupportedVideoEncodeAcceleratorProfiles();
126 for (const auto& vea_profile : vea_profiles) {
127 if (vea_profile.profile >= media::H264PROFILE_MIN &&
128 vea_profile.profile <= media::H264PROFILE_MAX) {
129 return true;
132 return false;
135 int NumberOfEncodeThreads() {
136 // Do not saturate CPU utilization just for encoding. On a lower-end system
137 // with only 1 or 2 cores, use only one thread for encoding. On systems with
138 // more cores, allow half of the cores to be used for encoding.
139 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2);
142 std::vector<CastRtpParams> SupportedAudioParams() {
143 // TODO(hclam): Fill in more codecs here.
144 return std::vector<CastRtpParams>(1, CastRtpParams(DefaultOpusPayload()));
147 std::vector<CastRtpParams> SupportedVideoParams() {
148 std::vector<CastRtpParams> supported_params;
150 // Prefer VP8 over H.264 for hardware encoder.
151 if (IsHardwareVP8EncodingSupported())
152 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
153 if (IsHardwareH264EncodingSupported())
154 supported_params.push_back(CastRtpParams(DefaultH264Payload()));
156 // Propose the default software VP8 encoder, if no hardware encoders are
157 // available.
158 if (supported_params.empty())
159 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
161 return supported_params;
164 bool ToAudioSenderConfig(const CastRtpParams& params,
165 AudioSenderConfig* config) {
166 config->ssrc = params.payload.ssrc;
167 config->receiver_ssrc = params.payload.feedback_ssrc;
168 if (config->ssrc == config->receiver_ssrc)
169 return false;
170 config->min_playout_delay =
171 base::TimeDelta::FromMilliseconds(
172 params.payload.min_latency_ms ?
173 params.payload.min_latency_ms :
174 params.payload.max_latency_ms);
175 config->max_playout_delay =
176 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
177 if (config->min_playout_delay <= base::TimeDelta())
178 return false;
179 if (config->min_playout_delay > config->max_playout_delay)
180 return false;
181 config->rtp_payload_type = params.payload.payload_type;
182 config->use_external_encoder = false;
183 config->frequency = params.payload.clock_rate;
184 if (config->frequency < 8000)
185 return false;
186 config->channels = params.payload.channels;
187 if (config->channels < 1)
188 return false;
189 config->bitrate = params.payload.max_bitrate * kBitrateMultiplier;
190 if (params.payload.codec_name == kCodecNameOpus)
191 config->codec = media::cast::CODEC_AUDIO_OPUS;
192 else
193 return false;
194 config->aes_key = params.payload.aes_key;
195 config->aes_iv_mask = params.payload.aes_iv_mask;
196 return true;
199 bool ToVideoSenderConfig(const CastRtpParams& params,
200 VideoSenderConfig* config) {
201 config->ssrc = params.payload.ssrc;
202 config->receiver_ssrc = params.payload.feedback_ssrc;
203 if (config->ssrc == config->receiver_ssrc)
204 return false;
205 config->min_playout_delay =
206 base::TimeDelta::FromMilliseconds(
207 params.payload.min_latency_ms ?
208 params.payload.min_latency_ms :
209 params.payload.max_latency_ms);
210 config->max_playout_delay =
211 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
212 if (config->min_playout_delay <= base::TimeDelta())
213 return false;
214 if (config->min_playout_delay > config->max_playout_delay)
215 return false;
216 config->rtp_payload_type = params.payload.payload_type;
217 config->min_bitrate = config->start_bitrate =
218 params.payload.min_bitrate * kBitrateMultiplier;
219 config->max_bitrate = params.payload.max_bitrate * kBitrateMultiplier;
220 if (config->min_bitrate > config->max_bitrate)
221 return false;
222 config->start_bitrate = config->min_bitrate;
223 config->max_frame_rate = static_cast<int>(
224 std::max(1.0, params.payload.max_frame_rate) + 0.5);
225 if (config->max_frame_rate > 120)
226 return false;
227 if (params.payload.codec_name == kCodecNameVp8) {
228 config->use_external_encoder = IsHardwareVP8EncodingSupported();
229 config->codec = media::cast::CODEC_VIDEO_VP8;
230 } else if (params.payload.codec_name == kCodecNameH264) {
231 config->use_external_encoder = IsHardwareH264EncodingSupported();
232 config->codec = media::cast::CODEC_VIDEO_H264;
233 } else {
234 return false;
236 if (!config->use_external_encoder) {
237 config->number_of_encode_threads = NumberOfEncodeThreads();
239 config->aes_key = params.payload.aes_key;
240 config->aes_iv_mask = params.payload.aes_iv_mask;
241 return true;
244 } // namespace
246 // This class receives MediaStreamTrack events and video frames from a
247 // MediaStreamTrack.
249 // Threading: Video frames are received on the IO thread and then
250 // forwarded to media::cast::VideoFrameInput through a static method.
251 // Member variables of this class are only accessed on the render thread.
252 class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
253 public content::MediaStreamVideoSink {
254 public:
255 // |track| provides data for this sink.
256 // |error_callback| is called if video formats don't match.
257 CastVideoSink(const blink::WebMediaStreamTrack& track,
258 const CastRtpStream::ErrorCallback& error_callback)
259 : track_(track),
260 sink_added_(false),
261 error_callback_(error_callback) {}
263 ~CastVideoSink() override {
264 if (sink_added_)
265 RemoveFromVideoTrack(this, track_);
268 // This static method is used to forward video frames to |frame_input|.
269 static void OnVideoFrame(
270 // These parameters are already bound when callback is created.
271 const CastRtpStream::ErrorCallback& error_callback,
272 const scoped_refptr<media::cast::VideoFrameInput> frame_input,
273 // These parameters are passed for each frame.
274 const scoped_refptr<media::VideoFrame>& frame,
275 base::TimeTicks estimated_capture_time) {
276 const base::TimeTicks timestamp = estimated_capture_time.is_null()
277 ? base::TimeTicks::Now()
278 : estimated_capture_time;
280 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
281 TRACE_EVENT_INSTANT2(
282 "cast_perf_test", "MediaStreamVideoSink::OnVideoFrame",
283 TRACE_EVENT_SCOPE_THREAD,
284 "timestamp", timestamp.ToInternalValue(),
285 "time_delta", frame->timestamp().ToInternalValue());
286 frame_input->InsertRawVideoFrame(frame, timestamp);
289 // Attach this sink to a video track represented by |track_|.
290 // Data received from the track will be submitted to |frame_input|.
291 void AddToTrack(
292 const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
293 DCHECK(!sink_added_);
294 sink_added_ = true;
295 AddToVideoTrack(
296 this,
297 base::Bind(
298 &CastVideoSink::OnVideoFrame,
299 error_callback_,
300 frame_input),
301 track_);
304 private:
305 blink::WebMediaStreamTrack track_;
306 bool sink_added_;
307 CastRtpStream::ErrorCallback error_callback_;
309 DISALLOW_COPY_AND_ASSIGN(CastVideoSink);
312 // Receives audio data from a MediaStreamTrack. Data is submitted to
313 // media::cast::FrameInput.
315 // Threading: Audio frames are received on the real-time audio thread.
316 // Note that RemoveFromAudioTrack() is synchronous and we have
317 // gurantee that there will be no more audio data after calling it.
318 class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
319 public content::MediaStreamAudioSink,
320 public media::AudioConverter::InputCallback {
321 public:
322 // |track| provides data for this sink.
323 CastAudioSink(const blink::WebMediaStreamTrack& track,
324 int output_channels,
325 int output_sample_rate)
326 : track_(track),
327 output_channels_(output_channels),
328 output_sample_rate_(output_sample_rate),
329 current_input_bus_(nullptr),
330 sample_frames_in_(0),
331 sample_frames_out_(0) {}
333 ~CastAudioSink() override {
334 if (frame_input_.get())
335 RemoveFromAudioTrack(this, track_);
338 // Add this sink to the track. Data received from the track will be
339 // submitted to |frame_input|.
340 void AddToTrack(
341 const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
342 DCHECK(frame_input.get());
343 DCHECK(!frame_input_.get());
344 // This member is written here and then accessed on the IO thread
345 // We will not get data until AddToAudioTrack is called so it is
346 // safe to access this member now.
347 frame_input_ = frame_input;
348 AddToAudioTrack(this, track_);
351 protected:
352 // Called on real-time audio thread.
353 void OnData(const media::AudioBus& input_bus,
354 base::TimeTicks estimated_capture_time) override {
355 DCHECK(input_params_.IsValid());
356 DCHECK_EQ(input_bus.channels(), input_params_.channels());
357 DCHECK_EQ(input_bus.frames(), input_params_.frames_per_buffer());
358 DCHECK(!estimated_capture_time.is_null());
359 DCHECK(converter_.get());
361 // Determine the duration of the audio signal enqueued within |converter_|.
362 const base::TimeDelta signal_duration_already_buffered =
363 (sample_frames_in_ * base::TimeDelta::FromSeconds(1) /
364 input_params_.sample_rate()) -
365 (sample_frames_out_ * base::TimeDelta::FromSeconds(1) /
366 output_sample_rate_);
367 DVLOG(2) << "Audio reference time adjustment: -("
368 << signal_duration_already_buffered.InMicroseconds() << " us)";
369 const base::TimeTicks capture_time_of_first_converted_sample =
370 estimated_capture_time - signal_duration_already_buffered;
372 // Convert the entire input signal. AudioConverter is efficient in that no
373 // additional copying or conversion will occur if the input signal is in the
374 // same format as the output. Note that, while the number of sample frames
375 // provided as input is always the same, the chunk size (and the size of the
376 // |audio_bus| here) can be variable. This is not an issue since
377 // media::cast::AudioFrameInput can handle variable-sized AudioBuses.
378 scoped_ptr<media::AudioBus> audio_bus =
379 media::AudioBus::Create(output_channels_, converter_->ChunkSize());
380 // AudioConverter will call ProvideInput() to fetch from |current_data_|.
381 current_input_bus_ = &input_bus;
382 converter_->Convert(audio_bus.get());
383 DCHECK(!current_input_bus_); // ProvideInput() called exactly once?
385 sample_frames_in_ += input_params_.frames_per_buffer();
386 sample_frames_out_ += audio_bus->frames();
388 frame_input_->InsertAudio(audio_bus.Pass(),
389 capture_time_of_first_converted_sample);
392 // Called on real-time audio thread.
393 void OnSetFormat(const media::AudioParameters& params) override {
394 if (input_params_.Equals(params))
395 return;
396 input_params_ = params;
398 DVLOG(1) << "Setting up audio resampling: {"
399 << input_params_.channels() << " channels, "
400 << input_params_.sample_rate() << " Hz} --> {"
401 << output_channels_ << " channels, "
402 << output_sample_rate_ << " Hz}";
403 const media::AudioParameters output_params(
404 media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
405 media::GuessChannelLayout(output_channels_),
406 output_sample_rate_, 32,
407 output_sample_rate_ * input_params_.frames_per_buffer() /
408 input_params_.sample_rate());
409 converter_.reset(
410 new media::AudioConverter(input_params_, output_params, false));
411 converter_->AddInput(this);
412 sample_frames_in_ = 0;
413 sample_frames_out_ = 0;
416 // Called on real-time audio thread.
417 double ProvideInput(media::AudioBus* audio_bus,
418 base::TimeDelta buffer_delay) override {
419 DCHECK(current_input_bus_);
420 current_input_bus_->CopyTo(audio_bus);
421 current_input_bus_ = nullptr;
422 return 1.0;
425 private:
426 const blink::WebMediaStreamTrack track_;
427 const int output_channels_;
428 const int output_sample_rate_;
430 // This must be set before the real-time audio thread starts calling OnData(),
431 // and remain unchanged until after the thread will stop calling OnData().
432 scoped_refptr<media::cast::AudioFrameInput> frame_input_;
434 // These members are accessed on the real-time audio time only.
435 media::AudioParameters input_params_;
436 scoped_ptr<media::AudioConverter> converter_;
437 const media::AudioBus* current_input_bus_;
438 int64 sample_frames_in_;
439 int64 sample_frames_out_;
441 DISALLOW_COPY_AND_ASSIGN(CastAudioSink);
444 CastRtpParams::CastRtpParams(const CastRtpPayloadParams& payload_params)
445 : payload(payload_params) {}
447 CastCodecSpecificParams::CastCodecSpecificParams() {}
449 CastCodecSpecificParams::~CastCodecSpecificParams() {}
451 CastRtpPayloadParams::CastRtpPayloadParams()
452 : payload_type(0),
453 max_latency_ms(0),
454 min_latency_ms(0),
455 ssrc(0),
456 feedback_ssrc(0),
457 clock_rate(0),
458 max_bitrate(0),
459 min_bitrate(0),
460 channels(0),
461 max_frame_rate(0.0) {
464 CastRtpPayloadParams::~CastRtpPayloadParams() {}
466 CastRtpParams::CastRtpParams() {}
468 CastRtpParams::~CastRtpParams() {}
470 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
471 const scoped_refptr<CastSession>& session)
472 : track_(track), cast_session_(session), weak_factory_(this) {}
474 CastRtpStream::~CastRtpStream() {
475 Stop();
478 std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
479 if (IsAudio())
480 return SupportedAudioParams();
481 else
482 return SupportedVideoParams();
485 CastRtpParams CastRtpStream::GetParams() { return params_; }
487 void CastRtpStream::Start(const CastRtpParams& params,
488 const base::Closure& start_callback,
489 const base::Closure& stop_callback,
490 const ErrorCallback& error_callback) {
491 DCHECK(!start_callback.is_null());
492 DCHECK(!stop_callback.is_null());
493 DCHECK(!error_callback.is_null());
495 DVLOG(1) << "CastRtpStream::Start = " << (IsAudio() ? "audio" : "video");
496 stop_callback_ = stop_callback;
497 error_callback_ = error_callback;
499 if (IsAudio()) {
500 AudioSenderConfig config;
501 if (!ToAudioSenderConfig(params, &config)) {
502 DidEncounterError("Invalid parameters for audio.");
503 return;
506 // In case of error we have to go through DidEncounterError() to stop
507 // the streaming after reporting the error.
508 audio_sink_.reset(new CastAudioSink(
509 track_,
510 params.payload.channels,
511 params.payload.clock_rate));
512 cast_session_->StartAudio(
513 config,
514 base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()),
515 base::Bind(&CastRtpStream::DidEncounterError,
516 weak_factory_.GetWeakPtr()));
517 start_callback.Run();
518 } else {
519 VideoSenderConfig config;
520 if (!ToVideoSenderConfig(params, &config)) {
521 DidEncounterError("Invalid parameters for video.");
522 return;
524 // See the code for audio above for explanation of callbacks.
525 video_sink_.reset(new CastVideoSink(
526 track_,
527 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
528 weak_factory_.GetWeakPtr()))));
529 cast_session_->StartVideo(
530 config,
531 base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()),
532 base::Bind(&CastRtpStream::DidEncounterError,
533 weak_factory_.GetWeakPtr()));
534 start_callback.Run();
538 void CastRtpStream::Stop() {
539 DVLOG(1) << "CastRtpStream::Stop = " << (IsAudio() ? "audio" : "video");
540 if (stop_callback_.is_null())
541 return; // Already stopped.
542 weak_factory_.InvalidateWeakPtrs();
543 error_callback_.Reset();
544 audio_sink_.reset();
545 video_sink_.reset();
546 base::ResetAndReturn(&stop_callback_).Run();
549 void CastRtpStream::ToggleLogging(bool enable) {
550 DVLOG(1) << "CastRtpStream::ToggleLogging(" << enable << ") = "
551 << (IsAudio() ? "audio" : "video");
552 cast_session_->ToggleLogging(IsAudio(), enable);
555 void CastRtpStream::GetRawEvents(
556 const base::Callback<void(scoped_ptr<base::BinaryValue>)>& callback,
557 const std::string& extra_data) {
558 DVLOG(1) << "CastRtpStream::GetRawEvents = "
559 << (IsAudio() ? "audio" : "video");
560 cast_session_->GetEventLogsAndReset(IsAudio(), extra_data, callback);
563 void CastRtpStream::GetStats(
564 const base::Callback<void(scoped_ptr<base::DictionaryValue>)>& callback) {
565 DVLOG(1) << "CastRtpStream::GetStats = "
566 << (IsAudio() ? "audio" : "video");
567 cast_session_->GetStatsAndReset(IsAudio(), callback);
570 bool CastRtpStream::IsAudio() const {
571 return track_.source().type() == blink::WebMediaStreamSource::TypeAudio;
574 void CastRtpStream::DidEncounterError(const std::string& message) {
575 DCHECK(content::RenderThread::Get());
576 DVLOG(1) << "CastRtpStream::DidEncounterError(" << message << ") = "
577 << (IsAudio() ? "audio" : "video");
578 // Save the WeakPtr first because the error callback might delete this object.
579 base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr();
580 error_callback_.Run(message);
581 base::ThreadTaskRunnerHandle::Get()->PostTask(
582 FROM_HERE,
583 base::Bind(&CastRtpStream::Stop, ptr));