Supervised user import: Listen for profile creation/deletion
[chromium-blink-merge.git] / chrome / renderer / media / cast_rtp_stream.cc
blobbbe675a076e57d4ea628f90d325df0a6d99e4117
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/renderer/media/cast_rtp_stream.h"
7 #include "base/bind.h"
8 #include "base/command_line.h"
9 #include "base/logging.h"
10 #include "base/memory/weak_ptr.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/sys_info.h"
13 #include "base/trace_event/trace_event.h"
14 #include "chrome/common/chrome_switches.h"
15 #include "chrome/renderer/media/cast_session.h"
16 #include "chrome/renderer/media/cast_udp_transport.h"
17 #include "content/public/renderer/media_stream_audio_sink.h"
18 #include "content/public/renderer/media_stream_video_sink.h"
19 #include "content/public/renderer/render_thread.h"
20 #include "content/public/renderer/video_encode_accelerator.h"
21 #include "media/audio/audio_parameters.h"
22 #include "media/base/audio_bus.h"
23 #include "media/base/audio_converter.h"
24 #include "media/base/audio_fifo.h"
25 #include "media/base/bind_to_current_loop.h"
26 #include "media/base/video_frame.h"
27 #include "media/cast/cast_config.h"
28 #include "media/cast/cast_defines.h"
29 #include "media/cast/cast_sender.h"
30 #include "media/cast/net/cast_transport_config.h"
31 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
32 #include "ui/gfx/geometry/size.h"
34 using media::cast::AudioSenderConfig;
35 using media::cast::VideoSenderConfig;
37 namespace {
39 const char kCodecNameOpus[] = "OPUS";
40 const char kCodecNameVp8[] = "VP8";
41 const char kCodecNameH264[] = "H264";
43 // To convert from kilobits per second to bits to per second.
44 const int kBitrateMultiplier = 1000;
46 CastRtpPayloadParams DefaultOpusPayload() {
47 CastRtpPayloadParams payload;
48 payload.payload_type = 127;
49 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
50 payload.ssrc = 1;
51 payload.feedback_ssrc = 2;
52 payload.clock_rate = media::cast::kDefaultAudioSamplingRate;
53 // The value is 0 which means VBR.
54 payload.min_bitrate = payload.max_bitrate =
55 media::cast::kDefaultAudioEncoderBitrate;
56 payload.channels = 2;
57 payload.max_frame_rate = 100; // 10 ms audio frames
58 payload.codec_name = kCodecNameOpus;
59 return payload;
62 CastRtpPayloadParams DefaultVp8Payload() {
63 CastRtpPayloadParams payload;
64 payload.payload_type = 96;
65 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
66 payload.ssrc = 11;
67 payload.feedback_ssrc = 12;
68 payload.clock_rate = media::cast::kVideoFrequency;
69 payload.max_bitrate = 2000;
70 payload.min_bitrate = 50;
71 payload.channels = 1;
72 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
73 payload.codec_name = kCodecNameVp8;
74 return payload;
77 CastRtpPayloadParams DefaultH264Payload() {
78 CastRtpPayloadParams payload;
79 // TODO(hshi): set different ssrc/rtpPayloadType values for H264 and VP8
80 // once b/13696137 is fixed.
81 payload.payload_type = 96;
82 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
83 payload.ssrc = 11;
84 payload.feedback_ssrc = 12;
85 payload.clock_rate = media::cast::kVideoFrequency;
86 payload.max_bitrate = 2000;
87 payload.min_bitrate = 50;
88 payload.channels = 1;
89 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
90 payload.codec_name = kCodecNameH264;
91 return payload;
94 bool IsHardwareVP8EncodingSupported() {
95 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
96 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
97 DVLOG(1) << "Disabled hardware VP8 support for Cast Streaming.";
98 return false;
101 // Query for hardware VP8 encoder support.
102 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
103 content::GetSupportedVideoEncodeAcceleratorProfiles();
104 for (size_t i = 0; i < vea_profiles.size(); ++i) {
105 if (vea_profiles[i].profile >= media::VP8PROFILE_MIN &&
106 vea_profiles[i].profile <= media::VP8PROFILE_MAX) {
107 return true;
110 return false;
113 bool IsHardwareH264EncodingSupported() {
114 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
115 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
116 DVLOG(1) << "Disabled hardware h264 support for Cast Streaming.";
117 return false;
120 // Query for hardware H.264 encoder support.
121 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
122 content::GetSupportedVideoEncodeAcceleratorProfiles();
123 for (size_t i = 0; i < vea_profiles.size(); ++i) {
124 if (vea_profiles[i].profile >= media::H264PROFILE_MIN &&
125 vea_profiles[i].profile <= media::H264PROFILE_MAX) {
126 return true;
129 return false;
132 int NumberOfEncodeThreads() {
133 // We want to give CPU cycles for capturing and not to saturate the system
134 // just for encoding. So on a lower end system with only 1 or 2 cores we
135 // use only one thread for encoding.
136 if (base::SysInfo::NumberOfProcessors() <= 2)
137 return 1;
139 // On higher end we want to use 2 threads for encoding to reduce latency.
140 // In theory a physical CPU core has maximum 2 hyperthreads. Having 3 or
141 // more logical processors means the system has at least 2 physical cores.
142 return 2;
145 std::vector<CastRtpParams> SupportedAudioParams() {
146 // TODO(hclam): Fill in more codecs here.
147 std::vector<CastRtpParams> supported_params;
148 supported_params.push_back(CastRtpParams(DefaultOpusPayload()));
149 return supported_params;
152 std::vector<CastRtpParams> SupportedVideoParams() {
153 std::vector<CastRtpParams> supported_params;
155 // Prefer VP8 over H.264 for hardware encoder.
156 if (IsHardwareVP8EncodingSupported())
157 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
158 if (IsHardwareH264EncodingSupported())
159 supported_params.push_back(CastRtpParams(DefaultH264Payload()));
161 // Propose the default software VP8 encoder, if no hardware encoders are
162 // available.
163 if (supported_params.empty())
164 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
166 return supported_params;
169 bool ToAudioSenderConfig(const CastRtpParams& params,
170 AudioSenderConfig* config) {
171 config->ssrc = params.payload.ssrc;
172 config->receiver_ssrc = params.payload.feedback_ssrc;
173 if (config->ssrc == config->receiver_ssrc)
174 return false;
175 config->min_playout_delay =
176 base::TimeDelta::FromMilliseconds(
177 params.payload.min_latency_ms ?
178 params.payload.min_latency_ms :
179 params.payload.max_latency_ms);
180 config->max_playout_delay =
181 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
182 if (config->min_playout_delay <= base::TimeDelta())
183 return false;
184 if (config->min_playout_delay > config->max_playout_delay)
185 return false;
186 config->rtp_payload_type = params.payload.payload_type;
187 config->use_external_encoder = false;
188 config->frequency = params.payload.clock_rate;
189 if (config->frequency < 8000)
190 return false;
191 config->channels = params.payload.channels;
192 if (config->channels < 1)
193 return false;
194 config->bitrate = params.payload.max_bitrate * kBitrateMultiplier;
195 if (params.payload.codec_name == kCodecNameOpus)
196 config->codec = media::cast::CODEC_AUDIO_OPUS;
197 else
198 return false;
199 config->aes_key = params.payload.aes_key;
200 config->aes_iv_mask = params.payload.aes_iv_mask;
201 return true;
204 bool ToVideoSenderConfig(const CastRtpParams& params,
205 VideoSenderConfig* config) {
206 config->ssrc = params.payload.ssrc;
207 config->receiver_ssrc = params.payload.feedback_ssrc;
208 if (config->ssrc == config->receiver_ssrc)
209 return false;
210 config->min_playout_delay =
211 base::TimeDelta::FromMilliseconds(
212 params.payload.min_latency_ms ?
213 params.payload.min_latency_ms :
214 params.payload.max_latency_ms);
215 config->max_playout_delay =
216 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
217 if (config->min_playout_delay <= base::TimeDelta())
218 return false;
219 if (config->min_playout_delay > config->max_playout_delay)
220 return false;
221 config->rtp_payload_type = params.payload.payload_type;
222 config->min_bitrate = config->start_bitrate =
223 params.payload.min_bitrate * kBitrateMultiplier;
224 config->max_bitrate = params.payload.max_bitrate * kBitrateMultiplier;
225 if (config->min_bitrate > config->max_bitrate)
226 return false;
227 config->start_bitrate = config->min_bitrate;
228 config->max_frame_rate = static_cast<int>(
229 std::max(1.0, params.payload.max_frame_rate) + 0.5);
230 if (config->max_frame_rate > 120)
231 return false;
232 if (params.payload.codec_name == kCodecNameVp8) {
233 config->use_external_encoder = IsHardwareVP8EncodingSupported();
234 config->codec = media::cast::CODEC_VIDEO_VP8;
235 } else if (params.payload.codec_name == kCodecNameH264) {
236 config->use_external_encoder = IsHardwareH264EncodingSupported();
237 config->codec = media::cast::CODEC_VIDEO_H264;
238 } else {
239 return false;
241 if (!config->use_external_encoder) {
242 config->number_of_encode_threads = NumberOfEncodeThreads();
244 config->aes_key = params.payload.aes_key;
245 config->aes_iv_mask = params.payload.aes_iv_mask;
246 return true;
249 } // namespace
251 // This class receives MediaStreamTrack events and video frames from a
252 // MediaStreamTrack.
254 // Threading: Video frames are received on the IO thread and then
255 // forwarded to media::cast::VideoFrameInput through a static method.
256 // Member variables of this class are only accessed on the render thread.
257 class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
258 public content::MediaStreamVideoSink {
259 public:
260 // |track| provides data for this sink.
261 // |error_callback| is called if video formats don't match.
262 CastVideoSink(const blink::WebMediaStreamTrack& track,
263 const CastRtpStream::ErrorCallback& error_callback)
264 : track_(track),
265 sink_added_(false),
266 error_callback_(error_callback) {}
268 ~CastVideoSink() override {
269 if (sink_added_)
270 RemoveFromVideoTrack(this, track_);
273 // This static method is used to forward video frames to |frame_input|.
274 static void OnVideoFrame(
275 // These parameters are already bound when callback is created.
276 const CastRtpStream::ErrorCallback& error_callback,
277 const scoped_refptr<media::cast::VideoFrameInput> frame_input,
278 // These parameters are passed for each frame.
279 const scoped_refptr<media::VideoFrame>& frame,
280 const base::TimeTicks& estimated_capture_time) {
281 base::TimeTicks timestamp;
282 if (estimated_capture_time.is_null())
283 timestamp = base::TimeTicks::Now();
284 else
285 timestamp = estimated_capture_time;
287 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
288 TRACE_EVENT_INSTANT2(
289 "cast_perf_test", "MediaStreamVideoSink::OnVideoFrame",
290 TRACE_EVENT_SCOPE_THREAD,
291 "timestamp", timestamp.ToInternalValue(),
292 "time_delta", frame->timestamp().ToInternalValue());
293 frame_input->InsertRawVideoFrame(frame, timestamp);
296 // Attach this sink to a video track represented by |track_|.
297 // Data received from the track will be submitted to |frame_input|.
298 void AddToTrack(
299 const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
300 DCHECK(!sink_added_);
301 sink_added_ = true;
302 AddToVideoTrack(
303 this,
304 base::Bind(
305 &CastVideoSink::OnVideoFrame,
306 error_callback_,
307 frame_input),
308 track_);
311 private:
312 blink::WebMediaStreamTrack track_;
313 bool sink_added_;
314 CastRtpStream::ErrorCallback error_callback_;
316 DISALLOW_COPY_AND_ASSIGN(CastVideoSink);
319 // Receives audio data from a MediaStreamTrack. Data is submitted to
320 // media::cast::FrameInput.
322 // Threading: Audio frames are received on the real-time audio thread.
323 // Note that RemoveFromAudioTrack() is synchronous and we have
324 // gurantee that there will be no more audio data after calling it.
325 class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
326 public content::MediaStreamAudioSink,
327 public media::AudioConverter::InputCallback {
328 public:
329 // |track| provides data for this sink.
330 CastAudioSink(const blink::WebMediaStreamTrack& track,
331 int output_channels,
332 int output_sample_rate)
333 : track_(track),
334 output_channels_(output_channels),
335 output_sample_rate_(output_sample_rate),
336 current_input_bus_(nullptr),
337 sample_frames_in_(0),
338 sample_frames_out_(0) {}
340 ~CastAudioSink() override {
341 if (frame_input_.get())
342 RemoveFromAudioTrack(this, track_);
345 // Add this sink to the track. Data received from the track will be
346 // submitted to |frame_input|.
347 void AddToTrack(
348 const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
349 DCHECK(frame_input.get());
350 DCHECK(!frame_input_.get());
351 // This member is written here and then accessed on the IO thread
352 // We will not get data until AddToAudioTrack is called so it is
353 // safe to access this member now.
354 frame_input_ = frame_input;
355 AddToAudioTrack(this, track_);
358 protected:
359 // Called on real-time audio thread.
360 void OnData(const media::AudioBus& input_bus,
361 base::TimeTicks estimated_capture_time) override {
362 DCHECK(input_params_.IsValid());
363 DCHECK_EQ(input_bus.channels(), input_params_.channels());
364 DCHECK_EQ(input_bus.frames(), input_params_.frames_per_buffer());
365 DCHECK(!estimated_capture_time.is_null());
366 DCHECK(converter_.get());
368 // Determine the duration of the audio signal enqueued within |converter_|.
369 const base::TimeDelta signal_duration_already_buffered =
370 (sample_frames_in_ * base::TimeDelta::FromSeconds(1) /
371 input_params_.sample_rate()) -
372 (sample_frames_out_ * base::TimeDelta::FromSeconds(1) /
373 output_sample_rate_);
374 DVLOG(2) << "Audio reference time adjustment: -("
375 << signal_duration_already_buffered.InMicroseconds() << " us)";
376 const base::TimeTicks capture_time_of_first_converted_sample =
377 estimated_capture_time - signal_duration_already_buffered;
379 // Convert the entire input signal. AudioConverter is efficient in that no
380 // additional copying or conversion will occur if the input signal is in the
381 // same format as the output. Note that, while the number of sample frames
382 // provided as input is always the same, the chunk size (and the size of the
383 // |audio_bus| here) can be variable. This is not an issue since
384 // media::cast::AudioFrameInput can handle variable-sized AudioBuses.
385 scoped_ptr<media::AudioBus> audio_bus =
386 media::AudioBus::Create(output_channels_, converter_->ChunkSize());
387 // AudioConverter will call ProvideInput() to fetch from |current_data_|.
388 current_input_bus_ = &input_bus;
389 converter_->Convert(audio_bus.get());
390 DCHECK(!current_input_bus_); // ProvideInput() called exactly once?
392 sample_frames_in_ += input_params_.frames_per_buffer();
393 sample_frames_out_ += audio_bus->frames();
395 frame_input_->InsertAudio(audio_bus.Pass(),
396 capture_time_of_first_converted_sample);
399 // Called on real-time audio thread.
400 void OnSetFormat(const media::AudioParameters& params) override {
401 if (input_params_.Equals(params))
402 return;
403 input_params_ = params;
405 DVLOG(1) << "Setting up audio resampling: {"
406 << input_params_.channels() << " channels, "
407 << input_params_.sample_rate() << " Hz} --> {"
408 << output_channels_ << " channels, "
409 << output_sample_rate_ << " Hz}";
410 const media::AudioParameters output_params(
411 media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
412 media::GuessChannelLayout(output_channels_),
413 output_sample_rate_, 32,
414 output_sample_rate_ * input_params_.frames_per_buffer() /
415 input_params_.sample_rate());
416 converter_.reset(
417 new media::AudioConverter(input_params_, output_params, false));
418 converter_->AddInput(this);
419 sample_frames_in_ = 0;
420 sample_frames_out_ = 0;
423 // Called on real-time audio thread.
424 double ProvideInput(media::AudioBus* audio_bus,
425 base::TimeDelta buffer_delay) override {
426 DCHECK(current_input_bus_);
427 current_input_bus_->CopyTo(audio_bus);
428 current_input_bus_ = nullptr;
429 return 1.0;
432 private:
433 const blink::WebMediaStreamTrack track_;
434 const int output_channels_;
435 const int output_sample_rate_;
437 // This must be set before the real-time audio thread starts calling OnData(),
438 // and remain unchanged until after the thread will stop calling OnData().
439 scoped_refptr<media::cast::AudioFrameInput> frame_input_;
441 // These members are accessed on the real-time audio time only.
442 media::AudioParameters input_params_;
443 scoped_ptr<media::AudioConverter> converter_;
444 const media::AudioBus* current_input_bus_;
445 int64 sample_frames_in_;
446 int64 sample_frames_out_;
448 DISALLOW_COPY_AND_ASSIGN(CastAudioSink);
451 CastRtpParams::CastRtpParams(const CastRtpPayloadParams& payload_params)
452 : payload(payload_params) {}
454 CastCodecSpecificParams::CastCodecSpecificParams() {}
456 CastCodecSpecificParams::~CastCodecSpecificParams() {}
458 CastRtpPayloadParams::CastRtpPayloadParams()
459 : payload_type(0),
460 max_latency_ms(0),
461 min_latency_ms(0),
462 ssrc(0),
463 feedback_ssrc(0),
464 clock_rate(0),
465 max_bitrate(0),
466 min_bitrate(0),
467 channels(0),
468 max_frame_rate(0.0) {
471 CastRtpPayloadParams::~CastRtpPayloadParams() {}
473 CastRtpParams::CastRtpParams() {}
475 CastRtpParams::~CastRtpParams() {}
477 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
478 const scoped_refptr<CastSession>& session)
479 : track_(track), cast_session_(session), weak_factory_(this) {}
481 CastRtpStream::~CastRtpStream() {}
483 std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
484 if (IsAudio())
485 return SupportedAudioParams();
486 else
487 return SupportedVideoParams();
490 CastRtpParams CastRtpStream::GetParams() { return params_; }
492 void CastRtpStream::Start(const CastRtpParams& params,
493 const base::Closure& start_callback,
494 const base::Closure& stop_callback,
495 const ErrorCallback& error_callback) {
496 DVLOG(1) << "CastRtpStream::Start = " << (IsAudio() ? "audio" : "video");
497 stop_callback_ = stop_callback;
498 error_callback_ = error_callback;
500 if (IsAudio()) {
501 AudioSenderConfig config;
502 if (!ToAudioSenderConfig(params, &config)) {
503 DidEncounterError("Invalid parameters for audio.");
504 return;
507 // In case of error we have to go through DidEncounterError() to stop
508 // the streaming after reporting the error.
509 audio_sink_.reset(new CastAudioSink(
510 track_,
511 params.payload.channels,
512 params.payload.clock_rate));
513 cast_session_->StartAudio(
514 config,
515 base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()),
516 base::Bind(&CastRtpStream::DidEncounterError,
517 weak_factory_.GetWeakPtr()));
518 start_callback.Run();
519 } else {
520 VideoSenderConfig config;
521 if (!ToVideoSenderConfig(params, &config)) {
522 DidEncounterError("Invalid parameters for video.");
523 return;
525 // See the code for audio above for explanation of callbacks.
526 video_sink_.reset(new CastVideoSink(
527 track_,
528 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
529 weak_factory_.GetWeakPtr()))));
530 cast_session_->StartVideo(
531 config,
532 base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()),
533 base::Bind(&CastRtpStream::DidEncounterError,
534 weak_factory_.GetWeakPtr()));
535 start_callback.Run();
539 void CastRtpStream::Stop() {
540 DVLOG(1) << "CastRtpStream::Stop = " << (IsAudio() ? "audio" : "video");
541 audio_sink_.reset();
542 video_sink_.reset();
543 if (!stop_callback_.is_null())
544 stop_callback_.Run();
547 void CastRtpStream::ToggleLogging(bool enable) {
548 DVLOG(1) << "CastRtpStream::ToggleLogging(" << enable << ") = "
549 << (IsAudio() ? "audio" : "video");
550 cast_session_->ToggleLogging(IsAudio(), enable);
553 void CastRtpStream::GetRawEvents(
554 const base::Callback<void(scoped_ptr<base::BinaryValue>)>& callback,
555 const std::string& extra_data) {
556 DVLOG(1) << "CastRtpStream::GetRawEvents = "
557 << (IsAudio() ? "audio" : "video");
558 cast_session_->GetEventLogsAndReset(IsAudio(), extra_data, callback);
561 void CastRtpStream::GetStats(
562 const base::Callback<void(scoped_ptr<base::DictionaryValue>)>& callback) {
563 DVLOG(1) << "CastRtpStream::GetStats = "
564 << (IsAudio() ? "audio" : "video");
565 cast_session_->GetStatsAndReset(IsAudio(), callback);
568 bool CastRtpStream::IsAudio() const {
569 return track_.source().type() == blink::WebMediaStreamSource::TypeAudio;
572 void CastRtpStream::DidEncounterError(const std::string& message) {
573 DVLOG(1) << "CastRtpStream::DidEncounterError(" << message << ") = "
574 << (IsAudio() ? "audio" : "video");
575 // Save the WeakPtr first because the error callback might delete this object.
576 base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr();
577 error_callback_.Run(message);
578 content::RenderThread::Get()->GetTaskRunner()->PostTask(
579 FROM_HERE,
580 base::Bind(&CastRtpStream::Stop, ptr));