Return Windows error code when create-process fails.
[chromium-blink-merge.git] / chrome / renderer / media / cast_rtp_stream.cc
bloba0e954c9cfb8caf5ff0156711ae4b170c734a8b3
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/renderer/media/cast_rtp_stream.h"
7 #include <algorithm>
9 #include "base/bind.h"
10 #include "base/callback_helpers.h"
11 #include "base/command_line.h"
12 #include "base/logging.h"
13 #include "base/memory/weak_ptr.h"
14 #include "base/strings/stringprintf.h"
15 #include "base/sys_info.h"
16 #include "base/trace_event/trace_event.h"
17 #include "chrome/common/chrome_switches.h"
18 #include "chrome/renderer/media/cast_session.h"
19 #include "chrome/renderer/media/cast_udp_transport.h"
20 #include "content/public/renderer/media_stream_audio_sink.h"
21 #include "content/public/renderer/media_stream_video_sink.h"
22 #include "content/public/renderer/render_thread.h"
23 #include "content/public/renderer/video_encode_accelerator.h"
24 #include "media/audio/audio_parameters.h"
25 #include "media/base/audio_bus.h"
26 #include "media/base/audio_converter.h"
27 #include "media/base/audio_fifo.h"
28 #include "media/base/bind_to_current_loop.h"
29 #include "media/base/video_frame.h"
30 #include "media/cast/cast_config.h"
31 #include "media/cast/cast_defines.h"
32 #include "media/cast/cast_sender.h"
33 #include "media/cast/net/cast_transport_config.h"
34 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
35 #include "ui/gfx/geometry/size.h"
37 using media::cast::AudioSenderConfig;
38 using media::cast::VideoSenderConfig;
40 namespace {
42 const char kCodecNameOpus[] = "OPUS";
43 const char kCodecNameVp8[] = "VP8";
44 const char kCodecNameH264[] = "H264";
46 // To convert from kilobits per second to bits to per second.
47 const int kBitrateMultiplier = 1000;
49 CastRtpPayloadParams DefaultOpusPayload() {
50 CastRtpPayloadParams payload;
51 payload.payload_type = 127;
52 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
53 payload.ssrc = 1;
54 payload.feedback_ssrc = 2;
55 payload.clock_rate = media::cast::kDefaultAudioSamplingRate;
56 // The value is 0 which means VBR.
57 payload.min_bitrate = payload.max_bitrate =
58 media::cast::kDefaultAudioEncoderBitrate;
59 payload.channels = 2;
60 payload.max_frame_rate = 100; // 10 ms audio frames
61 payload.codec_name = kCodecNameOpus;
62 return payload;
65 CastRtpPayloadParams DefaultVp8Payload() {
66 CastRtpPayloadParams payload;
67 payload.payload_type = 96;
68 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
69 payload.ssrc = 11;
70 payload.feedback_ssrc = 12;
71 payload.clock_rate = media::cast::kVideoFrequency;
72 payload.max_bitrate = 2000;
73 payload.min_bitrate = 50;
74 payload.channels = 1;
75 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
76 payload.codec_name = kCodecNameVp8;
77 return payload;
80 CastRtpPayloadParams DefaultH264Payload() {
81 CastRtpPayloadParams payload;
82 // TODO(hshi): set different ssrc/rtpPayloadType values for H264 and VP8
83 // once b/13696137 is fixed.
84 payload.payload_type = 96;
85 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
86 payload.ssrc = 11;
87 payload.feedback_ssrc = 12;
88 payload.clock_rate = media::cast::kVideoFrequency;
89 payload.max_bitrate = 2000;
90 payload.min_bitrate = 50;
91 payload.channels = 1;
92 payload.max_frame_rate = media::cast::kDefaultMaxFrameRate;
93 payload.codec_name = kCodecNameH264;
94 return payload;
97 bool IsHardwareVP8EncodingSupported() {
98 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
99 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
100 DVLOG(1) << "Disabled hardware VP8 support for Cast Streaming.";
101 return false;
104 // Query for hardware VP8 encoder support.
105 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
106 content::GetSupportedVideoEncodeAcceleratorProfiles();
107 for (size_t i = 0; i < vea_profiles.size(); ++i) {
108 if (vea_profiles[i].profile >= media::VP8PROFILE_MIN &&
109 vea_profiles[i].profile <= media::VP8PROFILE_MAX) {
110 return true;
113 return false;
116 bool IsHardwareH264EncodingSupported() {
117 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
118 if (cmd_line->HasSwitch(switches::kDisableCastStreamingHWEncoding)) {
119 DVLOG(1) << "Disabled hardware h264 support for Cast Streaming.";
120 return false;
123 // Query for hardware H.264 encoder support.
124 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
125 content::GetSupportedVideoEncodeAcceleratorProfiles();
126 for (size_t i = 0; i < vea_profiles.size(); ++i) {
127 if (vea_profiles[i].profile >= media::H264PROFILE_MIN &&
128 vea_profiles[i].profile <= media::H264PROFILE_MAX) {
129 return true;
132 return false;
135 int NumberOfEncodeThreads() {
136 // Do not saturate CPU utilization just for encoding. On a lower-end system
137 // with only 1 or 2 cores, use only one thread for encoding. On systems with
138 // more cores, allow half of the cores to be used for encoding.
139 return std::min(8, (base::SysInfo::NumberOfProcessors() + 1) / 2);
142 std::vector<CastRtpParams> SupportedAudioParams() {
143 // TODO(hclam): Fill in more codecs here.
144 std::vector<CastRtpParams> supported_params;
145 supported_params.push_back(CastRtpParams(DefaultOpusPayload()));
146 return supported_params;
149 std::vector<CastRtpParams> SupportedVideoParams() {
150 std::vector<CastRtpParams> supported_params;
152 // Prefer VP8 over H.264 for hardware encoder.
153 if (IsHardwareVP8EncodingSupported())
154 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
155 if (IsHardwareH264EncodingSupported())
156 supported_params.push_back(CastRtpParams(DefaultH264Payload()));
158 // Propose the default software VP8 encoder, if no hardware encoders are
159 // available.
160 if (supported_params.empty())
161 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
163 return supported_params;
166 bool ToAudioSenderConfig(const CastRtpParams& params,
167 AudioSenderConfig* config) {
168 config->ssrc = params.payload.ssrc;
169 config->receiver_ssrc = params.payload.feedback_ssrc;
170 if (config->ssrc == config->receiver_ssrc)
171 return false;
172 config->min_playout_delay =
173 base::TimeDelta::FromMilliseconds(
174 params.payload.min_latency_ms ?
175 params.payload.min_latency_ms :
176 params.payload.max_latency_ms);
177 config->max_playout_delay =
178 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
179 if (config->min_playout_delay <= base::TimeDelta())
180 return false;
181 if (config->min_playout_delay > config->max_playout_delay)
182 return false;
183 config->rtp_payload_type = params.payload.payload_type;
184 config->use_external_encoder = false;
185 config->frequency = params.payload.clock_rate;
186 if (config->frequency < 8000)
187 return false;
188 config->channels = params.payload.channels;
189 if (config->channels < 1)
190 return false;
191 config->bitrate = params.payload.max_bitrate * kBitrateMultiplier;
192 if (params.payload.codec_name == kCodecNameOpus)
193 config->codec = media::cast::CODEC_AUDIO_OPUS;
194 else
195 return false;
196 config->aes_key = params.payload.aes_key;
197 config->aes_iv_mask = params.payload.aes_iv_mask;
198 return true;
201 bool ToVideoSenderConfig(const CastRtpParams& params,
202 VideoSenderConfig* config) {
203 config->ssrc = params.payload.ssrc;
204 config->receiver_ssrc = params.payload.feedback_ssrc;
205 if (config->ssrc == config->receiver_ssrc)
206 return false;
207 config->min_playout_delay =
208 base::TimeDelta::FromMilliseconds(
209 params.payload.min_latency_ms ?
210 params.payload.min_latency_ms :
211 params.payload.max_latency_ms);
212 config->max_playout_delay =
213 base::TimeDelta::FromMilliseconds(params.payload.max_latency_ms);
214 if (config->min_playout_delay <= base::TimeDelta())
215 return false;
216 if (config->min_playout_delay > config->max_playout_delay)
217 return false;
218 config->rtp_payload_type = params.payload.payload_type;
219 config->min_bitrate = config->start_bitrate =
220 params.payload.min_bitrate * kBitrateMultiplier;
221 config->max_bitrate = params.payload.max_bitrate * kBitrateMultiplier;
222 if (config->min_bitrate > config->max_bitrate)
223 return false;
224 config->start_bitrate = config->min_bitrate;
225 config->max_frame_rate = static_cast<int>(
226 std::max(1.0, params.payload.max_frame_rate) + 0.5);
227 if (config->max_frame_rate > 120)
228 return false;
229 if (params.payload.codec_name == kCodecNameVp8) {
230 config->use_external_encoder = IsHardwareVP8EncodingSupported();
231 config->codec = media::cast::CODEC_VIDEO_VP8;
232 } else if (params.payload.codec_name == kCodecNameH264) {
233 config->use_external_encoder = IsHardwareH264EncodingSupported();
234 config->codec = media::cast::CODEC_VIDEO_H264;
235 } else {
236 return false;
238 if (!config->use_external_encoder) {
239 config->number_of_encode_threads = NumberOfEncodeThreads();
241 config->aes_key = params.payload.aes_key;
242 config->aes_iv_mask = params.payload.aes_iv_mask;
243 return true;
246 } // namespace
248 // This class receives MediaStreamTrack events and video frames from a
249 // MediaStreamTrack.
251 // Threading: Video frames are received on the IO thread and then
252 // forwarded to media::cast::VideoFrameInput through a static method.
253 // Member variables of this class are only accessed on the render thread.
254 class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
255 public content::MediaStreamVideoSink {
256 public:
257 // |track| provides data for this sink.
258 // |error_callback| is called if video formats don't match.
259 CastVideoSink(const blink::WebMediaStreamTrack& track,
260 const CastRtpStream::ErrorCallback& error_callback)
261 : track_(track),
262 sink_added_(false),
263 error_callback_(error_callback) {}
265 ~CastVideoSink() override {
266 if (sink_added_)
267 RemoveFromVideoTrack(this, track_);
270 // This static method is used to forward video frames to |frame_input|.
271 static void OnVideoFrame(
272 // These parameters are already bound when callback is created.
273 const CastRtpStream::ErrorCallback& error_callback,
274 const scoped_refptr<media::cast::VideoFrameInput> frame_input,
275 // These parameters are passed for each frame.
276 const scoped_refptr<media::VideoFrame>& frame,
277 const base::TimeTicks& estimated_capture_time) {
278 base::TimeTicks timestamp;
279 if (estimated_capture_time.is_null())
280 timestamp = base::TimeTicks::Now();
281 else
282 timestamp = estimated_capture_time;
284 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
285 TRACE_EVENT_INSTANT2(
286 "cast_perf_test", "MediaStreamVideoSink::OnVideoFrame",
287 TRACE_EVENT_SCOPE_THREAD,
288 "timestamp", timestamp.ToInternalValue(),
289 "time_delta", frame->timestamp().ToInternalValue());
290 frame_input->InsertRawVideoFrame(frame, timestamp);
293 // Attach this sink to a video track represented by |track_|.
294 // Data received from the track will be submitted to |frame_input|.
295 void AddToTrack(
296 const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
297 DCHECK(!sink_added_);
298 sink_added_ = true;
299 AddToVideoTrack(
300 this,
301 base::Bind(
302 &CastVideoSink::OnVideoFrame,
303 error_callback_,
304 frame_input),
305 track_);
308 private:
309 blink::WebMediaStreamTrack track_;
310 bool sink_added_;
311 CastRtpStream::ErrorCallback error_callback_;
313 DISALLOW_COPY_AND_ASSIGN(CastVideoSink);
316 // Receives audio data from a MediaStreamTrack. Data is submitted to
317 // media::cast::FrameInput.
319 // Threading: Audio frames are received on the real-time audio thread.
320 // Note that RemoveFromAudioTrack() is synchronous and we have
321 // gurantee that there will be no more audio data after calling it.
322 class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
323 public content::MediaStreamAudioSink,
324 public media::AudioConverter::InputCallback {
325 public:
326 // |track| provides data for this sink.
327 CastAudioSink(const blink::WebMediaStreamTrack& track,
328 int output_channels,
329 int output_sample_rate)
330 : track_(track),
331 output_channels_(output_channels),
332 output_sample_rate_(output_sample_rate),
333 current_input_bus_(nullptr),
334 sample_frames_in_(0),
335 sample_frames_out_(0) {}
337 ~CastAudioSink() override {
338 if (frame_input_.get())
339 RemoveFromAudioTrack(this, track_);
342 // Add this sink to the track. Data received from the track will be
343 // submitted to |frame_input|.
344 void AddToTrack(
345 const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
346 DCHECK(frame_input.get());
347 DCHECK(!frame_input_.get());
348 // This member is written here and then accessed on the IO thread
349 // We will not get data until AddToAudioTrack is called so it is
350 // safe to access this member now.
351 frame_input_ = frame_input;
352 AddToAudioTrack(this, track_);
355 protected:
356 // Called on real-time audio thread.
357 void OnData(const media::AudioBus& input_bus,
358 base::TimeTicks estimated_capture_time) override {
359 DCHECK(input_params_.IsValid());
360 DCHECK_EQ(input_bus.channels(), input_params_.channels());
361 DCHECK_EQ(input_bus.frames(), input_params_.frames_per_buffer());
362 DCHECK(!estimated_capture_time.is_null());
363 DCHECK(converter_.get());
365 // Determine the duration of the audio signal enqueued within |converter_|.
366 const base::TimeDelta signal_duration_already_buffered =
367 (sample_frames_in_ * base::TimeDelta::FromSeconds(1) /
368 input_params_.sample_rate()) -
369 (sample_frames_out_ * base::TimeDelta::FromSeconds(1) /
370 output_sample_rate_);
371 DVLOG(2) << "Audio reference time adjustment: -("
372 << signal_duration_already_buffered.InMicroseconds() << " us)";
373 const base::TimeTicks capture_time_of_first_converted_sample =
374 estimated_capture_time - signal_duration_already_buffered;
376 // Convert the entire input signal. AudioConverter is efficient in that no
377 // additional copying or conversion will occur if the input signal is in the
378 // same format as the output. Note that, while the number of sample frames
379 // provided as input is always the same, the chunk size (and the size of the
380 // |audio_bus| here) can be variable. This is not an issue since
381 // media::cast::AudioFrameInput can handle variable-sized AudioBuses.
382 scoped_ptr<media::AudioBus> audio_bus =
383 media::AudioBus::Create(output_channels_, converter_->ChunkSize());
384 // AudioConverter will call ProvideInput() to fetch from |current_data_|.
385 current_input_bus_ = &input_bus;
386 converter_->Convert(audio_bus.get());
387 DCHECK(!current_input_bus_); // ProvideInput() called exactly once?
389 sample_frames_in_ += input_params_.frames_per_buffer();
390 sample_frames_out_ += audio_bus->frames();
392 frame_input_->InsertAudio(audio_bus.Pass(),
393 capture_time_of_first_converted_sample);
396 // Called on real-time audio thread.
397 void OnSetFormat(const media::AudioParameters& params) override {
398 if (input_params_.Equals(params))
399 return;
400 input_params_ = params;
402 DVLOG(1) << "Setting up audio resampling: {"
403 << input_params_.channels() << " channels, "
404 << input_params_.sample_rate() << " Hz} --> {"
405 << output_channels_ << " channels, "
406 << output_sample_rate_ << " Hz}";
407 const media::AudioParameters output_params(
408 media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
409 media::GuessChannelLayout(output_channels_),
410 output_sample_rate_, 32,
411 output_sample_rate_ * input_params_.frames_per_buffer() /
412 input_params_.sample_rate());
413 converter_.reset(
414 new media::AudioConverter(input_params_, output_params, false));
415 converter_->AddInput(this);
416 sample_frames_in_ = 0;
417 sample_frames_out_ = 0;
420 // Called on real-time audio thread.
421 double ProvideInput(media::AudioBus* audio_bus,
422 base::TimeDelta buffer_delay) override {
423 DCHECK(current_input_bus_);
424 current_input_bus_->CopyTo(audio_bus);
425 current_input_bus_ = nullptr;
426 return 1.0;
429 private:
430 const blink::WebMediaStreamTrack track_;
431 const int output_channels_;
432 const int output_sample_rate_;
434 // This must be set before the real-time audio thread starts calling OnData(),
435 // and remain unchanged until after the thread will stop calling OnData().
436 scoped_refptr<media::cast::AudioFrameInput> frame_input_;
438 // These members are accessed on the real-time audio time only.
439 media::AudioParameters input_params_;
440 scoped_ptr<media::AudioConverter> converter_;
441 const media::AudioBus* current_input_bus_;
442 int64 sample_frames_in_;
443 int64 sample_frames_out_;
445 DISALLOW_COPY_AND_ASSIGN(CastAudioSink);
448 CastRtpParams::CastRtpParams(const CastRtpPayloadParams& payload_params)
449 : payload(payload_params) {}
451 CastCodecSpecificParams::CastCodecSpecificParams() {}
453 CastCodecSpecificParams::~CastCodecSpecificParams() {}
455 CastRtpPayloadParams::CastRtpPayloadParams()
456 : payload_type(0),
457 max_latency_ms(0),
458 min_latency_ms(0),
459 ssrc(0),
460 feedback_ssrc(0),
461 clock_rate(0),
462 max_bitrate(0),
463 min_bitrate(0),
464 channels(0),
465 max_frame_rate(0.0) {
468 CastRtpPayloadParams::~CastRtpPayloadParams() {}
470 CastRtpParams::CastRtpParams() {}
472 CastRtpParams::~CastRtpParams() {}
474 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
475 const scoped_refptr<CastSession>& session)
476 : track_(track), cast_session_(session), weak_factory_(this) {}
478 CastRtpStream::~CastRtpStream() {
479 Stop();
482 std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
483 if (IsAudio())
484 return SupportedAudioParams();
485 else
486 return SupportedVideoParams();
489 CastRtpParams CastRtpStream::GetParams() { return params_; }
491 void CastRtpStream::Start(const CastRtpParams& params,
492 const base::Closure& start_callback,
493 const base::Closure& stop_callback,
494 const ErrorCallback& error_callback) {
495 DCHECK(!start_callback.is_null());
496 DCHECK(!stop_callback.is_null());
497 DCHECK(!error_callback.is_null());
499 DVLOG(1) << "CastRtpStream::Start = " << (IsAudio() ? "audio" : "video");
500 stop_callback_ = stop_callback;
501 error_callback_ = error_callback;
503 if (IsAudio()) {
504 AudioSenderConfig config;
505 if (!ToAudioSenderConfig(params, &config)) {
506 DidEncounterError("Invalid parameters for audio.");
507 return;
510 // In case of error we have to go through DidEncounterError() to stop
511 // the streaming after reporting the error.
512 audio_sink_.reset(new CastAudioSink(
513 track_,
514 params.payload.channels,
515 params.payload.clock_rate));
516 cast_session_->StartAudio(
517 config,
518 base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()),
519 base::Bind(&CastRtpStream::DidEncounterError,
520 weak_factory_.GetWeakPtr()));
521 start_callback.Run();
522 } else {
523 VideoSenderConfig config;
524 if (!ToVideoSenderConfig(params, &config)) {
525 DidEncounterError("Invalid parameters for video.");
526 return;
528 // See the code for audio above for explanation of callbacks.
529 video_sink_.reset(new CastVideoSink(
530 track_,
531 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
532 weak_factory_.GetWeakPtr()))));
533 cast_session_->StartVideo(
534 config,
535 base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()),
536 base::Bind(&CastRtpStream::DidEncounterError,
537 weak_factory_.GetWeakPtr()));
538 start_callback.Run();
542 void CastRtpStream::Stop() {
543 DVLOG(1) << "CastRtpStream::Stop = " << (IsAudio() ? "audio" : "video");
544 if (stop_callback_.is_null())
545 return; // Already stopped.
546 weak_factory_.InvalidateWeakPtrs();
547 error_callback_.Reset();
548 audio_sink_.reset();
549 video_sink_.reset();
550 base::ResetAndReturn(&stop_callback_).Run();
553 void CastRtpStream::ToggleLogging(bool enable) {
554 DVLOG(1) << "CastRtpStream::ToggleLogging(" << enable << ") = "
555 << (IsAudio() ? "audio" : "video");
556 cast_session_->ToggleLogging(IsAudio(), enable);
559 void CastRtpStream::GetRawEvents(
560 const base::Callback<void(scoped_ptr<base::BinaryValue>)>& callback,
561 const std::string& extra_data) {
562 DVLOG(1) << "CastRtpStream::GetRawEvents = "
563 << (IsAudio() ? "audio" : "video");
564 cast_session_->GetEventLogsAndReset(IsAudio(), extra_data, callback);
567 void CastRtpStream::GetStats(
568 const base::Callback<void(scoped_ptr<base::DictionaryValue>)>& callback) {
569 DVLOG(1) << "CastRtpStream::GetStats = "
570 << (IsAudio() ? "audio" : "video");
571 cast_session_->GetStatsAndReset(IsAudio(), callback);
574 bool CastRtpStream::IsAudio() const {
575 return track_.source().type() == blink::WebMediaStreamSource::TypeAudio;
578 void CastRtpStream::DidEncounterError(const std::string& message) {
579 DCHECK(content::RenderThread::Get());
580 DVLOG(1) << "CastRtpStream::DidEncounterError(" << message << ") = "
581 << (IsAudio() ? "audio" : "video");
582 // Save the WeakPtr first because the error callback might delete this object.
583 base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr();
584 error_callback_.Run(message);
585 base::ThreadTaskRunnerHandle::Get()->PostTask(
586 FROM_HERE,
587 base::Bind(&CastRtpStream::Stop, ptr));