1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/webrtc_audio_renderer.h"
7 #include "base/logging.h"
8 #include "base/metrics/histogram.h"
9 #include "base/strings/string_util.h"
10 #include "base/strings/stringprintf.h"
11 #include "content/renderer/media/audio_device_factory.h"
12 #include "content/renderer/media/media_stream_dispatcher.h"
13 #include "content/renderer/media/webrtc_audio_device_impl.h"
14 #include "content/renderer/media/webrtc_logging.h"
15 #include "content/renderer/render_frame_impl.h"
16 #include "media/audio/audio_output_device.h"
17 #include "media/audio/audio_parameters.h"
18 #include "media/audio/sample_rates.h"
19 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
20 #include "third_party/libjingle/source/talk/media/base/audiorenderer.h"
24 #include "base/win/windows_version.h"
25 #include "media/audio/win/core_audio_util_win.h"
32 // We add a UMA histogram measuring the execution time of the Render() method
33 // every |kNumCallbacksBetweenRenderTimeHistograms| callback. Assuming 10ms
34 // between each callback leads to one UMA update each 100ms.
35 const int kNumCallbacksBetweenRenderTimeHistograms
= 10;
37 // This is a simple wrapper class that's handed out to users of a shared
38 // WebRtcAudioRenderer instance. This class maintains the per-user 'playing'
39 // and 'started' states to avoid problems related to incorrect usage which
40 // might violate the implementation assumptions inside WebRtcAudioRenderer
41 // (see the play reference count).
42 class SharedAudioRenderer
: public MediaStreamAudioRenderer
{
44 // Callback definition for a callback that is called when when Play(), Pause()
45 // or SetVolume are called (whenever the internal |playing_state_| changes).
46 typedef base::Callback
<
47 void(const scoped_refptr
<webrtc::MediaStreamInterface
>&,
48 WebRtcAudioRenderer::PlayingState
*)> OnPlayStateChanged
;
51 const scoped_refptr
<MediaStreamAudioRenderer
>& delegate
,
52 const scoped_refptr
<webrtc::MediaStreamInterface
>& media_stream
,
53 const OnPlayStateChanged
& on_play_state_changed
)
54 : delegate_(delegate
), media_stream_(media_stream
), started_(false),
55 on_play_state_changed_(on_play_state_changed
) {
56 DCHECK(!on_play_state_changed_
.is_null());
57 DCHECK(media_stream_
.get());
61 ~SharedAudioRenderer() override
{
62 DCHECK(thread_checker_
.CalledOnValidThread());
63 DVLOG(1) << __FUNCTION__
;
67 void Start() override
{
68 DCHECK(thread_checker_
.CalledOnValidThread());
75 void Play() override
{
76 DCHECK(thread_checker_
.CalledOnValidThread());
78 if (playing_state_
.playing())
80 playing_state_
.set_playing(true);
81 on_play_state_changed_
.Run(media_stream_
, &playing_state_
);
84 void Pause() override
{
85 DCHECK(thread_checker_
.CalledOnValidThread());
87 if (!playing_state_
.playing())
89 playing_state_
.set_playing(false);
90 on_play_state_changed_
.Run(media_stream_
, &playing_state_
);
93 void Stop() override
{
94 DCHECK(thread_checker_
.CalledOnValidThread());
102 void SetVolume(float volume
) override
{
103 DCHECK(thread_checker_
.CalledOnValidThread());
104 DCHECK(volume
>= 0.0f
&& volume
<= 1.0f
);
105 playing_state_
.set_volume(volume
);
106 on_play_state_changed_
.Run(media_stream_
, &playing_state_
);
109 media::OutputDevice
* GetOutputDevice() override
{
110 DVLOG(1) << __FUNCTION__
;
111 DCHECK(thread_checker_
.CalledOnValidThread());
112 return delegate_
->GetOutputDevice();
115 base::TimeDelta
GetCurrentRenderTime() const override
{
116 DCHECK(thread_checker_
.CalledOnValidThread());
117 return delegate_
->GetCurrentRenderTime();
120 bool IsLocalRenderer() const override
{
121 DCHECK(thread_checker_
.CalledOnValidThread());
122 return delegate_
->IsLocalRenderer();
126 base::ThreadChecker thread_checker_
;
127 const scoped_refptr
<MediaStreamAudioRenderer
> delegate_
;
128 const scoped_refptr
<webrtc::MediaStreamInterface
> media_stream_
;
130 WebRtcAudioRenderer::PlayingState playing_state_
;
131 OnPlayStateChanged on_play_state_changed_
;
134 // Returns either AudioParameters::NO_EFFECTS or AudioParameters::DUCKING
135 // depending on whether or not an input element is currently open with
137 int GetCurrentDuckingFlag(int render_frame_id
) {
138 RenderFrameImpl
* const frame
=
139 RenderFrameImpl::FromRoutingID(render_frame_id
);
140 MediaStreamDispatcher
* const dispatcher
= frame
?
141 frame
->GetMediaStreamDispatcher() : NULL
;
142 if (dispatcher
&& dispatcher
->IsAudioDuckingActive()) {
143 return media::AudioParameters::DUCKING
;
146 return media::AudioParameters::NO_EFFECTS
;
151 int WebRtcAudioRenderer::GetOptimalBufferSize(int sample_rate
,
152 int hardware_buffer_size
) {
153 // Use native hardware buffer size as default. On Windows, we strive to open
154 // up using this native hardware buffer size to achieve best
155 // possible performance and to ensure that no FIFO is needed on the browser
156 // side to match the client request. That is why there is no #if case for
158 int frames_per_buffer
= hardware_buffer_size
;
160 #if defined(OS_LINUX) || defined(OS_MACOSX)
161 // On Linux and MacOS, the low level IO implementations on the browser side
162 // supports all buffer size the clients want. We use the native peer
163 // connection buffer size (10ms) to achieve best possible performance.
164 frames_per_buffer
= sample_rate
/ 100;
165 #elif defined(OS_ANDROID)
166 // TODO(henrika): Keep tuning this scheme and espcicially for low-latency
167 // cases. Might not be possible to come up with the perfect solution using
168 // the render side only.
169 int frames_per_10ms
= sample_rate
/ 100;
170 if (frames_per_buffer
< 2 * frames_per_10ms
) {
171 // Examples of low-latency frame sizes and the resulting |buffer_size|:
172 // Nexus 7 : 240 audio frames => 2*480 = 960
173 // Nexus 10 : 256 => 2*441 = 882
174 // Galaxy Nexus: 144 => 2*441 = 882
175 frames_per_buffer
= 2 * frames_per_10ms
;
176 DVLOG(1) << "Low-latency output detected on Android";
180 DVLOG(1) << "Using sink output buffer size: " << frames_per_buffer
;
181 return frames_per_buffer
;
184 WebRtcAudioRenderer::WebRtcAudioRenderer(
185 const scoped_refptr
<base::SingleThreadTaskRunner
>& signaling_thread
,
186 const scoped_refptr
<webrtc::MediaStreamInterface
>& media_stream
,
187 int source_render_frame_id
,
190 int frames_per_buffer
)
191 : state_(UNINITIALIZED
),
192 source_render_frame_id_(source_render_frame_id
),
193 session_id_(session_id
),
194 signaling_thread_(signaling_thread
),
195 media_stream_(media_stream
),
199 audio_delay_milliseconds_(0),
200 fifo_delay_milliseconds_(0),
201 sink_params_(media::AudioParameters::AUDIO_PCM_LOW_LATENCY
,
202 media::CHANNEL_LAYOUT_STEREO
, sample_rate
, 16,
204 GetCurrentDuckingFlag(source_render_frame_id
)),
205 render_callback_count_(0) {
206 WebRtcLogMessage(base::StringPrintf(
207 "WAR::WAR. source_render_frame_id=%d"
208 ", session_id=%d, sample_rate=%d, frames_per_buffer=%d, effects=%i",
209 source_render_frame_id
, session_id
, sample_rate
, frames_per_buffer
,
210 sink_params_
.effects()));
213 WebRtcAudioRenderer::~WebRtcAudioRenderer() {
214 DCHECK(thread_checker_
.CalledOnValidThread());
215 DCHECK_EQ(state_
, UNINITIALIZED
);
218 bool WebRtcAudioRenderer::Initialize(WebRtcAudioRendererSource
* source
) {
219 DVLOG(1) << "WebRtcAudioRenderer::Initialize()";
220 DCHECK(thread_checker_
.CalledOnValidThread());
221 base::AutoLock
auto_lock(lock_
);
222 DCHECK_EQ(state_
, UNINITIALIZED
);
224 DCHECK(!sink_
.get());
227 // WebRTC does not yet support higher rates than 96000 on the client side
228 // and 48000 is the preferred sample rate. Therefore, if 192000 is detected,
229 // we change the rate to 48000 instead. The consequence is that the native
230 // layer will be opened up at 192kHz but WebRTC will provide data at 48kHz
231 // which will then be resampled by the audio converted on the browser side
232 // to match the native audio layer.
233 int sample_rate
= sink_params_
.sample_rate();
234 DVLOG(1) << "Audio output hardware sample rate: " << sample_rate
;
235 if (sample_rate
== 192000) {
236 DVLOG(1) << "Resampling from 48000 to 192000 is required";
239 media::AudioSampleRate asr
;
240 if (media::ToAudioSampleRate(sample_rate
, &asr
)) {
241 UMA_HISTOGRAM_ENUMERATION(
242 "WebRTC.AudioOutputSampleRate", asr
, media::kAudioSampleRateMax
+ 1);
244 UMA_HISTOGRAM_COUNTS("WebRTC.AudioOutputSampleRateUnexpected",
248 // Set up audio parameters for the source, i.e., the WebRTC client.
250 // The WebRTC client only supports multiples of 10ms as buffer size where
251 // 10ms is preferred for lowest possible delay.
252 media::AudioParameters source_params
;
253 const int frames_per_10ms
= (sample_rate
/ 100);
254 DVLOG(1) << "Using WebRTC output buffer size: " << frames_per_10ms
;
256 source_params
.Reset(media::AudioParameters::AUDIO_PCM_LOW_LATENCY
,
257 sink_params_
.channel_layout(), sink_params_
.channels(),
258 sample_rate
, 16, frames_per_10ms
);
260 const int frames_per_buffer
=
261 GetOptimalBufferSize(sample_rate
, sink_params_
.frames_per_buffer());
263 sink_params_
.Reset(sink_params_
.format(), sink_params_
.channel_layout(),
264 sink_params_
.channels(), sample_rate
, 16,
267 // Create a FIFO if re-buffering is required to match the source input with
268 // the sink request. The source acts as provider here and the sink as
270 fifo_delay_milliseconds_
= 0;
271 if (source_params
.frames_per_buffer() != sink_params_
.frames_per_buffer()) {
272 DVLOG(1) << "Rebuffering from " << source_params
.frames_per_buffer()
273 << " to " << sink_params_
.frames_per_buffer();
274 audio_fifo_
.reset(new media::AudioPullFifo(
275 source_params
.channels(),
276 source_params
.frames_per_buffer(),
278 &WebRtcAudioRenderer::SourceCallback
,
279 base::Unretained(this))));
281 if (sink_params_
.frames_per_buffer() > source_params
.frames_per_buffer()) {
282 int frame_duration_milliseconds
= base::Time::kMillisecondsPerSecond
/
283 static_cast<double>(source_params
.sample_rate());
284 fifo_delay_milliseconds_
= (sink_params_
.frames_per_buffer() -
285 source_params
.frames_per_buffer()) * frame_duration_milliseconds
;
291 // Configure the audio rendering client and start rendering.
292 sink_
= AudioDeviceFactory::NewOutputDevice(source_render_frame_id_
);
294 DCHECK_GE(session_id_
, 0);
295 sink_
->InitializeWithSessionId(sink_params_
, this, session_id_
);
299 // User must call Play() before any audio can be heard.
305 scoped_refptr
<MediaStreamAudioRenderer
>
306 WebRtcAudioRenderer::CreateSharedAudioRendererProxy(
307 const scoped_refptr
<webrtc::MediaStreamInterface
>& media_stream
) {
308 content::SharedAudioRenderer::OnPlayStateChanged on_play_state_changed
=
309 base::Bind(&WebRtcAudioRenderer::OnPlayStateChanged
, this);
310 return new SharedAudioRenderer(this, media_stream
, on_play_state_changed
);
313 bool WebRtcAudioRenderer::IsStarted() const {
314 DCHECK(thread_checker_
.CalledOnValidThread());
315 return start_ref_count_
!= 0;
318 void WebRtcAudioRenderer::Start() {
319 DVLOG(1) << "WebRtcAudioRenderer::Start()";
320 DCHECK(thread_checker_
.CalledOnValidThread());
324 void WebRtcAudioRenderer::Play() {
325 DVLOG(1) << "WebRtcAudioRenderer::Play()";
326 DCHECK(thread_checker_
.CalledOnValidThread());
328 if (playing_state_
.playing())
331 playing_state_
.set_playing(true);
332 render_callback_count_
= 0;
334 OnPlayStateChanged(media_stream_
, &playing_state_
);
337 void WebRtcAudioRenderer::EnterPlayState() {
338 DVLOG(1) << "WebRtcAudioRenderer::EnterPlayState()";
339 DCHECK(thread_checker_
.CalledOnValidThread());
340 DCHECK_GT(start_ref_count_
, 0) << "Did you forget to call Start()?";
341 base::AutoLock
auto_lock(lock_
);
342 if (state_
== UNINITIALIZED
)
345 DCHECK(play_ref_count_
== 0 || state_
== PLAYING
);
348 if (state_
!= PLAYING
) {
352 audio_delay_milliseconds_
= 0;
353 audio_fifo_
->Clear();
358 void WebRtcAudioRenderer::Pause() {
359 DVLOG(1) << "WebRtcAudioRenderer::Pause()";
360 DCHECK(thread_checker_
.CalledOnValidThread());
361 if (!playing_state_
.playing())
364 playing_state_
.set_playing(false);
366 OnPlayStateChanged(media_stream_
, &playing_state_
);
369 void WebRtcAudioRenderer::EnterPauseState() {
370 DVLOG(1) << "WebRtcAudioRenderer::EnterPauseState()";
371 DCHECK(thread_checker_
.CalledOnValidThread());
372 DCHECK_GT(start_ref_count_
, 0) << "Did you forget to call Start()?";
373 base::AutoLock
auto_lock(lock_
);
374 if (state_
== UNINITIALIZED
)
377 DCHECK_EQ(state_
, PLAYING
);
378 DCHECK_GT(play_ref_count_
, 0);
379 if (!--play_ref_count_
)
383 void WebRtcAudioRenderer::Stop() {
384 DVLOG(1) << "WebRtcAudioRenderer::Stop()";
385 DCHECK(thread_checker_
.CalledOnValidThread());
387 base::AutoLock
auto_lock(lock_
);
388 if (state_
== UNINITIALIZED
)
391 if (--start_ref_count_
)
394 DVLOG(1) << "Calling RemoveAudioRenderer and Stop().";
396 source_
->RemoveAudioRenderer(this);
398 state_
= UNINITIALIZED
;
401 // Make sure to stop the sink while _not_ holding the lock since the Render()
402 // callback may currently be executing and try to grab the lock while we're
403 // stopping the thread on which it runs.
407 void WebRtcAudioRenderer::SetVolume(float volume
) {
408 DCHECK(thread_checker_
.CalledOnValidThread());
409 DCHECK(volume
>= 0.0f
&& volume
<= 1.0f
);
411 playing_state_
.set_volume(volume
);
412 OnPlayStateChanged(media_stream_
, &playing_state_
);
415 media::OutputDevice
* WebRtcAudioRenderer::GetOutputDevice() {
416 DVLOG(1) << __FUNCTION__
;
417 DCHECK(thread_checker_
.CalledOnValidThread());
419 return sink_
->GetOutputDevice();
422 base::TimeDelta
WebRtcAudioRenderer::GetCurrentRenderTime() const {
423 DCHECK(thread_checker_
.CalledOnValidThread());
424 base::AutoLock
auto_lock(lock_
);
425 return current_time_
;
428 bool WebRtcAudioRenderer::IsLocalRenderer() const {
432 int WebRtcAudioRenderer::Render(media::AudioBus
* audio_bus
,
433 int audio_delay_milliseconds
) {
434 base::AutoLock
auto_lock(lock_
);
438 DVLOG(2) << "WebRtcAudioRenderer::Render()";
439 DVLOG(2) << "audio_delay_milliseconds: " << audio_delay_milliseconds
;
441 audio_delay_milliseconds_
= audio_delay_milliseconds
;
444 audio_fifo_
->Consume(audio_bus
, audio_bus
->frames());
446 SourceCallback(0, audio_bus
);
448 return (state_
== PLAYING
) ? audio_bus
->frames() : 0;
451 void WebRtcAudioRenderer::OnRenderError() {
453 LOG(ERROR
) << "OnRenderError()";
456 // Called by AudioPullFifo when more data is necessary.
457 void WebRtcAudioRenderer::SourceCallback(
458 int fifo_frame_delay
, media::AudioBus
* audio_bus
) {
459 base::TimeTicks start_time
= base::TimeTicks::Now() ;
460 DVLOG(2) << "WebRtcAudioRenderer::SourceCallback("
461 << fifo_frame_delay
<< ", "
462 << audio_bus
->frames() << ")";
464 int output_delay_milliseconds
= audio_delay_milliseconds_
;
465 output_delay_milliseconds
+= fifo_delay_milliseconds_
;
466 DVLOG(2) << "output_delay_milliseconds: " << output_delay_milliseconds
;
468 // We need to keep render data for the |source_| regardless of |state_|,
469 // otherwise the data will be buffered up inside |source_|.
470 source_
->RenderData(audio_bus
, sink_params_
.sample_rate(),
471 output_delay_milliseconds
,
474 // Avoid filling up the audio bus if we are not playing; instead
475 // return here and ensure that the returned value in Render() is 0.
476 if (state_
!= PLAYING
)
479 if (++render_callback_count_
== kNumCallbacksBetweenRenderTimeHistograms
) {
480 base::TimeDelta elapsed
= base::TimeTicks::Now() - start_time
;
481 render_callback_count_
= 0;
482 UMA_HISTOGRAM_TIMES("WebRTC.AudioRenderTimes", elapsed
);
486 void WebRtcAudioRenderer::UpdateSourceVolume(
487 webrtc::AudioSourceInterface
* source
) {
488 DCHECK(thread_checker_
.CalledOnValidThread());
490 // Note: If there are no playing audio renderers, then the volume will be
494 SourcePlayingStates::iterator entry
= source_playing_states_
.find(source
);
495 if (entry
!= source_playing_states_
.end()) {
496 PlayingStates
& states
= entry
->second
;
497 for (PlayingStates::const_iterator it
= states
.begin();
498 it
!= states
.end(); ++it
) {
499 if ((*it
)->playing())
500 volume
+= (*it
)->volume();
504 // The valid range for volume scaling of a remote webrtc source is
505 // 0.0-10.0 where 1.0 is no attenuation/boost.
506 DCHECK(volume
>= 0.0f
);
510 DVLOG(1) << "Setting remote source volume: " << volume
;
511 if (!signaling_thread_
->BelongsToCurrentThread()) {
512 // Libjingle hands out proxy objects in most cases, but the audio source
513 // object is an exception (bug?). So, to work around that, we need to make
514 // sure we call SetVolume on the signaling thread.
515 signaling_thread_
->PostTask(FROM_HERE
,
516 base::Bind(&webrtc::AudioSourceInterface::SetVolume
, source
, volume
));
518 source
->SetVolume(volume
);
522 bool WebRtcAudioRenderer::AddPlayingState(
523 webrtc::AudioSourceInterface
* source
,
524 PlayingState
* state
) {
525 DCHECK(thread_checker_
.CalledOnValidThread());
526 DCHECK(state
->playing());
527 // Look up or add the |source| to the map.
528 PlayingStates
& array
= source_playing_states_
[source
];
529 if (std::find(array
.begin(), array
.end(), state
) != array
.end())
532 array
.push_back(state
);
537 bool WebRtcAudioRenderer::RemovePlayingState(
538 webrtc::AudioSourceInterface
* source
,
539 PlayingState
* state
) {
540 DCHECK(thread_checker_
.CalledOnValidThread());
541 DCHECK(!state
->playing());
542 SourcePlayingStates::iterator found
= source_playing_states_
.find(source
);
543 if (found
== source_playing_states_
.end())
546 PlayingStates
& array
= found
->second
;
547 PlayingStates::iterator state_it
=
548 std::find(array
.begin(), array
.end(), state
);
549 if (state_it
== array
.end())
552 array
.erase(state_it
);
555 source_playing_states_
.erase(found
);
560 void WebRtcAudioRenderer::OnPlayStateChanged(
561 const scoped_refptr
<webrtc::MediaStreamInterface
>& media_stream
,
562 PlayingState
* state
) {
563 webrtc::AudioTrackVector
tracks(media_stream
->GetAudioTracks());
564 for (webrtc::AudioTrackVector::iterator it
= tracks
.begin();
565 it
!= tracks
.end(); ++it
) {
566 webrtc::AudioSourceInterface
* source
= (*it
)->GetSource();
568 if (!state
->playing()) {
569 if (RemovePlayingState(source
, state
))
571 } else if (AddPlayingState(source
, state
)) {
574 UpdateSourceVolume(source
);
578 } // namespace content