1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_
6 #define CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_
8 #include "base/memory/ref_counted.h"
9 #include "base/synchronization/lock.h"
10 #include "base/threading/non_thread_safe.h"
11 #include "base/threading/thread_checker.h"
12 #include "content/renderer/media/media_stream_audio_renderer.h"
13 #include "content/renderer/media/webrtc_audio_device_impl.h"
14 #include "media/base/audio_decoder.h"
15 #include "media/base/audio_pull_fifo.h"
16 #include "media/base/audio_renderer_sink.h"
17 #include "media/base/channel_layout.h"
20 class AudioOutputDevice
;
24 class AudioSourceInterface
;
25 class MediaStreamInterface
;
30 class WebRtcAudioRendererSource
;
32 // This renderer handles calls from the pipeline and WebRtc ADM. It is used
33 // for connecting WebRtc MediaStream with the audio pipeline.
34 class CONTENT_EXPORT WebRtcAudioRenderer
35 : NON_EXPORTED_BASE(public media::AudioRendererSink::RenderCallback
),
36 NON_EXPORTED_BASE(public MediaStreamAudioRenderer
) {
38 // This is a little utility class that holds the configured state of an audio
40 // It is used by both WebRtcAudioRenderer and SharedAudioRenderer (see cc
41 // file) so a part of why it exists is to avoid code duplication and track
42 // the state in the same way in WebRtcAudioRenderer and SharedAudioRenderer.
43 class PlayingState
: public base::NonThreadSafe
{
45 PlayingState() : playing_(false), volume_(1.0f
) {}
47 bool playing() const {
48 DCHECK(CalledOnValidThread());
52 void set_playing(bool playing
) {
53 DCHECK(CalledOnValidThread());
57 float volume() const {
58 DCHECK(CalledOnValidThread());
62 void set_volume(float volume
) {
63 DCHECK(CalledOnValidThread());
73 // Returns platform specific optimal buffer size for rendering audio.
74 static int GetOptimalBufferSize(int sample_rate
, int hardware_buffer_size
);
77 const scoped_refptr
<base::SingleThreadTaskRunner
>& signaling_thread
,
78 const scoped_refptr
<webrtc::MediaStreamInterface
>& media_stream
,
79 int source_render_view_id
,
80 int source_render_frame_id
,
83 int frames_per_buffer
);
85 // Initialize function called by clients like WebRtcAudioDeviceImpl.
86 // Stop() has to be called before |source| is deleted.
87 bool Initialize(WebRtcAudioRendererSource
* source
);
89 // When sharing a single instance of WebRtcAudioRenderer between multiple
90 // users (e.g. WebMediaPlayerMS), call this method to create a proxy object
91 // that maintains the Play and Stop states per caller.
92 // The wrapper ensures that Play() won't be called when the caller's state
93 // is "playing", Pause() won't be called when the state already is "paused"
94 // etc and similarly maintains the same state for Stop().
95 // When Stop() is called or when the proxy goes out of scope, the proxy
96 // will ensure that Pause() is called followed by a call to Stop(), which
97 // is the usage pattern that WebRtcAudioRenderer requires.
98 scoped_refptr
<MediaStreamAudioRenderer
> CreateSharedAudioRendererProxy(
99 const scoped_refptr
<webrtc::MediaStreamInterface
>& media_stream
);
101 // Used to DCHECK on the expected state.
102 bool IsStarted() const;
104 // Accessors to the sink audio parameters.
105 int channels() const { return sink_params_
.channels(); }
106 int sample_rate() const { return sink_params_
.sample_rate(); }
107 int frames_per_buffer() const { return sink_params_
.frames_per_buffer(); }
110 // MediaStreamAudioRenderer implementation. This is private since we want
111 // callers to use proxy objects.
112 // TODO(tommi): Make the MediaStreamAudioRenderer implementation a pimpl?
113 void Start() override
;
114 void Play() override
;
115 void Pause() override
;
116 void Stop() override
;
117 void SetVolume(float volume
) override
;
118 base::TimeDelta
GetCurrentRenderTime() const override
;
119 bool IsLocalRenderer() const override
;
121 // Called when an audio renderer, either the main or a proxy, starts playing.
122 // Here we maintain a reference count of how many renderers are currently
123 // playing so that the shared play state of all the streams can be reflected
125 void EnterPlayState();
127 // Called when an audio renderer, either the main or a proxy, is paused.
128 // See EnterPlayState for more details.
129 void EnterPauseState();
132 ~WebRtcAudioRenderer() override
;
141 // Holds raw pointers to PlaingState objects. Ownership is managed outside
143 typedef std::vector
<PlayingState
*> PlayingStates
;
144 // Maps an audio source to a list of playing states that collectively hold
145 // volume information for that source.
146 typedef std::map
<webrtc::AudioSourceInterface
*, PlayingStates
>
149 // Used to DCHECK that we are called on the correct thread.
150 base::ThreadChecker thread_checker_
;
152 // Flag to keep track the state of the renderer.
155 // media::AudioRendererSink::RenderCallback implementation.
156 // These two methods are called on the AudioOutputDevice worker thread.
157 int Render(media::AudioBus
* audio_bus
, int audio_delay_milliseconds
) override
;
158 void OnRenderError() override
;
160 // Called by AudioPullFifo when more data is necessary.
161 // This method is called on the AudioOutputDevice worker thread.
162 void SourceCallback(int fifo_frame_delay
, media::AudioBus
* audio_bus
);
164 // Goes through all renderers for the |source| and applies the proper
165 // volume scaling for the source based on the volume(s) of the renderer(s).
166 void UpdateSourceVolume(webrtc::AudioSourceInterface
* source
);
168 // Tracks a playing state. The state must be playing when this method
170 // Returns true if the state was added, false if it was already being tracked.
171 bool AddPlayingState(webrtc::AudioSourceInterface
* source
,
172 PlayingState
* state
);
173 // Removes a playing state for an audio source.
174 // Returns true if the state was removed from the internal map, false if
175 // it had already been removed or if the source isn't being rendered.
176 bool RemovePlayingState(webrtc::AudioSourceInterface
* source
,
177 PlayingState
* state
);
179 // Called whenever the Play/Pause state changes of any of the renderers
180 // or if the volume of any of them is changed.
181 // Here we update the shared Play state and apply volume scaling to all audio
182 // sources associated with the |media_stream| based on the collective volume
183 // of playing renderers.
184 void OnPlayStateChanged(
185 const scoped_refptr
<webrtc::MediaStreamInterface
>& media_stream
,
186 PlayingState
* state
);
188 // The render view and frame in which the audio is rendered into |sink_|.
189 const int source_render_view_id_
;
190 const int source_render_frame_id_
;
191 const int session_id_
;
193 const scoped_refptr
<base::SingleThreadTaskRunner
> signaling_thread_
;
195 // The sink (destination) for rendered audio.
196 scoped_refptr
<media::AudioOutputDevice
> sink_
;
198 // The media stream that holds the audio tracks that this renderer renders.
199 const scoped_refptr
<webrtc::MediaStreamInterface
> media_stream_
;
201 // Audio data source from the browser process.
202 WebRtcAudioRendererSource
* source_
;
204 // Protects access to |state_|, |source_|, |sink_| and |current_time_|.
205 mutable base::Lock lock_
;
207 // Ref count for the MediaPlayers which are playing audio.
210 // Ref count for the MediaPlayers which have called Start() but not Stop().
211 int start_ref_count_
;
213 // Used to buffer data between the client and the output device in cases where
214 // the client buffer size is not the same as the output device buffer size.
215 scoped_ptr
<media::AudioPullFifo
> audio_fifo_
;
217 // Contains the accumulated delay estimate which is provided to the WebRTC
219 int audio_delay_milliseconds_
;
221 // Delay due to the FIFO in milliseconds.
222 int fifo_delay_milliseconds_
;
224 base::TimeDelta current_time_
;
226 // Saved volume and playing state of the root renderer.
227 PlayingState playing_state_
;
229 // Audio params used by the sink of the renderer.
230 media::AudioParameters sink_params_
;
232 // Maps audio sources to a list of active audio renderers.
233 // Pointers to PlayingState objects are only kept in this map while the
234 // associated renderer is actually playing the stream. Ownership of the
235 // state objects lies with the renderers and they must leave the playing state
236 // before being destructed (PlayingState object goes out of scope).
237 SourcePlayingStates source_playing_states_
;
239 // Used for triggering new UMA histogram. Counts number of render
240 // callbacks modulo |kNumCallbacksBetweenRenderTimeHistograms|.
241 int render_callback_count_
;
243 DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioRenderer
);
246 } // namespace content
248 #endif // CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_