Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / content / renderer / media / webrtc_audio_renderer.h
blob91dc9b945f136196097d5be6b394efe2f5394df1
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_
6 #define CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_
8 #include <string>
10 #include "base/memory/ref_counted.h"
11 #include "base/synchronization/lock.h"
12 #include "base/threading/non_thread_safe.h"
13 #include "base/threading/thread_checker.h"
14 #include "content/public/renderer/media_stream_audio_renderer.h"
15 #include "content/renderer/media/webrtc_audio_device_impl.h"
16 #include "media/base/audio_decoder.h"
17 #include "media/base/audio_pull_fifo.h"
18 #include "media/base/audio_renderer_sink.h"
19 #include "media/base/channel_layout.h"
21 namespace media {
22 class AudioOutputDevice;
23 } // namespace media
25 namespace webrtc {
26 class AudioSourceInterface;
27 class MediaStreamInterface;
28 } // namespace webrtc
30 namespace content {
32 class WebRtcAudioRendererSource;
34 // This renderer handles calls from the pipeline and WebRtc ADM. It is used
35 // for connecting WebRtc MediaStream with the audio pipeline.
36 class CONTENT_EXPORT WebRtcAudioRenderer
37 : NON_EXPORTED_BASE(public media::AudioRendererSink::RenderCallback),
38 NON_EXPORTED_BASE(public MediaStreamAudioRenderer) {
39 public:
40 // This is a little utility class that holds the configured state of an audio
41 // stream.
42 // It is used by both WebRtcAudioRenderer and SharedAudioRenderer (see cc
43 // file) so a part of why it exists is to avoid code duplication and track
44 // the state in the same way in WebRtcAudioRenderer and SharedAudioRenderer.
45 class PlayingState : public base::NonThreadSafe {
46 public:
47 PlayingState() : playing_(false), volume_(1.0f) {}
49 bool playing() const {
50 DCHECK(CalledOnValidThread());
51 return playing_;
54 void set_playing(bool playing) {
55 DCHECK(CalledOnValidThread());
56 playing_ = playing;
59 float volume() const {
60 DCHECK(CalledOnValidThread());
61 return volume_;
64 void set_volume(float volume) {
65 DCHECK(CalledOnValidThread());
66 volume_ = volume;
69 private:
70 bool playing_;
71 float volume_;
75 // Returns platform specific optimal buffer size for rendering audio.
76 static int GetOptimalBufferSize(int sample_rate, int hardware_buffer_size);
78 WebRtcAudioRenderer(
79 const scoped_refptr<base::SingleThreadTaskRunner>& signaling_thread,
80 const scoped_refptr<webrtc::MediaStreamInterface>& media_stream,
81 int source_render_frame_id,
82 int session_id,
83 int sample_rate,
84 int frames_per_buffer);
86 // Initialize function called by clients like WebRtcAudioDeviceImpl.
87 // Stop() has to be called before |source| is deleted.
88 bool Initialize(WebRtcAudioRendererSource* source);
90 // When sharing a single instance of WebRtcAudioRenderer between multiple
91 // users (e.g. WebMediaPlayerMS), call this method to create a proxy object
92 // that maintains the Play and Stop states per caller.
93 // The wrapper ensures that Play() won't be called when the caller's state
94 // is "playing", Pause() won't be called when the state already is "paused"
95 // etc and similarly maintains the same state for Stop().
96 // When Stop() is called or when the proxy goes out of scope, the proxy
97 // will ensure that Pause() is called followed by a call to Stop(), which
98 // is the usage pattern that WebRtcAudioRenderer requires.
99 scoped_refptr<MediaStreamAudioRenderer> CreateSharedAudioRendererProxy(
100 const scoped_refptr<webrtc::MediaStreamInterface>& media_stream);
102 // Used to DCHECK on the expected state.
103 bool IsStarted() const;
105 // Accessors to the sink audio parameters.
106 int channels() const { return sink_params_.channels(); }
107 int sample_rate() const { return sink_params_.sample_rate(); }
108 int frames_per_buffer() const { return sink_params_.frames_per_buffer(); }
110 private:
111 // MediaStreamAudioRenderer implementation. This is private since we want
112 // callers to use proxy objects.
113 // TODO(tommi): Make the MediaStreamAudioRenderer implementation a pimpl?
114 void Start() override;
115 void Play() override;
116 void Pause() override;
117 void Stop() override;
118 void SetVolume(float volume) override;
119 media::OutputDevice* GetOutputDevice() override;
120 base::TimeDelta GetCurrentRenderTime() const override;
121 bool IsLocalRenderer() const override;
123 // Called when an audio renderer, either the main or a proxy, starts playing.
124 // Here we maintain a reference count of how many renderers are currently
125 // playing so that the shared play state of all the streams can be reflected
126 // correctly.
127 void EnterPlayState();
129 // Called when an audio renderer, either the main or a proxy, is paused.
130 // See EnterPlayState for more details.
131 void EnterPauseState();
133 protected:
134 ~WebRtcAudioRenderer() override;
136 private:
137 enum State {
138 UNINITIALIZED,
139 PLAYING,
140 PAUSED,
143 // Holds raw pointers to PlaingState objects. Ownership is managed outside
144 // of this type.
145 typedef std::vector<PlayingState*> PlayingStates;
146 // Maps an audio source to a list of playing states that collectively hold
147 // volume information for that source.
148 typedef std::map<webrtc::AudioSourceInterface*, PlayingStates>
149 SourcePlayingStates;
151 // Used to DCHECK that we are called on the correct thread.
152 base::ThreadChecker thread_checker_;
154 // Flag to keep track the state of the renderer.
155 State state_;
157 // media::AudioRendererSink::RenderCallback implementation.
158 // These two methods are called on the AudioOutputDevice worker thread.
159 int Render(media::AudioBus* audio_bus, int audio_delay_milliseconds) override;
160 void OnRenderError() override;
162 // Called by AudioPullFifo when more data is necessary.
163 // This method is called on the AudioOutputDevice worker thread.
164 void SourceCallback(int fifo_frame_delay, media::AudioBus* audio_bus);
166 // Goes through all renderers for the |source| and applies the proper
167 // volume scaling for the source based on the volume(s) of the renderer(s).
168 void UpdateSourceVolume(webrtc::AudioSourceInterface* source);
170 // Tracks a playing state. The state must be playing when this method
171 // is called.
172 // Returns true if the state was added, false if it was already being tracked.
173 bool AddPlayingState(webrtc::AudioSourceInterface* source,
174 PlayingState* state);
175 // Removes a playing state for an audio source.
176 // Returns true if the state was removed from the internal map, false if
177 // it had already been removed or if the source isn't being rendered.
178 bool RemovePlayingState(webrtc::AudioSourceInterface* source,
179 PlayingState* state);
181 // Called whenever the Play/Pause state changes of any of the renderers
182 // or if the volume of any of them is changed.
183 // Here we update the shared Play state and apply volume scaling to all audio
184 // sources associated with the |media_stream| based on the collective volume
185 // of playing renderers.
186 void OnPlayStateChanged(
187 const scoped_refptr<webrtc::MediaStreamInterface>& media_stream,
188 PlayingState* state);
190 // The RenderFrame in which the audio is rendered into |sink_|.
191 const int source_render_frame_id_;
192 const int session_id_;
194 const scoped_refptr<base::SingleThreadTaskRunner> signaling_thread_;
196 // The sink (destination) for rendered audio.
197 scoped_refptr<media::AudioOutputDevice> sink_;
199 // The media stream that holds the audio tracks that this renderer renders.
200 const scoped_refptr<webrtc::MediaStreamInterface> media_stream_;
202 // Audio data source from the browser process.
203 WebRtcAudioRendererSource* source_;
205 // Protects access to |state_|, |source_|, |sink_| and |current_time_|.
206 mutable base::Lock lock_;
208 // Ref count for the MediaPlayers which are playing audio.
209 int play_ref_count_;
211 // Ref count for the MediaPlayers which have called Start() but not Stop().
212 int start_ref_count_;
214 // Used to buffer data between the client and the output device in cases where
215 // the client buffer size is not the same as the output device buffer size.
216 scoped_ptr<media::AudioPullFifo> audio_fifo_;
218 // Contains the accumulated delay estimate which is provided to the WebRTC
219 // AEC.
220 int audio_delay_milliseconds_;
222 // Delay due to the FIFO in milliseconds.
223 int fifo_delay_milliseconds_;
225 base::TimeDelta current_time_;
227 // Saved volume and playing state of the root renderer.
228 PlayingState playing_state_;
230 // Audio params used by the sink of the renderer.
231 media::AudioParameters sink_params_;
233 // Maps audio sources to a list of active audio renderers.
234 // Pointers to PlayingState objects are only kept in this map while the
235 // associated renderer is actually playing the stream. Ownership of the
236 // state objects lies with the renderers and they must leave the playing state
237 // before being destructed (PlayingState object goes out of scope).
238 SourcePlayingStates source_playing_states_;
240 // Used for triggering new UMA histogram. Counts number of render
241 // callbacks modulo |kNumCallbacksBetweenRenderTimeHistograms|.
242 int render_callback_count_;
244 DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioRenderer);
247 } // namespace content
249 #endif // CONTENT_RENDERER_MEDIA_WEBRTC_AUDIO_RENDERER_H_