1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_
6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_
8 #include "base/atomicops.h"
9 #include "base/files/file.h"
10 #include "base/synchronization/lock.h"
11 #include "base/threading/thread_checker.h"
12 #include "base/time/time.h"
13 #include "content/common/content_export.h"
14 #include "content/renderer/media/aec_dump_message_filter.h"
15 #include "content/renderer/media/webrtc_audio_device_impl.h"
16 #include "media/base/audio_converter.h"
17 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
18 #include "third_party/webrtc/modules/audio_processing/include/audio_processing.h"
21 class WebMediaConstraints
;
27 class AudioParameters
;
31 class TypingDetection
;
36 class EchoInformation
;
37 class MediaStreamAudioBus
;
38 class MediaStreamAudioFifo
;
39 class RTCMediaConstraints
;
41 using webrtc::AudioProcessorInterface
;
43 // This class owns an object of webrtc::AudioProcessing which contains signal
44 // processing components like AGC, AEC and NS. It enables the components based
45 // on the getUserMedia constraints, processes the data and outputs it in a unit
46 // of 10 ms data chunk.
47 class CONTENT_EXPORT MediaStreamAudioProcessor
:
48 NON_EXPORTED_BASE(public WebRtcPlayoutDataSource::Sink
),
49 NON_EXPORTED_BASE(public AudioProcessorInterface
),
50 NON_EXPORTED_BASE(public AecDumpMessageFilter::AecDumpDelegate
) {
52 // |playout_data_source| is used to register this class as a sink to the
53 // WebRtc playout data for processing AEC. If clients do not enable AEC,
54 // |playout_data_source| won't be used.
55 MediaStreamAudioProcessor(const blink::WebMediaConstraints
& constraints
,
57 WebRtcPlayoutDataSource
* playout_data_source
);
59 // Called when the format of the capture data has changed.
60 // Called on the main render thread. The caller is responsible for stopping
61 // the capture thread before calling this method.
62 // After this method, the capture thread will be changed to a new capture
64 void OnCaptureFormatChanged(const media::AudioParameters
& source_params
);
66 // Pushes capture data in |audio_source| to the internal FIFO. Each call to
67 // this method should be followed by calls to ProcessAndConsumeData() while
68 // it returns false, to pull out all available data.
69 // Called on the capture audio thread.
70 void PushCaptureData(const media::AudioBus
& audio_source
,
71 base::TimeDelta capture_delay
);
73 // Processes a block of 10 ms data from the internal FIFO, returning true if
74 // |processed_data| contains the result. Returns false and does not modify the
75 // outputs if the internal FIFO has insufficient data. The caller does NOT own
76 // the object pointed to by |*processed_data|.
77 // |capture_delay| is an adjustment on the |capture_delay| value provided in
78 // the last call to PushCaptureData().
79 // |new_volume| receives the new microphone volume from the AGC.
80 // The new microphone volume range is [0, 255], and the value will be 0 if
81 // the microphone volume should not be adjusted.
82 // Called on the capture audio thread.
83 bool ProcessAndConsumeData(
86 media::AudioBus
** processed_data
,
87 base::TimeDelta
* capture_delay
,
90 // Stops the audio processor, no more AEC dump or render data after calling
94 // The audio formats of the capture input to and output from the processor.
95 // Must only be called on the main render or audio capture threads.
96 const media::AudioParameters
& InputFormat() const;
97 const media::AudioParameters
& OutputFormat() const;
99 // Accessor to check if the audio processing is enabled or not.
100 bool has_audio_processing() const { return audio_processing_
!= NULL
; }
102 // AecDumpMessageFilter::AecDumpDelegate implementation.
103 // Called on the main render thread.
104 void OnAecDumpFile(const IPC::PlatformFileForTransit
& file_handle
) override
;
105 void OnDisableAecDump() override
;
106 void OnIpcClosing() override
;
109 ~MediaStreamAudioProcessor() override
;
112 friend class MediaStreamAudioProcessorTest
;
113 FRIEND_TEST_ALL_PREFIXES(MediaStreamAudioProcessorTest
,
114 GetAecDumpMessageFilter
);
116 // WebRtcPlayoutDataSource::Sink implementation.
117 void OnPlayoutData(media::AudioBus
* audio_bus
,
119 int audio_delay_milliseconds
) override
;
120 void OnPlayoutDataSourceChanged() override
;
122 // webrtc::AudioProcessorInterface implementation.
123 // This method is called on the libjingle thread.
124 void GetStats(AudioProcessorStats
* stats
) override
;
126 // Helper to initialize the WebRtc AudioProcessing.
127 void InitializeAudioProcessingModule(
128 const blink::WebMediaConstraints
& constraints
, int effects
);
130 // Helper to initialize the capture converter.
131 void InitializeCaptureFifo(const media::AudioParameters
& input_format
);
133 // Helper to initialize the render converter.
134 void InitializeRenderFifoIfNeeded(int sample_rate
,
135 int number_of_channels
,
136 int frames_per_buffer
);
138 // Called by ProcessAndConsumeData().
139 // Returns the new microphone volume in the range of |0, 255].
140 // When the volume does not need to be updated, it returns 0.
141 int ProcessData(const float* const* process_ptrs
,
143 base::TimeDelta capture_delay
,
146 float* const* output_ptrs
);
148 // Cached value for the render delay latency. This member is accessed by
149 // both the capture audio thread and the render audio thread.
150 base::subtle::Atomic32 render_delay_ms_
;
152 // Module to handle processing and format conversion.
153 scoped_ptr
<webrtc::AudioProcessing
> audio_processing_
;
155 // FIFO to provide 10 ms capture chunks.
156 scoped_ptr
<MediaStreamAudioFifo
> capture_fifo_
;
157 // Receives processing output.
158 scoped_ptr
<MediaStreamAudioBus
> output_bus_
;
160 // FIFO to provide 10 ms render chunks when the AEC is enabled.
161 scoped_ptr
<MediaStreamAudioFifo
> render_fifo_
;
163 // These are mutated on the main render thread in OnCaptureFormatChanged().
164 // The caller guarantees this does not run concurrently with accesses on the
165 // capture audio thread.
166 media::AudioParameters input_format_
;
167 media::AudioParameters output_format_
;
168 // Only used on the render audio thread.
169 media::AudioParameters render_format_
;
171 // Raw pointer to the WebRtcPlayoutDataSource, which is valid for the
172 // lifetime of RenderThread.
173 WebRtcPlayoutDataSource
* playout_data_source_
;
175 // Used to DCHECK that some methods are called on the main render thread.
176 base::ThreadChecker main_thread_checker_
;
177 // Used to DCHECK that some methods are called on the capture audio thread.
178 base::ThreadChecker capture_thread_checker_
;
179 // Used to DCHECK that some methods are called on the render audio thread.
180 base::ThreadChecker render_thread_checker_
;
182 // Flag to enable stereo channel mirroring.
183 bool audio_mirroring_
;
185 scoped_ptr
<webrtc::TypingDetection
> typing_detector_
;
186 // This flag is used to show the result of typing detection.
187 // It can be accessed by the capture audio thread and by the libjingle thread
188 // which calls GetStats().
189 base::subtle::Atomic32 typing_detected_
;
191 // Communication with browser for AEC dump.
192 scoped_refptr
<AecDumpMessageFilter
> aec_dump_message_filter_
;
194 // Flag to avoid executing Stop() more than once.
197 // Object for logging echo information when the AEC is enabled. Accessible by
198 // the libjingle thread through GetStats().
199 scoped_ptr
<EchoInformation
> echo_information_
;
201 DISALLOW_COPY_AND_ASSIGN(MediaStreamAudioProcessor
);
204 } // namespace content
206 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_