1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // Implementation of AudioOutputStream for Windows using Windows Core Audio
6 // WASAPI for low latency rendering.
8 // Overview of operation and performance:
10 // - An object of WASAPIAudioOutputStream is created by the AudioManager
12 // - Next some thread will call Open(), at that point the underlying
13 // Core Audio APIs are utilized to create two WASAPI interfaces called
14 // IAudioClient and IAudioRenderClient.
15 // - Then some thread will call Start(source).
16 // A thread called "wasapi_render_thread" is started and this thread listens
17 // on an event signal which is set periodically by the audio engine to signal
18 // render events. As a result, OnMoreData() will be called and the registered
19 // client is then expected to provide data samples to be played out.
20 // - At some point, a thread will call Stop(), which stops and joins the
21 // render thread and at the same time stops audio streaming.
22 // - The same thread that called stop will call Close() where we cleanup
23 // and notify the audio manager, which likely will destroy this object.
24 // - A total typical delay of 35 ms contains three parts:
25 // o Audio endpoint device period (~10 ms).
26 // o Stream latency between the buffer and endpoint device (~5 ms).
27 // o Endpoint buffer (~20 ms to ensure glitch-free rendering).
29 // Implementation notes:
31 // - The minimum supported client is Windows Vista.
32 // - This implementation is single-threaded, hence:
33 // o Construction and destruction must take place from the same thread.
34 // o All APIs must be called from the creating thread as well.
35 // - It is required to first acquire the native audio parameters of the default
36 // output device and then use the same rate when creating this object. Use
37 // e.g. WASAPIAudioOutputStream::HardwareSampleRate() to retrieve the sample
38 // rate. Open() will fail unless "perfect" audio parameters are utilized.
39 // - Calling Close() also leads to self destruction.
40 // - Support for 8-bit audio has not yet been verified and tested.
42 // Core Audio API details:
44 // - The public API methods (Open(), Start(), Stop() and Close()) must be
45 // called on constructing thread. The reason is that we want to ensure that
46 // the COM environment is the same for all API implementations.
47 // - Utilized MMDevice interfaces:
48 // o IMMDeviceEnumerator
50 // - Utilized WASAPI interfaces:
52 // o IAudioRenderClient
53 // - The stream is initialized in shared mode and the processing of the
54 // audio buffer is event driven.
55 // - The Multimedia Class Scheduler service (MMCSS) is utilized to boost
56 // the priority of the render thread.
57 // - Audio-rendering endpoint devices can have three roles:
58 // Console (eConsole), Communications (eCommunications), and Multimedia
59 // (eMultimedia). Search for "Device Roles" on MSDN for more details.
63 // - It is assumed that this class is created on the audio thread owned
64 // by the AudioManager.
65 // - It is a requirement to call the following methods on the same audio
66 // thread: Open(), Start(), Stop(), and Close().
67 // - Audio rendering is performed on the audio render thread, owned by this
68 // class, and the AudioSourceCallback::OnMoreData() method will be called
69 // from this thread. Stream switching also takes place on the audio-render
72 // Experimental exclusive mode:
74 // - It is possible to open up a stream in exclusive mode by using the
75 // --enable-exclusive-audio command line flag.
76 // - The internal buffering scheme is less flexible for exclusive streams.
77 // Hence, some manual tuning will be required before deciding what frame
78 // size to use. See the WinAudioOutputTest unit test for more details.
79 // - If an application opens a stream in exclusive mode, the application has
80 // exclusive use of the audio endpoint device that plays the stream.
81 // - Exclusive-mode should only be utilized when the lowest possible latency
83 // - In exclusive mode, the client can choose to open the stream in any audio
84 // format that the endpoint device supports, i.e. not limited to the device's
85 // current (default) configuration.
86 // - Initial measurements on Windows 7 (HP Z600 workstation) have shown that
87 // the lowest possible latencies we can achieve on this machine are:
88 // o ~3.3333ms @ 48kHz <=> 160 audio frames per buffer.
89 // o ~3.6281ms @ 44.1kHz <=> 160 audio frames per buffer.
90 // - See http://msdn.microsoft.com/en-us/library/windows/desktop/dd370844(v=vs.85).aspx
93 #ifndef MEDIA_AUDIO_WIN_AUDIO_LOW_LATENCY_OUTPUT_WIN_H_
94 #define MEDIA_AUDIO_WIN_AUDIO_LOW_LATENCY_OUTPUT_WIN_H_
96 #include <Audioclient.h>
97 #include <MMDeviceAPI.h>
101 #include "base/compiler_specific.h"
102 #include "base/memory/scoped_ptr.h"
103 #include "base/threading/platform_thread.h"
104 #include "base/threading/simple_thread.h"
105 #include "base/win/scoped_co_mem.h"
106 #include "base/win/scoped_com_initializer.h"
107 #include "base/win/scoped_comptr.h"
108 #include "base/win/scoped_handle.h"
109 #include "media/audio/audio_io.h"
110 #include "media/audio/audio_parameters.h"
111 #include "media/base/media_export.h"
115 class AudioManagerWin
;
117 // AudioOutputStream implementation using Windows Core Audio APIs.
118 class MEDIA_EXPORT WASAPIAudioOutputStream
:
119 public AudioOutputStream
,
120 public base::DelegateSimpleThread::Delegate
{
122 // The ctor takes all the usual parameters, plus |manager| which is the
123 // the audio manager who is creating this object.
124 WASAPIAudioOutputStream(AudioManagerWin
* manager
,
125 const std::string
& device_id
,
126 const AudioParameters
& params
,
129 // The dtor is typically called by the AudioManager only and it is usually
130 // triggered by calling AudioOutputStream::Close().
131 virtual ~WASAPIAudioOutputStream();
133 // Implementation of AudioOutputStream.
134 virtual bool Open() OVERRIDE
;
135 virtual void Start(AudioSourceCallback
* callback
) OVERRIDE
;
136 virtual void Stop() OVERRIDE
;
137 virtual void Close() OVERRIDE
;
138 virtual void SetVolume(double volume
) OVERRIDE
;
139 virtual void GetVolume(double* volume
) OVERRIDE
;
141 // Retrieves the number of channels the audio engine uses for its internal
142 // processing/mixing of shared-mode streams for the default endpoint device.
143 static int HardwareChannelCount();
145 // Retrieves the channel layout the audio engine uses for its internal
146 // processing/mixing of shared-mode streams for the default endpoint device.
147 // Note that we convert an internal channel layout mask (see ChannelMask())
148 // into a Chrome-specific channel layout enumerator in this method, hence
149 // the match might not be perfect.
150 static ChannelLayout
HardwareChannelLayout();
152 // Retrieves the sample rate the audio engine uses for its internal
153 // processing/mixing of shared-mode streams. To fetch the settings for the
154 // default device, pass an empty string as the |device_id|.
155 static int HardwareSampleRate(const std::string
& device_id
);
157 // Returns AUDCLNT_SHAREMODE_EXCLUSIVE if --enable-exclusive-mode is used
158 // as command-line flag and AUDCLNT_SHAREMODE_SHARED otherwise (default).
159 static AUDCLNT_SHAREMODE
GetShareMode();
161 bool started() const { return render_thread_
.get() != NULL
; }
164 // DelegateSimpleThread::Delegate implementation.
165 virtual void Run() OVERRIDE
;
167 // Core part of the thread loop which controls the actual rendering.
168 // Checks available amount of space in the endpoint buffer and reads
169 // data from the client to fill up the buffer without causing audio
171 void RenderAudioFromSource(IAudioClock
* audio_clock
, UINT64 device_frequency
);
173 // Called when the device will be opened in exclusive mode and use the
174 // application specified format.
175 // TODO(henrika): rewrite and move to CoreAudioUtil when removing flag
176 // for exclusive audio mode.
177 HRESULT
ExclusiveModeInitialization(IAudioClient
* client
,
179 uint32
* endpoint_buffer_size
);
181 // If |render_thread_| is valid, sets |stop_render_event_| and blocks until
182 // the thread has stopped. |stop_render_event_| is reset after the call.
183 // |source_| is set to NULL.
186 // Contains the thread ID of the creating thread.
187 base::PlatformThreadId creating_thread_id_
;
189 // Our creator, the audio manager needs to be notified when we close.
190 AudioManagerWin
* manager_
;
192 // Rendering is driven by this thread (which has no message loop).
193 // All OnMoreData() callbacks will be called from this thread.
194 scoped_ptr
<base::DelegateSimpleThread
> render_thread_
;
196 // Contains the desired audio format which is set up at construction.
197 // Extended PCM waveform format structure based on WAVEFORMATEXTENSIBLE.
198 // Use this for multiple channel and hi-resolution PCM data.
199 WAVEFORMATPCMEX format_
;
201 // Set to true when stream is successfully opened.
204 // We check if the input audio parameters are identical (bit depth is
205 // excluded) to the preferred (native) audio parameters during construction.
206 // Open() will fail if |audio_parameters_are_valid_| is false.
207 bool audio_parameters_are_valid_
;
209 // Volume level from 0 to 1.
212 // Size in audio frames of each audio packet where an audio packet
213 // is defined as the block of data which the source is expected to deliver
214 // in each OnMoreData() callback.
215 size_t packet_size_frames_
;
217 // Size in bytes of each audio packet.
218 size_t packet_size_bytes_
;
220 // Size in milliseconds of each audio packet.
221 float packet_size_ms_
;
223 // Length of the audio endpoint buffer.
224 uint32 endpoint_buffer_size_frames_
;
226 // The target device id or an empty string for the default device.
227 const std::string device_id_
;
229 // Defines the role that the system has assigned to an audio endpoint device.
232 // The sharing mode for the connection.
233 // Valid values are AUDCLNT_SHAREMODE_SHARED and AUDCLNT_SHAREMODE_EXCLUSIVE
234 // where AUDCLNT_SHAREMODE_SHARED is the default.
235 AUDCLNT_SHAREMODE share_mode_
;
237 // Counts the number of audio frames written to the endpoint buffer.
238 UINT64 num_written_frames_
;
240 // Pointer to the client that will deliver audio samples to be played out.
241 AudioSourceCallback
* source_
;
243 // An IMMDeviceEnumerator interface which represents a device enumerator.
244 base::win::ScopedComPtr
<IMMDeviceEnumerator
> device_enumerator_
;
246 // An IAudioClient interface which enables a client to create and initialize
247 // an audio stream between an audio application and the audio engine.
248 base::win::ScopedComPtr
<IAudioClient
> audio_client_
;
250 // The IAudioRenderClient interface enables a client to write output
251 // data to a rendering endpoint buffer.
252 base::win::ScopedComPtr
<IAudioRenderClient
> audio_render_client_
;
254 // The audio engine will signal this event each time a buffer becomes
255 // ready to be filled by the client.
256 base::win::ScopedHandle audio_samples_render_event_
;
258 // This event will be signaled when rendering shall stop.
259 base::win::ScopedHandle stop_render_event_
;
261 // Container for retrieving data from AudioSourceCallback::OnMoreData().
262 scoped_ptr
<AudioBus
> audio_bus_
;
264 DISALLOW_COPY_AND_ASSIGN(WASAPIAudioOutputStream
);
269 #endif // MEDIA_AUDIO_WIN_AUDIO_LOW_LATENCY_OUTPUT_WIN_H_