[Android] Add ps_ext tool for tools-friendly cpu/mem/proc stats.
[chromium-blink-merge.git] / media / audio / win / core_audio_util_win.h
bloba210af906ea6e3366aaa1834d7497f572dc075e3
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // Utility methods for the Core Audio API on Windows.
6 // Always ensure that Core Audio is supported before using these methods.
7 // Use media::CoreAudioIsSupported() for this purpose.
8 // Also, all methods must be called on a valid COM thread. This can be done
9 // by using the base::win::ScopedCOMInitializer helper class.
11 #ifndef MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
12 #define MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
14 #include <audioclient.h>
15 #include <mmdeviceapi.h>
16 #include <string>
18 #include "base/basictypes.h"
19 #include "base/time/time.h"
20 #include "base/win/scoped_comptr.h"
21 #include "media/audio/audio_device_name.h"
22 #include "media/audio/audio_parameters.h"
23 #include "media/base/media_export.h"
25 using base::win::ScopedComPtr;
27 namespace media {
30 // Represents audio channel configuration constants as understood by Windows.
31 // E.g. KSAUDIO_SPEAKER_MONO. For a list of possible values see:
32 // http://msdn.microsoft.com/en-us/library/windows/hardware/ff537083(v=vs.85).aspx
33 typedef uint32 ChannelConfig;
35 class MEDIA_EXPORT CoreAudioUtil {
36 public:
37 // Returns true if Windows Core Audio is supported.
38 // Always verify that this method returns true before using any of the
39 // methods in this class.
40 static bool IsSupported();
42 // Converts between reference time to base::TimeDelta.
43 // One reference-time unit is 100 nanoseconds.
44 // Example: double s = RefererenceTimeToTimeDelta(t).InMillisecondsF();
45 static base::TimeDelta RefererenceTimeToTimeDelta(REFERENCE_TIME time);
47 // Returns AUDCLNT_SHAREMODE_EXCLUSIVE if --enable-exclusive-mode is used
48 // as command-line flag and AUDCLNT_SHAREMODE_SHARED otherwise (default).
49 static AUDCLNT_SHAREMODE GetShareMode();
51 // The Windows Multimedia Device (MMDevice) API enables audio clients to
52 // discover audio endpoint devices and determine their capabilities.
54 // Number of active audio devices in the specified flow data flow direction.
55 // Set |data_flow| to eAll to retrieve the total number of active audio
56 // devices.
57 static int NumberOfActiveDevices(EDataFlow data_flow);
59 // Creates an IMMDeviceEnumerator interface which provides methods for
60 // enumerating audio endpoint devices.
61 static ScopedComPtr<IMMDeviceEnumerator> CreateDeviceEnumerator();
63 // Creates a default endpoint device that is specified by a data-flow
64 // direction and role, e.g. default render device.
65 static ScopedComPtr<IMMDevice> CreateDefaultDevice(
66 EDataFlow data_flow, ERole role);
68 // Returns the device id of the default output device or an empty string
69 // if no such device exists or if the default device has been disabled.
70 static std::string GetDefaultOutputDeviceID();
72 // Creates an endpoint device that is specified by a unique endpoint device-
73 // identification string.
74 static ScopedComPtr<IMMDevice> CreateDevice(const std::string& device_id);
76 // Returns the unique ID and user-friendly name of a given endpoint device.
77 // Example: "{0.0.1.00000000}.{8db6020f-18e3-4f25-b6f5-7726c9122574}", and
78 // "Microphone (Realtek High Definition Audio)".
79 static HRESULT GetDeviceName(IMMDevice* device, AudioDeviceName* name);
81 // Returns the device ID/path of the controller (a.k.a. physical device that
82 // |device| is connected to. This ID will be the same for all devices from
83 // the same controller so it is useful for doing things like determining
84 // whether a set of output and input devices belong to the same controller.
85 // The device enumerator is required as well as the device itself since
86 // looking at the device topology is required and we need to open up
87 // associated devices to determine the controller id.
88 // If the ID could not be determined for some reason, an empty string is
89 // returned.
90 static std::string GetAudioControllerID(IMMDevice* device,
91 IMMDeviceEnumerator* enumerator);
93 // Accepts an id of an input device and finds a matching output device id.
94 // If the associated hardware does not have an audio output device (e.g.
95 // a webcam with a mic), an empty string is returned.
96 static std::string GetMatchingOutputDeviceID(
97 const std::string& input_device_id);
99 // Gets the user-friendly name of the endpoint device which is represented
100 // by a unique id in |device_id|.
101 static std::string GetFriendlyName(const std::string& device_id);
103 // Returns true if the provided unique |device_id| corresponds to the current
104 // default device for the specified by a data-flow direction and role.
105 static bool DeviceIsDefault(
106 EDataFlow flow, ERole role, const std::string& device_id);
108 // Query if the audio device is a rendering device or a capture device.
109 static EDataFlow GetDataFlow(IMMDevice* device);
111 // The Windows Audio Session API (WASAPI) enables client applications to
112 // manage the flow of audio data between the application and an audio endpoint
113 // device.
115 // Create an IAudioClient instance for the default IMMDevice where
116 // flow direction and role is define by |data_flow| and |role|.
117 // The IAudioClient interface enables a client to create and initialize an
118 // audio stream between an audio application and the audio engine (for a
119 // shared-mode stream) or the hardware buffer of an audio endpoint device
120 // (for an exclusive-mode stream).
121 static ScopedComPtr<IAudioClient> CreateDefaultClient(EDataFlow data_flow,
122 ERole role);
124 // Create an IAudioClient instance for a specific device _or_ the default
125 // device if |device_id| is empty.
126 static ScopedComPtr<IAudioClient> CreateClient(const std::string& device_id,
127 EDataFlow data_flow,
128 ERole role);
130 // Create an IAudioClient interface for an existing IMMDevice given by
131 // |audio_device|. Flow direction and role is define by the |audio_device|.
132 static ScopedComPtr<IAudioClient> CreateClient(IMMDevice* audio_device);
134 // Get the mix format that the audio engine uses internally for processing
135 // of shared-mode streams. This format is not necessarily a format that the
136 // audio endpoint device supports. Thus, the caller might not succeed in
137 // creating an exclusive-mode stream with a format obtained by this method.
138 static HRESULT GetSharedModeMixFormat(IAudioClient* client,
139 WAVEFORMATPCMEX* format);
141 // Returns true if the specified |client| supports the format in |format|
142 // for the given |share_mode| (shared or exclusive).
143 static bool IsFormatSupported(IAudioClient* client,
144 AUDCLNT_SHAREMODE share_mode,
145 const WAVEFORMATPCMEX* format);
147 // Returns true if the specified |channel_layout| is supported for the
148 // default IMMDevice where flow direction and role is define by |data_flow|
149 // and |role|. If this method returns true for a certain channel layout, it
150 // means that SharedModeInitialize() will succeed using a format based on
151 // the preferred format where the channel layout has been modified.
152 static bool IsChannelLayoutSupported(const std::string& device_id,
153 EDataFlow data_flow,
154 ERole role,
155 ChannelLayout channel_layout);
157 // For a shared-mode stream, the audio engine periodically processes the
158 // data in the endpoint buffer at the period obtained in |device_period|.
159 // For an exclusive mode stream, |device_period| corresponds to the minimum
160 // time interval between successive processing by the endpoint device.
161 // This period plus the stream latency between the buffer and endpoint device
162 // represents the minimum possible latency that an audio application can
163 // achieve. The time in |device_period| is expressed in 100-nanosecond units.
164 static HRESULT GetDevicePeriod(IAudioClient* client,
165 AUDCLNT_SHAREMODE share_mode,
166 REFERENCE_TIME* device_period);
168 // Get the preferred audio parameters for the specified |client| or the
169 // given direction and role is define by |data_flow| and |role|, or the
170 // unique device id given by |device_id|.
171 // The acquired values should only be utilized for shared mode streamed since
172 // there are no preferred settings for an exclusive mode stream.
173 static HRESULT GetPreferredAudioParameters(IAudioClient* client,
174 AudioParameters* params);
175 static HRESULT GetPreferredAudioParameters(EDataFlow data_flow, ERole role,
176 AudioParameters* params);
177 static HRESULT GetPreferredAudioParameters(const std::string& device_id,
178 AudioParameters* params);
180 // Retrieves an integer mask which corresponds to the channel layout the
181 // audio engine uses for its internal processing/mixing of shared-mode
182 // streams. This mask indicates which channels are present in the multi-
183 // channel stream. The least significant bit corresponds with the Front Left
184 // speaker, the next least significant bit corresponds to the Front Right
185 // speaker, and so on, continuing in the order defined in KsMedia.h.
186 // See http://msdn.microsoft.com/en-us/library/windows/hardware/ff537083(v=vs.85).aspx
187 // for more details.
188 // To get the channel config of the default device, pass an empty string
189 // for |device_id|.
190 static ChannelConfig GetChannelConfig(const std::string& device_id,
191 EDataFlow data_flow);
193 // After activating an IAudioClient interface on an audio endpoint device,
194 // the client must initialize it once, and only once, to initialize the audio
195 // stream between the client and the device. In shared mode, the client
196 // connects indirectly through the audio engine which does the mixing.
197 // In exclusive mode, the client connects directly to the audio hardware.
198 // If a valid event is provided in |event_handle|, the client will be
199 // initialized for event-driven buffer handling. If |event_handle| is set to
200 // NULL, event-driven buffer handling is not utilized.
201 static HRESULT SharedModeInitialize(IAudioClient* client,
202 const WAVEFORMATPCMEX* format,
203 HANDLE event_handle,
204 uint32* endpoint_buffer_size);
205 // TODO(henrika): add ExclusiveModeInitialize(...)
207 // Create an IAudioRenderClient client for an existing IAudioClient given by
208 // |client|. The IAudioRenderClient interface enables a client to write
209 // output data to a rendering endpoint buffer.
210 static ScopedComPtr<IAudioRenderClient> CreateRenderClient(
211 IAudioClient* client);
213 // Create an IAudioCaptureClient client for an existing IAudioClient given by
214 // |client|. The IAudioCaptureClient interface enables a client to read
215 // input data from a capture endpoint buffer.
216 static ScopedComPtr<IAudioCaptureClient> CreateCaptureClient(
217 IAudioClient* client);
219 // Fills up the endpoint rendering buffer with silence for an existing
220 // IAudioClient given by |client| and a corresponding IAudioRenderClient
221 // given by |render_client|.
222 static bool FillRenderEndpointBufferWithSilence(
223 IAudioClient* client, IAudioRenderClient* render_client);
225 private:
226 CoreAudioUtil() {}
227 ~CoreAudioUtil() {}
228 DISALLOW_COPY_AND_ASSIGN(CoreAudioUtil);
231 } // namespace media
233 #endif // MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_