1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/base/media_switches.h"
9 // Allow users to specify a custom buffer size for debugging purpose.
10 const char kAudioBufferSize
[] = "audio-buffer-size";
12 // Set number of threads to use for video decoding.
13 const char kVideoThreads
[] = "video-threads";
15 #if defined(OS_ANDROID)
16 // Sets the MediaSource player that uses the separate media thread
17 const char kEnableMediaThreadForMediaPlayback
[] =
18 "enable-media-thread-for-media-playback";
21 #if defined(OS_LINUX) || defined(OS_FREEBSD) || defined(OS_SOLARIS)
22 // The Alsa device to use when opening an audio input stream.
23 const char kAlsaInputDevice
[] = "alsa-input-device";
24 // The Alsa device to use when opening an audio stream.
25 const char kAlsaOutputDevice
[] = "alsa-output-device";
28 // Use GpuMemoryBuffers for Video Capture when this is an option for the device.
29 // Experimental, see http://crbug.com/503835 and http://crbug.com/440843.
30 const char kUseGpuMemoryBuffersForCapture
[] =
31 "use-gpu-memory-buffers-for-capture";
33 #if defined(OS_MACOSX)
34 // AVFoundation is available in versions 10.7 and onwards, and is to be used
35 // http://crbug.com/288562 for both audio and video device monitoring and for
36 // video capture. Being a dynamically loaded NSBundle and library, it hits the
37 // Chrome startup time (http://crbug.com/311325 and http://crbug.com/311437);
38 // for experimentation purposes, in particular library load time issue, the
39 // usage of this library can be enabled by using this flag.
40 const char kEnableAVFoundation
[] = "enable-avfoundation";
42 // QTKit is the media capture API predecessor to AVFoundation, available up and
43 // until Mac OS X 10.9 (despite being deprecated in this last one). This flag
44 // is used for troubleshooting and testing, and forces QTKit in builds and
45 // configurations where AVFoundation would be used otherwise.
46 const char kForceQTKit
[] = "force-qtkit";
50 // Use exclusive mode audio streaming for Windows Vista and higher.
51 // Leads to lower latencies for audio streams which uses the
52 // AudioParameters::AUDIO_PCM_LOW_LATENCY audio path.
53 // See http://msdn.microsoft.com/en-us/library/windows/desktop/dd370844.aspx
55 const char kEnableExclusiveAudio
[] = "enable-exclusive-audio";
57 // Used to troubleshoot problems with different video capture implementations
58 // on Windows. By default we use the Media Foundation API on Windows 7 and up,
59 // but specifying this switch will force use of DirectShow always.
60 // See bug: http://crbug.com/268412
61 const char kForceDirectShowVideoCapture
[] = "force-directshow";
63 // Force the use of MediaFoundation for video capture. This is only supported in
64 // Windows 7 and above. Used, like |kForceDirectShowVideoCapture|, to
65 // troubleshoot problems in Windows platforms.
66 const char kForceMediaFoundationVideoCapture
[] = "force-mediafoundation";
68 // Use Windows WaveOut/In audio API even if Core Audio is supported.
69 const char kForceWaveAudio
[] = "force-wave-audio";
71 // Instead of always using the hardware channel layout, check if a driver
72 // supports the source channel layout. Avoids outputting empty channels and
73 // permits drivers to enable stereo to multichannel expansion. Kept behind a
74 // flag since some drivers lie about supported layouts and hang when used. See
75 // http://crbug.com/259165 for more details.
76 const char kTrySupportedChannelLayouts
[] = "try-supported-channel-layouts";
78 // Number of buffers to use for WaveOut.
79 const char kWaveOutBuffers
[] = "waveout-buffers";
83 // Use CRAS, the ChromeOS audio server.
84 const char kUseCras
[] = "use-cras";
87 // Enables the audio thread hang monitor. Allows us to find users in the field
88 // who have stuck audio threads. See crbug.com/422522 and crbug.com/478932.
89 // TODO(dalecurtis): This should be removed once those issues are resolved.
90 const char kEnableAudioHangMonitor
[] = "enable-audio-hang-monitor";
92 // Use fake device for Media Stream to replace actual camera and microphone.
93 const char kUseFakeDeviceForMediaStream
[] = "use-fake-device-for-media-stream";
95 // Use an .y4m file to play as the webcam. See the comments in
96 // media/capture/video/file_video_capture_device.h for more details.
97 const char kUseFileForFakeVideoCapture
[] = "use-file-for-fake-video-capture";
99 // Play a .wav file as the microphone. Note that for WebRTC calls we'll treat
100 // the bits as if they came from the microphone, which means you should disable
101 // audio processing (lest your audio file will play back distorted). The input
102 // file is converted to suit Chrome's audio buses if necessary, so most sane
103 // .wav files should work.
104 const char kUseFileForFakeAudioCapture
[] = "use-file-for-fake-audio-capture";
106 // Enables support for inband text tracks in media content.
107 const char kEnableInbandTextTracks
[] = "enable-inband-text-tracks";
109 // When running tests on a system without the required hardware or libraries,
110 // this flag will cause the tests to fail. Otherwise, they silently succeed.
111 const char kRequireAudioHardwareForTesting
[] =
112 "require-audio-hardware-for-testing";
114 // Allows clients to override the threshold for when the media renderer will
115 // declare the underflow state for the video stream when audio is present.
116 // TODO(dalecurtis): Remove once experiments for http://crbug.com/470940 finish.
117 const char kVideoUnderflowThresholdMs
[] = "video-underflow-threshold-ms";
119 // Use the new rendering algorithm for webrtc, which is designed to improve
121 const char kEnableRTCSmoothnessAlgorithm
[] = "enable-rtc-smoothness-algorithm";
123 } // namespace switches