1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/
12 RtAudio: a realtime audio i/o C++ class
13 Copyright (c) 2001-2004 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 requested to send the modifications to the original developer so that
28 they can be incorporated into the canonical version.
30 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
31 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
32 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
33 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
34 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
35 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
36 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 /************************************************************************/
40 // RtAudio: Version 3.0.1, 22 March 2004
45 // Static variable definitions.
46 const unsigned int RtApi::MAX_SAMPLE_RATES
= 14;
47 const unsigned int RtApi::SAMPLE_RATES
[] = {
48 4000, 5512, 8000, 9600, 11025, 16000, 22050,
49 32000, 44100, 48000, 88200, 96000, 176400, 192000
52 #if ( defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) ) && !defined(__WINDOWS_PTHREAD__)
53 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
54 #define MUTEX_DESTROY(A) DeleteCriticalSection(A);
55 #define MUTEX_LOCK(A) EnterCriticalSection(A)
56 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
58 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
59 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A);
60 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
61 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
64 // *************************************************** //
66 // Public common (OS-independent) methods.
68 // *************************************************** //
70 RtAudio :: RtAudio( RtAudioApi api
)
75 RtAudio :: RtAudio( int outputDevice
, int outputChannels
,
76 int inputDevice
, int inputChannels
,
77 RtAudioFormat format
, int sampleRate
,
78 int *bufferSize
, int numberOfBuffers
, RtAudioApi api
)
83 rtapi_
->openStream( outputDevice
, outputChannels
,
84 inputDevice
, inputChannels
,
86 bufferSize
, numberOfBuffers
);
88 catch (RtError
&exception
) {
89 // Deallocate the RtApi instance.
100 void RtAudio :: openStream( int outputDevice
, int outputChannels
,
101 int inputDevice
, int inputChannels
,
102 RtAudioFormat format
, int sampleRate
,
103 int *bufferSize
, int numberOfBuffers
)
105 rtapi_
->openStream( outputDevice
, outputChannels
, inputDevice
,
106 inputChannels
, format
, sampleRate
,
107 bufferSize
, numberOfBuffers
);
110 void RtAudio::initialize( RtAudioApi api
)
114 // First look for a compiled match to a specified API value. If one
115 // of these constructors throws an error, it will be passed up the
116 // inheritance chain.
117 #if defined(__LINUX_JACK__)
118 if ( api
== LINUX_JACK
)
119 rtapi_
= new RtApiJack();
121 #if defined(__LINUX_ALSA__)
122 if ( api
== LINUX_ALSA
)
123 rtapi_
= new RtApiAlsa();
125 #if defined(__LINUX_OSS__)
126 if ( api
== LINUX_OSS
)
127 rtapi_
= new RtApiOss();
129 #if defined(__WINDOWS_ASIO__)
130 if ( api
== WINDOWS_ASIO
)
131 rtapi_
= new RtApiAsio();
133 #if defined(__WINDOWS_DS__)
134 if ( api
== WINDOWS_DS
)
135 rtapi_
= new RtApiDs();
137 #if defined(__IRIX_AL__)
138 if ( api
== IRIX_AL
)
139 rtapi_
= new RtApiAl();
141 #if defined(__MACOSX_CORE__)
142 if ( api
== MACOSX_CORE
)
143 rtapi_
= new RtApiCore();
146 if ( rtapi_
) return;
148 // No compiled support for specified API value.
149 throw RtError( "RtAudio: no compiled support for specified API argument!", RtError::INVALID_PARAMETER
);
152 // No specified API ... search for "best" option.
154 #if defined(__LINUX_JACK__)
155 rtapi_
= new RtApiJack();
156 #elif defined(__WINDOWS_ASIO__)
157 rtapi_
= new RtApiAsio();
158 #elif defined(__IRIX_AL__)
159 rtapi_
= new RtApiAl();
160 #elif defined(__MACOSX_CORE__)
161 rtapi_
= new RtApiCore();
167 #if defined(__RTAUDIO_DEBUG__)
168 fprintf(stderr
, "\nRtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio).\n\n");
173 if ( rtapi_
) return;
175 // Try second API support
178 #if defined(__LINUX_ALSA__)
179 rtapi_
= new RtApiAlsa();
180 #elif defined(__WINDOWS_DS__)
181 rtapi_
= new RtApiDs();
187 #if defined(__RTAUDIO_DEBUG__)
188 fprintf(stderr
, "\nRtAudio: no devices found for second api option (Alsa or DirectSound).\n\n");
194 if ( rtapi_
) return;
196 // Try third API support
198 #if defined(__LINUX_OSS__)
200 rtapi_
= new RtApiOss();
202 catch (RtError
&error
) {
212 throw RtError( "RtAudio: no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND
);
218 stream_
.mode
= UNINITIALIZED
;
219 stream_
.apiHandle
= 0;
220 MUTEX_INITIALIZE(&stream_
.mutex
);
225 MUTEX_DESTROY(&stream_
.mutex
);
228 void RtApi :: openStream( int outputDevice
, int outputChannels
,
229 int inputDevice
, int inputChannels
,
230 RtAudioFormat format
, int sampleRate
,
231 int *bufferSize
, int numberOfBuffers
)
233 if ( stream_
.mode
!= UNINITIALIZED
) {
234 sprintf(message_
, "RtApi: only one open stream allowed per class instance.");
235 error(RtError::INVALID_STREAM
);
238 if (outputChannels
< 1 && inputChannels
< 1) {
239 sprintf(message_
,"RtApi: one or both 'channel' parameters must be greater than zero.");
240 error(RtError::INVALID_PARAMETER
);
243 if ( formatBytes(format
) == 0 ) {
244 sprintf(message_
,"RtApi: 'format' parameter value is undefined.");
245 error(RtError::INVALID_PARAMETER
);
248 if ( outputChannels
> 0 ) {
249 if (outputDevice
> nDevices_
|| outputDevice
< 0) {
250 sprintf(message_
,"RtApi: 'outputDevice' parameter value (%d) is invalid.", outputDevice
);
251 error(RtError::INVALID_PARAMETER
);
255 if ( inputChannels
> 0 ) {
256 if (inputDevice
> nDevices_
|| inputDevice
< 0) {
257 sprintf(message_
,"RtApi: 'inputDevice' parameter value (%d) is invalid.", inputDevice
);
258 error(RtError::INVALID_PARAMETER
);
263 bool result
= FAILURE
;
264 int device
, defaultDevice
= 0;
268 if ( outputChannels
> 0 ) {
271 channels
= outputChannels
;
273 if ( outputDevice
== 0 ) { // Try default device first.
274 defaultDevice
= getDefaultOutputDevice();
275 device
= defaultDevice
;
278 device
= outputDevice
- 1;
280 for ( int i
=-1; i
<nDevices_
; i
++ ) {
282 if ( i
== defaultDevice
) continue;
285 if (devices_
[device
].probed
== false) {
286 // If the device wasn't successfully probed before, try it
288 clearDeviceInfo(&devices_
[device
]);
289 probeDeviceInfo(&devices_
[device
]);
291 if ( devices_
[device
].probed
)
292 result
= probeDeviceOpen(device
, mode
, channels
, sampleRate
,
293 format
, bufferSize
, numberOfBuffers
);
294 if ( result
== SUCCESS
) break;
295 if ( outputDevice
> 0 ) break;
300 if ( inputChannels
> 0 && ( result
== SUCCESS
|| outputChannels
<= 0 ) ) {
303 channels
= inputChannels
;
305 if ( inputDevice
== 0 ) { // Try default device first.
306 defaultDevice
= getDefaultInputDevice();
307 device
= defaultDevice
;
310 device
= inputDevice
- 1;
312 for (int i
=-1; i
<nDevices_
; i
++) {
314 if ( i
== defaultDevice
) continue;
317 if (devices_
[device
].probed
== false) {
318 // If the device wasn't successfully probed before, try it
320 clearDeviceInfo(&devices_
[device
]);
321 probeDeviceInfo(&devices_
[device
]);
323 if ( devices_
[device
].probed
)
324 result
= probeDeviceOpen(device
, mode
, channels
, sampleRate
,
325 format
, bufferSize
, numberOfBuffers
);
326 if (result
== SUCCESS
) break;
327 if ( outputDevice
> 0 ) break;
331 if ( result
== SUCCESS
)
334 // If we get here, all attempted probes failed. Close any opened
335 // devices and clear the stream structure.
336 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
338 if ( ( outputDevice
== 0 && outputChannels
> 0 )
339 || ( inputDevice
== 0 && inputChannels
> 0 ) )
340 sprintf(message_
,"RtApi: no devices found for given stream parameters.");
342 sprintf(message_
,"RtApi: unable to open specified device(s) with given stream parameters.");
343 error(RtError::INVALID_PARAMETER
);
348 int RtApi :: getDeviceCount(void)
350 return devices_
.size();
353 RtAudioDeviceInfo
RtApi :: getDeviceInfo( int device
)
355 if (device
> (int) devices_
.size() || device
< 1) {
356 sprintf(message_
, "RtApi: invalid device specifier (%d)!", device
);
357 error(RtError::INVALID_DEVICE
);
360 RtAudioDeviceInfo info
;
361 int deviceIndex
= device
- 1;
363 // If the device wasn't successfully probed before, try it now (or again).
364 if (devices_
[deviceIndex
].probed
== false) {
365 clearDeviceInfo(&devices_
[deviceIndex
]);
366 probeDeviceInfo(&devices_
[deviceIndex
]);
369 info
.name
.append( devices_
[deviceIndex
].name
);
370 info
.probed
= devices_
[deviceIndex
].probed
;
371 if ( info
.probed
== true ) {
372 info
.outputChannels
= devices_
[deviceIndex
].maxOutputChannels
;
373 info
.inputChannels
= devices_
[deviceIndex
].maxInputChannels
;
374 info
.duplexChannels
= devices_
[deviceIndex
].maxDuplexChannels
;
375 for (unsigned int i
=0; i
<devices_
[deviceIndex
].sampleRates
.size(); i
++)
376 info
.sampleRates
.push_back( devices_
[deviceIndex
].sampleRates
[i
] );
377 info
.nativeFormats
= devices_
[deviceIndex
].nativeFormats
;
378 if ( (deviceIndex
== getDefaultOutputDevice()) ||
379 (deviceIndex
== getDefaultInputDevice()) )
380 info
.isDefault
= true;
386 char * const RtApi :: getStreamBuffer(void)
389 return stream_
.userBuffer
;
392 int RtApi :: getDefaultInputDevice(void)
394 // Should be implemented in subclasses if appropriate.
398 int RtApi :: getDefaultOutputDevice(void)
400 // Should be implemented in subclasses if appropriate.
404 void RtApi :: closeStream(void)
406 // MUST be implemented in subclasses!
409 void RtApi :: probeDeviceInfo( RtApiDevice
*info
)
411 // MUST be implemented in subclasses!
414 bool RtApi :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
415 int sampleRate
, RtAudioFormat format
,
416 int *bufferSize
, int numberOfBuffers
)
418 // MUST be implemented in subclasses!
423 // *************************************************** //
425 // OS/API-specific methods.
427 // *************************************************** //
429 #if defined(__LINUX_OSS__)
432 #include <sys/stat.h>
433 #include <sys/types.h>
434 #include <sys/ioctl.h>
437 #include <sys/soundcard.h>
441 #define DAC_NAME "/dev/dsp"
442 #define MAX_DEVICES 16
443 #define MAX_CHANNELS 16
445 extern "C" void *ossCallbackHandler(void * ptr
);
447 RtApiOss :: RtApiOss()
451 if (nDevices_
<= 0) {
452 sprintf(message_
, "RtApiOss: no Linux OSS audio devices found!");
453 error(RtError::NO_DEVICES_FOUND
);
457 RtApiOss :: ~RtApiOss()
459 if ( stream_
.mode
!= UNINITIALIZED
)
463 void RtApiOss :: initialize(void)
465 // Count cards and devices
468 // We check /dev/dsp before probing devices. /dev/dsp is supposed to
469 // be a link to the "default" audio device, of the form /dev/dsp0,
470 // /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a
471 // real device, so we need to check for that. Also, sometimes the
472 // link is to /dev/dspx and other times just dspx. I'm not sure how
473 // the latter works, but it does.
474 char device_name
[16];
478 if (lstat(DAC_NAME
, &dspstat
) == 0) {
479 if (S_ISLNK(dspstat
.st_mode
)) {
480 i
= readlink(DAC_NAME
, device_name
, sizeof(device_name
));
482 device_name
[i
] = '\0';
483 if (i
> 8) { // check for "/dev/dspx"
484 if (!strncmp(DAC_NAME
, device_name
, 8))
485 dsplink
= atoi(&device_name
[8]);
487 else if (i
> 3) { // check for "dspx"
488 if (!strncmp("dsp", device_name
, 3))
489 dsplink
= atoi(&device_name
[3]);
493 sprintf(message_
, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME
);
494 error(RtError::SYSTEM_ERROR
);
499 sprintf(message_
, "RtApiOss: cannot stat %s.", DAC_NAME
);
500 error(RtError::SYSTEM_ERROR
);
503 // The OSS API doesn't provide a routine for determining the number
504 // of devices. Thus, we'll just pursue a brute force method. The
505 // idea is to start with /dev/dsp(0) and continue with higher device
506 // numbers until we reach MAX_DSP_DEVICES. This should tell us how
507 // many devices we have ... it is not a fullproof scheme, but hopefully
508 // it will work most of the time.
511 for (i
=-1; i
<MAX_DEVICES
; i
++) {
513 // Probe /dev/dsp first, since it is supposed to be the default device.
515 sprintf(device_name
, "%s", DAC_NAME
);
516 else if (i
== dsplink
)
517 continue; // We've aready probed this device via /dev/dsp link ... try next device.
519 sprintf(device_name
, "%s%d", DAC_NAME
, i
);
521 // First try to open the device for playback, then record mode.
522 fd
= open(device_name
, O_WRONLY
| O_NONBLOCK
);
524 // Open device for playback failed ... either busy or doesn't exist.
525 if (errno
!= EBUSY
&& errno
!= EAGAIN
) {
526 // Try to open for capture
527 fd
= open(device_name
, O_RDONLY
| O_NONBLOCK
);
529 // Open device for record failed.
530 if (errno
!= EBUSY
&& errno
!= EAGAIN
)
533 sprintf(message_
, "RtApiOss: OSS record device (%s) is busy.", device_name
);
534 error(RtError::WARNING
);
535 // still count it for now
540 sprintf(message_
, "RtApiOss: OSS playback device (%s) is busy.", device_name
);
541 error(RtError::WARNING
);
542 // still count it for now
546 if (fd
>= 0) close(fd
);
548 device
.name
.append( (const char *)device_name
, strlen(device_name
)+1);
549 devices_
.push_back(device
);
554 void RtApiOss :: probeDeviceInfo(RtApiDevice
*info
)
556 int i
, fd
, channels
, mask
;
558 // The OSS API doesn't provide a means for probing the capabilities
559 // of devices. Thus, we'll just pursue a brute force method.
561 // First try for playback
562 fd
= open(info
->name
.c_str(), O_WRONLY
| O_NONBLOCK
);
564 // Open device failed ... either busy or doesn't exist
565 if (errno
== EBUSY
|| errno
== EAGAIN
)
566 sprintf(message_
, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.",
569 sprintf(message_
, "RtApiOss: OSS playback device (%s) open error.", info
->name
.c_str());
570 error(RtError::DEBUG_WARNING
);
574 // We have an open device ... see how many channels it can handle
575 for (i
=MAX_CHANNELS
; i
>0; i
--) {
577 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1) {
578 // This would normally indicate some sort of hardware error, but under ALSA's
579 // OSS emulation, it sometimes indicates an invalid channel value. Further,
580 // the returned channel value is not changed. So, we'll ignore the possible
582 continue; // try next channel number
584 // Check to see whether the device supports the requested number of channels
585 if (channels
!= i
) continue; // try next channel number
586 // If here, we found the largest working channel value
589 info
->maxOutputChannels
= i
;
591 // Now find the minimum number of channels it can handle
592 for (i
=1; i
<=info
->maxOutputChannels
; i
++) {
594 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
595 continue; // try next channel number
596 // If here, we found the smallest working channel value
599 info
->minOutputChannels
= i
;
603 // Now try for capture
604 fd
= open(info
->name
.c_str(), O_RDONLY
| O_NONBLOCK
);
606 // Open device for capture failed ... either busy or doesn't exist
607 if (errno
== EBUSY
|| errno
== EAGAIN
)
608 sprintf(message_
, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.",
611 sprintf(message_
, "RtApiOss: OSS capture device (%s) open error.", info
->name
.c_str());
612 error(RtError::DEBUG_WARNING
);
613 if (info
->maxOutputChannels
== 0)
614 // didn't open for playback either ... device invalid
616 goto probe_parameters
;
619 // We have the device open for capture ... see how many channels it can handle
620 for (i
=MAX_CHANNELS
; i
>0; i
--) {
622 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
) {
623 continue; // as above
625 // If here, we found a working channel value
628 info
->maxInputChannels
= i
;
630 // Now find the minimum number of channels it can handle
631 for (i
=1; i
<=info
->maxInputChannels
; i
++) {
633 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
634 continue; // try next channel number
635 // If here, we found the smallest working channel value
638 info
->minInputChannels
= i
;
641 if (info
->maxOutputChannels
== 0 && info
->maxInputChannels
== 0) {
642 sprintf(message_
, "RtApiOss: device (%s) reports zero channels for input and output.",
644 error(RtError::DEBUG_WARNING
);
648 // If device opens for both playback and capture, we determine the channels.
649 if (info
->maxOutputChannels
== 0 || info
->maxInputChannels
== 0)
650 goto probe_parameters
;
652 fd
= open(info
->name
.c_str(), O_RDWR
| O_NONBLOCK
);
654 goto probe_parameters
;
656 ioctl(fd
, SNDCTL_DSP_SETDUPLEX
, 0);
657 ioctl(fd
, SNDCTL_DSP_GETCAPS
, &mask
);
658 if (mask
& DSP_CAP_DUPLEX
) {
659 info
->hasDuplexSupport
= true;
660 // We have the device open for duplex ... see how many channels it can handle
661 for (i
=MAX_CHANNELS
; i
>0; i
--) {
663 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
664 continue; // as above
665 // If here, we found a working channel value
668 info
->maxDuplexChannels
= i
;
670 // Now find the minimum number of channels it can handle
671 for (i
=1; i
<=info
->maxDuplexChannels
; i
++) {
673 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
674 continue; // try next channel number
675 // If here, we found the smallest working channel value
678 info
->minDuplexChannels
= i
;
683 // At this point, we need to figure out the supported data formats
684 // and sample rates. We'll proceed by openning the device in the
685 // direction with the maximum number of channels, or playback if
686 // they are equal. This might limit our sample rate options, but so
689 if (info
->maxOutputChannels
>= info
->maxInputChannels
) {
690 fd
= open(info
->name
.c_str(), O_WRONLY
| O_NONBLOCK
);
691 channels
= info
->maxOutputChannels
;
694 fd
= open(info
->name
.c_str(), O_RDONLY
| O_NONBLOCK
);
695 channels
= info
->maxInputChannels
;
699 // We've got some sort of conflict ... abort
700 sprintf(message_
, "RtApiOss: device (%s) won't reopen during probe.",
702 error(RtError::DEBUG_WARNING
);
706 // We have an open device ... set to maximum channels.
708 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
) {
709 // We've got some sort of conflict ... abort
711 sprintf(message_
, "RtApiOss: device (%s) won't revert to previous channel setting.",
713 error(RtError::DEBUG_WARNING
);
717 if (ioctl(fd
, SNDCTL_DSP_GETFMTS
, &mask
) == -1) {
719 sprintf(message_
, "RtApiOss: device (%s) can't get supported audio formats.",
721 error(RtError::DEBUG_WARNING
);
725 // Probe the supported data formats ... we don't care about endian-ness just yet.
727 info
->nativeFormats
= 0;
728 #if defined (AFMT_S32_BE)
729 // This format does not seem to be in the 2.4 kernel version of OSS soundcard.h
730 if (mask
& AFMT_S32_BE
) {
731 format
= AFMT_S32_BE
;
732 info
->nativeFormats
|= RTAUDIO_SINT32
;
735 #if defined (AFMT_S32_LE)
736 /* This format is not in the 2.4.4 kernel version of OSS soundcard.h */
737 if (mask
& AFMT_S32_LE
) {
738 format
= AFMT_S32_LE
;
739 info
->nativeFormats
|= RTAUDIO_SINT32
;
742 if (mask
& AFMT_S8
) {
744 info
->nativeFormats
|= RTAUDIO_SINT8
;
746 if (mask
& AFMT_S16_BE
) {
747 format
= AFMT_S16_BE
;
748 info
->nativeFormats
|= RTAUDIO_SINT16
;
750 if (mask
& AFMT_S16_LE
) {
751 format
= AFMT_S16_LE
;
752 info
->nativeFormats
|= RTAUDIO_SINT16
;
755 // Check that we have at least one supported format
756 if (info
->nativeFormats
== 0) {
758 sprintf(message_
, "RtApiOss: device (%s) data format not supported by RtAudio.",
760 error(RtError::DEBUG_WARNING
);
766 if (ioctl(fd
, SNDCTL_DSP_SETFMT
, &format
) == -1 || format
!= i
) {
768 sprintf(message_
, "RtApiOss: device (%s) error setting data format.",
770 error(RtError::DEBUG_WARNING
);
774 // Probe the supported sample rates.
775 info
->sampleRates
.clear();
776 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
777 int speed
= SAMPLE_RATES
[k
];
778 if (ioctl(fd
, SNDCTL_DSP_SPEED
, &speed
) != -1 && speed
== (int)SAMPLE_RATES
[k
])
779 info
->sampleRates
.push_back(speed
);
782 if (info
->sampleRates
.size() == 0) {
784 sprintf(message_
, "RtApiOss: no supported sample rates found for device (%s).",
786 error(RtError::DEBUG_WARNING
);
790 // That's all ... close the device and return
796 bool RtApiOss :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
797 int sampleRate
, RtAudioFormat format
,
798 int *bufferSize
, int numberOfBuffers
)
800 int buffers
, buffer_bytes
, device_channels
, device_format
;
802 int *handle
= (int *) stream_
.apiHandle
;
804 const char *name
= devices_
[device
].name
.c_str();
807 fd
= open(name
, O_WRONLY
| O_NONBLOCK
);
808 else { // mode == INPUT
809 if (stream_
.mode
== OUTPUT
&& stream_
.device
[0] == device
) {
810 // We just set the same device for playback ... close and reopen for duplex (OSS only).
813 // First check that the number previously set channels is the same.
814 if (stream_
.nUserChannels
[0] != channels
) {
815 sprintf(message_
, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name
);
818 fd
= open(name
, O_RDWR
| O_NONBLOCK
);
821 fd
= open(name
, O_RDONLY
| O_NONBLOCK
);
825 if (errno
== EBUSY
|| errno
== EAGAIN
)
826 sprintf(message_
, "RtApiOss: device (%s) is busy and cannot be opened.",
829 sprintf(message_
, "RtApiOss: device (%s) cannot be opened.", name
);
833 // Now reopen in blocking mode.
836 fd
= open(name
, O_WRONLY
| O_SYNC
);
837 else { // mode == INPUT
838 if (stream_
.mode
== OUTPUT
&& stream_
.device
[0] == device
)
839 fd
= open(name
, O_RDWR
| O_SYNC
);
841 fd
= open(name
, O_RDONLY
| O_SYNC
);
845 sprintf(message_
, "RtApiOss: device (%s) cannot be opened.", name
);
849 // Get the sample format mask
851 if (ioctl(fd
, SNDCTL_DSP_GETFMTS
, &mask
) == -1) {
853 sprintf(message_
, "RtApiOss: device (%s) can't get supported audio formats.",
858 // Determine how to set the device format.
859 stream_
.userFormat
= format
;
861 stream_
.doByteSwap
[mode
] = false;
862 if (format
== RTAUDIO_SINT8
) {
863 if (mask
& AFMT_S8
) {
864 device_format
= AFMT_S8
;
865 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
868 else if (format
== RTAUDIO_SINT16
) {
869 if (mask
& AFMT_S16_NE
) {
870 device_format
= AFMT_S16_NE
;
871 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
873 #if BYTE_ORDER == LITTLE_ENDIAN
874 else if (mask
& AFMT_S16_BE
) {
875 device_format
= AFMT_S16_BE
;
876 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
877 stream_
.doByteSwap
[mode
] = true;
880 else if (mask
& AFMT_S16_LE
) {
881 device_format
= AFMT_S16_LE
;
882 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
883 stream_
.doByteSwap
[mode
] = true;
887 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
888 else if (format
== RTAUDIO_SINT32
) {
889 if (mask
& AFMT_S32_NE
) {
890 device_format
= AFMT_S32_NE
;
891 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
893 #if BYTE_ORDER == LITTLE_ENDIAN
894 else if (mask
& AFMT_S32_BE
) {
895 device_format
= AFMT_S32_BE
;
896 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
897 stream_
.doByteSwap
[mode
] = true;
900 else if (mask
& AFMT_S32_LE
) {
901 device_format
= AFMT_S32_LE
;
902 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
903 stream_
.doByteSwap
[mode
] = true;
909 if (device_format
== -1) {
910 // The user requested format is not natively supported by the device.
911 if (mask
& AFMT_S16_NE
) {
912 device_format
= AFMT_S16_NE
;
913 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
915 #if BYTE_ORDER == LITTLE_ENDIAN
916 else if (mask
& AFMT_S16_BE
) {
917 device_format
= AFMT_S16_BE
;
918 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
919 stream_
.doByteSwap
[mode
] = true;
922 else if (mask
& AFMT_S16_LE
) {
923 device_format
= AFMT_S16_LE
;
924 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
925 stream_
.doByteSwap
[mode
] = true;
928 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
929 else if (mask
& AFMT_S32_NE
) {
930 device_format
= AFMT_S32_NE
;
931 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
933 #if BYTE_ORDER == LITTLE_ENDIAN
934 else if (mask
& AFMT_S32_BE
) {
935 device_format
= AFMT_S32_BE
;
936 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
937 stream_
.doByteSwap
[mode
] = true;
940 else if (mask
& AFMT_S32_LE
) {
941 device_format
= AFMT_S32_LE
;
942 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
943 stream_
.doByteSwap
[mode
] = true;
947 else if (mask
& AFMT_S8
) {
948 device_format
= AFMT_S8
;
949 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
953 if (stream_
.deviceFormat
[mode
] == 0) {
954 // This really shouldn't happen ...
956 sprintf(message_
, "RtApiOss: device (%s) data format not supported by RtAudio.",
961 // Determine the number of channels for this device. Note that the
962 // channel value requested by the user might be < min_X_Channels.
963 stream_
.nUserChannels
[mode
] = channels
;
964 device_channels
= channels
;
965 if (mode
== OUTPUT
) {
966 if (channels
< devices_
[device
].minOutputChannels
)
967 device_channels
= devices_
[device
].minOutputChannels
;
969 else { // mode == INPUT
970 if (stream_
.mode
== OUTPUT
&& stream_
.device
[0] == device
) {
971 // We're doing duplex setup here.
972 if (channels
< devices_
[device
].minDuplexChannels
)
973 device_channels
= devices_
[device
].minDuplexChannels
;
976 if (channels
< devices_
[device
].minInputChannels
)
977 device_channels
= devices_
[device
].minInputChannels
;
980 stream_
.nDeviceChannels
[mode
] = device_channels
;
982 // Attempt to set the buffer size. According to OSS, the minimum
983 // number of buffers is two. The supposed minimum buffer size is 16
984 // bytes, so that will be our lower bound. The argument to this
985 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
986 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
987 // We'll check the actual value used near the end of the setup
989 buffer_bytes
= *bufferSize
* formatBytes(stream_
.deviceFormat
[mode
]) * device_channels
;
990 if (buffer_bytes
< 16) buffer_bytes
= 16;
991 buffers
= numberOfBuffers
;
992 if (buffers
< 2) buffers
= 2;
993 temp
= ((int) buffers
<< 16) + (int)(log10((double)buffer_bytes
)/log10(2.0));
994 if (ioctl(fd
, SNDCTL_DSP_SETFRAGMENT
, &temp
)) {
996 sprintf(message_
, "RtApiOss: error setting fragment size for device (%s).",
1000 stream_
.nBuffers
= buffers
;
1002 // Set the data format.
1003 temp
= device_format
;
1004 if (ioctl(fd
, SNDCTL_DSP_SETFMT
, &device_format
) == -1 || device_format
!= temp
) {
1006 sprintf(message_
, "RtApiOss: error setting data format for device (%s).",
1011 // Set the number of channels.
1012 temp
= device_channels
;
1013 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &device_channels
) == -1 || device_channels
!= temp
) {
1015 sprintf(message_
, "RtApiOss: error setting %d channels on device (%s).",
1020 // Set the sample rate.
1023 if (ioctl(fd
, SNDCTL_DSP_SPEED
, &srate
) == -1) {
1025 sprintf(message_
, "RtApiOss: error setting sample rate = %d on device (%s).",
1030 // Verify the sample rate setup worked.
1031 if (abs(srate
- temp
) > 100) {
1033 sprintf(message_
, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",
1037 stream_
.sampleRate
= sampleRate
;
1039 if (ioctl(fd
, SNDCTL_DSP_GETBLKSIZE
, &buffer_bytes
) == -1) {
1041 sprintf(message_
, "RtApiOss: error getting buffer size for device (%s).",
1046 // Save buffer size (in sample frames).
1047 *bufferSize
= buffer_bytes
/ (formatBytes(stream_
.deviceFormat
[mode
]) * device_channels
);
1048 stream_
.bufferSize
= *bufferSize
;
1050 if (mode
== INPUT
&& stream_
.mode
== OUTPUT
&&
1051 stream_
.device
[0] == device
) {
1052 // We're doing duplex setup here.
1053 stream_
.deviceFormat
[0] = stream_
.deviceFormat
[1];
1054 stream_
.nDeviceChannels
[0] = device_channels
;
1057 // Allocate the stream handles if necessary and then save.
1058 if ( stream_
.apiHandle
== 0 ) {
1059 handle
= (int *) calloc(2, sizeof(int));
1060 stream_
.apiHandle
= (void *) handle
;
1065 handle
= (int *) stream_
.apiHandle
;
1069 // Set flags for buffer conversion
1070 stream_
.doConvertBuffer
[mode
] = false;
1071 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
1072 stream_
.doConvertBuffer
[mode
] = true;
1073 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
1074 stream_
.doConvertBuffer
[mode
] = true;
1076 // Allocate necessary internal buffers
1077 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
1080 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
1081 buffer_bytes
= stream_
.nUserChannels
[0];
1083 buffer_bytes
= stream_
.nUserChannels
[1];
1085 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
1086 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
1087 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
1088 if (stream_
.userBuffer
== NULL
) {
1090 sprintf(message_
, "RtApiOss: error allocating user buffer memory (%s).",
1096 if ( stream_
.doConvertBuffer
[mode
] ) {
1099 bool makeBuffer
= true;
1100 if ( mode
== OUTPUT
)
1101 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
1102 else { // mode == INPUT
1103 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
1104 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
1105 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
1106 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
1111 buffer_bytes
*= *bufferSize
;
1112 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
1113 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
1114 if (stream_
.deviceBuffer
== NULL
) {
1116 sprintf(message_
, "RtApiOss: error allocating device buffer memory (%s).",
1123 stream_
.device
[mode
] = device
;
1124 stream_
.state
= STREAM_STOPPED
;
1126 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
) {
1127 stream_
.mode
= DUPLEX
;
1128 if (stream_
.device
[0] == device
)
1132 stream_
.mode
= mode
;
1134 stream_
.sub_mode
= mode
;
1143 stream_
.apiHandle
= 0;
1146 if (stream_
.userBuffer
) {
1147 free(stream_
.userBuffer
);
1148 stream_
.userBuffer
= 0;
1151 error(RtError::WARNING
);
1155 void RtApiOss :: closeStream()
1157 // We don't want an exception to be thrown here because this
1158 // function is called by our class destructor. So, do our own
1160 if ( stream_
.mode
== UNINITIALIZED
) {
1161 sprintf(message_
, "RtApiOss::closeStream(): no open stream to close!");
1162 error(RtError::WARNING
);
1166 int *handle
= (int *) stream_
.apiHandle
;
1167 if (stream_
.state
== STREAM_RUNNING
) {
1168 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
1169 ioctl(handle
[0], SNDCTL_DSP_RESET
, 0);
1171 ioctl(handle
[1], SNDCTL_DSP_RESET
, 0);
1172 stream_
.state
= STREAM_STOPPED
;
1175 if (stream_
.callbackInfo
.usingCallback
) {
1176 stream_
.callbackInfo
.usingCallback
= false;
1177 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
1181 if (handle
[0]) close(handle
[0]);
1182 if (handle
[1]) close(handle
[1]);
1184 stream_
.apiHandle
= 0;
1187 if (stream_
.userBuffer
) {
1188 free(stream_
.userBuffer
);
1189 stream_
.userBuffer
= 0;
1192 if (stream_
.deviceBuffer
) {
1193 free(stream_
.deviceBuffer
);
1194 stream_
.deviceBuffer
= 0;
1197 stream_
.mode
= UNINITIALIZED
;
1198 stream_
.sub_mode
= UNINITIALIZED
;
1201 void RtApiOss :: startStream()
1204 if (stream_
.state
== STREAM_RUNNING
) return;
1206 MUTEX_LOCK(&stream_
.mutex
);
1208 stream_
.state
= STREAM_RUNNING
;
1210 // No need to do anything else here ... OSS automatically starts
1211 // when fed samples.
1213 MUTEX_UNLOCK(&stream_
.mutex
);
1216 void RtApiOss :: stopStream()
1219 if (stream_
.state
== STREAM_STOPPED
) return;
1221 // Change the state before the lock to improve shutdown response
1222 // when using a callback.
1223 stream_
.state
= STREAM_STOPPED
;
1224 MUTEX_LOCK(&stream_
.mutex
);
1227 int *handle
= (int *) stream_
.apiHandle
;
1228 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
1229 err
= ioctl(handle
[0], SNDCTL_DSP_POST
, 0);
1230 //err = ioctl(handle[0], SNDCTL_DSP_SYNC, 0);
1232 sprintf(message_
, "RtApiOss: error stopping device (%s).",
1233 devices_
[stream_
.device
[0]].name
.c_str());
1234 error(RtError::DRIVER_ERROR
);
1238 err
= ioctl(handle
[1], SNDCTL_DSP_POST
, 0);
1239 //err = ioctl(handle[1], SNDCTL_DSP_SYNC, 0);
1241 sprintf(message_
, "RtApiOss: error stopping device (%s).",
1242 devices_
[stream_
.device
[1]].name
.c_str());
1243 error(RtError::DRIVER_ERROR
);
1247 MUTEX_UNLOCK(&stream_
.mutex
);
1250 void RtApiOss :: abortStream()
1255 int RtApiOss :: streamWillBlock()
1258 if (stream_
.state
== STREAM_STOPPED
) return 0;
1260 MUTEX_LOCK(&stream_
.mutex
);
1262 int bytes
= 0, channels
= 0, frames
= 0;
1263 audio_buf_info info
;
1264 int *handle
= (int *) stream_
.apiHandle
;
1265 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
1266 ioctl(handle
[0], SNDCTL_DSP_GETOSPACE
, &info
);
1268 channels
= stream_
.nDeviceChannels
[0];
1271 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
1272 ioctl(handle
[1], SNDCTL_DSP_GETISPACE
, &info
);
1273 if (stream_
.mode
== DUPLEX
) {
1274 bytes
= (bytes
< info
.bytes
) ? bytes
: info
.bytes
;
1275 channels
= stream_
.nDeviceChannels
[0];
1279 channels
= stream_
.nDeviceChannels
[1];
1283 frames
= (int) (bytes
/ (channels
* formatBytes(stream_
.deviceFormat
[0])));
1284 frames
-= stream_
.bufferSize
;
1285 if (frames
< 0) frames
= 0;
1287 MUTEX_UNLOCK(&stream_
.mutex
);
1291 void RtApiOss :: tickStream()
1296 if (stream_
.state
== STREAM_STOPPED
) {
1297 if (stream_
.callbackInfo
.usingCallback
) usleep(50000); // sleep 50 milliseconds
1300 else if (stream_
.callbackInfo
.usingCallback
) {
1301 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
1302 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
1305 MUTEX_LOCK(&stream_
.mutex
);
1307 // The state might change while waiting on a mutex.
1308 if (stream_
.state
== STREAM_STOPPED
)
1311 int result
, *handle
;
1314 RtAudioFormat format
;
1315 handle
= (int *) stream_
.apiHandle
;
1316 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
1318 // Setup parameters and do buffer conversion if necessary.
1319 if (stream_
.doConvertBuffer
[0]) {
1320 convertStreamBuffer(OUTPUT
);
1321 buffer
= stream_
.deviceBuffer
;
1322 samples
= stream_
.bufferSize
* stream_
.nDeviceChannels
[0];
1323 format
= stream_
.deviceFormat
[0];
1326 buffer
= stream_
.userBuffer
;
1327 samples
= stream_
.bufferSize
* stream_
.nUserChannels
[0];
1328 format
= stream_
.userFormat
;
1331 // Do byte swapping if necessary.
1332 if (stream_
.doByteSwap
[0])
1333 byteSwapBuffer(buffer
, samples
, format
);
1335 // Write samples to device.
1336 result
= write(handle
[0], buffer
, samples
* formatBytes(format
));
1339 // This could be an underrun, but the basic OSS API doesn't provide a means for determining that.
1340 sprintf(message_
, "RtApiOss: audio write error for device (%s).",
1341 devices_
[stream_
.device
[0]].name
.c_str());
1342 error(RtError::DRIVER_ERROR
);
1346 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
1348 // Setup parameters.
1349 if (stream_
.doConvertBuffer
[1]) {
1350 buffer
= stream_
.deviceBuffer
;
1351 samples
= stream_
.bufferSize
* stream_
.nDeviceChannels
[1];
1352 format
= stream_
.deviceFormat
[1];
1355 buffer
= stream_
.userBuffer
;
1356 samples
= stream_
.bufferSize
* stream_
.nUserChannels
[1];
1357 format
= stream_
.userFormat
;
1360 // Read samples from device.
1361 result
= read(handle
[1], buffer
, samples
* formatBytes(format
));
1364 // This could be an overrun, but the basic OSS API doesn't provide a means for determining that.
1365 sprintf(message_
, "RtApiOss: audio read error for device (%s).",
1366 devices_
[stream_
.device
[1]].name
.c_str());
1367 error(RtError::DRIVER_ERROR
);
1370 // Do byte swapping if necessary.
1371 if (stream_
.doByteSwap
[1])
1372 byteSwapBuffer(buffer
, samples
, format
);
1374 // Do buffer conversion if necessary.
1375 if (stream_
.doConvertBuffer
[1])
1376 convertStreamBuffer(INPUT
);
1380 MUTEX_UNLOCK(&stream_
.mutex
);
1382 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
1386 void RtApiOss :: setStreamCallback(RtAudioCallback callback
, void *userData
)
1390 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
1391 if ( info
->usingCallback
) {
1392 sprintf(message_
, "RtApiOss: A callback is already set for this stream!");
1393 error(RtError::WARNING
);
1397 info
->callback
= (void *) callback
;
1398 info
->userData
= userData
;
1399 info
->usingCallback
= true;
1400 info
->object
= (void *) this;
1402 // Set the thread attributes for joinable and realtime scheduling
1403 // priority. The higher priority will only take affect if the
1404 // program is run as root or suid.
1405 pthread_attr_t attr
;
1406 pthread_attr_init(&attr
);
1407 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
1408 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
1410 int err
= pthread_create(&(info
->thread
), &attr
, ossCallbackHandler
, &stream_
.callbackInfo
);
1411 pthread_attr_destroy(&attr
);
1413 info
->usingCallback
= false;
1414 sprintf(message_
, "RtApiOss: error starting callback thread!");
1415 error(RtError::THREAD_ERROR
);
1419 void RtApiOss :: cancelStreamCallback()
1423 if (stream_
.callbackInfo
.usingCallback
) {
1425 if (stream_
.state
== STREAM_RUNNING
)
1428 MUTEX_LOCK(&stream_
.mutex
);
1430 stream_
.callbackInfo
.usingCallback
= false;
1431 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
1432 stream_
.callbackInfo
.thread
= 0;
1433 stream_
.callbackInfo
.callback
= NULL
;
1434 stream_
.callbackInfo
.userData
= NULL
;
1436 MUTEX_UNLOCK(&stream_
.mutex
);
1440 extern "C" void *ossCallbackHandler(void *ptr
)
1442 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
1443 RtApiOss
*object
= (RtApiOss
*) info
->object
;
1444 bool *usingCallback
= &info
->usingCallback
;
1446 while ( *usingCallback
) {
1447 pthread_testcancel();
1449 object
->tickStream();
1451 catch (RtError
&exception
) {
1452 fprintf(stderr
, "\nRtApiOss: callback thread error (%s) ... closing thread.\n\n",
1453 exception
.getMessageString());
1461 //******************** End of __LINUX_OSS__ *********************//
1464 #if defined(__MACOSX_CORE__)
1467 // The OS X CoreAudio API is designed to use a separate callback
1468 // procedure for each of its audio devices. A single RtAudio duplex
1469 // stream using two different devices is supported here, though it
1470 // cannot be guaranteed to always behave correctly because we cannot
1471 // synchronize these two callbacks. This same functionality can be
1472 // achieved with better synchrony by opening two separate streams for
1473 // the devices and using RtAudio blocking calls (i.e. tickStream()).
1475 // A property listener is installed for over/underrun information.
1476 // However, no functionality is currently provided to allow property
1477 // listeners to trigger user handlers because it is unclear what could
1478 // be done if a critical stream parameter (buffer size, sample rate,
1479 // device disconnect) notification arrived. The listeners entail
1480 // quite a bit of extra code and most likely, a user program wouldn't
1481 // be prepared for the result anyway.
1483 // A structure to hold various information related to the CoreAuio API
1490 pthread_cond_t condition
;
1493 :stopStream(false), xrun(false), deviceBuffer(0) {}
1496 RtApiCore :: RtApiCore()
1500 if (nDevices_
<= 0) {
1501 sprintf(message_
, "RtApiCore: no Macintosh OS-X Core Audio devices found!");
1502 error(RtError::NO_DEVICES_FOUND
);
1506 RtApiCore :: ~RtApiCore()
1508 // The subclass destructor gets called before the base class
1509 // destructor, so close an existing stream before deallocating
1510 // apiDeviceId memory.
1511 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
1513 // Free our allocated apiDeviceId memory.
1515 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
1516 id
= (AudioDeviceID
*) devices_
[i
].apiDeviceId
;
1521 void RtApiCore :: initialize(void)
1523 OSStatus err
= noErr
;
1525 AudioDeviceID
*deviceList
= NULL
;
1528 // Find out how many audio devices there are, if any.
1529 err
= AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices
, &dataSize
, NULL
);
1531 sprintf(message_
, "RtApiCore: OS-X error getting device info!");
1532 error(RtError::SYSTEM_ERROR
);
1535 nDevices_
= dataSize
/ sizeof(AudioDeviceID
);
1536 if (nDevices_
== 0) return;
1538 // Make space for the devices we are about to get.
1539 deviceList
= (AudioDeviceID
*) malloc( dataSize
);
1540 if (deviceList
== NULL
) {
1541 sprintf(message_
, "RtApiCore: memory allocation error during initialization!");
1542 error(RtError::MEMORY_ERROR
);
1545 // Get the array of AudioDeviceIDs.
1546 err
= AudioHardwareGetProperty(kAudioHardwarePropertyDevices
, &dataSize
, (void *) deviceList
);
1549 sprintf(message_
, "RtApiCore: OS-X error getting device properties!");
1550 error(RtError::SYSTEM_ERROR
);
1553 // Create list of device structures and write device identifiers.
1556 for (int i
=0; i
<nDevices_
; i
++) {
1557 devices_
.push_back(device
);
1558 id
= (AudioDeviceID
*) malloc( sizeof(AudioDeviceID
) );
1559 *id
= deviceList
[i
];
1560 devices_
[i
].apiDeviceId
= (void *) id
;
1566 int RtApiCore :: getDefaultInputDevice(void)
1568 AudioDeviceID id
, *deviceId
;
1569 UInt32 dataSize
= sizeof( AudioDeviceID
);
1571 OSStatus result
= AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice
,
1574 if (result
!= noErr
) {
1575 sprintf( message_
, "RtApiCore: OS-X error getting default input device." );
1576 error(RtError::WARNING
);
1580 for ( int i
=0; i
<nDevices_
; i
++ ) {
1581 deviceId
= (AudioDeviceID
*) devices_
[i
].apiDeviceId
;
1582 if ( id
== *deviceId
) return i
;
1588 int RtApiCore :: getDefaultOutputDevice(void)
1590 AudioDeviceID id
, *deviceId
;
1591 UInt32 dataSize
= sizeof( AudioDeviceID
);
1593 OSStatus result
= AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice
,
1596 if (result
!= noErr
) {
1597 sprintf( message_
, "RtApiCore: OS-X error getting default output device." );
1598 error(RtError::WARNING
);
1602 for ( int i
=0; i
<nDevices_
; i
++ ) {
1603 deviceId
= (AudioDeviceID
*) devices_
[i
].apiDeviceId
;
1604 if ( id
== *deviceId
) return i
;
1610 static bool deviceSupportsFormat( AudioDeviceID id
, bool isInput
,
1611 AudioStreamBasicDescription
*desc
, bool isDuplex
)
1613 OSStatus result
= noErr
;
1614 UInt32 dataSize
= sizeof( AudioStreamBasicDescription
);
1616 result
= AudioDeviceGetProperty( id
, 0, isInput
,
1617 kAudioDevicePropertyStreamFormatSupported
,
1620 if (result
== kAudioHardwareNoError
) {
1622 result
= AudioDeviceGetProperty( id
, 0, true,
1623 kAudioDevicePropertyStreamFormatSupported
,
1627 if (result
!= kAudioHardwareNoError
)
1636 void RtApiCore :: probeDeviceInfo( RtApiDevice
*info
)
1638 OSStatus err
= noErr
;
1640 // Get the device manufacturer and name.
1643 UInt32 dataSize
= 256;
1644 AudioDeviceID
*id
= (AudioDeviceID
*) info
->apiDeviceId
;
1645 err
= AudioDeviceGetProperty( *id
, 0, false,
1646 kAudioDevicePropertyDeviceManufacturer
,
1649 sprintf( message_
, "RtApiCore: OS-X error getting device manufacturer." );
1650 error(RtError::DEBUG_WARNING
);
1653 strncpy(fullname
, name
, 256);
1654 strcat(fullname
, ": " );
1657 err
= AudioDeviceGetProperty( *id
, 0, false,
1658 kAudioDevicePropertyDeviceName
,
1661 sprintf( message_
, "RtApiCore: OS-X error getting device name." );
1662 error(RtError::DEBUG_WARNING
);
1665 strncat(fullname
, name
, 254);
1667 info
->name
.append( (const char *)fullname
, strlen(fullname
)+1);
1669 // Get output channel information.
1670 unsigned int i
, minChannels
= 0, maxChannels
= 0, nStreams
= 0;
1671 AudioBufferList
*bufferList
= nil
;
1672 err
= AudioDeviceGetPropertyInfo( *id
, 0, false,
1673 kAudioDevicePropertyStreamConfiguration
,
1675 if (err
== noErr
&& dataSize
> 0) {
1676 bufferList
= (AudioBufferList
*) malloc( dataSize
);
1677 if (bufferList
== NULL
) {
1678 sprintf(message_
, "RtApiCore: memory allocation error!");
1679 error(RtError::DEBUG_WARNING
);
1683 err
= AudioDeviceGetProperty( *id
, 0, false,
1684 kAudioDevicePropertyStreamConfiguration
,
1685 &dataSize
, bufferList
);
1689 nStreams
= bufferList
->mNumberBuffers
;
1690 for ( i
=0; i
<nStreams
; i
++ ) {
1691 maxChannels
+= bufferList
->mBuffers
[i
].mNumberChannels
;
1692 if ( bufferList
->mBuffers
[i
].mNumberChannels
< minChannels
)
1693 minChannels
= bufferList
->mBuffers
[i
].mNumberChannels
;
1699 if (err
!= noErr
|| dataSize
<= 0) {
1700 sprintf( message_
, "RtApiCore: OS-X error getting output channels for device (%s).",
1701 info
->name
.c_str() );
1702 error(RtError::DEBUG_WARNING
);
1707 if ( maxChannels
> 0 )
1708 info
->maxOutputChannels
= maxChannels
;
1709 if ( minChannels
> 0 )
1710 info
->minOutputChannels
= minChannels
;
1713 // Get input channel information.
1715 err
= AudioDeviceGetPropertyInfo( *id
, 0, true,
1716 kAudioDevicePropertyStreamConfiguration
,
1718 if (err
== noErr
&& dataSize
> 0) {
1719 bufferList
= (AudioBufferList
*) malloc( dataSize
);
1720 if (bufferList
== NULL
) {
1721 sprintf(message_
, "RtApiCore: memory allocation error!");
1722 error(RtError::DEBUG_WARNING
);
1725 err
= AudioDeviceGetProperty( *id
, 0, true,
1726 kAudioDevicePropertyStreamConfiguration
,
1727 &dataSize
, bufferList
);
1731 nStreams
= bufferList
->mNumberBuffers
;
1732 for ( i
=0; i
<nStreams
; i
++ ) {
1733 if ( bufferList
->mBuffers
[i
].mNumberChannels
< minChannels
)
1734 minChannels
= bufferList
->mBuffers
[i
].mNumberChannels
;
1735 maxChannels
+= bufferList
->mBuffers
[i
].mNumberChannels
;
1741 if (err
!= noErr
|| dataSize
<= 0) {
1742 sprintf( message_
, "RtApiCore: OS-X error getting input channels for device (%s).",
1743 info
->name
.c_str() );
1744 error(RtError::DEBUG_WARNING
);
1749 if ( maxChannels
> 0 )
1750 info
->maxInputChannels
= maxChannels
;
1751 if ( minChannels
> 0 )
1752 info
->minInputChannels
= minChannels
;
1755 // If device opens for both playback and capture, we determine the channels.
1756 if (info
->maxOutputChannels
> 0 && info
->maxInputChannels
> 0) {
1757 info
->hasDuplexSupport
= true;
1758 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
1759 info
->maxInputChannels
: info
->maxOutputChannels
;
1760 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
1761 info
->minInputChannels
: info
->minOutputChannels
;
1764 // Probe the device sample rate and data format parameters. The
1765 // core audio query mechanism is performed on a "stream"
1766 // description, which can have a variable number of channels and
1767 // apply to input or output only.
1769 // Create a stream description structure.
1770 AudioStreamBasicDescription description
;
1771 dataSize
= sizeof( AudioStreamBasicDescription
);
1772 memset(&description
, 0, sizeof(AudioStreamBasicDescription
));
1773 bool isInput
= false;
1774 if ( info
->maxOutputChannels
== 0 ) isInput
= true;
1775 bool isDuplex
= false;
1776 if ( info
->maxDuplexChannels
> 0 ) isDuplex
= true;
1778 // Determine the supported sample rates.
1779 info
->sampleRates
.clear();
1780 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
1781 description
.mSampleRate
= (double) SAMPLE_RATES
[k
];
1782 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1783 info
->sampleRates
.push_back( SAMPLE_RATES
[k
] );
1786 if (info
->sampleRates
.size() == 0) {
1787 sprintf( message_
, "RtApiCore: No supported sample rates found for OS-X device (%s).",
1788 info
->name
.c_str() );
1789 error(RtError::DEBUG_WARNING
);
1793 // Determine the supported data formats.
1794 info
->nativeFormats
= 0;
1795 description
.mFormatID
= kAudioFormatLinearPCM
;
1796 description
.mBitsPerChannel
= 8;
1797 description
.mFormatFlags
= kLinearPCMFormatFlagIsSignedInteger
| kLinearPCMFormatFlagIsPacked
| kLinearPCMFormatFlagIsBigEndian
;
1798 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1799 info
->nativeFormats
|= RTAUDIO_SINT8
;
1801 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1802 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1803 info
->nativeFormats
|= RTAUDIO_SINT8
;
1806 description
.mBitsPerChannel
= 16;
1807 description
.mFormatFlags
|= kLinearPCMFormatFlagIsBigEndian
;
1808 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1809 info
->nativeFormats
|= RTAUDIO_SINT16
;
1811 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1812 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1813 info
->nativeFormats
|= RTAUDIO_SINT16
;
1816 description
.mBitsPerChannel
= 32;
1817 description
.mFormatFlags
|= kLinearPCMFormatFlagIsBigEndian
;
1818 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1819 info
->nativeFormats
|= RTAUDIO_SINT32
;
1821 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1822 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1823 info
->nativeFormats
|= RTAUDIO_SINT32
;
1826 description
.mBitsPerChannel
= 24;
1827 description
.mFormatFlags
= kLinearPCMFormatFlagIsSignedInteger
| kLinearPCMFormatFlagIsAlignedHigh
| kLinearPCMFormatFlagIsBigEndian
;
1828 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1829 info
->nativeFormats
|= RTAUDIO_SINT24
;
1831 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1832 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1833 info
->nativeFormats
|= RTAUDIO_SINT24
;
1836 description
.mBitsPerChannel
= 32;
1837 description
.mFormatFlags
= kLinearPCMFormatFlagIsFloat
| kLinearPCMFormatFlagIsPacked
| kLinearPCMFormatFlagIsBigEndian
;
1838 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1839 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
1841 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1842 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1843 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
1846 description
.mBitsPerChannel
= 64;
1847 description
.mFormatFlags
|= kLinearPCMFormatFlagIsBigEndian
;
1848 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1849 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
1851 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1852 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1853 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
1856 // Check that we have at least one supported format.
1857 if (info
->nativeFormats
== 0) {
1858 sprintf(message_
, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.",
1859 info
->name
.c_str());
1860 error(RtError::DEBUG_WARNING
);
1864 info
->probed
= true;
1867 OSStatus
callbackHandler(AudioDeviceID inDevice
,
1868 const AudioTimeStamp
* inNow
,
1869 const AudioBufferList
* inInputData
,
1870 const AudioTimeStamp
* inInputTime
,
1871 AudioBufferList
* outOutputData
,
1872 const AudioTimeStamp
* inOutputTime
,
1875 CallbackInfo
*info
= (CallbackInfo
*) infoPointer
;
1877 RtApiCore
*object
= (RtApiCore
*) info
->object
;
1879 object
->callbackEvent( inDevice
, (void *)inInputData
, (void *)outOutputData
);
1881 catch (RtError
&exception
) {
1882 fprintf(stderr
, "\nRtApiCore: callback handler error (%s)!\n\n", exception
.getMessageString());
1883 return kAudioHardwareUnspecifiedError
;
1886 return kAudioHardwareNoError
;
1889 OSStatus
deviceListener(AudioDeviceID inDevice
,
1892 AudioDevicePropertyID propertyID
,
1893 void* handlePointer
)
1895 CoreHandle
*handle
= (CoreHandle
*) handlePointer
;
1896 if ( propertyID
== kAudioDeviceProcessorOverload
) {
1897 #ifdef __CHUCK_DEBUG__
1899 fprintf(stderr
, "[chuck](via rtaudio): RtApiCore: OS-X audio input overrun detected!\n");
1901 fprintf(stderr
, "[chuck](via rtaudio): RtApiCore: OS-X audio output underrun detected!\n");
1903 handle
->xrun
= true;
1906 return kAudioHardwareNoError
;
1909 bool RtApiCore :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
1910 int sampleRate
, RtAudioFormat format
,
1911 int *bufferSize
, int numberOfBuffers
)
1913 // Setup for stream mode.
1914 bool isInput
= false;
1915 AudioDeviceID id
= *((AudioDeviceID
*) devices_
[device
].apiDeviceId
);
1916 if ( mode
== INPUT
) isInput
= true;
1918 // Search for a stream which contains the desired number of channels.
1919 OSStatus err
= noErr
;
1921 unsigned int deviceChannels
, nStreams
= 0;
1922 UInt32 iChannel
= 0, iStream
= 0;
1923 AudioBufferList
*bufferList
= nil
;
1924 err
= AudioDeviceGetPropertyInfo( id
, 0, isInput
,
1925 kAudioDevicePropertyStreamConfiguration
,
1928 if (err
== noErr
&& dataSize
> 0) {
1929 bufferList
= (AudioBufferList
*) malloc( dataSize
);
1930 if (bufferList
== NULL
) {
1931 sprintf(message_
, "RtApiCore: memory allocation error in probeDeviceOpen()!");
1932 error(RtError::DEBUG_WARNING
);
1935 err
= AudioDeviceGetProperty( id
, 0, isInput
,
1936 kAudioDevicePropertyStreamConfiguration
,
1937 &dataSize
, bufferList
);
1940 stream_
.deInterleave
[mode
] = false;
1941 nStreams
= bufferList
->mNumberBuffers
;
1942 for ( iStream
=0; iStream
<nStreams
; iStream
++ ) {
1943 if ( bufferList
->mBuffers
[iStream
].mNumberChannels
>= (unsigned int) channels
) break;
1944 iChannel
+= bufferList
->mBuffers
[iStream
].mNumberChannels
;
1946 // If we didn't find a single stream above, see if we can meet
1947 // the channel specification in mono mode (i.e. using separate
1948 // non-interleaved buffers). This can only work if there are N
1949 // consecutive one-channel streams, where N is the number of
1950 // desired channels.
1952 if ( iStream
>= nStreams
&& nStreams
>= (unsigned int) channels
) {
1954 for ( iStream
=0; iStream
<nStreams
; iStream
++ ) {
1955 if ( bufferList
->mBuffers
[iStream
].mNumberChannels
== 1 )
1959 if ( counter
== channels
) {
1960 iStream
-= channels
- 1;
1961 iChannel
-= channels
- 1;
1962 stream_
.deInterleave
[mode
] = true;
1965 iChannel
+= bufferList
->mBuffers
[iStream
].mNumberChannels
;
1970 if (err
!= noErr
|| dataSize
<= 0) {
1971 if ( bufferList
) free( bufferList
);
1972 sprintf( message_
, "RtApiCore: OS-X error getting channels for device (%s).",
1973 devices_
[device
].name
.c_str() );
1974 error(RtError::DEBUG_WARNING
);
1978 if (iStream
>= nStreams
) {
1980 sprintf( message_
, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).",
1981 devices_
[device
].name
.c_str(), channels
);
1982 error(RtError::DEBUG_WARNING
);
1986 // This is ok even for mono mode ... it gets updated later.
1987 deviceChannels
= bufferList
->mBuffers
[iStream
].mNumberChannels
;
1990 // Determine the buffer size.
1991 AudioValueRange bufferRange
;
1992 dataSize
= sizeof(AudioValueRange
);
1993 err
= AudioDeviceGetProperty( id
, 0, isInput
,
1994 kAudioDevicePropertyBufferSizeRange
,
1995 &dataSize
, &bufferRange
);
1997 sprintf( message_
, "RtApiCore: OS-X error getting buffer size range for device (%s).",
1998 devices_
[device
].name
.c_str() );
1999 error(RtError::DEBUG_WARNING
);
2003 long bufferBytes
= *bufferSize
* deviceChannels
* formatBytes(RTAUDIO_FLOAT32
);
2004 if (bufferRange
.mMinimum
> bufferBytes
) bufferBytes
= (int) bufferRange
.mMinimum
;
2005 else if (bufferRange
.mMaximum
< bufferBytes
) bufferBytes
= (int) bufferRange
.mMaximum
;
2007 // Set the buffer size. For mono mode, I'm assuming we only need to
2008 // make this setting for the first channel.
2009 UInt32 theSize
= (UInt32
) bufferBytes
;
2010 dataSize
= sizeof( UInt32
);
2011 err
= AudioDeviceSetProperty(id
, NULL
, 0, isInput
,
2012 kAudioDevicePropertyBufferSize
,
2013 dataSize
, &theSize
);
2015 sprintf( message_
, "RtApiCore: OS-X error setting the buffer size for device (%s).",
2016 devices_
[device
].name
.c_str() );
2017 error(RtError::DEBUG_WARNING
);
2021 // If attempting to setup a duplex stream, the bufferSize parameter
2022 // MUST be the same in both directions!
2023 *bufferSize
= bufferBytes
/ ( deviceChannels
* formatBytes(RTAUDIO_FLOAT32
) );
2024 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
&& *bufferSize
!= stream_
.bufferSize
) {
2025 sprintf( message_
, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).",
2026 devices_
[device
].name
.c_str() );
2027 error(RtError::DEBUG_WARNING
);
2031 stream_
.bufferSize
= *bufferSize
;
2032 stream_
.nBuffers
= 1;
2034 // Set the stream format description. Do for each channel in mono mode.
2035 AudioStreamBasicDescription description
;
2036 dataSize
= sizeof( AudioStreamBasicDescription
);
2037 if ( stream_
.deInterleave
[mode
] ) nStreams
= channels
;
2039 for ( unsigned int i
=0; i
<nStreams
; i
++, iChannel
++ ) {
2041 err
= AudioDeviceGetProperty( id
, iChannel
, isInput
,
2042 kAudioDevicePropertyStreamFormat
,
2043 &dataSize
, &description
);
2045 sprintf( message_
, "RtApiCore: OS-X error getting stream format for device (%s).",
2046 devices_
[device
].name
.c_str() );
2047 error(RtError::DEBUG_WARNING
);
2051 // Set the sample rate and data format id.
2052 description
.mSampleRate
= (double) sampleRate
;
2053 description
.mFormatID
= kAudioFormatLinearPCM
;
2054 err
= AudioDeviceSetProperty( id
, NULL
, iChannel
, isInput
,
2055 kAudioDevicePropertyStreamFormat
,
2056 dataSize
, &description
);
2058 sprintf( message_
, "RtApiCore: OS-X error setting sample rate or data format for device (%s).",
2059 devices_
[device
].name
.c_str() );
2060 error(RtError::DEBUG_WARNING
);
2065 // Check whether we need byte-swapping (assuming OS-X host is big-endian).
2066 iChannel
-= nStreams
;
2067 err
= AudioDeviceGetProperty( id
, iChannel
, isInput
,
2068 kAudioDevicePropertyStreamFormat
,
2069 &dataSize
, &description
);
2071 sprintf( message_
, "RtApiCore: OS-X error getting stream format for device (%s).", devices_
[device
].name
.c_str() );
2072 error(RtError::DEBUG_WARNING
);
2076 stream_
.doByteSwap
[mode
] = false;
2077 if ( !description
.mFormatFlags
& kLinearPCMFormatFlagIsBigEndian
)
2078 stream_
.doByteSwap
[mode
] = true;
2080 // From the CoreAudio documentation, PCM data must be supplied as
2082 stream_
.userFormat
= format
;
2083 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
2085 if ( stream_
.deInterleave
[mode
] ) // mono mode
2086 stream_
.nDeviceChannels
[mode
] = channels
;
2088 stream_
.nDeviceChannels
[mode
] = description
.mChannelsPerFrame
;
2089 stream_
.nUserChannels
[mode
] = channels
;
2091 // Set flags for buffer conversion.
2092 stream_
.doConvertBuffer
[mode
] = false;
2093 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
2094 stream_
.doConvertBuffer
[mode
] = true;
2095 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
2096 stream_
.doConvertBuffer
[mode
] = true;
2097 if (stream_
.nUserChannels
[mode
] > 1 && stream_
.deInterleave
[mode
])
2098 stream_
.doConvertBuffer
[mode
] = true;
2100 // Allocate our CoreHandle structure for the stream.
2102 if ( stream_
.apiHandle
== 0 ) {
2103 handle
= (CoreHandle
*) calloc(1, sizeof(CoreHandle
));
2104 if ( handle
== NULL
) {
2105 sprintf(message_
, "RtApiCore: OS-X error allocating coreHandle memory (%s).",
2106 devices_
[device
].name
.c_str());
2109 handle
->index
[0] = 0;
2110 handle
->index
[1] = 0;
2111 if ( pthread_cond_init(&handle
->condition
, NULL
) ) {
2112 sprintf(message_
, "RtApiCore: error initializing pthread condition variable (%s).",
2113 devices_
[device
].name
.c_str());
2116 stream_
.apiHandle
= (void *) handle
;
2119 handle
= (CoreHandle
*) stream_
.apiHandle
;
2120 handle
->index
[mode
] = iStream
;
2122 // Allocate necessary internal buffers.
2123 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
2126 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
2127 buffer_bytes
= stream_
.nUserChannels
[0];
2129 buffer_bytes
= stream_
.nUserChannels
[1];
2131 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
2132 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
2133 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
2134 if (stream_
.userBuffer
== NULL
) {
2135 sprintf(message_
, "RtApiCore: OS-X error allocating user buffer memory (%s).",
2136 devices_
[device
].name
.c_str());
2141 if ( stream_
.deInterleave
[mode
] ) {
2144 bool makeBuffer
= true;
2145 if ( mode
== OUTPUT
)
2146 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
2147 else { // mode == INPUT
2148 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
2149 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
2150 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
2151 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
2156 buffer_bytes
*= *bufferSize
;
2157 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
2158 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
2159 if (stream_
.deviceBuffer
== NULL
) {
2160 sprintf(message_
, "RtApiCore: error allocating device buffer memory (%s).",
2161 devices_
[device
].name
.c_str());
2165 // If not de-interleaving, we point stream_.deviceBuffer to the
2166 // OS X supplied device buffer before doing any necessary data
2167 // conversions. This presents a problem if we have a duplex
2168 // stream using one device which needs de-interleaving and
2169 // another device which doesn't. So, save a pointer to our own
2170 // device buffer in the CallbackInfo structure.
2171 handle
->deviceBuffer
= stream_
.deviceBuffer
;
2175 stream_
.sampleRate
= sampleRate
;
2176 stream_
.device
[mode
] = device
;
2177 stream_
.state
= STREAM_STOPPED
;
2178 stream_
.callbackInfo
.object
= (void *) this;
2180 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
&& stream_
.device
[0] == device
)
2181 // Only one callback procedure per device.
2182 stream_
.mode
= DUPLEX
;
2184 err
= AudioDeviceAddIOProc( id
, callbackHandler
, (void *) &stream_
.callbackInfo
);
2186 sprintf( message_
, "RtApiCore: OS-X error setting callback for device (%s).", devices_
[device
].name
.c_str() );
2187 error(RtError::DEBUG_WARNING
);
2190 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
2191 stream_
.mode
= DUPLEX
;
2193 stream_
.mode
= mode
;
2196 stream_
.sub_mode
= mode
;
2198 // Setup the device property listener for over/underload.
2199 err
= AudioDeviceAddPropertyListener( id
, iChannel
, isInput
,
2200 kAudioDeviceProcessorOverload
,
2201 deviceListener
, (void *) handle
);
2207 pthread_cond_destroy(&handle
->condition
);
2209 stream_
.apiHandle
= 0;
2212 if (stream_
.userBuffer
) {
2213 free(stream_
.userBuffer
);
2214 stream_
.userBuffer
= 0;
2217 error(RtError::WARNING
);
2221 void RtApiCore :: closeStream()
2223 // We don't want an exception to be thrown here because this
2224 // function is called by our class destructor. So, do our own
2226 if ( stream_
.mode
== UNINITIALIZED
) {
2227 sprintf(message_
, "RtApiCore::closeStream(): no open stream to close!");
2228 error(RtError::WARNING
);
2232 AudioDeviceID id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2233 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2234 if (stream_
.state
== STREAM_RUNNING
)
2235 AudioDeviceStop( id
, callbackHandler
);
2236 AudioDeviceRemoveIOProc( id
, callbackHandler
);
2239 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[1]].apiDeviceId
);
2240 if (stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& stream_
.device
[0] != stream_
.device
[1]) ) {
2241 if (stream_
.state
== STREAM_RUNNING
)
2242 AudioDeviceStop( id
, callbackHandler
);
2243 AudioDeviceRemoveIOProc( id
, callbackHandler
);
2246 if (stream_
.userBuffer
) {
2247 free(stream_
.userBuffer
);
2248 stream_
.userBuffer
= 0;
2251 if ( stream_
.deInterleave
[0] || stream_
.deInterleave
[1] ) {
2252 free(stream_
.deviceBuffer
);
2253 stream_
.deviceBuffer
= 0;
2256 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2258 // Destroy pthread condition variable and free the CoreHandle structure.
2260 pthread_cond_destroy(&handle
->condition
);
2262 stream_
.apiHandle
= 0;
2265 stream_
.mode
= UNINITIALIZED
;
2266 stream_
.sub_mode
= UNINITIALIZED
;
2269 void RtApiCore :: startStream()
2272 if (stream_
.state
== STREAM_RUNNING
) return;
2274 MUTEX_LOCK(&stream_
.mutex
);
2278 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2280 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2281 err
= AudioDeviceStart(id
, callbackHandler
);
2283 sprintf(message_
, "RtApiCore: OS-X error starting callback procedure on device (%s).",
2284 devices_
[stream_
.device
[0]].name
.c_str());
2285 MUTEX_UNLOCK(&stream_
.mutex
);
2286 error(RtError::DRIVER_ERROR
);
2290 if (stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& stream_
.device
[0] != stream_
.device
[1]) ) {
2292 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[1]].apiDeviceId
);
2293 err
= AudioDeviceStart(id
, callbackHandler
);
2295 sprintf(message_
, "RtApiCore: OS-X error starting input callback procedure on device (%s).",
2296 devices_
[stream_
.device
[0]].name
.c_str());
2297 MUTEX_UNLOCK(&stream_
.mutex
);
2298 error(RtError::DRIVER_ERROR
);
2302 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2303 handle
->stopStream
= false;
2304 stream_
.state
= STREAM_RUNNING
;
2306 MUTEX_UNLOCK(&stream_
.mutex
);
2309 void RtApiCore :: stopStream()
2312 if (stream_
.state
== STREAM_STOPPED
) return;
2314 // Change the state before the lock to improve shutdown response
2315 // when using a callback.
2316 stream_
.state
= STREAM_STOPPED
;
2317 MUTEX_LOCK(&stream_
.mutex
);
2321 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2323 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2324 err
= AudioDeviceStop(id
, callbackHandler
);
2326 sprintf(message_
, "RtApiCore: OS-X error stopping callback procedure on device (%s).",
2327 devices_
[stream_
.device
[0]].name
.c_str());
2328 MUTEX_UNLOCK(&stream_
.mutex
);
2329 error(RtError::DRIVER_ERROR
);
2333 if (stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& stream_
.device
[0] != stream_
.device
[1]) ) {
2335 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[1]].apiDeviceId
);
2336 err
= AudioDeviceStop(id
, callbackHandler
);
2338 sprintf(message_
, "RtApiCore: OS-X error stopping input callback procedure on device (%s).",
2339 devices_
[stream_
.device
[0]].name
.c_str());
2340 MUTEX_UNLOCK(&stream_
.mutex
);
2341 error(RtError::DRIVER_ERROR
);
2345 MUTEX_UNLOCK(&stream_
.mutex
);
2348 void RtApiCore :: abortStream()
2353 void RtApiCore :: tickStream()
2357 if (stream_
.state
== STREAM_STOPPED
) return;
2359 if (stream_
.callbackInfo
.usingCallback
) {
2360 sprintf(message_
, "RtApiCore: tickStream() should not be used when a callback function is set!");
2361 error(RtError::WARNING
);
2365 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2367 MUTEX_LOCK(&stream_
.mutex
);
2369 pthread_cond_wait(&handle
->condition
, &stream_
.mutex
);
2371 MUTEX_UNLOCK(&stream_
.mutex
);
2374 void RtApiCore :: callbackEvent( AudioDeviceID deviceId
, void *inData
, void *outData
)
2378 if (stream_
.state
== STREAM_STOPPED
) return;
2380 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
2381 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2382 AudioBufferList
*inBufferList
= (AudioBufferList
*) inData
;
2383 AudioBufferList
*outBufferList
= (AudioBufferList
*) outData
;
2385 if ( info
->usingCallback
&& handle
->stopStream
) {
2386 // Check if the stream should be stopped (via the previous user
2387 // callback return value). We stop the stream here, rather than
2388 // after the function call, so that output data can first be
2394 MUTEX_LOCK(&stream_
.mutex
);
2396 // Invoke user callback first, to get fresh output data. Don't
2397 // invoke the user callback if duplex mode AND the input/output devices
2398 // are different AND this function is called for the input device.
2399 AudioDeviceID id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2400 if ( info
->usingCallback
&& (stream_
.mode
!= DUPLEX
|| deviceId
== id
) ) {
2401 RtAudioCallback callback
= (RtAudioCallback
) info
->callback
;
2402 handle
->stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, info
->userData
);
2403 if ( handle
->xrun
== true ) {
2404 handle
->xrun
= false;
2405 MUTEX_UNLOCK(&stream_
.mutex
);
2409 else if( info
->usingCallback
&& (stream_
.mode
== DUPLEX
&& deviceId
!= id
) )
2411 if( stream_
.sub_mode
== INPUT
)
2412 memcpy(stream_
.userBuffer
,
2413 inBufferList
->mBuffers
[handle
->index
[1]].mData
,
2414 inBufferList
->mBuffers
[handle
->index
[1]].mDataByteSize
);
2416 memcpy(outBufferList
->mBuffers
[handle
->index
[0]].mData
,
2418 outBufferList
->mBuffers
[handle
->index
[0]].mDataByteSize
);
2421 if ( stream_
.mode
== OUTPUT
|| ( stream_
.mode
== DUPLEX
&& deviceId
== id
) ) {
2423 if (stream_
.doConvertBuffer
[0]) {
2425 if ( !stream_
.deInterleave
[0] )
2426 stream_
.deviceBuffer
= (char *) outBufferList
->mBuffers
[handle
->index
[0]].mData
;
2428 stream_
.deviceBuffer
= handle
->deviceBuffer
;
2430 convertStreamBuffer(OUTPUT
);
2431 if ( stream_
.doByteSwap
[0] )
2432 byteSwapBuffer(stream_
.deviceBuffer
,
2433 stream_
.bufferSize
* stream_
.nDeviceChannels
[0],
2434 stream_
.deviceFormat
[0]);
2436 if ( stream_
.deInterleave
[0] ) {
2437 int bufferBytes
= outBufferList
->mBuffers
[handle
->index
[0]].mDataByteSize
;
2438 for ( int i
=0; i
<stream_
.nDeviceChannels
[0]; i
++ ) {
2439 memcpy(outBufferList
->mBuffers
[handle
->index
[0]+i
].mData
,
2440 &stream_
.deviceBuffer
[i
*bufferBytes
], bufferBytes
);
2446 if (stream_
.doByteSwap
[0])
2447 byteSwapBuffer(stream_
.userBuffer
,
2448 stream_
.bufferSize
* stream_
.nUserChannels
[0],
2449 stream_
.userFormat
);
2451 memcpy(outBufferList
->mBuffers
[handle
->index
[0]].mData
,
2453 outBufferList
->mBuffers
[handle
->index
[0]].mDataByteSize
);
2457 if ( stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& deviceId
== id
) ) {
2459 if (stream_
.doConvertBuffer
[1]) {
2461 if ( stream_
.deInterleave
[1] ) {
2462 stream_
.deviceBuffer
= (char *) handle
->deviceBuffer
;
2463 int bufferBytes
= inBufferList
->mBuffers
[handle
->index
[1]].mDataByteSize
;
2464 for ( int i
=0; i
<stream_
.nDeviceChannels
[1]; i
++ ) {
2465 memcpy(&stream_
.deviceBuffer
[i
*bufferBytes
],
2466 inBufferList
->mBuffers
[handle
->index
[1]+i
].mData
, bufferBytes
);
2470 stream_
.deviceBuffer
= (char *) inBufferList
->mBuffers
[handle
->index
[1]].mData
;
2472 if ( stream_
.doByteSwap
[1] )
2473 byteSwapBuffer(stream_
.deviceBuffer
,
2474 stream_
.bufferSize
* stream_
.nDeviceChannels
[1],
2475 stream_
.deviceFormat
[1]);
2476 convertStreamBuffer(INPUT
);
2480 memcpy(stream_
.userBuffer
,
2481 inBufferList
->mBuffers
[handle
->index
[1]].mData
,
2482 inBufferList
->mBuffers
[handle
->index
[1]].mDataByteSize
);
2484 if (stream_
.doByteSwap
[1])
2485 byteSwapBuffer(stream_
.userBuffer
,
2486 stream_
.bufferSize
* stream_
.nUserChannels
[1],
2487 stream_
.userFormat
);
2491 if ( !info
->usingCallback
&& (stream_
.mode
!= DUPLEX
|| deviceId
== id
) )
2492 pthread_cond_signal(&handle
->condition
);
2494 MUTEX_UNLOCK(&stream_
.mutex
);
2497 void RtApiCore :: setStreamCallback(RtAudioCallback callback
, void *userData
)
2501 if ( stream_
.callbackInfo
.usingCallback
) {
2502 sprintf(message_
, "RtApiCore: A callback is already set for this stream!");
2503 error(RtError::WARNING
);
2507 stream_
.callbackInfo
.callback
= (void *) callback
;
2508 stream_
.callbackInfo
.userData
= userData
;
2509 stream_
.callbackInfo
.usingCallback
= true;
2512 void RtApiCore :: cancelStreamCallback()
2516 if (stream_
.callbackInfo
.usingCallback
) {
2518 if (stream_
.state
== STREAM_RUNNING
)
2521 MUTEX_LOCK(&stream_
.mutex
);
2523 stream_
.callbackInfo
.usingCallback
= false;
2524 stream_
.callbackInfo
.userData
= NULL
;
2525 stream_
.state
= STREAM_STOPPED
;
2526 stream_
.callbackInfo
.callback
= NULL
;
2528 MUTEX_UNLOCK(&stream_
.mutex
);
2533 //******************** End of __MACOSX_CORE__ *********************//
2536 #if defined(__LINUX_JACK__)
2538 // JACK is a low-latency audio server, written primarily for the
2539 // GNU/Linux operating system. It can connect a number of different
2540 // applications to an audio device, as well as allowing them to share
2541 // audio between themselves.
2543 // The JACK server must be running before RtApiJack can be instantiated.
2544 // RtAudio will report just a single "device", which is the JACK audio
2545 // server. The JACK server is typically started in a terminal as follows:
2547 // .jackd -d alsa -d hw:0
2549 // Many of the parameters normally set for a stream are fixed by the
2550 // JACK server and can be specified when the JACK server is started.
2553 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
2555 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
2556 // frames, and number of buffers = 4. Once the server is running, it
2557 // is not possible to override these values. If the values are not
2558 // specified in the command-line, the JACK server uses default values.
2560 #include <jack/jack.h>
2563 // A structure to hold various information related to the Jack API
2566 jack_client_t
*client
;
2567 jack_port_t
**ports
[2];
2570 pthread_cond_t condition
;
2573 :client(0), clientOpen(false), stopStream(false) {}
2576 std::string jackmsg
;
2578 static void jackerror (const char *desc
)
2581 jackmsg
.append( desc
, strlen(desc
)+1 );
2584 RtApiJack :: RtApiJack()
2588 if (nDevices_
<= 0) {
2589 sprintf(message_
, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!",
2591 error(RtError::NO_DEVICES_FOUND
);
2595 RtApiJack :: ~RtApiJack()
2597 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
2600 void RtApiJack :: initialize(void)
2604 // Tell the jack server to call jackerror() when it experiences an
2605 // error. This function saves the error message for subsequent
2606 // reporting via the normal RtAudio error function.
2607 jack_set_error_function( jackerror
);
2609 // Look for jack server and try to become a client.
2610 jack_client_t
*client
;
2611 if ( (client
= jack_client_new( "RtApiJack" )) == 0)
2615 // Determine the name of the device.
2616 device
.name
= "Jack Server";
2617 devices_
.push_back(device
);
2620 jack_client_close(client
);
2623 void RtApiJack :: probeDeviceInfo(RtApiDevice
*info
)
2625 // Look for jack server and try to become a client.
2626 jack_client_t
*client
;
2627 if ( (client
= jack_client_new( "RtApiJack" )) == 0) {
2628 sprintf(message_
, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!",
2630 error(RtError::WARNING
);
2634 // Get the current jack server sample rate.
2635 info
->sampleRates
.clear();
2636 info
->sampleRates
.push_back( jack_get_sample_rate(client
) );
2638 // Count the available ports as device channels. Jack "input ports"
2639 // equal RtAudio output channels.
2642 unsigned int nChannels
= 0;
2643 ports
= jack_get_ports( client
, NULL
, NULL
, JackPortIsInput
);
2645 port
= (char *) ports
[nChannels
];
2647 port
= (char *) ports
[++nChannels
];
2649 info
->maxOutputChannels
= nChannels
;
2650 info
->minOutputChannels
= 1;
2653 // Jack "output ports" equal RtAudio input channels.
2655 ports
= jack_get_ports( client
, NULL
, NULL
, JackPortIsOutput
);
2657 port
= (char *) ports
[nChannels
];
2659 port
= (char *) ports
[++nChannels
];
2661 info
->maxInputChannels
= nChannels
;
2662 info
->minInputChannels
= 1;
2665 if (info
->maxOutputChannels
== 0 && info
->maxInputChannels
== 0) {
2666 jack_client_close(client
);
2667 sprintf(message_
, "RtApiJack: error determining jack input/output channels!");
2668 error(RtError::WARNING
);
2672 if (info
->maxOutputChannels
> 0 && info
->maxInputChannels
> 0) {
2673 info
->hasDuplexSupport
= true;
2674 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
2675 info
->maxInputChannels
: info
->maxOutputChannels
;
2676 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
2677 info
->minInputChannels
: info
->minOutputChannels
;
2680 // Get the jack data format type. There isn't much documentation
2681 // regarding supported data formats in jack. I'm assuming here that
2682 // the default type will always be a floating-point type, of length
2683 // equal to either 4 or 8 bytes.
2684 int sample_size
= sizeof( jack_default_audio_sample_t
);
2685 if ( sample_size
== 4 )
2686 info
->nativeFormats
= RTAUDIO_FLOAT32
;
2687 else if ( sample_size
== 8 )
2688 info
->nativeFormats
= RTAUDIO_FLOAT64
;
2690 // Check that we have a supported format
2691 if (info
->nativeFormats
== 0) {
2692 jack_client_close(client
);
2693 sprintf(message_
, "RtApiJack: error determining jack server data format!");
2694 error(RtError::WARNING
);
2698 jack_client_close(client
);
2699 info
->probed
= true;
2702 int jackCallbackHandler(jack_nframes_t nframes
, void *infoPointer
)
2704 CallbackInfo
*info
= (CallbackInfo
*) infoPointer
;
2705 RtApiJack
*object
= (RtApiJack
*) info
->object
;
2707 object
->callbackEvent( (unsigned long) nframes
);
2709 catch (RtError
&exception
) {
2710 fprintf(stderr
, "\nRtApiJack: callback handler error (%s)!\n\n", exception
.getMessageString());
2717 void jackShutdown(void *infoPointer
)
2719 CallbackInfo
*info
= (CallbackInfo
*) infoPointer
;
2720 JackHandle
*handle
= (JackHandle
*) info
->apiInfo
;
2721 handle
->clientOpen
= false;
2722 RtApiJack
*object
= (RtApiJack
*) info
->object
;
2724 object
->closeStream();
2726 catch (RtError
&exception
) {
2727 fprintf(stderr
, "\nRtApiJack: jackShutdown error (%s)!\n\n", exception
.getMessageString());
2731 fprintf(stderr
, "\nRtApiJack: the Jack server is shutting down ... stream stopped and closed!!!\n\n");
2734 int jackXrun( void * )
2736 fprintf(stderr
, "\nRtApiJack: audio overrun/underrun reported!\n");
2740 bool RtApiJack :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
2741 int sampleRate
, RtAudioFormat format
,
2742 int *bufferSize
, int numberOfBuffers
)
2744 // Compare the jack server channels to the requested number of channels.
2745 if ( (mode
== OUTPUT
&& devices_
[device
].maxOutputChannels
< channels
) ||
2746 (mode
== INPUT
&& devices_
[device
].maxInputChannels
< channels
) ) {
2747 sprintf(message_
, "RtApiJack: the Jack server does not support requested channels!");
2748 error(RtError::DEBUG_WARNING
);
2752 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
2754 // Look for jack server and try to become a client (only do once per stream).
2756 jack_client_t
*client
= 0;
2757 if ( mode
== OUTPUT
|| (mode
== INPUT
&& stream_
.mode
!= OUTPUT
) ) {
2758 snprintf(label
, 32, "RtApiJack");
2759 if ( (client
= jack_client_new( (const char *) label
)) == 0) {
2760 sprintf(message_
, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!",
2762 error(RtError::DEBUG_WARNING
);
2767 // The handle must have been created on an earlier pass.
2768 client
= handle
->client
;
2771 // First, check the jack server sample rate.
2773 jack_rate
= (int) jack_get_sample_rate(client
);
2774 if ( sampleRate
!= jack_rate
) {
2775 jack_client_close(client
);
2776 sprintf( message_
, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).",
2777 sampleRate
, jack_rate
);
2778 error(RtError::DEBUG_WARNING
);
2781 stream_
.sampleRate
= jack_rate
;
2783 // The jack server seems to support just a single floating-point
2784 // data type. Since we already checked it before, just use what we
2786 stream_
.deviceFormat
[mode
] = devices_
[device
].nativeFormats
;
2787 stream_
.userFormat
= format
;
2789 // Jack always uses non-interleaved buffers. We'll need to
2790 // de-interleave if we have more than one channel.
2791 stream_
.deInterleave
[mode
] = false;
2793 stream_
.deInterleave
[mode
] = true;
2795 // Jack always provides host byte-ordered data.
2796 stream_
.doByteSwap
[mode
] = false;
2798 // Get the buffer size. The buffer size and number of buffers
2799 // (periods) is set when the jack server is started.
2800 stream_
.bufferSize
= (int) jack_get_buffer_size(client
);
2801 *bufferSize
= stream_
.bufferSize
;
2803 stream_
.nDeviceChannels
[mode
] = channels
;
2804 stream_
.nUserChannels
[mode
] = channels
;
2806 stream_
.doConvertBuffer
[mode
] = false;
2807 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
2808 stream_
.doConvertBuffer
[mode
] = true;
2809 if (stream_
.deInterleave
[mode
])
2810 stream_
.doConvertBuffer
[mode
] = true;
2812 // Allocate our JackHandle structure for the stream.
2813 if ( handle
== 0 ) {
2814 handle
= (JackHandle
*) calloc(1, sizeof(JackHandle
));
2815 if ( handle
== NULL
) {
2816 sprintf(message_
, "RtApiJack: error allocating JackHandle memory (%s).",
2817 devices_
[device
].name
.c_str());
2820 handle
->ports
[0] = 0;
2821 handle
->ports
[1] = 0;
2822 if ( pthread_cond_init(&handle
->condition
, NULL
) ) {
2823 sprintf(message_
, "RtApiJack: error initializing pthread condition variable!");
2826 stream_
.apiHandle
= (void *) handle
;
2827 handle
->client
= client
;
2828 handle
->clientOpen
= true;
2831 // Allocate necessary internal buffers.
2832 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
2835 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
2836 buffer_bytes
= stream_
.nUserChannels
[0];
2838 buffer_bytes
= stream_
.nUserChannels
[1];
2840 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
2841 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
2842 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
2843 if (stream_
.userBuffer
== NULL
) {
2844 sprintf(message_
, "RtApiJack: error allocating user buffer memory (%s).",
2845 devices_
[device
].name
.c_str());
2850 if ( stream_
.doConvertBuffer
[mode
] ) {
2853 bool makeBuffer
= true;
2854 if ( mode
== OUTPUT
)
2855 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
2856 else { // mode == INPUT
2857 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
2858 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
2859 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
2860 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
2865 buffer_bytes
*= *bufferSize
;
2866 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
2867 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
2868 if (stream_
.deviceBuffer
== NULL
) {
2869 sprintf(message_
, "RtApiJack: error allocating device buffer memory (%s).",
2870 devices_
[device
].name
.c_str());
2876 // Allocate memory for the Jack ports (channels) identifiers.
2877 handle
->ports
[mode
] = (jack_port_t
**) malloc (sizeof (jack_port_t
*) * channels
);
2878 if ( handle
->ports
[mode
] == NULL
) {
2879 sprintf(message_
, "RtApiJack: error allocating port handle memory (%s).",
2880 devices_
[device
].name
.c_str());
2884 stream_
.device
[mode
] = device
;
2885 stream_
.state
= STREAM_STOPPED
;
2886 stream_
.callbackInfo
.usingCallback
= false;
2887 stream_
.callbackInfo
.object
= (void *) this;
2888 stream_
.callbackInfo
.apiInfo
= (void *) handle
;
2890 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
2891 // We had already set up the stream for output.
2892 stream_
.mode
= DUPLEX
;
2894 stream_
.mode
= mode
;
2895 jack_set_process_callback( handle
->client
, jackCallbackHandler
, (void *) &stream_
.callbackInfo
);
2896 jack_set_xrun_callback( handle
->client
, jackXrun
, NULL
);
2897 jack_on_shutdown( handle
->client
, jackShutdown
, (void *) &stream_
.callbackInfo
);
2904 pthread_cond_destroy(&handle
->condition
);
2905 if ( handle
->clientOpen
== true )
2906 jack_client_close(handle
->client
);
2908 if ( handle
->ports
[0] ) free(handle
->ports
[0]);
2909 if ( handle
->ports
[1] ) free(handle
->ports
[1]);
2912 stream_
.apiHandle
= 0;
2915 if (stream_
.userBuffer
) {
2916 free(stream_
.userBuffer
);
2917 stream_
.userBuffer
= 0;
2920 error(RtError::WARNING
);
2924 void RtApiJack :: closeStream()
2926 // We don't want an exception to be thrown here because this
2927 // function is called by our class destructor. So, do our own
2929 if ( stream_
.mode
== UNINITIALIZED
) {
2930 sprintf(message_
, "RtApiJack::closeStream(): no open stream to close!");
2931 error(RtError::WARNING
);
2935 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
2936 if ( handle
&& handle
->clientOpen
== true ) {
2937 if (stream_
.state
== STREAM_RUNNING
)
2938 jack_deactivate(handle
->client
);
2940 jack_client_close(handle
->client
);
2944 if ( handle
->ports
[0] ) free(handle
->ports
[0]);
2945 if ( handle
->ports
[1] ) free(handle
->ports
[1]);
2946 pthread_cond_destroy(&handle
->condition
);
2948 stream_
.apiHandle
= 0;
2951 if (stream_
.userBuffer
) {
2952 free(stream_
.userBuffer
);
2953 stream_
.userBuffer
= 0;
2956 if (stream_
.deviceBuffer
) {
2957 free(stream_
.deviceBuffer
);
2958 stream_
.deviceBuffer
= 0;
2961 stream_
.mode
= UNINITIALIZED
;
2965 void RtApiJack :: startStream()
2968 if (stream_
.state
== STREAM_RUNNING
) return;
2970 MUTEX_LOCK(&stream_
.mutex
);
2973 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
2974 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2975 for ( int i
=0; i
<stream_
.nUserChannels
[0]; i
++ ) {
2976 snprintf(label
, 64, "outport %d", i
);
2977 handle
->ports
[0][i
] = jack_port_register(handle
->client
, (const char *)label
,
2978 JACK_DEFAULT_AUDIO_TYPE
, JackPortIsOutput
, 0);
2982 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
2983 for ( int i
=0; i
<stream_
.nUserChannels
[1]; i
++ ) {
2984 snprintf(label
, 64, "inport %d", i
);
2985 handle
->ports
[1][i
] = jack_port_register(handle
->client
, (const char *)label
,
2986 JACK_DEFAULT_AUDIO_TYPE
, JackPortIsInput
, 0);
2990 if (jack_activate(handle
->client
)) {
2991 sprintf(message_
, "RtApiJack: unable to activate JACK client!");
2992 error(RtError::SYSTEM_ERROR
);
2997 // Get the list of available ports.
2998 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2999 ports
= jack_get_ports(handle
->client
, NULL
, NULL
, JackPortIsPhysical
|JackPortIsInput
);
3000 if ( ports
== NULL
) {
3001 sprintf(message_
, "RtApiJack: error determining available jack input ports!");
3002 error(RtError::SYSTEM_ERROR
);
3005 // Now make the port connections. Since RtAudio wasn't designed to
3006 // allow the user to select particular channels of a device, we'll
3007 // just open the first "nChannels" ports.
3008 for ( int i
=0; i
<stream_
.nUserChannels
[0]; i
++ ) {
3011 result
= jack_connect( handle
->client
, jack_port_name(handle
->ports
[0][i
]), ports
[i
] );
3014 sprintf(message_
, "RtApiJack: error connecting output ports!");
3015 error(RtError::SYSTEM_ERROR
);
3021 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
3022 ports
= jack_get_ports( handle
->client
, NULL
, NULL
, JackPortIsPhysical
|JackPortIsOutput
);
3023 if ( ports
== NULL
) {
3024 sprintf(message_
, "RtApiJack: error determining available jack output ports!");
3025 error(RtError::SYSTEM_ERROR
);
3028 // Now make the port connections. See note above.
3029 for ( int i
=0; i
<stream_
.nUserChannels
[1]; i
++ ) {
3032 result
= jack_connect( handle
->client
, ports
[i
], jack_port_name(handle
->ports
[1][i
]) );
3035 sprintf(message_
, "RtApiJack: error connecting input ports!");
3036 error(RtError::SYSTEM_ERROR
);
3042 handle
->stopStream
= false;
3043 stream_
.state
= STREAM_RUNNING
;
3045 MUTEX_UNLOCK(&stream_
.mutex
);
3048 void RtApiJack :: stopStream()
3051 if (stream_
.state
== STREAM_STOPPED
) return;
3053 // Change the state before the lock to improve shutdown response
3054 // when using a callback.
3055 stream_
.state
= STREAM_STOPPED
;
3056 MUTEX_LOCK(&stream_
.mutex
);
3058 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3059 jack_deactivate(handle
->client
);
3061 MUTEX_UNLOCK(&stream_
.mutex
);
3064 void RtApiJack :: abortStream()
3069 void RtApiJack :: tickStream()
3073 if (stream_
.state
== STREAM_STOPPED
) return;
3075 if (stream_
.callbackInfo
.usingCallback
) {
3076 sprintf(message_
, "RtApiJack: tickStream() should not be used when a callback function is set!");
3077 error(RtError::WARNING
);
3081 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3083 MUTEX_LOCK(&stream_
.mutex
);
3085 pthread_cond_wait(&handle
->condition
, &stream_
.mutex
);
3087 MUTEX_UNLOCK(&stream_
.mutex
);
3090 void RtApiJack :: callbackEvent( unsigned long nframes
)
3094 if (stream_
.state
== STREAM_STOPPED
) return;
3096 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
3097 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3098 if ( info
->usingCallback
&& handle
->stopStream
) {
3099 // Check if the stream should be stopped (via the previous user
3100 // callback return value). We stop the stream here, rather than
3101 // after the function call, so that output data can first be
3107 MUTEX_LOCK(&stream_
.mutex
);
3109 // Invoke user callback first, to get fresh output data.
3110 if ( info
->usingCallback
) {
3111 RtAudioCallback callback
= (RtAudioCallback
) info
->callback
;
3112 handle
->stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, info
->userData
);
3115 jack_default_audio_sample_t
*jackbuffer
;
3116 long bufferBytes
= nframes
* sizeof (jack_default_audio_sample_t
);
3117 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
3119 if (stream_
.doConvertBuffer
[0]) {
3120 convertStreamBuffer(OUTPUT
);
3122 for ( int i
=0; i
<stream_
.nDeviceChannels
[0]; i
++ ) {
3123 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[0][i
],
3124 (jack_nframes_t
) nframes
);
3125 memcpy(jackbuffer
, &stream_
.deviceBuffer
[i
*bufferBytes
], bufferBytes
);
3128 else { // single channel only
3129 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[0][0],
3130 (jack_nframes_t
) nframes
);
3131 memcpy(jackbuffer
, stream_
.userBuffer
, bufferBytes
);
3135 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
3137 if (stream_
.doConvertBuffer
[1]) {
3138 for ( int i
=0; i
<stream_
.nDeviceChannels
[1]; i
++ ) {
3139 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[1][i
],
3140 (jack_nframes_t
) nframes
);
3141 memcpy(&stream_
.deviceBuffer
[i
*bufferBytes
], jackbuffer
, bufferBytes
);
3143 convertStreamBuffer(INPUT
);
3145 else { // single channel only
3146 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[1][0],
3147 (jack_nframes_t
) nframes
);
3148 memcpy(stream_
.userBuffer
, jackbuffer
, bufferBytes
);
3152 if ( !info
->usingCallback
)
3153 pthread_cond_signal(&handle
->condition
);
3155 MUTEX_UNLOCK(&stream_
.mutex
);
3158 void RtApiJack :: setStreamCallback(RtAudioCallback callback
, void *userData
)
3162 if ( stream_
.callbackInfo
.usingCallback
) {
3163 sprintf(message_
, "RtApiJack: A callback is already set for this stream!");
3164 error(RtError::WARNING
);
3168 stream_
.callbackInfo
.callback
= (void *) callback
;
3169 stream_
.callbackInfo
.userData
= userData
;
3170 stream_
.callbackInfo
.usingCallback
= true;
3173 void RtApiJack :: cancelStreamCallback()
3177 if (stream_
.callbackInfo
.usingCallback
) {
3179 if (stream_
.state
== STREAM_RUNNING
)
3182 MUTEX_LOCK(&stream_
.mutex
);
3184 stream_
.callbackInfo
.usingCallback
= false;
3185 stream_
.callbackInfo
.userData
= NULL
;
3186 stream_
.state
= STREAM_STOPPED
;
3187 stream_
.callbackInfo
.callback
= NULL
;
3189 MUTEX_UNLOCK(&stream_
.mutex
);
3195 #if defined(__LINUX_ALSA__)
3197 #include <alsa/asoundlib.h>
3201 extern "C" void *alsaCallbackHandler(void * ptr
);
3203 RtApiAlsa :: RtApiAlsa()
3207 if (nDevices_
<= 0) {
3208 sprintf(message_
, "RtApiAlsa: no Linux ALSA audio devices found!");
3209 error(RtError::NO_DEVICES_FOUND
);
3213 RtApiAlsa :: ~RtApiAlsa()
3215 if ( stream_
.mode
!= UNINITIALIZED
)
3219 void RtApiAlsa :: initialize(void)
3221 int card
, subdevice
, result
;
3225 snd_ctl_card_info_t
*info
;
3226 snd_ctl_card_info_alloca(&info
);
3229 // Count cards and devices
3232 snd_card_next(&card
);
3233 while ( card
>= 0 ) {
3234 sprintf(name
, "hw:%d", card
);
3235 result
= snd_ctl_open(&handle
, name
, 0);
3237 sprintf(message_
, "RtApiAlsa: control open (%i): %s.", card
, snd_strerror(result
));
3238 error(RtError::DEBUG_WARNING
);
3241 result
= snd_ctl_card_info(handle
, info
);
3243 sprintf(message_
, "RtApiAlsa: control hardware info (%i): %s.", card
, snd_strerror(result
));
3244 error(RtError::DEBUG_WARNING
);
3247 cardId
= snd_ctl_card_info_get_id(info
);
3250 result
= snd_ctl_pcm_next_device(handle
, &subdevice
);
3252 sprintf(message_
, "RtApiAlsa: control next device (%i): %s.", card
, snd_strerror(result
));
3253 error(RtError::DEBUG_WARNING
);
3258 sprintf( name
, "hw:%d,%d", card
, subdevice
);
3259 // If a cardId exists and it contains at least one non-numeric
3260 // character, use it to identify the device. This avoids a bug
3261 // in ALSA such that a numeric string is interpreted as a device
3263 for ( unsigned int i
=0; i
<strlen(cardId
); i
++ ) {
3264 if ( !isdigit( cardId
[i
] ) ) {
3265 sprintf( name
, "hw:%s,%d", cardId
, subdevice
);
3269 device
.name
.erase();
3270 device
.name
.append( (const char *)name
, strlen(name
)+1 );
3271 devices_
.push_back(device
);
3275 snd_ctl_close(handle
);
3276 snd_card_next(&card
);
3280 void RtApiAlsa :: probeDeviceInfo(RtApiDevice
*info
)
3283 int open_mode
= SND_PCM_ASYNC
;
3286 snd_pcm_stream_t stream
;
3287 snd_pcm_info_t
*pcminfo
;
3288 snd_pcm_info_alloca(&pcminfo
);
3289 snd_pcm_hw_params_t
*params
;
3290 snd_pcm_hw_params_alloca(¶ms
);
3294 // Open the control interface for this card.
3295 strncpy( name
, info
->name
.c_str(), 64 );
3296 card
= strtok(name
, ",");
3297 err
= snd_ctl_open(&chandle
, card
, SND_CTL_NONBLOCK
);
3299 sprintf(message_
, "RtApiAlsa: control open (%s): %s.", card
, snd_strerror(err
));
3300 error(RtError::DEBUG_WARNING
);
3303 unsigned int dev
= (unsigned int) atoi( strtok(NULL
, ",") );
3305 // First try for playback
3306 stream
= SND_PCM_STREAM_PLAYBACK
;
3307 snd_pcm_info_set_device(pcminfo
, dev
);
3308 snd_pcm_info_set_subdevice(pcminfo
, 0);
3309 snd_pcm_info_set_stream(pcminfo
, stream
);
3311 if ((err
= snd_ctl_pcm_info(chandle
, pcminfo
)) < 0) {
3312 if (err
== -ENOENT
) {
3313 sprintf(message_
, "RtApiAlsa: pcm device (%s) doesn't handle output!", info
->name
.c_str());
3314 error(RtError::DEBUG_WARNING
);
3317 sprintf(message_
, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s",
3318 info
->name
.c_str(), snd_strerror(err
));
3319 error(RtError::DEBUG_WARNING
);
3324 err
= snd_pcm_open(&handle
, info
->name
.c_str(), stream
, open_mode
| SND_PCM_NONBLOCK
);
3327 sprintf(message_
, "RtApiAlsa: pcm playback device (%s) is busy: %s.",
3328 info
->name
.c_str(), snd_strerror(err
));
3330 sprintf(message_
, "RtApiAlsa: pcm playback open (%s) error: %s.",
3331 info
->name
.c_str(), snd_strerror(err
));
3332 error(RtError::DEBUG_WARNING
);
3336 // We have an open device ... allocate the parameter structure.
3337 err
= snd_pcm_hw_params_any(handle
, params
);
3339 snd_pcm_close(handle
);
3340 sprintf(message_
, "RtApiAlsa: hardware probe error (%s): %s.",
3341 info
->name
.c_str(), snd_strerror(err
));
3342 error(RtError::WARNING
);
3346 // Get output channel information.
3348 err
= snd_pcm_hw_params_get_channels_min(params
, &value
);
3350 snd_pcm_close(handle
);
3351 sprintf(message_
, "RtApiAlsa: hardware minimum channel probe error (%s): %s.",
3352 info
->name
.c_str(), snd_strerror(err
));
3353 error(RtError::WARNING
);
3356 info
->minOutputChannels
= value
;
3358 err
= snd_pcm_hw_params_get_channels_max(params
, &value
);
3360 snd_pcm_close(handle
);
3361 sprintf(message_
, "RtApiAlsa: hardware maximum channel probe error (%s): %s.",
3362 info
->name
.c_str(), snd_strerror(err
));
3363 error(RtError::WARNING
);
3366 info
->maxOutputChannels
= value
;
3368 snd_pcm_close(handle
);
3371 // Now try for capture
3372 stream
= SND_PCM_STREAM_CAPTURE
;
3373 snd_pcm_info_set_stream(pcminfo
, stream
);
3375 err
= snd_ctl_pcm_info(chandle
, pcminfo
);
3376 snd_ctl_close(chandle
);
3378 if (err
== -ENOENT
) {
3379 sprintf(message_
, "RtApiAlsa: pcm device (%s) doesn't handle input!", info
->name
.c_str());
3380 error(RtError::DEBUG_WARNING
);
3383 sprintf(message_
, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s",
3384 info
->name
.c_str(), snd_strerror(err
));
3385 error(RtError::DEBUG_WARNING
);
3387 if (info
->maxOutputChannels
== 0)
3388 // didn't open for playback either ... device invalid
3390 goto probe_parameters
;
3393 err
= snd_pcm_open(&handle
, info
->name
.c_str(), stream
, open_mode
| SND_PCM_NONBLOCK
);
3396 sprintf(message_
, "RtApiAlsa: pcm capture device (%s) is busy: %s.",
3397 info
->name
.c_str(), snd_strerror(err
));
3399 sprintf(message_
, "RtApiAlsa: pcm capture open (%s) error: %s.",
3400 info
->name
.c_str(), snd_strerror(err
));
3401 error(RtError::DEBUG_WARNING
);
3402 if (info
->maxOutputChannels
== 0)
3403 // didn't open for playback either ... device invalid
3405 goto probe_parameters
;
3408 // We have an open capture device ... allocate the parameter structure.
3409 err
= snd_pcm_hw_params_any(handle
, params
);
3411 snd_pcm_close(handle
);
3412 sprintf(message_
, "RtApiAlsa: hardware probe error (%s): %s.",
3413 info
->name
.c_str(), snd_strerror(err
));
3414 error(RtError::WARNING
);
3415 if (info
->maxOutputChannels
> 0)
3416 goto probe_parameters
;
3421 // Get input channel information.
3422 err
= snd_pcm_hw_params_get_channels_min(params
, &value
);
3424 snd_pcm_close(handle
);
3425 sprintf(message_
, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.",
3426 info
->name
.c_str(), snd_strerror(err
));
3427 error(RtError::WARNING
);
3428 if (info
->maxOutputChannels
> 0)
3429 goto probe_parameters
;
3433 info
->minInputChannels
= value
;
3435 err
= snd_pcm_hw_params_get_channels_max(params
, &value
);
3437 snd_pcm_close(handle
);
3438 sprintf(message_
, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.",
3439 info
->name
.c_str(), snd_strerror(err
));
3440 error(RtError::WARNING
);
3441 if (info
->maxOutputChannels
> 0)
3442 goto probe_parameters
;
3446 info
->maxInputChannels
= value
;
3448 snd_pcm_close(handle
);
3450 // If device opens for both playback and capture, we determine the channels.
3451 if (info
->maxOutputChannels
== 0 || info
->maxInputChannels
== 0)
3452 goto probe_parameters
;
3454 info
->hasDuplexSupport
= true;
3455 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
3456 info
->maxInputChannels
: info
->maxOutputChannels
;
3457 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
3458 info
->minInputChannels
: info
->minOutputChannels
;
3461 // At this point, we just need to figure out the supported data
3462 // formats and sample rates. We'll proceed by opening the device in
3463 // the direction with the maximum number of channels, or playback if
3464 // they are equal. This might limit our sample rate options, but so
3467 if (info
->maxOutputChannels
>= info
->maxInputChannels
)
3468 stream
= SND_PCM_STREAM_PLAYBACK
;
3470 stream
= SND_PCM_STREAM_CAPTURE
;
3472 err
= snd_pcm_open(&handle
, info
->name
.c_str(), stream
, open_mode
);
3474 sprintf(message_
, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.",
3475 info
->name
.c_str(), snd_strerror(err
));
3476 error(RtError::WARNING
);
3480 // We have an open device ... allocate the parameter structure.
3481 err
= snd_pcm_hw_params_any(handle
, params
);
3483 snd_pcm_close(handle
);
3484 sprintf(message_
, "RtApiAlsa: hardware reopen probe error (%s): %s.",
3485 info
->name
.c_str(), snd_strerror(err
));
3486 error(RtError::WARNING
);
3490 // Test our discrete set of sample rate values.
3492 info
->sampleRates
.clear();
3493 for (unsigned int i
=0; i
<MAX_SAMPLE_RATES
; i
++) {
3494 if (snd_pcm_hw_params_test_rate(handle
, params
, SAMPLE_RATES
[i
], dir
) == 0)
3495 info
->sampleRates
.push_back(SAMPLE_RATES
[i
]);
3497 if (info
->sampleRates
.size() == 0) {
3498 snd_pcm_close(handle
);
3499 sprintf(message_
, "RtApiAlsa: no supported sample rates found for device (%s).",
3500 info
->name
.c_str());
3501 error(RtError::DEBUG_WARNING
);
3505 // Probe the supported data formats ... we don't care about endian-ness just yet
3506 snd_pcm_format_t format
;
3507 info
->nativeFormats
= 0;
3508 format
= SND_PCM_FORMAT_S8
;
3509 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3510 info
->nativeFormats
|= RTAUDIO_SINT8
;
3511 format
= SND_PCM_FORMAT_S16
;
3512 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3513 info
->nativeFormats
|= RTAUDIO_SINT16
;
3514 format
= SND_PCM_FORMAT_S24
;
3515 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3516 info
->nativeFormats
|= RTAUDIO_SINT24
;
3517 format
= SND_PCM_FORMAT_S32
;
3518 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3519 info
->nativeFormats
|= RTAUDIO_SINT32
;
3520 format
= SND_PCM_FORMAT_FLOAT
;
3521 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3522 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
3523 format
= SND_PCM_FORMAT_FLOAT64
;
3524 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3525 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
3527 // Check that we have at least one supported format
3528 if (info
->nativeFormats
== 0) {
3529 snd_pcm_close(handle
);
3530 sprintf(message_
, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.",
3531 info
->name
.c_str());
3532 error(RtError::WARNING
);
3536 // That's all ... close the device and return
3537 snd_pcm_close(handle
);
3538 info
->probed
= true;
3542 bool RtApiAlsa :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
3543 int sampleRate
, RtAudioFormat format
,
3544 int *bufferSize
, int numberOfBuffers
)
3546 #if defined(__RTAUDIO_DEBUG__)
3548 snd_output_stdio_attach(&out
, stderr
, 0);
3551 // I'm not using the "plug" interface ... too much inconsistent behavior.
3552 const char *name
= devices_
[device
].name
.c_str();
3554 snd_pcm_stream_t alsa_stream
;
3556 alsa_stream
= SND_PCM_STREAM_PLAYBACK
;
3558 alsa_stream
= SND_PCM_STREAM_CAPTURE
;
3562 int alsa_open_mode
= SND_PCM_ASYNC
;
3563 err
= snd_pcm_open(&handle
, name
, alsa_stream
, alsa_open_mode
);
3565 sprintf(message_
,"RtApiAlsa: pcm device (%s) won't open: %s.",
3566 name
, snd_strerror(err
));
3567 error(RtError::WARNING
);
3571 // Fill the parameter structure.
3572 snd_pcm_hw_params_t
*hw_params
;
3573 snd_pcm_hw_params_alloca(&hw_params
);
3574 err
= snd_pcm_hw_params_any(handle
, hw_params
);
3576 snd_pcm_close(handle
);
3577 sprintf(message_
, "RtApiAlsa: error getting parameter handle (%s): %s.",
3578 name
, snd_strerror(err
));
3579 error(RtError::WARNING
);
3583 #if defined(__RTAUDIO_DEBUG__)
3584 fprintf(stderr
, "\nRtApiAlsa: dump hardware params just after device open:\n\n");
3585 snd_pcm_hw_params_dump(hw_params
, out
);
3588 // Set access ... try interleaved access first, then non-interleaved
3589 if ( !snd_pcm_hw_params_test_access( handle
, hw_params
, SND_PCM_ACCESS_RW_INTERLEAVED
) ) {
3590 err
= snd_pcm_hw_params_set_access(handle
, hw_params
, SND_PCM_ACCESS_RW_INTERLEAVED
);
3592 else if ( !snd_pcm_hw_params_test_access( handle
, hw_params
, SND_PCM_ACCESS_RW_NONINTERLEAVED
) ) {
3593 err
= snd_pcm_hw_params_set_access(handle
, hw_params
, SND_PCM_ACCESS_RW_NONINTERLEAVED
);
3594 stream_
.deInterleave
[mode
] = true;
3597 snd_pcm_close(handle
);
3598 sprintf(message_
, "RtApiAlsa: device (%s) access not supported by RtAudio.", name
);
3599 error(RtError::WARNING
);
3604 snd_pcm_close(handle
);
3605 sprintf(message_
, "RtApiAlsa: error setting access ( (%s): %s.", name
, snd_strerror(err
));
3606 error(RtError::WARNING
);
3610 // Determine how to set the device format.
3611 stream_
.userFormat
= format
;
3612 snd_pcm_format_t device_format
= SND_PCM_FORMAT_UNKNOWN
;
3614 if (format
== RTAUDIO_SINT8
)
3615 device_format
= SND_PCM_FORMAT_S8
;
3616 else if (format
== RTAUDIO_SINT16
)
3617 device_format
= SND_PCM_FORMAT_S16
;
3618 else if (format
== RTAUDIO_SINT24
)
3619 device_format
= SND_PCM_FORMAT_S24
;
3620 else if (format
== RTAUDIO_SINT32
)
3621 device_format
= SND_PCM_FORMAT_S32
;
3622 else if (format
== RTAUDIO_FLOAT32
)
3623 device_format
= SND_PCM_FORMAT_FLOAT
;
3624 else if (format
== RTAUDIO_FLOAT64
)
3625 device_format
= SND_PCM_FORMAT_FLOAT64
;
3627 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3628 stream_
.deviceFormat
[mode
] = format
;
3632 // The user requested format is not natively supported by the device.
3633 device_format
= SND_PCM_FORMAT_FLOAT64
;
3634 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3635 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT64
;
3639 device_format
= SND_PCM_FORMAT_FLOAT
;
3640 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3641 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
3645 device_format
= SND_PCM_FORMAT_S32
;
3646 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3647 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
3651 device_format
= SND_PCM_FORMAT_S24
;
3652 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3653 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT24
;
3657 device_format
= SND_PCM_FORMAT_S16
;
3658 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3659 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
3663 device_format
= SND_PCM_FORMAT_S8
;
3664 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3665 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
3669 // If we get here, no supported format was found.
3670 sprintf(message_
,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name
);
3671 snd_pcm_close(handle
);
3672 error(RtError::WARNING
);
3676 err
= snd_pcm_hw_params_set_format(handle
, hw_params
, device_format
);
3678 snd_pcm_close(handle
);
3679 sprintf(message_
, "RtApiAlsa: error setting format (%s): %s.",
3680 name
, snd_strerror(err
));
3681 error(RtError::WARNING
);
3685 // Determine whether byte-swaping is necessary.
3686 stream_
.doByteSwap
[mode
] = false;
3687 if (device_format
!= SND_PCM_FORMAT_S8
) {
3688 err
= snd_pcm_format_cpu_endian(device_format
);
3690 stream_
.doByteSwap
[mode
] = true;
3692 snd_pcm_close(handle
);
3693 sprintf(message_
, "RtApiAlsa: error getting format endian-ness (%s): %s.",
3694 name
, snd_strerror(err
));
3695 error(RtError::WARNING
);
3700 // Set the sample rate.
3701 err
= snd_pcm_hw_params_set_rate(handle
, hw_params
, (unsigned int)sampleRate
, 0);
3703 snd_pcm_close(handle
);
3704 sprintf(message_
, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.",
3705 sampleRate
, name
, snd_strerror(err
));
3706 error(RtError::WARNING
);
3710 // Determine the number of channels for this device. We support a possible
3711 // minimum device channel number > than the value requested by the user.
3712 stream_
.nUserChannels
[mode
] = channels
;
3714 err
= snd_pcm_hw_params_get_channels_max(hw_params
, &value
);
3715 int device_channels
= value
;
3716 if (err
< 0 || device_channels
< channels
) {
3717 snd_pcm_close(handle
);
3718 sprintf(message_
, "RtApiAlsa: channels (%d) not supported by device (%s).",
3720 error(RtError::WARNING
);
3724 err
= snd_pcm_hw_params_get_channels_min(hw_params
, &value
);
3726 snd_pcm_close(handle
);
3727 sprintf(message_
, "RtApiAlsa: error getting min channels count on device (%s).", name
);
3728 error(RtError::WARNING
);
3731 device_channels
= value
;
3732 if (device_channels
< channels
) device_channels
= channels
;
3733 stream_
.nDeviceChannels
[mode
] = device_channels
;
3735 // Set the device channels.
3736 err
= snd_pcm_hw_params_set_channels(handle
, hw_params
, device_channels
);
3738 snd_pcm_close(handle
);
3739 sprintf(message_
, "RtApiAlsa: error setting channels (%d) on device (%s): %s.",
3740 device_channels
, name
, snd_strerror(err
));
3741 error(RtError::WARNING
);
3745 // Set the buffer number, which in ALSA is referred to as the "period".
3747 unsigned int periods
= numberOfBuffers
;
3748 // Even though the hardware might allow 1 buffer, it won't work reliably.
3749 if (periods
< 2) periods
= 2;
3750 err
= snd_pcm_hw_params_get_periods_min(hw_params
, &value
, &dir
);
3752 snd_pcm_close(handle
);
3753 sprintf(message_
, "RtApiAlsa: error getting min periods on device (%s): %s.",
3754 name
, snd_strerror(err
));
3755 error(RtError::WARNING
);
3758 if (value
> periods
) periods
= value
;
3759 err
= snd_pcm_hw_params_get_periods_max(hw_params
, &value
, &dir
);
3761 snd_pcm_close(handle
);
3762 sprintf(message_
, "RtApiAlsa: error getting max periods on device (%s): %s.",
3763 name
, snd_strerror(err
));
3764 error(RtError::WARNING
);
3767 if (value
< periods
) periods
= value
;
3769 err
= snd_pcm_hw_params_set_periods(handle
, hw_params
, periods
, 0);
3771 snd_pcm_close(handle
);
3772 sprintf(message_
, "RtApiAlsa: error setting periods (%s): %s.",
3773 name
, snd_strerror(err
));
3774 error(RtError::WARNING
);
3778 // Set the buffer (or period) size.
3779 snd_pcm_uframes_t period_size
;
3780 err
= snd_pcm_hw_params_get_period_size_min(hw_params
, &period_size
, &dir
);
3782 snd_pcm_close(handle
);
3783 sprintf(message_
, "RtApiAlsa: error getting period size (%s): %s.",
3784 name
, snd_strerror(err
));
3785 error(RtError::WARNING
);
3788 if (*bufferSize
< (int) period_size
) *bufferSize
= (int) period_size
;
3790 err
= snd_pcm_hw_params_set_period_size(handle
, hw_params
, *bufferSize
, 0);
3792 snd_pcm_close(handle
);
3793 sprintf(message_
, "RtApiAlsa: error setting period size (%s): %s.",
3794 name
, snd_strerror(err
));
3795 error(RtError::WARNING
);
3799 // If attempting to setup a duplex stream, the bufferSize parameter
3800 // MUST be the same in both directions!
3801 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
&& *bufferSize
!= stream_
.bufferSize
) {
3802 sprintf( message_
, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).",
3804 error(RtError::DEBUG_WARNING
);
3808 stream_
.bufferSize
= *bufferSize
;
3810 // Install the hardware configuration
3811 err
= snd_pcm_hw_params(handle
, hw_params
);
3813 snd_pcm_close(handle
);
3814 sprintf(message_
, "RtApiAlsa: error installing hardware configuration (%s): %s.",
3815 name
, snd_strerror(err
));
3816 error(RtError::WARNING
);
3820 #if defined(__RTAUDIO_DEBUG__)
3821 fprintf(stderr
, "\nRtApiAlsa: dump hardware params after installation:\n\n");
3822 snd_pcm_hw_params_dump(hw_params
, out
);
3825 // Allocate the stream handle if necessary and then save.
3826 snd_pcm_t
**handles
;
3827 if ( stream_
.apiHandle
== 0 ) {
3828 handles
= (snd_pcm_t
**) calloc(2, sizeof(snd_pcm_t
*));
3829 if ( handle
== NULL
) {
3830 sprintf(message_
, "RtApiAlsa: error allocating handle memory (%s).",
3831 devices_
[device
].name
.c_str());
3834 stream_
.apiHandle
= (void *) handles
;
3839 handles
= (snd_pcm_t
**) stream_
.apiHandle
;
3841 handles
[mode
] = handle
;
3843 // Set flags for buffer conversion
3844 stream_
.doConvertBuffer
[mode
] = false;
3845 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
3846 stream_
.doConvertBuffer
[mode
] = true;
3847 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
3848 stream_
.doConvertBuffer
[mode
] = true;
3849 if (stream_
.nUserChannels
[mode
] > 1 && stream_
.deInterleave
[mode
])
3850 stream_
.doConvertBuffer
[mode
] = true;
3852 // Allocate necessary internal buffers
3853 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
3856 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
3857 buffer_bytes
= stream_
.nUserChannels
[0];
3859 buffer_bytes
= stream_
.nUserChannels
[1];
3861 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
3862 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
3863 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
3864 if (stream_
.userBuffer
== NULL
) {
3865 sprintf(message_
, "RtApiAlsa: error allocating user buffer memory (%s).",
3866 devices_
[device
].name
.c_str());
3871 if ( stream_
.doConvertBuffer
[mode
] ) {
3874 bool makeBuffer
= true;
3875 if ( mode
== OUTPUT
)
3876 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
3877 else { // mode == INPUT
3878 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
3879 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
3880 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
3881 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
3886 buffer_bytes
*= *bufferSize
;
3887 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
3888 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
3889 if (stream_
.deviceBuffer
== NULL
) {
3890 sprintf(message_
, "RtApiAlsa: error allocating device buffer memory (%s).",
3891 devices_
[device
].name
.c_str());
3897 stream_
.device
[mode
] = device
;
3898 stream_
.state
= STREAM_STOPPED
;
3899 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
3900 // We had already set up an output stream.
3901 stream_
.mode
= DUPLEX
;
3903 stream_
.mode
= mode
;
3904 stream_
.nBuffers
= periods
;
3905 stream_
.sampleRate
= sampleRate
;
3912 snd_pcm_close(handles
[0]);
3914 snd_pcm_close(handles
[1]);
3916 stream_
.apiHandle
= 0;
3919 if (stream_
.userBuffer
) {
3920 free(stream_
.userBuffer
);
3921 stream_
.userBuffer
= 0;
3924 error(RtError::WARNING
);
3928 void RtApiAlsa :: closeStream()
3930 // We don't want an exception to be thrown here because this
3931 // function is called by our class destructor. So, do our own
3933 if ( stream_
.mode
== UNINITIALIZED
) {
3934 sprintf(message_
, "RtApiAlsa::closeStream(): no open stream to close!");
3935 error(RtError::WARNING
);
3939 snd_pcm_t
**handle
= (snd_pcm_t
**) stream_
.apiHandle
;
3940 if (stream_
.state
== STREAM_RUNNING
) {
3941 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
3942 snd_pcm_drop(handle
[0]);
3943 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
)
3944 snd_pcm_drop(handle
[1]);
3945 stream_
.state
= STREAM_STOPPED
;
3948 if (stream_
.callbackInfo
.usingCallback
) {
3949 stream_
.callbackInfo
.usingCallback
= false;
3950 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
3954 if (handle
[0]) snd_pcm_close(handle
[0]);
3955 if (handle
[1]) snd_pcm_close(handle
[1]);
3960 if (stream_
.userBuffer
) {
3961 free(stream_
.userBuffer
);
3962 stream_
.userBuffer
= 0;
3965 if (stream_
.deviceBuffer
) {
3966 free(stream_
.deviceBuffer
);
3967 stream_
.deviceBuffer
= 0;
3970 stream_
.mode
= UNINITIALIZED
;
3973 void RtApiAlsa :: startStream()
3975 // This method calls snd_pcm_prepare if the device isn't already in that state.
3978 if (stream_
.state
== STREAM_RUNNING
) return;
3980 MUTEX_LOCK(&stream_
.mutex
);
3983 snd_pcm_state_t state
;
3984 snd_pcm_t
**handle
= (snd_pcm_t
**) stream_
.apiHandle
;
3985 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
3986 state
= snd_pcm_state(handle
[0]);
3987 if (state
!= SND_PCM_STATE_PREPARED
) {
3988 err
= snd_pcm_prepare(handle
[0]);
3990 sprintf(message_
, "RtApiAlsa: error preparing pcm device (%s): %s.",
3991 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
3992 MUTEX_UNLOCK(&stream_
.mutex
);
3993 error(RtError::DRIVER_ERROR
);
3998 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
3999 state
= snd_pcm_state(handle
[1]);
4000 if (state
!= SND_PCM_STATE_PREPARED
) {
4001 err
= snd_pcm_prepare(handle
[1]);
4003 sprintf(message_
, "RtApiAlsa: error preparing pcm device (%s): %s.",
4004 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4005 MUTEX_UNLOCK(&stream_
.mutex
);
4006 error(RtError::DRIVER_ERROR
);
4010 stream_
.state
= STREAM_RUNNING
;
4012 MUTEX_UNLOCK(&stream_
.mutex
);
4015 void RtApiAlsa :: stopStream()
4018 if (stream_
.state
== STREAM_STOPPED
) return;
4020 // Change the state before the lock to improve shutdown response
4021 // when using a callback.
4022 stream_
.state
= STREAM_STOPPED
;
4023 MUTEX_LOCK(&stream_
.mutex
);
4026 snd_pcm_t
**handle
= (snd_pcm_t
**) stream_
.apiHandle
;
4027 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4028 err
= snd_pcm_drain(handle
[0]);
4030 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4031 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4032 MUTEX_UNLOCK(&stream_
.mutex
);
4033 error(RtError::DRIVER_ERROR
);
4037 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
4038 err
= snd_pcm_drain(handle
[1]);
4040 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4041 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4042 MUTEX_UNLOCK(&stream_
.mutex
);
4043 error(RtError::DRIVER_ERROR
);
4047 MUTEX_UNLOCK(&stream_
.mutex
);
4050 void RtApiAlsa :: abortStream()
4053 if (stream_
.state
== STREAM_STOPPED
) return;
4055 // Change the state before the lock to improve shutdown response
4056 // when using a callback.
4057 stream_
.state
= STREAM_STOPPED
;
4058 MUTEX_LOCK(&stream_
.mutex
);
4061 snd_pcm_t
**handle
= (snd_pcm_t
**) stream_
.apiHandle
;
4062 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4063 err
= snd_pcm_drop(handle
[0]);
4065 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4066 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4067 MUTEX_UNLOCK(&stream_
.mutex
);
4068 error(RtError::DRIVER_ERROR
);
4072 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
4073 err
= snd_pcm_drop(handle
[1]);
4075 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4076 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4077 MUTEX_UNLOCK(&stream_
.mutex
);
4078 error(RtError::DRIVER_ERROR
);
4082 MUTEX_UNLOCK(&stream_
.mutex
);
4085 int RtApiAlsa :: streamWillBlock()
4088 if (stream_
.state
== STREAM_STOPPED
) return 0;
4090 MUTEX_LOCK(&stream_
.mutex
);
4092 int err
= 0, frames
= 0;
4093 snd_pcm_t
**handle
= (snd_pcm_t
**) stream_
.apiHandle
;
4094 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4095 err
= snd_pcm_avail_update(handle
[0]);
4097 sprintf(message_
, "RtApiAlsa: error getting available frames for device (%s): %s.",
4098 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4099 MUTEX_UNLOCK(&stream_
.mutex
);
4100 error(RtError::DRIVER_ERROR
);
4106 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
4107 err
= snd_pcm_avail_update(handle
[1]);
4109 sprintf(message_
, "RtApiAlsa: error getting available frames for device (%s): %s.",
4110 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4111 MUTEX_UNLOCK(&stream_
.mutex
);
4112 error(RtError::DRIVER_ERROR
);
4114 if (frames
> err
) frames
= err
;
4117 frames
= stream_
.bufferSize
- frames
;
4118 if (frames
< 0) frames
= 0;
4120 MUTEX_UNLOCK(&stream_
.mutex
);
4124 void RtApiAlsa :: tickStream()
4129 if (stream_
.state
== STREAM_STOPPED
) {
4130 if (stream_
.callbackInfo
.usingCallback
) usleep(50000); // sleep 50 milliseconds
4133 else if (stream_
.callbackInfo
.usingCallback
) {
4134 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
4135 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
4138 MUTEX_LOCK(&stream_
.mutex
);
4140 // The state might change while waiting on a mutex.
4141 if (stream_
.state
== STREAM_STOPPED
)
4148 RtAudioFormat format
;
4149 handle
= (snd_pcm_t
**) stream_
.apiHandle
;
4150 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4152 // Setup parameters and do buffer conversion if necessary.
4153 if (stream_
.doConvertBuffer
[0]) {
4154 convertStreamBuffer(OUTPUT
);
4155 buffer
= stream_
.deviceBuffer
;
4156 channels
= stream_
.nDeviceChannels
[0];
4157 format
= stream_
.deviceFormat
[0];
4160 buffer
= stream_
.userBuffer
;
4161 channels
= stream_
.nUserChannels
[0];
4162 format
= stream_
.userFormat
;
4165 // Do byte swapping if necessary.
4166 if (stream_
.doByteSwap
[0])
4167 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
4169 // Write samples to device in interleaved/non-interleaved format.
4170 if (stream_
.deInterleave
[0]) {
4171 void *bufs
[channels
];
4172 size_t offset
= stream_
.bufferSize
* formatBytes(format
);
4173 for (int i
=0; i
<channels
; i
++)
4174 bufs
[i
] = (void *) (buffer
+ (i
* offset
));
4175 err
= snd_pcm_writen(handle
[0], bufs
, stream_
.bufferSize
);
4178 err
= snd_pcm_writei(handle
[0], buffer
, stream_
.bufferSize
);
4180 if (err
< stream_
.bufferSize
) {
4181 // Either an error or underrun occured.
4182 if (err
== -EPIPE
) {
4183 snd_pcm_state_t state
= snd_pcm_state(handle
[0]);
4184 if (state
== SND_PCM_STATE_XRUN
) {
4185 sprintf(message_
, "RtApiAlsa: underrun detected.");
4186 error(RtError::WARNING
);
4187 err
= snd_pcm_prepare(handle
[0]);
4189 sprintf(message_
, "RtApiAlsa: error preparing handle after underrun: %s.",
4191 MUTEX_UNLOCK(&stream_
.mutex
);
4192 error(RtError::DRIVER_ERROR
);
4196 sprintf(message_
, "RtApiAlsa: tickStream() error, current state is %s.",
4197 snd_pcm_state_name(state
));
4198 MUTEX_UNLOCK(&stream_
.mutex
);
4199 error(RtError::DRIVER_ERROR
);
4204 sprintf(message_
, "RtApiAlsa: audio write error for device (%s): %s.",
4205 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4206 MUTEX_UNLOCK(&stream_
.mutex
);
4207 error(RtError::DRIVER_ERROR
);
4212 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
4214 // Setup parameters.
4215 if (stream_
.doConvertBuffer
[1]) {
4216 buffer
= stream_
.deviceBuffer
;
4217 channels
= stream_
.nDeviceChannels
[1];
4218 format
= stream_
.deviceFormat
[1];
4221 buffer
= stream_
.userBuffer
;
4222 channels
= stream_
.nUserChannels
[1];
4223 format
= stream_
.userFormat
;
4226 // Read samples from device in interleaved/non-interleaved format.
4227 if (stream_
.deInterleave
[1]) {
4228 void *bufs
[channels
];
4229 size_t offset
= stream_
.bufferSize
* formatBytes(format
);
4230 for (int i
=0; i
<channels
; i
++)
4231 bufs
[i
] = (void *) (buffer
+ (i
* offset
));
4232 err
= snd_pcm_readn(handle
[1], bufs
, stream_
.bufferSize
);
4235 err
= snd_pcm_readi(handle
[1], buffer
, stream_
.bufferSize
);
4237 if (err
< stream_
.bufferSize
) {
4238 // Either an error or underrun occured.
4239 if (err
== -EPIPE
) {
4240 snd_pcm_state_t state
= snd_pcm_state(handle
[1]);
4241 if (state
== SND_PCM_STATE_XRUN
) {
4242 sprintf(message_
, "RtApiAlsa: overrun detected.");
4243 error(RtError::WARNING
);
4244 err
= snd_pcm_prepare(handle
[1]);
4246 sprintf(message_
, "RtApiAlsa: error preparing handle after overrun: %s.",
4248 MUTEX_UNLOCK(&stream_
.mutex
);
4249 error(RtError::DRIVER_ERROR
);
4253 sprintf(message_
, "RtApiAlsa: tickStream() error, current state is %s.",
4254 snd_pcm_state_name(state
));
4255 MUTEX_UNLOCK(&stream_
.mutex
);
4256 error(RtError::DRIVER_ERROR
);
4261 sprintf(message_
, "RtApiAlsa: audio read error for device (%s): %s.",
4262 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4263 MUTEX_UNLOCK(&stream_
.mutex
);
4264 error(RtError::DRIVER_ERROR
);
4268 // Do byte swapping if necessary.
4269 if (stream_
.doByteSwap
[1])
4270 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
4272 // Do buffer conversion if necessary.
4273 if (stream_
.doConvertBuffer
[1])
4274 convertStreamBuffer(INPUT
);
4278 MUTEX_UNLOCK(&stream_
.mutex
);
4280 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
4284 void RtApiAlsa :: setStreamCallback(RtAudioCallback callback
, void *userData
)
4288 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
4289 if ( info
->usingCallback
) {
4290 sprintf(message_
, "RtApiAlsa: A callback is already set for this stream!");
4291 error(RtError::WARNING
);
4295 info
->callback
= (void *) callback
;
4296 info
->userData
= userData
;
4297 info
->usingCallback
= true;
4298 info
->object
= (void *) this;
4300 // Set the thread attributes for joinable and realtime scheduling
4301 // priority. The higher priority will only take affect if the
4302 // program is run as root or suid.
4303 pthread_attr_t attr
;
4304 pthread_attr_init(&attr
);
4305 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
4306 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
4308 int err
= pthread_create(&info
->thread
, &attr
, alsaCallbackHandler
, &stream_
.callbackInfo
);
4309 pthread_attr_destroy(&attr
);
4311 info
->usingCallback
= false;
4312 sprintf(message_
, "RtApiAlsa: error starting callback thread!");
4313 error(RtError::THREAD_ERROR
);
4317 void RtApiAlsa :: cancelStreamCallback()
4321 if (stream_
.callbackInfo
.usingCallback
) {
4323 if (stream_
.state
== STREAM_RUNNING
)
4326 MUTEX_LOCK(&stream_
.mutex
);
4328 stream_
.callbackInfo
.usingCallback
= false;
4329 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
4330 stream_
.callbackInfo
.thread
= 0;
4331 stream_
.callbackInfo
.callback
= NULL
;
4332 stream_
.callbackInfo
.userData
= NULL
;
4334 MUTEX_UNLOCK(&stream_
.mutex
);
4338 extern "C" void *alsaCallbackHandler(void *ptr
)
4340 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
4341 RtApiAlsa
*object
= (RtApiAlsa
*) info
->object
;
4342 bool *usingCallback
= &info
->usingCallback
;
4344 while ( *usingCallback
) {
4346 object
->tickStream();
4348 catch (RtError
&exception
) {
4349 fprintf(stderr
, "\nRtApiAlsa: callback thread error (%s) ... closing thread.\n\n",
4350 exception
.getMessageString());
4358 //******************** End of __LINUX_ALSA__ *********************//
4361 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
4363 // The ASIO API is designed around a callback scheme, so this
4364 // implementation is similar to that used for OS-X CoreAudio and Linux
4365 // Jack. The primary constraint with ASIO is that it only allows
4366 // access to a single driver at a time. Thus, it is not possible to
4367 // have more than one simultaneous RtAudio stream.
4369 // This implementation also requires a number of external ASIO files
4370 // and a few global variables. The ASIO callback scheme does not
4371 // allow for the passing of user data, so we must create a global
4372 // pointer to our callbackInfo structure.
4374 // On unix systems, we make use of a pthread condition variable.
4375 // Since there is no equivalent in Windows, I hacked something based
4376 // on information found in
4377 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
4379 #include "asio/asiosys.h"
4380 #include "asio/asio.h"
4381 #include "asio/asiodrivers.h"
4384 AsioDrivers drivers
;
4385 ASIOCallbacks asioCallbacks
;
4386 ASIODriverInfo driverInfo
;
4387 CallbackInfo
*asioCallbackInfo
;
4391 ASIOBufferInfo
*bufferInfos
;
4395 :stopStream(false), bufferInfos(0) {}
4398 RtApiAsio :: RtApiAsio()
4402 if (nDevices_
<= 0) {
4403 sprintf(message_
, "RtApiAsio: no Windows ASIO audio drivers found!");
4404 error(RtError::NO_DEVICES_FOUND
);
4408 RtApiAsio :: ~RtApiAsio()
4410 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
4413 void RtApiAsio :: initialize(void)
4415 nDevices_
= drivers
.asioGetNumDev();
4416 if (nDevices_
<= 0) return;
4418 // Create device structures and write device driver names to each.
4421 for (int i
=0; i
<nDevices_
; i
++) {
4422 if ( drivers
.asioGetDriverName( i
, name
, 128 ) == 0 ) {
4423 device
.name
.erase();
4424 device
.name
.append( (const char *)name
, strlen(name
)+1);
4425 devices_
.push_back(device
);
4428 sprintf(message_
, "RtApiAsio: error getting driver name for device index %d!", i
);
4429 error(RtError::WARNING
);
4433 nDevices_
= (int) devices_
.size();
4435 drivers
.removeCurrentDriver();
4436 driverInfo
.asioVersion
= 2;
4437 // See note in DirectSound implementation about GetDesktopWindow().
4438 driverInfo
.sysRef
= GetForegroundWindow();
4441 void RtApiAsio :: probeDeviceInfo(RtApiDevice
*info
)
4443 // Don't probe if a stream is already open.
4444 if ( stream_
.mode
!= UNINITIALIZED
) {
4445 sprintf(message_
, "RtApiAsio: unable to probe driver while a stream is open.");
4446 error(RtError::DEBUG_WARNING
);
4450 if ( !drivers
.loadDriver( (char *)info
->name
.c_str() ) ) {
4451 sprintf(message_
, "RtApiAsio: error loading driver (%s).", info
->name
.c_str());
4452 error(RtError::DEBUG_WARNING
);
4456 ASIOError result
= ASIOInit( &driverInfo
);
4457 if ( result
!= ASE_OK
) {
4459 if ( result
== ASE_HWMalfunction
)
4460 sprintf(details
, "hardware malfunction");
4461 else if ( result
== ASE_NoMemory
)
4462 sprintf(details
, "no memory");
4463 else if ( result
== ASE_NotPresent
)
4464 sprintf(details
, "driver/hardware not present");
4466 sprintf(details
, "unspecified");
4467 sprintf(message_
, "RtApiAsio: error (%s) initializing driver (%s).", details
, info
->name
.c_str());
4468 error(RtError::DEBUG_WARNING
);
4472 // Determine the device channel information.
4473 long inputChannels
, outputChannels
;
4474 result
= ASIOGetChannels( &inputChannels
, &outputChannels
);
4475 if ( result
!= ASE_OK
) {
4476 drivers
.removeCurrentDriver();
4477 sprintf(message_
, "RtApiAsio: error getting input/output channel count (%s).", info
->name
.c_str());
4478 error(RtError::DEBUG_WARNING
);
4482 info
->maxOutputChannels
= outputChannels
;
4483 if ( outputChannels
> 0 ) info
->minOutputChannels
= 1;
4485 info
->maxInputChannels
= inputChannels
;
4486 if ( inputChannels
> 0 ) info
->minInputChannels
= 1;
4488 // If device opens for both playback and capture, we determine the channels.
4489 if (info
->maxOutputChannels
> 0 && info
->maxInputChannels
> 0) {
4490 info
->hasDuplexSupport
= true;
4491 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
4492 info
->maxInputChannels
: info
->maxOutputChannels
;
4493 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
4494 info
->minInputChannels
: info
->minOutputChannels
;
4497 // Determine the supported sample rates.
4498 info
->sampleRates
.clear();
4499 for (unsigned int i
=0; i
<MAX_SAMPLE_RATES
; i
++) {
4500 result
= ASIOCanSampleRate( (ASIOSampleRate
) SAMPLE_RATES
[i
] );
4501 if ( result
== ASE_OK
)
4502 info
->sampleRates
.push_back( SAMPLE_RATES
[i
] );
4505 if (info
->sampleRates
.size() == 0) {
4506 drivers
.removeCurrentDriver();
4507 sprintf( message_
, "RtApiAsio: No supported sample rates found for driver (%s).", info
->name
.c_str() );
4508 error(RtError::DEBUG_WARNING
);
4512 // Determine supported data types ... just check first channel and assume rest are the same.
4513 ASIOChannelInfo channelInfo
;
4514 channelInfo
.channel
= 0;
4515 channelInfo
.isInput
= true;
4516 if ( info
->maxInputChannels
<= 0 ) channelInfo
.isInput
= false;
4517 result
= ASIOGetChannelInfo( &channelInfo
);
4518 if ( result
!= ASE_OK
) {
4519 drivers
.removeCurrentDriver();
4520 sprintf(message_
, "RtApiAsio: error getting driver (%s) channel information.", info
->name
.c_str());
4521 error(RtError::DEBUG_WARNING
);
4525 if ( channelInfo
.type
== ASIOSTInt16MSB
|| channelInfo
.type
== ASIOSTInt16LSB
)
4526 info
->nativeFormats
|= RTAUDIO_SINT16
;
4527 else if ( channelInfo
.type
== ASIOSTInt32MSB
|| channelInfo
.type
== ASIOSTInt32LSB
)
4528 info
->nativeFormats
|= RTAUDIO_SINT32
;
4529 else if ( channelInfo
.type
== ASIOSTFloat32MSB
|| channelInfo
.type
== ASIOSTFloat32LSB
)
4530 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
4531 else if ( channelInfo
.type
== ASIOSTFloat64MSB
|| channelInfo
.type
== ASIOSTFloat64LSB
)
4532 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
4534 // Check that we have at least one supported format.
4535 if (info
->nativeFormats
== 0) {
4536 drivers
.removeCurrentDriver();
4537 sprintf(message_
, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
4538 info
->name
.c_str());
4539 error(RtError::DEBUG_WARNING
);
4543 info
->probed
= true;
4544 drivers
.removeCurrentDriver();
4547 void bufferSwitch(long index
, ASIOBool processNow
)
4549 RtApiAsio
*object
= (RtApiAsio
*) asioCallbackInfo
->object
;
4551 object
->callbackEvent( index
);
4553 catch (RtError
&exception
) {
4554 fprintf(stderr
, "\nRtApiAsio: callback handler error (%s)!\n\n", exception
.getMessageString());
4561 void sampleRateChanged(ASIOSampleRate sRate
)
4563 // The ASIO documentation says that this usually only happens during
4564 // external sync. Audio processing is not stopped by the driver,
4565 // actual sample rate might not have even changed, maybe only the
4566 // sample rate status of an AES/EBU or S/PDIF digital input at the
4569 RtAudio
*object
= (RtAudio
*) asioCallbackInfo
->object
;
4571 object
->stopStream();
4573 catch (RtError
&exception
) {
4574 fprintf(stderr
, "\nRtApiAsio: sampleRateChanged() error (%s)!\n\n", exception
.getMessageString());
4578 fprintf(stderr
, "\nRtApiAsio: driver reports sample rate changed to %d ... stream stopped!!!", (int) sRate
);
4581 long asioMessages(long selector
, long value
, void* message
, double* opt
)
4585 case kAsioSelectorSupported
:
4586 if(value
== kAsioResetRequest
4587 || value
== kAsioEngineVersion
4588 || value
== kAsioResyncRequest
4589 || value
== kAsioLatenciesChanged
4590 // The following three were added for ASIO 2.0, you don't
4591 // necessarily have to support them.
4592 || value
== kAsioSupportsTimeInfo
4593 || value
== kAsioSupportsTimeCode
4594 || value
== kAsioSupportsInputMonitor
)
4597 case kAsioResetRequest
:
4598 // Defer the task and perform the reset of the driver during the
4599 // next "safe" situation. You cannot reset the driver right now,
4600 // as this code is called from the driver. Reset the driver is
4601 // done by completely destruct is. I.e. ASIOStop(),
4602 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
4604 fprintf(stderr
, "\nRtApiAsio: driver reset requested!!!");
4607 case kAsioResyncRequest
:
4608 // This informs the application that the driver encountered some
4609 // non-fatal data loss. It is used for synchronization purposes
4610 // of different media. Added mainly to work around the Win16Mutex
4611 // problems in Windows 95/98 with the Windows Multimedia system,
4612 // which could lose data because the Mutex was held too long by
4613 // another thread. However a driver can issue it in other
4615 fprintf(stderr
, "\nRtApiAsio: driver resync requested!!!");
4618 case kAsioLatenciesChanged
:
4619 // This will inform the host application that the drivers were
4620 // latencies changed. Beware, it this does not mean that the
4621 // buffer sizes have changed! You might need to update internal
4623 fprintf(stderr
, "\nRtApiAsio: driver latency may have changed!!!");
4626 case kAsioEngineVersion
:
4627 // Return the supported ASIO version of the host application. If
4628 // a host application does not implement this selector, ASIO 1.0
4629 // is assumed by the driver.
4632 case kAsioSupportsTimeInfo
:
4633 // Informs the driver whether the
4634 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
4635 // For compatibility with ASIO 1.0 drivers the host application
4636 // should always support the "old" bufferSwitch method, too.
4639 case kAsioSupportsTimeCode
:
4640 // Informs the driver wether application is interested in time
4641 // code info. If an application does not need to know about time
4642 // code, the driver has less work to do.
4649 bool RtApiAsio :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
4650 int sampleRate
, RtAudioFormat format
,
4651 int *bufferSize
, int numberOfBuffers
)
4653 // For ASIO, a duplex stream MUST use the same driver.
4654 if ( mode
== INPUT
&& stream_
.mode
== OUTPUT
&& stream_
.device
[0] != device
) {
4655 sprintf(message_
, "RtApiAsio: duplex stream must use the same device for input and output.");
4656 error(RtError::WARNING
);
4660 // Only load the driver once for duplex stream.
4662 if ( mode
!= INPUT
|| stream_
.mode
!= OUTPUT
) {
4663 if ( !drivers
.loadDriver( (char *)devices_
[device
].name
.c_str() ) ) {
4664 sprintf(message_
, "RtApiAsio: error loading driver (%s).", devices_
[device
].name
.c_str());
4665 error(RtError::DEBUG_WARNING
);
4669 result
= ASIOInit( &driverInfo
);
4670 if ( result
!= ASE_OK
) {
4672 if ( result
== ASE_HWMalfunction
)
4673 sprintf(details
, "hardware malfunction");
4674 else if ( result
== ASE_NoMemory
)
4675 sprintf(details
, "no memory");
4676 else if ( result
== ASE_NotPresent
)
4677 sprintf(details
, "driver/hardware not present");
4679 sprintf(details
, "unspecified");
4680 sprintf(message_
, "RtApiAsio: error (%s) initializing driver (%s).", details
, devices_
[device
].name
.c_str());
4681 error(RtError::DEBUG_WARNING
);
4686 // Check the device channel count.
4687 long inputChannels
, outputChannels
;
4688 result
= ASIOGetChannels( &inputChannels
, &outputChannels
);
4689 if ( result
!= ASE_OK
) {
4690 drivers
.removeCurrentDriver();
4691 sprintf(message_
, "RtApiAsio: error getting input/output channel count (%s).",
4692 devices_
[device
].name
.c_str());
4693 error(RtError::DEBUG_WARNING
);
4697 if ( ( mode
== OUTPUT
&& channels
> outputChannels
) ||
4698 ( mode
== INPUT
&& channels
> inputChannels
) ) {
4699 drivers
.removeCurrentDriver();
4700 sprintf(message_
, "RtApiAsio: driver (%s) does not support requested channel count (%d).",
4701 devices_
[device
].name
.c_str(), channels
);
4702 error(RtError::DEBUG_WARNING
);
4705 stream_
.nDeviceChannels
[mode
] = channels
;
4706 stream_
.nUserChannels
[mode
] = channels
;
4708 // Verify the sample rate is supported.
4709 result
= ASIOCanSampleRate( (ASIOSampleRate
) sampleRate
);
4710 if ( result
!= ASE_OK
) {
4711 drivers
.removeCurrentDriver();
4712 sprintf(message_
, "RtApiAsio: driver (%s) does not support requested sample rate (%d).",
4713 devices_
[device
].name
.c_str(), sampleRate
);
4714 error(RtError::DEBUG_WARNING
);
4718 // Set the sample rate.
4719 result
= ASIOSetSampleRate( (ASIOSampleRate
) sampleRate
);
4720 if ( result
!= ASE_OK
) {
4721 drivers
.removeCurrentDriver();
4722 sprintf(message_
, "RtApiAsio: driver (%s) error setting sample rate (%d).",
4723 devices_
[device
].name
.c_str(), sampleRate
);
4724 error(RtError::DEBUG_WARNING
);
4728 // Determine the driver data type.
4729 ASIOChannelInfo channelInfo
;
4730 channelInfo
.channel
= 0;
4731 if ( mode
== OUTPUT
) channelInfo
.isInput
= false;
4732 else channelInfo
.isInput
= true;
4733 result
= ASIOGetChannelInfo( &channelInfo
);
4734 if ( result
!= ASE_OK
) {
4735 drivers
.removeCurrentDriver();
4736 sprintf(message_
, "RtApiAsio: driver (%s) error getting data format.",
4737 devices_
[device
].name
.c_str());
4738 error(RtError::DEBUG_WARNING
);
4742 // Assuming WINDOWS host is always little-endian.
4743 stream_
.doByteSwap
[mode
] = false;
4744 stream_
.userFormat
= format
;
4745 stream_
.deviceFormat
[mode
] = 0;
4746 if ( channelInfo
.type
== ASIOSTInt16MSB
|| channelInfo
.type
== ASIOSTInt16LSB
) {
4747 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
4748 if ( channelInfo
.type
== ASIOSTInt16MSB
) stream_
.doByteSwap
[mode
] = true;
4750 else if ( channelInfo
.type
== ASIOSTInt32MSB
|| channelInfo
.type
== ASIOSTInt32LSB
) {
4751 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
4752 if ( channelInfo
.type
== ASIOSTInt32MSB
) stream_
.doByteSwap
[mode
] = true;
4754 else if ( channelInfo
.type
== ASIOSTFloat32MSB
|| channelInfo
.type
== ASIOSTFloat32LSB
) {
4755 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
4756 if ( channelInfo
.type
== ASIOSTFloat32MSB
) stream_
.doByteSwap
[mode
] = true;
4758 else if ( channelInfo
.type
== ASIOSTFloat64MSB
|| channelInfo
.type
== ASIOSTFloat64LSB
) {
4759 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT64
;
4760 if ( channelInfo
.type
== ASIOSTFloat64MSB
) stream_
.doByteSwap
[mode
] = true;
4763 if ( stream_
.deviceFormat
[mode
] == 0 ) {
4764 drivers
.removeCurrentDriver();
4765 sprintf(message_
, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
4766 devices_
[device
].name
.c_str());
4767 error(RtError::DEBUG_WARNING
);
4771 // Set the buffer size. For a duplex stream, this will end up
4772 // setting the buffer size based on the input constraints, which
4774 long minSize
, maxSize
, preferSize
, granularity
;
4775 result
= ASIOGetBufferSize( &minSize
, &maxSize
, &preferSize
, &granularity
);
4776 if ( result
!= ASE_OK
) {
4777 drivers
.removeCurrentDriver();
4778 sprintf(message_
, "RtApiAsio: driver (%s) error getting buffer size.",
4779 devices_
[device
].name
.c_str());
4780 error(RtError::DEBUG_WARNING
);
4784 if ( *bufferSize
< minSize
) *bufferSize
= minSize
;
4785 else if ( *bufferSize
> maxSize
) *bufferSize
= maxSize
;
4786 else if ( granularity
== -1 ) {
4787 // Make sure bufferSize is a power of two.
4788 double power
= log10( (double) *bufferSize
) / log10( 2.0 );
4789 *bufferSize
= (int) pow( 2.0, floor(power
+0.5) );
4790 if ( *bufferSize
< minSize
) *bufferSize
= minSize
;
4791 else if ( *bufferSize
> maxSize
) *bufferSize
= maxSize
;
4792 else *bufferSize
= preferSize
;
4795 if ( mode
== INPUT
&& stream_
.mode
== OUTPUT
&& stream_
.bufferSize
!= *bufferSize
)
4796 fprintf( stderr
, "Possible input/output buffersize discrepancy!\n" );
4798 stream_
.bufferSize
= *bufferSize
;
4799 stream_
.nBuffers
= 2;
4801 // ASIO always uses deinterleaved channels.
4802 stream_
.deInterleave
[mode
] = true;
4804 // Allocate, if necessary, our AsioHandle structure for the stream.
4805 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
4806 if ( handle
== 0 ) {
4807 handle
= (AsioHandle
*) calloc(1, sizeof(AsioHandle
));
4808 if ( handle
== NULL
) {
4809 drivers
.removeCurrentDriver();
4810 sprintf(message_
, "RtApiAsio: error allocating AsioHandle memory (%s).",
4811 devices_
[device
].name
.c_str());
4812 error(RtError::DEBUG_WARNING
);
4815 handle
->bufferInfos
= 0;
4816 // Create a manual-reset event.
4817 handle
->condition
= CreateEvent(NULL
, // no security
4818 TRUE
, // manual-reset
4819 FALSE
, // non-signaled initially
4821 stream_
.apiHandle
= (void *) handle
;
4824 // Create the ASIO internal buffers. Since RtAudio sets up input
4825 // and output separately, we'll have to dispose of previously
4826 // created output buffers for a duplex stream.
4827 if ( mode
== INPUT
&& stream_
.mode
== OUTPUT
) {
4828 ASIODisposeBuffers();
4829 if ( handle
->bufferInfos
) free( handle
->bufferInfos
);
4832 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
4833 int i
, nChannels
= stream_
.nDeviceChannels
[0] + stream_
.nDeviceChannels
[1];
4834 handle
->bufferInfos
= (ASIOBufferInfo
*) malloc( nChannels
* sizeof(ASIOBufferInfo
) );
4835 if (handle
->bufferInfos
== NULL
) {
4836 sprintf(message_
, "RtApiAsio: error allocating bufferInfo memory (%s).",
4837 devices_
[device
].name
.c_str());
4840 ASIOBufferInfo
*infos
;
4841 infos
= handle
->bufferInfos
;
4842 for ( i
=0; i
<stream_
.nDeviceChannels
[0]; i
++, infos
++ ) {
4843 infos
->isInput
= ASIOFalse
;
4844 infos
->channelNum
= i
;
4845 infos
->buffers
[0] = infos
->buffers
[1] = 0;
4847 for ( i
=0; i
<stream_
.nDeviceChannels
[1]; i
++, infos
++ ) {
4848 infos
->isInput
= ASIOTrue
;
4849 infos
->channelNum
= i
;
4850 infos
->buffers
[0] = infos
->buffers
[1] = 0;
4853 // Set up the ASIO callback structure and create the ASIO data buffers.
4854 asioCallbacks
.bufferSwitch
= &bufferSwitch
;
4855 asioCallbacks
.sampleRateDidChange
= &sampleRateChanged
;
4856 asioCallbacks
.asioMessage
= &asioMessages
;
4857 asioCallbacks
.bufferSwitchTimeInfo
= NULL
;
4858 result
= ASIOCreateBuffers( handle
->bufferInfos
, nChannels
, stream_
.bufferSize
, &asioCallbacks
);
4859 if ( result
!= ASE_OK
) {
4860 sprintf(message_
, "RtApiAsio: driver (%s) error creating buffers.",
4861 devices_
[device
].name
.c_str());
4865 // Set flags for buffer conversion.
4866 stream_
.doConvertBuffer
[mode
] = false;
4867 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
4868 stream_
.doConvertBuffer
[mode
] = true;
4869 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
4870 stream_
.doConvertBuffer
[mode
] = true;
4871 if (stream_
.nUserChannels
[mode
] > 1 && stream_
.deInterleave
[mode
])
4872 stream_
.doConvertBuffer
[mode
] = true;
4874 // Allocate necessary internal buffers
4875 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
4878 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
4879 buffer_bytes
= stream_
.nUserChannels
[0];
4881 buffer_bytes
= stream_
.nUserChannels
[1];
4883 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
4884 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
4885 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
4886 if (stream_
.userBuffer
== NULL
) {
4887 sprintf(message_
, "RtApiAsio: error allocating user buffer memory (%s).",
4888 devices_
[device
].name
.c_str());
4893 if ( stream_
.doConvertBuffer
[mode
] ) {
4896 bool makeBuffer
= true;
4897 if ( mode
== OUTPUT
)
4898 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
4899 else { // mode == INPUT
4900 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
4901 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
4902 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
4903 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
4908 buffer_bytes
*= *bufferSize
;
4909 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
4910 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
4911 if (stream_
.deviceBuffer
== NULL
) {
4912 sprintf(message_
, "RtApiAsio: error allocating device buffer memory (%s).",
4913 devices_
[device
].name
.c_str());
4919 stream_
.device
[mode
] = device
;
4920 stream_
.state
= STREAM_STOPPED
;
4921 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
4922 // We had already set up an output stream.
4923 stream_
.mode
= DUPLEX
;
4925 stream_
.mode
= mode
;
4926 stream_
.sampleRate
= sampleRate
;
4927 asioCallbackInfo
= &stream_
.callbackInfo
;
4928 stream_
.callbackInfo
.object
= (void *) this;
4933 ASIODisposeBuffers();
4934 drivers
.removeCurrentDriver();
4937 CloseHandle( handle
->condition
);
4938 if ( handle
->bufferInfos
)
4939 free( handle
->bufferInfos
);
4941 stream_
.apiHandle
= 0;
4944 if (stream_
.userBuffer
) {
4945 free(stream_
.userBuffer
);
4946 stream_
.userBuffer
= 0;
4949 error(RtError::WARNING
);
4953 void RtApiAsio :: closeStream()
4955 // We don't want an exception to be thrown here because this
4956 // function is called by our class destructor. So, do our own
4958 if ( stream_
.mode
== UNINITIALIZED
) {
4959 sprintf(message_
, "RtApiAsio::closeStream(): no open stream to close!");
4960 error(RtError::WARNING
);
4964 if (stream_
.state
== STREAM_RUNNING
)
4967 ASIODisposeBuffers();
4968 drivers
.removeCurrentDriver();
4970 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
4972 CloseHandle( handle
->condition
);
4973 if ( handle
->bufferInfos
)
4974 free( handle
->bufferInfos
);
4976 stream_
.apiHandle
= 0;
4979 if (stream_
.userBuffer
) {
4980 free(stream_
.userBuffer
);
4981 stream_
.userBuffer
= 0;
4984 if (stream_
.deviceBuffer
) {
4985 free(stream_
.deviceBuffer
);
4986 stream_
.deviceBuffer
= 0;
4989 stream_
.mode
= UNINITIALIZED
;
4992 void RtApiAsio :: setStreamCallback(RtAudioCallback callback
, void *userData
)
4996 if ( stream_
.callbackInfo
.usingCallback
) {
4997 sprintf(message_
, "RtApiAsio: A callback is already set for this stream!");
4998 error(RtError::WARNING
);
5002 stream_
.callbackInfo
.callback
= (void *) callback
;
5003 stream_
.callbackInfo
.userData
= userData
;
5004 stream_
.callbackInfo
.usingCallback
= true;
5007 void RtApiAsio :: cancelStreamCallback()
5011 if (stream_
.callbackInfo
.usingCallback
) {
5013 if (stream_
.state
== STREAM_RUNNING
)
5016 MUTEX_LOCK(&stream_
.mutex
);
5018 stream_
.callbackInfo
.usingCallback
= false;
5019 stream_
.callbackInfo
.userData
= NULL
;
5020 stream_
.state
= STREAM_STOPPED
;
5021 stream_
.callbackInfo
.callback
= NULL
;
5023 MUTEX_UNLOCK(&stream_
.mutex
);
5027 void RtApiAsio :: startStream()
5030 if (stream_
.state
== STREAM_RUNNING
) return;
5032 MUTEX_LOCK(&stream_
.mutex
);
5034 ASIOError result
= ASIOStart();
5035 if ( result
!= ASE_OK
) {
5036 sprintf(message_
, "RtApiAsio: error starting device (%s).",
5037 devices_
[stream_
.device
[0]].name
.c_str());
5038 MUTEX_UNLOCK(&stream_
.mutex
);
5039 error(RtError::DRIVER_ERROR
);
5041 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5042 handle
->stopStream
= false;
5043 stream_
.state
= STREAM_RUNNING
;
5045 MUTEX_UNLOCK(&stream_
.mutex
);
5048 void RtApiAsio :: stopStream()
5051 if (stream_
.state
== STREAM_STOPPED
) return;
5053 // Change the state before the lock to improve shutdown response
5054 // when using a callback.
5055 stream_
.state
= STREAM_STOPPED
;
5056 MUTEX_LOCK(&stream_
.mutex
);
5058 ASIOError result
= ASIOStop();
5059 if ( result
!= ASE_OK
) {
5060 sprintf(message_
, "RtApiAsio: error stopping device (%s).",
5061 devices_
[stream_
.device
[0]].name
.c_str());
5062 MUTEX_UNLOCK(&stream_
.mutex
);
5063 error(RtError::DRIVER_ERROR
);
5066 MUTEX_UNLOCK(&stream_
.mutex
);
5069 void RtApiAsio :: abortStream()
5074 void RtApiAsio :: tickStream()
5078 if (stream_
.state
== STREAM_STOPPED
)
5081 if (stream_
.callbackInfo
.usingCallback
) {
5082 sprintf(message_
, "RtApiAsio: tickStream() should not be used when a callback function is set!");
5083 error(RtError::WARNING
);
5087 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5089 MUTEX_LOCK(&stream_
.mutex
);
5091 // Release the stream_mutex here and wait for the event
5092 // to become signaled by the callback process.
5093 MUTEX_UNLOCK(&stream_
.mutex
);
5094 WaitForMultipleObjects(1, &handle
->condition
, FALSE
, INFINITE
);
5095 ResetEvent( handle
->condition
);
5098 void RtApiAsio :: callbackEvent(long bufferIndex
)
5102 if (stream_
.state
== STREAM_STOPPED
) return;
5104 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
5105 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5106 if ( info
->usingCallback
&& handle
->stopStream
) {
5107 // Check if the stream should be stopped (via the previous user
5108 // callback return value). We stop the stream here, rather than
5109 // after the function call, so that output data can first be
5115 MUTEX_LOCK(&stream_
.mutex
);
5117 // Invoke user callback first, to get fresh output data.
5118 if ( info
->usingCallback
) {
5119 RtAudioCallback callback
= (RtAudioCallback
) info
->callback
;
5120 if ( callback(stream_
.userBuffer
, stream_
.bufferSize
, info
->userData
) )
5121 handle
->stopStream
= true;
5125 int nChannels
= stream_
.nDeviceChannels
[0] + stream_
.nDeviceChannels
[1];
5126 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
5128 bufferBytes
= stream_
.bufferSize
* formatBytes(stream_
.deviceFormat
[0]);
5129 if (stream_
.doConvertBuffer
[0]) {
5131 convertStreamBuffer(OUTPUT
);
5132 if ( stream_
.doByteSwap
[0] )
5133 byteSwapBuffer(stream_
.deviceBuffer
,
5134 stream_
.bufferSize
* stream_
.nDeviceChannels
[0],
5135 stream_
.deviceFormat
[0]);
5137 // Always de-interleave ASIO output data.
5139 for ( int i
=0; i
<nChannels
; i
++ ) {
5140 if ( handle
->bufferInfos
[i
].isInput
!= ASIOTrue
)
5141 memcpy(handle
->bufferInfos
[i
].buffers
[bufferIndex
],
5142 &stream_
.deviceBuffer
[j
++*bufferBytes
], bufferBytes
);
5145 else { // single channel only
5147 if (stream_
.doByteSwap
[0])
5148 byteSwapBuffer(stream_
.userBuffer
,
5149 stream_
.bufferSize
* stream_
.nUserChannels
[0],
5150 stream_
.userFormat
);
5152 for ( int i
=0; i
<nChannels
; i
++ ) {
5153 if ( handle
->bufferInfos
[i
].isInput
!= ASIOTrue
) {
5154 memcpy(handle
->bufferInfos
[i
].buffers
[bufferIndex
], stream_
.userBuffer
, bufferBytes
);
5161 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
5163 bufferBytes
= stream_
.bufferSize
* formatBytes(stream_
.deviceFormat
[1]);
5164 if (stream_
.doConvertBuffer
[1]) {
5166 // Always interleave ASIO input data.
5168 for ( int i
=0; i
<nChannels
; i
++ ) {
5169 if ( handle
->bufferInfos
[i
].isInput
== ASIOTrue
)
5170 memcpy(&stream_
.deviceBuffer
[j
++*bufferBytes
],
5171 handle
->bufferInfos
[i
].buffers
[bufferIndex
],
5175 if ( stream_
.doByteSwap
[1] )
5176 byteSwapBuffer(stream_
.deviceBuffer
,
5177 stream_
.bufferSize
* stream_
.nDeviceChannels
[1],
5178 stream_
.deviceFormat
[1]);
5179 convertStreamBuffer(INPUT
);
5182 else { // single channel only
5183 for ( int i
=0; i
<nChannels
; i
++ ) {
5184 if ( handle
->bufferInfos
[i
].isInput
== ASIOTrue
) {
5185 memcpy(stream_
.userBuffer
,
5186 handle
->bufferInfos
[i
].buffers
[bufferIndex
],
5192 if (stream_
.doByteSwap
[1])
5193 byteSwapBuffer(stream_
.userBuffer
,
5194 stream_
.bufferSize
* stream_
.nUserChannels
[1],
5195 stream_
.userFormat
);
5199 if ( !info
->usingCallback
)
5200 SetEvent( handle
->condition
);
5202 MUTEX_UNLOCK(&stream_
.mutex
);
5205 //******************** End of __WINDOWS_ASIO__ *********************//
5208 #if defined(__WINDOWS_DS__) // Windows DirectSound API
5209 #define DIRECTSOUND_VERSION 0x0500 // this #def is here for compatibility with newer DirectSound libs.
5212 // A structure to hold various information related to the DirectSound
5213 // API implementation.
5220 // Declarations for utility functions, callbacks, and structures
5221 // specific to the DirectSound implementation.
5222 static bool CALLBACK
deviceCountCallback(LPGUID lpguid
,
5223 LPCSTR lpcstrDescription
,
5224 LPCSTR lpcstrModule
,
5227 static bool CALLBACK
deviceInfoCallback(LPGUID lpguid
,
5228 LPCSTR lpcstrDescription
,
5229 LPCSTR lpcstrModule
,
5232 static bool CALLBACK
defaultDeviceCallback(LPGUID lpguid
,
5233 LPCSTR lpcstrDescription
,
5234 LPCSTR lpcstrModule
,
5237 static bool CALLBACK
deviceIdCallback(LPGUID lpguid
,
5238 LPCSTR lpcstrDescription
,
5239 LPCSTR lpcstrModule
,
5242 static char* getErrorString(int code
);
5244 #if defined(__WINDOWS_PTHREAD__)
5245 extern "C" void * callbackHandler(void * ptr
);
5247 extern "C" unsigned __stdcall
callbackHandler(void *ptr
);
5257 RtApiDs :: RtApiDs()
5261 if (nDevices_
<= 0) {
5262 sprintf(message_
, "RtApiDs: no Windows DirectSound audio devices found!");
5263 error(RtError::NO_DEVICES_FOUND
);
5267 RtApiDs :: ~RtApiDs()
5269 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
5272 int RtApiDs :: getDefaultInputDevice(void)
5275 info
.name
[0] = '\0';
5277 // Enumerate through devices to find the default output.
5278 HRESULT result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)defaultDeviceCallback
, &info
);
5279 if ( FAILED(result
) ) {
5280 sprintf(message_
, "RtApiDs: Error performing default input device enumeration: %s.",
5281 getErrorString(result
));
5282 error(RtError::WARNING
);
5286 for ( int i
=0; i
<nDevices_
; i
++ ) {
5287 if ( strncmp( info
.name
, devices_
[i
].name
.c_str(), 64 ) == 0 ) return i
;
5294 int RtApiDs :: getDefaultOutputDevice(void)
5297 info
.name
[0] = '\0';
5299 // Enumerate through devices to find the default output.
5300 HRESULT result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)defaultDeviceCallback
, &info
);
5301 if ( FAILED(result
) ) {
5302 sprintf(message_
, "RtApiDs: Error performing default output device enumeration: %s.",
5303 getErrorString(result
));
5304 error(RtError::WARNING
);
5308 for ( int i
=0; i
<nDevices_
; i
++ )
5309 if ( strncmp( info
.name
, devices_
[i
].name
.c_str(), 64 ) == 0 ) return i
;
5314 void RtApiDs :: initialize(void)
5316 int i
, ins
= 0, outs
= 0, count
= 0;
5320 // Count DirectSound devices.
5321 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceCountCallback
, &outs
);
5322 if ( FAILED(result
) ) {
5323 sprintf(message_
, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5324 getErrorString(result
));
5325 error(RtError::DRIVER_ERROR
);
5328 // Count DirectSoundCapture devices.
5329 result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceCountCallback
, &ins
);
5330 if ( FAILED(result
) ) {
5331 sprintf(message_
, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5332 getErrorString(result
));
5333 error(RtError::DRIVER_ERROR
);
5337 if (count
== 0) return;
5339 std::vector
<enum_info
> info(count
);
5340 for (i
=0; i
<count
; i
++) {
5341 info
[i
].name
[0] = '\0';
5342 if (i
< outs
) info
[i
].isInput
= false;
5343 else info
[i
].isInput
= true;
5346 // Get playback device info and check capabilities.
5347 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceInfoCallback
, &info
[0]);
5348 if ( FAILED(result
) ) {
5349 sprintf(message_
, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5350 getErrorString(result
));
5351 error(RtError::DRIVER_ERROR
);
5354 // Get capture device info and check capabilities.
5355 result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceInfoCallback
, &info
[0]);
5356 if ( FAILED(result
) ) {
5357 sprintf(message_
, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5358 getErrorString(result
));
5359 error(RtError::DRIVER_ERROR
);
5362 // Create device structures for valid devices and write device names
5363 // to each. Devices are considered invalid if they cannot be
5364 // opened, they report < 1 supported channels, or they report no
5365 // supported data (capture only).
5368 for (i
=0; i
<count
; i
++) {
5369 if ( info
[i
].isValid
) {
5370 device
.name
.erase();
5371 device
.name
.append( (const char *)info
[i
].name
, strlen(info
[i
].name
)+1);
5372 devices_
.push_back(device
);
5376 nDevices_
= devices_
.size();
5380 void RtApiDs :: probeDeviceInfo(RtApiDevice
*info
)
5383 strncpy( dsinfo
.name
, info
->name
.c_str(), 64 );
5384 dsinfo
.isValid
= false;
5386 // Enumerate through input devices to find the id (if it exists).
5387 HRESULT result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
5388 if ( FAILED(result
) ) {
5389 sprintf(message_
, "RtApiDs: Error performing input device id enumeration: %s.",
5390 getErrorString(result
));
5391 error(RtError::WARNING
);
5395 // Do capture probe first.
5396 if ( dsinfo
.isValid
== false )
5397 goto playback_probe
;
5399 LPDIRECTSOUNDCAPTURE input
;
5400 result
= DirectSoundCaptureCreate( dsinfo
.id
, &input
, NULL
);
5401 if ( FAILED(result
) ) {
5402 sprintf(message_
, "RtApiDs: Could not create capture object (%s): %s.",
5403 info
->name
.c_str(), getErrorString(result
));
5404 error(RtError::WARNING
);
5405 goto playback_probe
;
5409 in_caps
.dwSize
= sizeof(in_caps
);
5410 result
= input
->GetCaps( &in_caps
);
5411 if ( FAILED(result
) ) {
5413 sprintf(message_
, "RtApiDs: Could not get capture capabilities (%s): %s.",
5414 info
->name
.c_str(), getErrorString(result
));
5415 error(RtError::WARNING
);
5416 goto playback_probe
;
5419 // Get input channel information.
5420 info
->minInputChannels
= 1;
5421 info
->maxInputChannels
= in_caps
.dwChannels
;
5423 // Get sample rate and format information.
5424 info
->sampleRates
.clear();
5425 if( in_caps
.dwChannels
== 2 ) {
5426 if( in_caps
.dwFormats
& WAVE_FORMAT_1S16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5427 if( in_caps
.dwFormats
& WAVE_FORMAT_2S16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5428 if( in_caps
.dwFormats
& WAVE_FORMAT_4S16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5429 if( in_caps
.dwFormats
& WAVE_FORMAT_1S08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5430 if( in_caps
.dwFormats
& WAVE_FORMAT_2S08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5431 if( in_caps
.dwFormats
& WAVE_FORMAT_4S08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5433 if ( info
->nativeFormats
& RTAUDIO_SINT16
) {
5434 if( in_caps
.dwFormats
& WAVE_FORMAT_1S16
) info
->sampleRates
.push_back( 11025 );
5435 if( in_caps
.dwFormats
& WAVE_FORMAT_2S16
) info
->sampleRates
.push_back( 22050 );
5436 if( in_caps
.dwFormats
& WAVE_FORMAT_4S16
) info
->sampleRates
.push_back( 44100 );
5438 else if ( info
->nativeFormats
& RTAUDIO_SINT8
) {
5439 if( in_caps
.dwFormats
& WAVE_FORMAT_1S08
) info
->sampleRates
.push_back( 11025 );
5440 if( in_caps
.dwFormats
& WAVE_FORMAT_2S08
) info
->sampleRates
.push_back( 22050 );
5441 if( in_caps
.dwFormats
& WAVE_FORMAT_4S08
) info
->sampleRates
.push_back( 44100 );
5444 else if ( in_caps
.dwChannels
== 1 ) {
5445 if( in_caps
.dwFormats
& WAVE_FORMAT_1M16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5446 if( in_caps
.dwFormats
& WAVE_FORMAT_2M16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5447 if( in_caps
.dwFormats
& WAVE_FORMAT_4M16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5448 if( in_caps
.dwFormats
& WAVE_FORMAT_1M08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5449 if( in_caps
.dwFormats
& WAVE_FORMAT_2M08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5450 if( in_caps
.dwFormats
& WAVE_FORMAT_4M08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5452 if ( info
->nativeFormats
& RTAUDIO_SINT16
) {
5453 if( in_caps
.dwFormats
& WAVE_FORMAT_1M16
) info
->sampleRates
.push_back( 11025 );
5454 if( in_caps
.dwFormats
& WAVE_FORMAT_2M16
) info
->sampleRates
.push_back( 22050 );
5455 if( in_caps
.dwFormats
& WAVE_FORMAT_4M16
) info
->sampleRates
.push_back( 44100 );
5457 else if ( info
->nativeFormats
& RTAUDIO_SINT8
) {
5458 if( in_caps
.dwFormats
& WAVE_FORMAT_1M08
) info
->sampleRates
.push_back( 11025 );
5459 if( in_caps
.dwFormats
& WAVE_FORMAT_2M08
) info
->sampleRates
.push_back( 22050 );
5460 if( in_caps
.dwFormats
& WAVE_FORMAT_4M08
) info
->sampleRates
.push_back( 44100 );
5463 else info
->minInputChannels
= 0; // technically, this would be an error
5469 dsinfo
.isValid
= false;
5471 // Enumerate through output devices to find the id (if it exists).
5472 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
5473 if ( FAILED(result
) ) {
5474 sprintf(message_
, "RtApiDs: Error performing output device id enumeration: %s.",
5475 getErrorString(result
));
5476 error(RtError::WARNING
);
5480 // Now do playback probe.
5481 if ( dsinfo
.isValid
== false )
5482 goto check_parameters
;
5484 LPDIRECTSOUND output
;
5486 result
= DirectSoundCreate( dsinfo
.id
, &output
, NULL
);
5487 if ( FAILED(result
) ) {
5488 sprintf(message_
, "RtApiDs: Could not create playback object (%s): %s.",
5489 info
->name
.c_str(), getErrorString(result
));
5490 error(RtError::WARNING
);
5491 goto check_parameters
;
5494 out_caps
.dwSize
= sizeof(out_caps
);
5495 result
= output
->GetCaps( &out_caps
);
5496 if ( FAILED(result
) ) {
5498 sprintf(message_
, "RtApiDs: Could not get playback capabilities (%s): %s.",
5499 info
->name
.c_str(), getErrorString(result
));
5500 error(RtError::WARNING
);
5501 goto check_parameters
;
5504 // Get output channel information.
5505 info
->minOutputChannels
= 1;
5506 info
->maxOutputChannels
= ( out_caps
.dwFlags
& DSCAPS_PRIMARYSTEREO
) ? 2 : 1;
5508 // Get sample rate information. Use capture device rate information
5510 if ( info
->sampleRates
.size() == 0 ) {
5511 info
->sampleRates
.push_back( (int) out_caps
.dwMinSecondarySampleRate
);
5512 info
->sampleRates
.push_back( (int) out_caps
.dwMaxSecondarySampleRate
);
5515 // Check input rates against output rate range.
5516 for ( int i
=info
->sampleRates
.size()-1; i
>=0; i
-- ) {
5517 if ( (unsigned int) info
->sampleRates
[i
] > out_caps
.dwMaxSecondarySampleRate
)
5518 info
->sampleRates
.erase( info
->sampleRates
.begin() + i
);
5520 while ( info
->sampleRates
.size() > 0 &&
5521 ((unsigned int) info
->sampleRates
[0] < out_caps
.dwMinSecondarySampleRate
) ) {
5522 info
->sampleRates
.erase( info
->sampleRates
.begin() );
5526 // Get format information.
5527 if ( out_caps
.dwFlags
& DSCAPS_PRIMARY16BIT
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5528 if ( out_caps
.dwFlags
& DSCAPS_PRIMARY8BIT
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5533 if ( info
->maxInputChannels
== 0 && info
->maxOutputChannels
== 0 ) {
5534 sprintf(message_
, "RtApiDs: no reported input or output channels for device (%s).",
5535 info
->name
.c_str());
5536 error(RtError::DEBUG_WARNING
);
5539 if ( info
->sampleRates
.size() == 0 || info
->nativeFormats
== 0 ) {
5540 sprintf(message_
, "RtApiDs: no reported sample rates or data formats for device (%s).",
5541 info
->name
.c_str());
5542 error(RtError::DEBUG_WARNING
);
5546 // Determine duplex status.
5547 if (info
->maxInputChannels
< info
->maxOutputChannels
)
5548 info
->maxDuplexChannels
= info
->maxInputChannels
;
5550 info
->maxDuplexChannels
= info
->maxOutputChannels
;
5551 if (info
->minInputChannels
< info
->minOutputChannels
)
5552 info
->minDuplexChannels
= info
->minInputChannels
;
5554 info
->minDuplexChannels
= info
->minOutputChannels
;
5556 if ( info
->maxDuplexChannels
> 0 ) info
->hasDuplexSupport
= true;
5557 else info
->hasDuplexSupport
= false;
5559 info
->probed
= true;
5564 bool RtApiDs :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
5565 int sampleRate
, RtAudioFormat format
,
5566 int *bufferSize
, int numberOfBuffers
)
5569 HWND hWnd
= GetForegroundWindow();
5571 // According to a note in PortAudio, using GetDesktopWindow()
5572 // instead of GetForegroundWindow() is supposed to avoid problems
5573 // that occur when the application's window is not the foreground
5574 // window. Also, if the application window closes before the
5575 // DirectSound buffer, DirectSound can crash. However, for console
5576 // applications, no sound was produced when using GetDesktopWindow().
5582 // Check the numberOfBuffers parameter and limit the lowest value to
5583 // two. This is a judgement call and a value of two is probably too
5584 // low for capture, but it should work for playback.
5585 if (numberOfBuffers
< 2)
5588 nBuffers
= numberOfBuffers
;
5590 // Define the wave format structure (16-bit PCM, srate, channels)
5591 WAVEFORMATEX waveFormat
;
5592 ZeroMemory(&waveFormat
, sizeof(WAVEFORMATEX
));
5593 waveFormat
.wFormatTag
= WAVE_FORMAT_PCM
;
5594 waveFormat
.nChannels
= channels
;
5595 waveFormat
.nSamplesPerSec
= (unsigned long) sampleRate
;
5597 // Determine the data format.
5598 if ( devices_
[device
].nativeFormats
) { // 8-bit and/or 16-bit support
5599 if ( format
== RTAUDIO_SINT8
) {
5600 if ( devices_
[device
].nativeFormats
& RTAUDIO_SINT8
)
5601 waveFormat
.wBitsPerSample
= 8;
5603 waveFormat
.wBitsPerSample
= 16;
5606 if ( devices_
[device
].nativeFormats
& RTAUDIO_SINT16
)
5607 waveFormat
.wBitsPerSample
= 16;
5609 waveFormat
.wBitsPerSample
= 8;
5613 sprintf(message_
, "RtApiDs: no reported data formats for device (%s).",
5614 devices_
[device
].name
.c_str());
5615 error(RtError::DEBUG_WARNING
);
5619 waveFormat
.nBlockAlign
= waveFormat
.nChannels
* waveFormat
.wBitsPerSample
/ 8;
5620 waveFormat
.nAvgBytesPerSec
= waveFormat
.nSamplesPerSec
* waveFormat
.nBlockAlign
;
5623 void *ohandle
= 0, *bhandle
= 0;
5624 strncpy( dsinfo
.name
, devices_
[device
].name
.c_str(), 64 );
5625 dsinfo
.isValid
= false;
5626 if ( mode
== OUTPUT
) {
5628 if ( devices_
[device
].maxOutputChannels
< channels
) {
5629 sprintf(message_
, "RtApiDs: requested channels (%d) > than supported (%d) by device (%s).",
5630 channels
, devices_
[device
].maxOutputChannels
, devices_
[device
].name
.c_str());
5631 error(RtError::DEBUG_WARNING
);
5635 // Enumerate through output devices to find the id (if it exists).
5636 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
5637 if ( FAILED(result
) ) {
5638 sprintf(message_
, "RtApiDs: Error performing output device id enumeration: %s.",
5639 getErrorString(result
));
5640 error(RtError::DEBUG_WARNING
);
5644 if ( dsinfo
.isValid
== false ) {
5645 sprintf(message_
, "RtApiDs: output device (%s) id not found!", devices_
[device
].name
.c_str());
5646 error(RtError::DEBUG_WARNING
);
5650 LPGUID id
= dsinfo
.id
;
5651 LPDIRECTSOUND object
;
5652 LPDIRECTSOUNDBUFFER buffer
;
5653 DSBUFFERDESC bufferDescription
;
5655 result
= DirectSoundCreate( id
, &object
, NULL
);
5656 if ( FAILED(result
) ) {
5657 sprintf(message_
, "RtApiDs: Could not create playback object (%s): %s.",
5658 devices_
[device
].name
.c_str(), getErrorString(result
));
5659 error(RtError::DEBUG_WARNING
);
5663 // Set cooperative level to DSSCL_EXCLUSIVE
5664 result
= object
->SetCooperativeLevel(hWnd
, DSSCL_EXCLUSIVE
);
5665 if ( FAILED(result
) ) {
5667 sprintf(message_
, "RtApiDs: Unable to set cooperative level (%s): %s.",
5668 devices_
[device
].name
.c_str(), getErrorString(result
));
5669 error(RtError::WARNING
);
5673 // Even though we will write to the secondary buffer, we need to
5674 // access the primary buffer to set the correct output format
5675 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
5676 // buffer description.
5677 ZeroMemory(&bufferDescription
, sizeof(DSBUFFERDESC
));
5678 bufferDescription
.dwSize
= sizeof(DSBUFFERDESC
);
5679 bufferDescription
.dwFlags
= DSBCAPS_PRIMARYBUFFER
;
5680 // Obtain the primary buffer
5681 result
= object
->CreateSoundBuffer(&bufferDescription
, &buffer
, NULL
);
5682 if ( FAILED(result
) ) {
5684 sprintf(message_
, "RtApiDs: Unable to access primary buffer (%s): %s.",
5685 devices_
[device
].name
.c_str(), getErrorString(result
));
5686 error(RtError::WARNING
);
5690 // Set the primary DS buffer sound format.
5691 result
= buffer
->SetFormat(&waveFormat
);
5692 if ( FAILED(result
) ) {
5694 sprintf(message_
, "RtApiDs: Unable to set primary buffer format (%s): %s.",
5695 devices_
[device
].name
.c_str(), getErrorString(result
));
5696 error(RtError::WARNING
);
5700 // Setup the secondary DS buffer description.
5701 buffer_size
= channels
* *bufferSize
* nBuffers
* waveFormat
.wBitsPerSample
/ 8;
5702 ZeroMemory(&bufferDescription
, sizeof(DSBUFFERDESC
));
5703 bufferDescription
.dwSize
= sizeof(DSBUFFERDESC
);
5704 bufferDescription
.dwFlags
= ( DSBCAPS_STICKYFOCUS
|
5705 DSBCAPS_GETCURRENTPOSITION2
|
5706 DSBCAPS_LOCHARDWARE
); // Force hardware mixing
5707 bufferDescription
.dwBufferBytes
= buffer_size
;
5708 bufferDescription
.lpwfxFormat
= &waveFormat
;
5710 // Try to create the secondary DS buffer. If that doesn't work,
5711 // try to use software mixing. Otherwise, there's a problem.
5712 result
= object
->CreateSoundBuffer(&bufferDescription
, &buffer
, NULL
);
5713 if ( FAILED(result
) ) {
5714 bufferDescription
.dwFlags
= ( DSBCAPS_STICKYFOCUS
|
5715 DSBCAPS_GETCURRENTPOSITION2
|
5716 DSBCAPS_LOCSOFTWARE
); // Force software mixing
5717 result
= object
->CreateSoundBuffer(&bufferDescription
, &buffer
, NULL
);
5718 if ( FAILED(result
) ) {
5720 sprintf(message_
, "RtApiDs: Unable to create secondary DS buffer (%s): %s.",
5721 devices_
[device
].name
.c_str(), getErrorString(result
));
5722 error(RtError::WARNING
);
5727 // Get the buffer size ... might be different from what we specified.
5729 dsbcaps
.dwSize
= sizeof(DSBCAPS
);
5730 buffer
->GetCaps(&dsbcaps
);
5731 buffer_size
= dsbcaps
.dwBufferBytes
;
5733 // Lock the DS buffer
5734 result
= buffer
->Lock(0, buffer_size
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
5735 if ( FAILED(result
) ) {
5738 sprintf(message_
, "RtApiDs: Unable to lock buffer (%s): %s.",
5739 devices_
[device
].name
.c_str(), getErrorString(result
));
5740 error(RtError::WARNING
);
5744 // Zero the DS buffer
5745 ZeroMemory(audioPtr
, dataLen
);
5747 // Unlock the DS buffer
5748 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
5749 if ( FAILED(result
) ) {
5752 sprintf(message_
, "RtApiDs: Unable to unlock buffer(%s): %s.",
5753 devices_
[device
].name
.c_str(), getErrorString(result
));
5754 error(RtError::WARNING
);
5758 ohandle
= (void *) object
;
5759 bhandle
= (void *) buffer
;
5760 stream_
.nDeviceChannels
[0] = channels
;
5763 if ( mode
== INPUT
) {
5765 if ( devices_
[device
].maxInputChannels
< channels
)
5768 // Enumerate through input devices to find the id (if it exists).
5769 result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
5770 if ( FAILED(result
) ) {
5771 sprintf(message_
, "RtApiDs: Error performing input device id enumeration: %s.",
5772 getErrorString(result
));
5773 error(RtError::DEBUG_WARNING
);
5777 if ( dsinfo
.isValid
== false ) {
5778 sprintf(message_
, "RtAudioDS: input device (%s) id not found!", devices_
[device
].name
.c_str());
5779 error(RtError::DEBUG_WARNING
);
5783 LPGUID id
= dsinfo
.id
;
5784 LPDIRECTSOUNDCAPTURE object
;
5785 LPDIRECTSOUNDCAPTUREBUFFER buffer
;
5786 DSCBUFFERDESC bufferDescription
;
5788 result
= DirectSoundCaptureCreate( id
, &object
, NULL
);
5789 if ( FAILED(result
) ) {
5790 sprintf(message_
, "RtApiDs: Could not create capture object (%s): %s.",
5791 devices_
[device
].name
.c_str(), getErrorString(result
));
5792 error(RtError::WARNING
);
5796 // Setup the secondary DS buffer description.
5797 buffer_size
= channels
* *bufferSize
* nBuffers
* waveFormat
.wBitsPerSample
/ 8;
5798 ZeroMemory(&bufferDescription
, sizeof(DSCBUFFERDESC
));
5799 bufferDescription
.dwSize
= sizeof(DSCBUFFERDESC
);
5800 bufferDescription
.dwFlags
= 0;
5801 bufferDescription
.dwReserved
= 0;
5802 bufferDescription
.dwBufferBytes
= buffer_size
;
5803 bufferDescription
.lpwfxFormat
= &waveFormat
;
5805 // Create the capture buffer.
5806 result
= object
->CreateCaptureBuffer(&bufferDescription
, &buffer
, NULL
);
5807 if ( FAILED(result
) ) {
5809 sprintf(message_
, "RtApiDs: Unable to create capture buffer (%s): %s.",
5810 devices_
[device
].name
.c_str(), getErrorString(result
));
5811 error(RtError::WARNING
);
5815 // Lock the capture buffer
5816 result
= buffer
->Lock(0, buffer_size
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
5817 if ( FAILED(result
) ) {
5820 sprintf(message_
, "RtApiDs: Unable to lock capture buffer (%s): %s.",
5821 devices_
[device
].name
.c_str(), getErrorString(result
));
5822 error(RtError::WARNING
);
5827 ZeroMemory(audioPtr
, dataLen
);
5829 // Unlock the buffer
5830 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
5831 if ( FAILED(result
) ) {
5834 sprintf(message_
, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
5835 devices_
[device
].name
.c_str(), getErrorString(result
));
5836 error(RtError::WARNING
);
5840 ohandle
= (void *) object
;
5841 bhandle
= (void *) buffer
;
5842 stream_
.nDeviceChannels
[1] = channels
;
5845 stream_
.userFormat
= format
;
5846 if ( waveFormat
.wBitsPerSample
== 8 )
5847 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
5849 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
5850 stream_
.nUserChannels
[mode
] = channels
;
5851 *bufferSize
= buffer_size
/ (channels
* nBuffers
* waveFormat
.wBitsPerSample
/ 8);
5852 stream_
.bufferSize
= *bufferSize
;
5854 // Set flags for buffer conversion
5855 stream_
.doConvertBuffer
[mode
] = false;
5856 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
5857 stream_
.doConvertBuffer
[mode
] = true;
5858 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
5859 stream_
.doConvertBuffer
[mode
] = true;
5861 // Allocate necessary internal buffers
5862 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
5865 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
5866 buffer_bytes
= stream_
.nUserChannels
[0];
5868 buffer_bytes
= stream_
.nUserChannels
[1];
5870 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
5871 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
5872 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
5873 if (stream_
.userBuffer
== NULL
) {
5874 sprintf(message_
, "RtApiDs: error allocating user buffer memory (%s).",
5875 devices_
[device
].name
.c_str());
5880 if ( stream_
.doConvertBuffer
[mode
] ) {
5883 bool makeBuffer
= true;
5884 if ( mode
== OUTPUT
)
5885 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
5886 else { // mode == INPUT
5887 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
5888 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
5889 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
5890 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
5895 buffer_bytes
*= *bufferSize
;
5896 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
5897 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
5898 if (stream_
.deviceBuffer
== NULL
) {
5899 sprintf(message_
, "RtApiDs: error allocating device buffer memory (%s).",
5900 devices_
[device
].name
.c_str());
5906 // Allocate our DsHandle structures for the stream.
5908 if ( stream_
.apiHandle
== 0 ) {
5909 handles
= (DsHandle
*) calloc(2, sizeof(DsHandle
));
5910 if ( handles
== NULL
) {
5911 sprintf(message_
, "RtApiDs: Error allocating DsHandle memory (%s).",
5912 devices_
[device
].name
.c_str());
5915 handles
[0].object
= 0;
5916 handles
[1].object
= 0;
5917 stream_
.apiHandle
= (void *) handles
;
5920 handles
= (DsHandle
*) stream_
.apiHandle
;
5921 handles
[mode
].object
= ohandle
;
5922 handles
[mode
].buffer
= bhandle
;
5924 stream_
.device
[mode
] = device
;
5925 stream_
.state
= STREAM_STOPPED
;
5926 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
5927 // We had already set up an output stream.
5928 stream_
.mode
= DUPLEX
;
5930 stream_
.mode
= mode
;
5931 stream_
.nBuffers
= nBuffers
;
5932 stream_
.sampleRate
= sampleRate
;
5938 if (handles
[0].object
) {
5939 LPDIRECTSOUND object
= (LPDIRECTSOUND
) handles
[0].object
;
5940 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
5941 if (buffer
) buffer
->Release();
5944 if (handles
[1].object
) {
5945 LPDIRECTSOUNDCAPTURE object
= (LPDIRECTSOUNDCAPTURE
) handles
[1].object
;
5946 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
5947 if (buffer
) buffer
->Release();
5951 stream_
.apiHandle
= 0;
5954 if (stream_
.userBuffer
) {
5955 free(stream_
.userBuffer
);
5956 stream_
.userBuffer
= 0;
5959 error(RtError::WARNING
);
5963 void RtApiDs :: setStreamCallback(RtAudioCallback callback
, void *userData
)
5967 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
5968 if ( info
->usingCallback
) {
5969 sprintf(message_
, "RtApiDs: A callback is already set for this stream!");
5970 error(RtError::WARNING
);
5974 info
->callback
= (void *) callback
;
5975 info
->userData
= userData
;
5976 info
->usingCallback
= true;
5977 info
->object
= (void *) this;
5979 #if defined(__WINDOWS_PTHREAD__)
5980 pthread_attr_t attr
;
5981 pthread_attr_init(&attr
);
5982 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
5983 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
5985 int err
= pthread_create(&info
->thread
, &attr
, callbackHandler
, &stream_
.callbackInfo
);
5986 pthread_attr_destroy(&attr
);
5988 info
->usingCallback
= false;
5989 sprintf(message_
, "RtApiDs: error starting callback thread!");
5990 error(RtError::THREAD_ERROR
);
5994 info
->thread
= _beginthreadex(NULL
, 0, &callbackHandler
,
5995 &stream_
.callbackInfo
, 0, &thread_id
);
5996 if (info
->thread
== 0) {
5997 info
->usingCallback
= false;
5998 sprintf(message_
, "RtApiDs: error starting callback thread!");
5999 error(RtError::THREAD_ERROR
);
6003 // When spawning multiple threads in quick succession, it appears to be
6004 // necessary to wait a bit for each to initialize ... another windoism!
6008 void RtApiDs :: cancelStreamCallback()
6012 if (stream_
.callbackInfo
.usingCallback
) {
6014 if (stream_
.state
== STREAM_RUNNING
)
6017 MUTEX_LOCK(&stream_
.mutex
);
6019 stream_
.callbackInfo
.usingCallback
= false;
6020 WaitForSingleObject( (HANDLE
)stream_
.callbackInfo
.thread
, INFINITE
);
6021 CloseHandle( (HANDLE
)stream_
.callbackInfo
.thread
);
6022 stream_
.callbackInfo
.thread
= 0;
6023 stream_
.callbackInfo
.callback
= NULL
;
6024 stream_
.callbackInfo
.userData
= NULL
;
6026 MUTEX_UNLOCK(&stream_
.mutex
);
6030 void RtApiDs :: closeStream()
6032 // We don't want an exception to be thrown here because this
6033 // function is called by our class destructor. So, do our own
6035 if ( stream_
.mode
== UNINITIALIZED
) {
6036 sprintf(message_
, "RtApiDs::closeStream(): no open stream to close!");
6037 error(RtError::WARNING
);
6041 if (stream_
.callbackInfo
.usingCallback
) {
6042 stream_
.callbackInfo
.usingCallback
= false;
6043 WaitForSingleObject( (HANDLE
)stream_
.callbackInfo
.thread
, INFINITE
);
6044 CloseHandle( (HANDLE
)stream_
.callbackInfo
.thread
);
6047 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6049 if (handles
[0].object
) {
6050 LPDIRECTSOUND object
= (LPDIRECTSOUND
) handles
[0].object
;
6051 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6059 if (handles
[1].object
) {
6060 LPDIRECTSOUNDCAPTURE object
= (LPDIRECTSOUNDCAPTURE
) handles
[1].object
;
6061 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6069 stream_
.apiHandle
= 0;
6072 if (stream_
.userBuffer
) {
6073 free(stream_
.userBuffer
);
6074 stream_
.userBuffer
= 0;
6077 if (stream_
.deviceBuffer
) {
6078 free(stream_
.deviceBuffer
);
6079 stream_
.deviceBuffer
= 0;
6082 stream_
.mode
= UNINITIALIZED
;
6085 void RtApiDs :: startStream()
6088 if (stream_
.state
== STREAM_RUNNING
) return;
6090 MUTEX_LOCK(&stream_
.mutex
);
6093 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6094 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6095 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6096 result
= buffer
->Play(0, 0, DSBPLAY_LOOPING
);
6097 if ( FAILED(result
) ) {
6098 sprintf(message_
, "RtApiDs: Unable to start buffer (%s): %s.",
6099 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6100 error(RtError::DRIVER_ERROR
);
6104 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6105 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6106 result
= buffer
->Start(DSCBSTART_LOOPING
);
6107 if ( FAILED(result
) ) {
6108 sprintf(message_
, "RtApiDs: Unable to start capture buffer (%s): %s.",
6109 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6110 error(RtError::DRIVER_ERROR
);
6113 stream_
.state
= STREAM_RUNNING
;
6115 MUTEX_UNLOCK(&stream_
.mutex
);
6118 void RtApiDs :: stopStream()
6121 if (stream_
.state
== STREAM_STOPPED
) return;
6123 // Change the state before the lock to improve shutdown response
6124 // when using a callback.
6125 stream_
.state
= STREAM_STOPPED
;
6126 MUTEX_LOCK(&stream_
.mutex
);
6128 // There is no specific DirectSound API call to "drain" a buffer
6129 // before stopping. We can hack this for playback by writing zeroes
6130 // for another bufferSize * nBuffers frames. For capture, the
6131 // concept is less clear so we'll repeat what we do in the
6132 // abortStream() case.
6135 LPVOID buffer1
= NULL
;
6136 LPVOID buffer2
= NULL
;
6137 DWORD bufferSize1
= 0;
6138 DWORD bufferSize2
= 0;
6139 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6140 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6142 DWORD currentPos
, safePos
;
6143 long buffer_bytes
= stream_
.bufferSize
* stream_
.nDeviceChannels
[0];
6144 buffer_bytes
*= formatBytes(stream_
.deviceFormat
[0]);
6146 LPDIRECTSOUNDBUFFER dsBuffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6147 UINT nextWritePos
= handles
[0].bufferPointer
;
6148 dsBufferSize
= buffer_bytes
* stream_
.nBuffers
;
6150 // Write zeroes for nBuffer counts.
6151 for (int i
=0; i
<stream_
.nBuffers
; i
++) {
6153 // Find out where the read and "safe write" pointers are.
6154 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
6155 if ( FAILED(result
) ) {
6156 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6157 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6158 error(RtError::DRIVER_ERROR
);
6161 if ( currentPos
< nextWritePos
) currentPos
+= dsBufferSize
; // unwrap offset
6162 DWORD endWrite
= nextWritePos
+ buffer_bytes
;
6164 // Check whether the entire write region is behind the play pointer.
6165 while ( currentPos
< endWrite
) {
6166 double millis
= (endWrite
- currentPos
) * 900.0;
6167 millis
/= ( formatBytes(stream_
.deviceFormat
[0]) * stream_
.sampleRate
);
6168 if ( millis
< 1.0 ) millis
= 1.0;
6169 Sleep( (DWORD
) millis
);
6171 // Wake up, find out where we are now
6172 result
= dsBuffer
->GetCurrentPosition( ¤tPos
, &safePos
);
6173 if ( FAILED(result
) ) {
6174 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6175 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6176 error(RtError::DRIVER_ERROR
);
6178 if ( currentPos
< nextWritePos
) currentPos
+= dsBufferSize
; // unwrap offset
6181 // Lock free space in the buffer
6182 result
= dsBuffer
->Lock (nextWritePos
, buffer_bytes
, &buffer1
,
6183 &bufferSize1
, &buffer2
, &bufferSize2
, 0);
6184 if ( FAILED(result
) ) {
6185 sprintf(message_
, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
6186 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6187 error(RtError::DRIVER_ERROR
);
6190 // Zero the free space
6191 ZeroMemory(buffer1
, bufferSize1
);
6192 if (buffer2
!= NULL
) ZeroMemory(buffer2
, bufferSize2
);
6194 // Update our buffer offset and unlock sound buffer
6195 dsBuffer
->Unlock (buffer1
, bufferSize1
, buffer2
, bufferSize2
);
6196 if ( FAILED(result
) ) {
6197 sprintf(message_
, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
6198 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6199 error(RtError::DRIVER_ERROR
);
6201 nextWritePos
= (nextWritePos
+ bufferSize1
+ bufferSize2
) % dsBufferSize
;
6202 handles
[0].bufferPointer
= nextWritePos
;
6205 // If we play again, start at the beginning of the buffer.
6206 handles
[0].bufferPointer
= 0;
6209 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6210 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6214 result
= buffer
->Stop();
6215 if ( FAILED(result
) ) {
6216 sprintf(message_
, "RtApiDs: Unable to stop capture buffer (%s): %s",
6217 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6218 error(RtError::DRIVER_ERROR
);
6221 dsBufferSize
= stream_
.bufferSize
* stream_
.nDeviceChannels
[1];
6222 dsBufferSize
*= formatBytes(stream_
.deviceFormat
[1]) * stream_
.nBuffers
;
6224 // Lock the buffer and clear it so that if we start to play again,
6225 // we won't have old data playing.
6226 result
= buffer
->Lock(0, dsBufferSize
, &buffer1
, &bufferSize1
, NULL
, NULL
, 0);
6227 if ( FAILED(result
) ) {
6228 sprintf(message_
, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6229 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6230 error(RtError::DRIVER_ERROR
);
6233 // Zero the DS buffer
6234 ZeroMemory(buffer1
, bufferSize1
);
6236 // Unlock the DS buffer
6237 result
= buffer
->Unlock(buffer1
, bufferSize1
, NULL
, 0);
6238 if ( FAILED(result
) ) {
6239 sprintf(message_
, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6240 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6241 error(RtError::DRIVER_ERROR
);
6244 // If we start recording again, we must begin at beginning of buffer.
6245 handles
[1].bufferPointer
= 0;
6248 MUTEX_UNLOCK(&stream_
.mutex
);
6251 void RtApiDs :: abortStream()
6254 if (stream_
.state
== STREAM_STOPPED
) return;
6256 // Change the state before the lock to improve shutdown response
6257 // when using a callback.
6258 stream_
.state
= STREAM_STOPPED
;
6259 MUTEX_LOCK(&stream_
.mutex
);
6265 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6266 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6267 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6268 result
= buffer
->Stop();
6269 if ( FAILED(result
) ) {
6270 sprintf(message_
, "RtApiDs: Unable to stop buffer (%s): %s",
6271 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6272 error(RtError::DRIVER_ERROR
);
6275 dsBufferSize
= stream_
.bufferSize
* stream_
.nDeviceChannels
[0];
6276 dsBufferSize
*= formatBytes(stream_
.deviceFormat
[0]) * stream_
.nBuffers
;
6278 // Lock the buffer and clear it so that if we start to play again,
6279 // we won't have old data playing.
6280 result
= buffer
->Lock(0, dsBufferSize
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
6281 if ( FAILED(result
) ) {
6282 sprintf(message_
, "RtApiDs: Unable to lock buffer (%s): %s.",
6283 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6284 error(RtError::DRIVER_ERROR
);
6287 // Zero the DS buffer
6288 ZeroMemory(audioPtr
, dataLen
);
6290 // Unlock the DS buffer
6291 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
6292 if ( FAILED(result
) ) {
6293 sprintf(message_
, "RtApiDs: Unable to unlock buffer (%s): %s.",
6294 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6295 error(RtError::DRIVER_ERROR
);
6298 // If we start playing again, we must begin at beginning of buffer.
6299 handles
[0].bufferPointer
= 0;
6302 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6303 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6307 result
= buffer
->Stop();
6308 if ( FAILED(result
) ) {
6309 sprintf(message_
, "RtApiDs: Unable to stop capture buffer (%s): %s",
6310 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6311 error(RtError::DRIVER_ERROR
);
6314 dsBufferSize
= stream_
.bufferSize
* stream_
.nDeviceChannels
[1];
6315 dsBufferSize
*= formatBytes(stream_
.deviceFormat
[1]) * stream_
.nBuffers
;
6317 // Lock the buffer and clear it so that if we start to play again,
6318 // we won't have old data playing.
6319 result
= buffer
->Lock(0, dsBufferSize
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
6320 if ( FAILED(result
) ) {
6321 sprintf(message_
, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6322 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6323 error(RtError::DRIVER_ERROR
);
6326 // Zero the DS buffer
6327 ZeroMemory(audioPtr
, dataLen
);
6329 // Unlock the DS buffer
6330 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
6331 if ( FAILED(result
) ) {
6332 sprintf(message_
, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6333 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6334 error(RtError::DRIVER_ERROR
);
6337 // If we start recording again, we must begin at beginning of buffer.
6338 handles
[1].bufferPointer
= 0;
6341 MUTEX_UNLOCK(&stream_
.mutex
);
6344 int RtApiDs :: streamWillBlock()
6347 if (stream_
.state
== STREAM_STOPPED
) return 0;
6349 MUTEX_LOCK(&stream_
.mutex
);
6354 DWORD currentPos
, safePos
;
6356 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6357 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6359 LPDIRECTSOUNDBUFFER dsBuffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6360 UINT nextWritePos
= handles
[0].bufferPointer
;
6361 channels
= stream_
.nDeviceChannels
[0];
6362 DWORD dsBufferSize
= stream_
.bufferSize
* channels
;
6363 dsBufferSize
*= formatBytes(stream_
.deviceFormat
[0]) * stream_
.nBuffers
;
6365 // Find out where the read and "safe write" pointers are.
6366 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
6367 if ( FAILED(result
) ) {
6368 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6369 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6370 error(RtError::DRIVER_ERROR
);
6373 if ( currentPos
< nextWritePos
) currentPos
+= dsBufferSize
; // unwrap offset
6374 frames
= currentPos
- nextWritePos
;
6375 frames
/= channels
* formatBytes(stream_
.deviceFormat
[0]);
6378 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6380 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6381 UINT nextReadPos
= handles
[1].bufferPointer
;
6382 channels
= stream_
.nDeviceChannels
[1];
6383 DWORD dsBufferSize
= stream_
.bufferSize
* channels
;
6384 dsBufferSize
*= formatBytes(stream_
.deviceFormat
[1]) * stream_
.nBuffers
;
6386 // Find out where the write and "safe read" pointers are.
6387 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
6388 if ( FAILED(result
) ) {
6389 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
6390 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6391 error(RtError::DRIVER_ERROR
);
6394 if ( safePos
< nextReadPos
) safePos
+= dsBufferSize
; // unwrap offset
6396 if (stream_
.mode
== DUPLEX
) {
6397 // Take largest value of the two.
6398 int temp
= safePos
- nextReadPos
;
6399 temp
/= channels
* formatBytes(stream_
.deviceFormat
[1]);
6400 frames
= ( temp
> frames
) ? temp
: frames
;
6403 frames
= safePos
- nextReadPos
;
6404 frames
/= channels
* formatBytes(stream_
.deviceFormat
[1]);
6408 frames
= stream_
.bufferSize
- frames
;
6409 if (frames
< 0) frames
= 0;
6411 MUTEX_UNLOCK(&stream_
.mutex
);
6415 void RtApiDs :: tickStream()
6420 if (stream_
.state
== STREAM_STOPPED
) {
6421 if (stream_
.callbackInfo
.usingCallback
) Sleep(50); // sleep 50 milliseconds
6424 else if (stream_
.callbackInfo
.usingCallback
) {
6425 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
6426 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
6429 MUTEX_LOCK(&stream_
.mutex
);
6431 // The state might change while waiting on a mutex.
6432 if (stream_
.state
== STREAM_STOPPED
) {
6433 MUTEX_UNLOCK(&stream_
.mutex
);
6438 DWORD currentPos
, safePos
;
6439 LPVOID buffer1
= NULL
;
6440 LPVOID buffer2
= NULL
;
6441 DWORD bufferSize1
= 0;
6442 DWORD bufferSize2
= 0;
6445 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6446 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6448 // Setup parameters and do buffer conversion if necessary.
6449 if (stream_
.doConvertBuffer
[0]) {
6450 convertStreamBuffer(OUTPUT
);
6451 buffer
= stream_
.deviceBuffer
;
6452 buffer_bytes
= stream_
.bufferSize
* stream_
.nDeviceChannels
[0];
6453 buffer_bytes
*= formatBytes(stream_
.deviceFormat
[0]);
6456 buffer
= stream_
.userBuffer
;
6457 buffer_bytes
= stream_
.bufferSize
* stream_
.nUserChannels
[0];
6458 buffer_bytes
*= formatBytes(stream_
.userFormat
);
6461 // No byte swapping necessary in DirectSound implementation.
6463 LPDIRECTSOUNDBUFFER dsBuffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6464 UINT nextWritePos
= handles
[0].bufferPointer
;
6465 DWORD dsBufferSize
= buffer_bytes
* stream_
.nBuffers
;
6467 // Find out where the read and "safe write" pointers are.
6468 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
6469 if ( FAILED(result
) ) {
6470 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6471 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6472 error(RtError::DRIVER_ERROR
);
6475 if ( currentPos
< nextWritePos
) currentPos
+= dsBufferSize
; // unwrap offset
6476 DWORD endWrite
= nextWritePos
+ buffer_bytes
;
6478 // Check whether the entire write region is behind the play pointer.
6479 while ( currentPos
< endWrite
) {
6480 // If we are here, then we must wait until the play pointer gets
6481 // beyond the write region. The approach here is to use the
6482 // Sleep() function to suspend operation until safePos catches
6483 // up. Calculate number of milliseconds to wait as:
6484 // time = distance * (milliseconds/second) * fudgefactor /
6485 // ((bytes/sample) * (samples/second))
6486 // A "fudgefactor" less than 1 is used because it was found
6487 // that sleeping too long was MUCH worse than sleeping for
6488 // several shorter periods.
6489 double millis
= (endWrite
- currentPos
) * 900.0;
6490 millis
/= ( formatBytes(stream_
.deviceFormat
[0]) * stream_
.sampleRate
);
6491 if ( millis
< 1.0 ) millis
= 1.0;
6492 Sleep( (DWORD
) millis
);
6494 // Wake up, find out where we are now
6495 result
= dsBuffer
->GetCurrentPosition( ¤tPos
, &safePos
);
6496 if ( FAILED(result
) ) {
6497 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6498 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6499 error(RtError::DRIVER_ERROR
);
6501 if ( currentPos
< nextWritePos
) currentPos
+= dsBufferSize
; // unwrap offset
6504 // Lock free space in the buffer
6505 result
= dsBuffer
->Lock (nextWritePos
, buffer_bytes
, &buffer1
,
6506 &bufferSize1
, &buffer2
, &bufferSize2
, 0);
6507 if ( FAILED(result
) ) {
6508 sprintf(message_
, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
6509 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6510 error(RtError::DRIVER_ERROR
);
6513 // Copy our buffer into the DS buffer
6514 CopyMemory(buffer1
, buffer
, bufferSize1
);
6515 if (buffer2
!= NULL
) CopyMemory(buffer2
, buffer
+bufferSize1
, bufferSize2
);
6517 // Update our buffer offset and unlock sound buffer
6518 dsBuffer
->Unlock (buffer1
, bufferSize1
, buffer2
, bufferSize2
);
6519 if ( FAILED(result
) ) {
6520 sprintf(message_
, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
6521 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6522 error(RtError::DRIVER_ERROR
);
6524 nextWritePos
= (nextWritePos
+ bufferSize1
+ bufferSize2
) % dsBufferSize
;
6525 handles
[0].bufferPointer
= nextWritePos
;
6528 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6530 // Setup parameters.
6531 if (stream_
.doConvertBuffer
[1]) {
6532 buffer
= stream_
.deviceBuffer
;
6533 buffer_bytes
= stream_
.bufferSize
* stream_
.nDeviceChannels
[1];
6534 buffer_bytes
*= formatBytes(stream_
.deviceFormat
[1]);
6537 buffer
= stream_
.userBuffer
;
6538 buffer_bytes
= stream_
.bufferSize
* stream_
.nUserChannels
[1];
6539 buffer_bytes
*= formatBytes(stream_
.userFormat
);
6542 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6543 UINT nextReadPos
= handles
[1].bufferPointer
;
6544 DWORD dsBufferSize
= buffer_bytes
* stream_
.nBuffers
;
6546 // Find out where the write and "safe read" pointers are.
6547 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
6548 if ( FAILED(result
) ) {
6549 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
6550 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6551 error(RtError::DRIVER_ERROR
);
6554 if ( safePos
< nextReadPos
) safePos
+= dsBufferSize
; // unwrap offset
6555 DWORD endRead
= nextReadPos
+ buffer_bytes
;
6557 // Check whether the entire write region is behind the play pointer.
6558 while ( safePos
< endRead
) {
6559 // See comments for playback.
6560 double millis
= (endRead
- safePos
) * 900.0;
6561 millis
/= ( formatBytes(stream_
.deviceFormat
[1]) * stream_
.sampleRate
);
6562 if ( millis
< 1.0 ) millis
= 1.0;
6563 Sleep( (DWORD
) millis
);
6565 // Wake up, find out where we are now
6566 result
= dsBuffer
->GetCurrentPosition( ¤tPos
, &safePos
);
6567 if ( FAILED(result
) ) {
6568 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
6569 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6570 error(RtError::DRIVER_ERROR
);
6573 if ( safePos
< nextReadPos
) safePos
+= dsBufferSize
; // unwrap offset
6576 // Lock free space in the buffer
6577 result
= dsBuffer
->Lock (nextReadPos
, buffer_bytes
, &buffer1
,
6578 &bufferSize1
, &buffer2
, &bufferSize2
, 0);
6579 if ( FAILED(result
) ) {
6580 sprintf(message_
, "RtApiDs: Unable to lock buffer during capture (%s): %s.",
6581 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6582 error(RtError::DRIVER_ERROR
);
6585 // Copy our buffer into the DS buffer
6586 CopyMemory(buffer
, buffer1
, bufferSize1
);
6587 if (buffer2
!= NULL
) CopyMemory(buffer
+bufferSize1
, buffer2
, bufferSize2
);
6589 // Update our buffer offset and unlock sound buffer
6590 nextReadPos
= (nextReadPos
+ bufferSize1
+ bufferSize2
) % dsBufferSize
;
6591 dsBuffer
->Unlock (buffer1
, bufferSize1
, buffer2
, bufferSize2
);
6592 if ( FAILED(result
) ) {
6593 sprintf(message_
, "RtApiDs: Unable to unlock buffer during capture (%s): %s.",
6594 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6595 error(RtError::DRIVER_ERROR
);
6597 handles
[1].bufferPointer
= nextReadPos
;
6599 // No byte swapping necessary in DirectSound implementation.
6601 // Do buffer conversion if necessary.
6602 if (stream_
.doConvertBuffer
[1])
6603 convertStreamBuffer(INPUT
);
6606 MUTEX_UNLOCK(&stream_
.mutex
);
6608 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
6612 // Definitions for utility functions and callbacks
6613 // specific to the DirectSound implementation.
6615 #if defined(__WINDOWS_PTHREAD__)
6616 extern "C" void * callbackHandler( void * ptr
)
6618 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
6619 RtApiDs
*object
= (RtApiDs
*) info
->object
;
6620 bool *usingCallback
= &info
->usingCallback
;
6622 while ( *usingCallback
) {
6623 pthread_testcancel();
6625 object
->tickStream();
6627 catch (RtError
&exception
) {
6628 fprintf(stderr
, "\nRtApiDs: callback thread error (%s) ... closing thread.\n\n",
6629 exception
.getMessageString());
6639 extern "C" unsigned __stdcall
callbackHandler(void *ptr
)
6641 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
6642 RtApiDs
*object
= (RtApiDs
*) info
->object
;
6643 bool *usingCallback
= &info
->usingCallback
;
6645 while ( *usingCallback
) {
6647 object
->tickStream();
6649 catch (RtError
&exception
) {
6650 fprintf(stderr
, "\nRtApiDs: callback thread error (%s) ... closing thread.\n\n",
6651 exception
.getMessageString());
6663 static bool CALLBACK
deviceCountCallback(LPGUID lpguid
,
6664 LPCSTR lpcstrDescription
,
6665 LPCSTR lpcstrModule
,
6668 int *pointer
= ((int *) lpContext
);
6674 static bool CALLBACK
deviceInfoCallback(LPGUID lpguid
,
6675 LPCSTR lpcstrDescription
,
6676 LPCSTR lpcstrModule
,
6679 enum_info
*info
= ((enum_info
*) lpContext
);
6680 while (strlen(info
->name
) > 0) info
++;
6682 strncpy(info
->name
, lpcstrDescription
, 64);
6686 info
->isValid
= false;
6687 if (info
->isInput
== true) {
6689 LPDIRECTSOUNDCAPTURE object
;
6691 hr
= DirectSoundCaptureCreate( lpguid
, &object
, NULL
);
6692 if( hr
!= DS_OK
) return true;
6694 caps
.dwSize
= sizeof(caps
);
6695 hr
= object
->GetCaps( &caps
);
6697 if (caps
.dwChannels
> 0 && caps
.dwFormats
> 0)
6698 info
->isValid
= true;
6704 LPDIRECTSOUND object
;
6705 hr
= DirectSoundCreate( lpguid
, &object
, NULL
);
6706 if( hr
!= DS_OK
) return true;
6708 caps
.dwSize
= sizeof(caps
);
6709 hr
= object
->GetCaps( &caps
);
6711 if ( caps
.dwFlags
& DSCAPS_PRIMARYMONO
|| caps
.dwFlags
& DSCAPS_PRIMARYSTEREO
)
6712 info
->isValid
= true;
6720 static bool CALLBACK
defaultDeviceCallback(LPGUID lpguid
,
6721 LPCSTR lpcstrDescription
,
6722 LPCSTR lpcstrModule
,
6725 enum_info
*info
= ((enum_info
*) lpContext
);
6727 if ( lpguid
== NULL
) {
6728 strncpy(info
->name
, lpcstrDescription
, 64);
6735 static bool CALLBACK
deviceIdCallback(LPGUID lpguid
,
6736 LPCSTR lpcstrDescription
,
6737 LPCSTR lpcstrModule
,
6740 enum_info
*info
= ((enum_info
*) lpContext
);
6742 if ( strncmp( info
->name
, lpcstrDescription
, 64 ) == 0 ) {
6744 info
->isValid
= true;
6751 static char* getErrorString(int code
)
6755 case DSERR_ALLOCATED
:
6756 return "Direct Sound already allocated";
6758 case DSERR_CONTROLUNAVAIL
:
6759 return "Direct Sound control unavailable";
6761 case DSERR_INVALIDPARAM
:
6762 return "Direct Sound invalid parameter";
6764 case DSERR_INVALIDCALL
:
6765 return "Direct Sound invalid call";
6768 return "Direct Sound generic error";
6770 case DSERR_PRIOLEVELNEEDED
:
6771 return "Direct Sound Priority level needed";
6773 case DSERR_OUTOFMEMORY
:
6774 return "Direct Sound out of memory";
6776 case DSERR_BADFORMAT
:
6777 return "Direct Sound bad format";
6779 case DSERR_UNSUPPORTED
:
6780 return "Direct Sound unsupported error";
6782 case DSERR_NODRIVER
:
6783 return "Direct Sound no driver error";
6785 case DSERR_ALREADYINITIALIZED
:
6786 return "Direct Sound already initialized";
6788 case DSERR_NOAGGREGATION
:
6789 return "Direct Sound no aggregation";
6791 case DSERR_BUFFERLOST
:
6792 return "Direct Sound buffer lost";
6794 case DSERR_OTHERAPPHASPRIO
:
6795 return "Direct Sound other app has priority";
6797 case DSERR_UNINITIALIZED
:
6798 return "Direct Sound uninitialized";
6801 return "Direct Sound unknown error";
6805 //******************** End of __WINDOWS_DS__ *********************//
6808 #if defined(__IRIX_AL__) // SGI's AL API for IRIX
6810 #include <dmedia/audio.h>
6814 extern "C" void *callbackHandler(void * ptr
);
6816 RtApiAl :: RtApiAl()
6820 if (nDevices_
<= 0) {
6821 sprintf(message_
, "RtApiAl: no Irix AL audio devices found!");
6822 error(RtError::NO_DEVICES_FOUND
);
6826 RtApiAl :: ~RtApiAl()
6828 // The subclass destructor gets called before the base class
6829 // destructor, so close any existing streams before deallocating
6830 // apiDeviceId memory.
6831 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
6833 // Free our allocated apiDeviceId memory.
6835 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
6836 id
= (long *) devices_
[i
].apiDeviceId
;
6841 void RtApiAl :: initialize(void)
6843 // Count cards and devices
6846 // Determine the total number of input and output devices.
6847 nDevices_
= alQueryValues(AL_SYSTEM
, AL_DEVICES
, 0, 0, 0, 0);
6848 if (nDevices_
< 0) {
6849 sprintf(message_
, "RtApiAl: error counting devices: %s.",
6850 alGetErrorString(oserror()));
6851 error(RtError::DRIVER_ERROR
);
6854 if (nDevices_
<= 0) return;
6856 ALvalue
*vls
= (ALvalue
*) new ALvalue
[nDevices_
];
6858 // Create our list of devices and write their ascii identifiers and resource ids.
6862 pvs
[0].param
= AL_NAME
;
6863 pvs
[0].value
.ptr
= name
;
6868 outs
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_OUTPUT
, vls
, nDevices_
, 0, 0);
6871 sprintf(message_
, "RtApiAl: error getting output devices: %s.",
6872 alGetErrorString(oserror()));
6873 error(RtError::DRIVER_ERROR
);
6876 for (i
=0; i
<outs
; i
++) {
6877 if (alGetParams(vls
[i
].i
, pvs
, 1) < 0) {
6879 sprintf(message_
, "RtApiAl: error querying output devices: %s.",
6880 alGetErrorString(oserror()));
6881 error(RtError::DRIVER_ERROR
);
6883 device
.name
.erase();
6884 device
.name
.append( (const char *)name
, strlen(name
)+1);
6885 devices_
.push_back(device
);
6886 id
= (long *) calloc(2, sizeof(long));
6888 devices_
[i
].apiDeviceId
= (void *) id
;
6891 ins
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_INPUT
, &vls
[outs
], nDevices_
-outs
, 0, 0);
6894 sprintf(message_
, "RtApiAl: error getting input devices: %s.",
6895 alGetErrorString(oserror()));
6896 error(RtError::DRIVER_ERROR
);
6899 for (i
=outs
; i
<ins
+outs
; i
++) {
6900 if (alGetParams(vls
[i
].i
, pvs
, 1) < 0) {
6902 sprintf(message_
, "RtApiAl: error querying input devices: %s.",
6903 alGetErrorString(oserror()));
6904 error(RtError::DRIVER_ERROR
);
6906 device
.name
.erase();
6907 device
.name
.append( (const char *)name
, strlen(name
)+1);
6908 devices_
.push_back(device
);
6909 id
= (long *) calloc(2, sizeof(long));
6911 devices_
[i
].apiDeviceId
= (void *) id
;
6917 int RtApiAl :: getDefaultInputDevice(void)
6921 int result
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_INPUT
, &value
, 1, 0, 0);
6923 sprintf(message_
, "RtApiAl: error getting default input device id: %s.",
6924 alGetErrorString(oserror()));
6925 error(RtError::WARNING
);
6928 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
6929 id
= (long *) devices_
[i
].apiDeviceId
;
6930 if ( id
[1] == value
.i
) return i
;
6937 int RtApiAl :: getDefaultOutputDevice(void)
6941 int result
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_OUTPUT
, &value
, 1, 0, 0);
6943 sprintf(message_
, "RtApiAl: error getting default output device id: %s.",
6944 alGetErrorString(oserror()));
6945 error(RtError::WARNING
);
6948 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
6949 id
= (long *) devices_
[i
].apiDeviceId
;
6950 if ( id
[0] == value
.i
) return i
;
6957 void RtApiAl :: probeDeviceInfo(RtApiDevice
*info
)
6964 // Get output resource ID if it exists.
6965 long *id
= (long *) info
->apiDeviceId
;
6969 // Probe output device parameters.
6970 result
= alQueryValues(resource
, AL_CHANNELS
, &value
, 1, 0, 0);
6972 sprintf(message_
, "RtApiAl: error getting device (%s) channels: %s.",
6973 info
->name
.c_str(), alGetErrorString(oserror()));
6974 error(RtError::WARNING
);
6977 info
->maxOutputChannels
= value
.i
;
6978 info
->minOutputChannels
= 1;
6981 result
= alGetParamInfo(resource
, AL_RATE
, &pinfo
);
6983 sprintf(message_
, "RtApiAl: error getting device (%s) rates: %s.",
6984 info
->name
.c_str(), alGetErrorString(oserror()));
6985 error(RtError::WARNING
);
6988 info
->sampleRates
.clear();
6989 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
6990 if ( SAMPLE_RATES
[k
] >= pinfo
.min
.i
&& SAMPLE_RATES
[k
] <= pinfo
.max
.i
)
6991 info
->sampleRates
.push_back( SAMPLE_RATES
[k
] );
6995 // The AL library supports all our formats, except 24-bit and 32-bit ints.
6996 info
->nativeFormats
= (RtAudioFormat
) 51;
6999 // Now get input resource ID if it exists.
7003 // Probe input device parameters.
7004 result
= alQueryValues(resource
, AL_CHANNELS
, &value
, 1, 0, 0);
7006 sprintf(message_
, "RtApiAl: error getting device (%s) channels: %s.",
7007 info
->name
.c_str(), alGetErrorString(oserror()));
7008 error(RtError::WARNING
);
7011 info
->maxInputChannels
= value
.i
;
7012 info
->minInputChannels
= 1;
7015 result
= alGetParamInfo(resource
, AL_RATE
, &pinfo
);
7017 sprintf(message_
, "RtApiAl: error getting device (%s) rates: %s.",
7018 info
->name
.c_str(), alGetErrorString(oserror()));
7019 error(RtError::WARNING
);
7022 // In the case of the default device, these values will
7023 // overwrite the rates determined for the output device. Since
7024 // the input device is most likely to be more limited than the
7025 // output device, this is ok.
7026 info
->sampleRates
.clear();
7027 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
7028 if ( SAMPLE_RATES
[k
] >= pinfo
.min
.i
&& SAMPLE_RATES
[k
] <= pinfo
.max
.i
)
7029 info
->sampleRates
.push_back( SAMPLE_RATES
[k
] );
7033 // The AL library supports all our formats, except 24-bit and 32-bit ints.
7034 info
->nativeFormats
= (RtAudioFormat
) 51;
7037 if ( info
->maxInputChannels
== 0 && info
->maxOutputChannels
== 0 )
7039 if ( info
->sampleRates
.size() == 0 )
7042 // Determine duplex status.
7043 if (info
->maxInputChannels
< info
->maxOutputChannels
)
7044 info
->maxDuplexChannels
= info
->maxInputChannels
;
7046 info
->maxDuplexChannels
= info
->maxOutputChannels
;
7047 if (info
->minInputChannels
< info
->minOutputChannels
)
7048 info
->minDuplexChannels
= info
->minInputChannels
;
7050 info
->minDuplexChannels
= info
->minOutputChannels
;
7052 if ( info
->maxDuplexChannels
> 0 ) info
->hasDuplexSupport
= true;
7053 else info
->hasDuplexSupport
= false;
7055 info
->probed
= true;
7060 bool RtApiAl :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
7061 int sampleRate
, RtAudioFormat format
,
7062 int *bufferSize
, int numberOfBuffers
)
7064 int result
, nBuffers
;
7069 long *id
= (long *) devices_
[device
].apiDeviceId
;
7071 // Get a new ALconfig structure.
7072 al_config
= alNewConfig();
7074 sprintf(message_
,"RtApiAl: can't get AL config: %s.",
7075 alGetErrorString(oserror()));
7076 error(RtError::WARNING
);
7080 // Set the channels.
7081 result
= alSetChannels(al_config
, channels
);
7083 alFreeConfig(al_config
);
7084 sprintf(message_
,"RtApiAl: can't set %d channels in AL config: %s.",
7085 channels
, alGetErrorString(oserror()));
7086 error(RtError::WARNING
);
7090 // Attempt to set the queue size. The al API doesn't provide a
7091 // means for querying the minimum/maximum buffer size of a device,
7092 // so if the specified size doesn't work, take whatever the
7093 // al_config structure returns.
7094 if ( numberOfBuffers
< 1 )
7097 nBuffers
= numberOfBuffers
;
7098 long buffer_size
= *bufferSize
* nBuffers
;
7099 result
= alSetQueueSize(al_config
, buffer_size
); // in sample frames
7101 // Get the buffer size specified by the al_config and try that.
7102 buffer_size
= alGetQueueSize(al_config
);
7103 result
= alSetQueueSize(al_config
, buffer_size
);
7105 alFreeConfig(al_config
);
7106 sprintf(message_
,"RtApiAl: can't set buffer size (%ld) in AL config: %s.",
7107 buffer_size
, alGetErrorString(oserror()));
7108 error(RtError::WARNING
);
7111 *bufferSize
= buffer_size
/ nBuffers
;
7114 // Set the data format.
7115 stream_
.userFormat
= format
;
7116 stream_
.deviceFormat
[mode
] = format
;
7117 if (format
== RTAUDIO_SINT8
) {
7118 result
= alSetSampFmt(al_config
, AL_SAMPFMT_TWOSCOMP
);
7119 result
= alSetWidth(al_config
, AL_SAMPLE_8
);
7121 else if (format
== RTAUDIO_SINT16
) {
7122 result
= alSetSampFmt(al_config
, AL_SAMPFMT_TWOSCOMP
);
7123 result
= alSetWidth(al_config
, AL_SAMPLE_16
);
7125 else if (format
== RTAUDIO_SINT24
) {
7126 // Our 24-bit format assumes the upper 3 bytes of a 4 byte word.
7127 // The AL library uses the lower 3 bytes, so we'll need to do our
7129 result
= alSetSampFmt(al_config
, AL_SAMPFMT_FLOAT
);
7130 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
7132 else if (format
== RTAUDIO_SINT32
) {
7133 // The AL library doesn't seem to support the 32-bit integer
7134 // format, so we'll need to do our own conversion.
7135 result
= alSetSampFmt(al_config
, AL_SAMPFMT_FLOAT
);
7136 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
7138 else if (format
== RTAUDIO_FLOAT32
)
7139 result
= alSetSampFmt(al_config
, AL_SAMPFMT_FLOAT
);
7140 else if (format
== RTAUDIO_FLOAT64
)
7141 result
= alSetSampFmt(al_config
, AL_SAMPFMT_DOUBLE
);
7143 if ( result
== -1 ) {
7144 alFreeConfig(al_config
);
7145 sprintf(message_
,"RtApiAl: error setting sample format in AL config: %s.",
7146 alGetErrorString(oserror()));
7147 error(RtError::WARNING
);
7151 if (mode
== OUTPUT
) {
7155 resource
= AL_DEFAULT_OUTPUT
;
7158 result
= alSetDevice(al_config
, resource
);
7159 if ( result
== -1 ) {
7160 alFreeConfig(al_config
);
7161 sprintf(message_
,"RtApiAl: error setting device (%s) in AL config: %s.",
7162 devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
7163 error(RtError::WARNING
);
7168 port
= alOpenPort("RtApiAl Output Port", "w", al_config
);
7170 alFreeConfig(al_config
);
7171 sprintf(message_
,"RtApiAl: error opening output port: %s.",
7172 alGetErrorString(oserror()));
7173 error(RtError::WARNING
);
7177 // Set the sample rate
7178 pvs
[0].param
= AL_MASTER_CLOCK
;
7179 pvs
[0].value
.i
= AL_CRYSTAL_MCLK_TYPE
;
7180 pvs
[1].param
= AL_RATE
;
7181 pvs
[1].value
.ll
= alDoubleToFixed((double)sampleRate
);
7182 result
= alSetParams(resource
, pvs
, 2);
7185 alFreeConfig(al_config
);
7186 sprintf(message_
,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
7187 sampleRate
, devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
7188 error(RtError::WARNING
);
7192 else { // mode == INPUT
7196 resource
= AL_DEFAULT_INPUT
;
7199 result
= alSetDevice(al_config
, resource
);
7200 if ( result
== -1 ) {
7201 alFreeConfig(al_config
);
7202 sprintf(message_
,"RtApiAl: error setting device (%s) in AL config: %s.",
7203 devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
7204 error(RtError::WARNING
);
7209 port
= alOpenPort("RtApiAl Input Port", "r", al_config
);
7211 alFreeConfig(al_config
);
7212 sprintf(message_
,"RtApiAl: error opening input port: %s.",
7213 alGetErrorString(oserror()));
7214 error(RtError::WARNING
);
7218 // Set the sample rate
7219 pvs
[0].param
= AL_MASTER_CLOCK
;
7220 pvs
[0].value
.i
= AL_CRYSTAL_MCLK_TYPE
;
7221 pvs
[1].param
= AL_RATE
;
7222 pvs
[1].value
.ll
= alDoubleToFixed((double)sampleRate
);
7223 result
= alSetParams(resource
, pvs
, 2);
7226 alFreeConfig(al_config
);
7227 sprintf(message_
,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
7228 sampleRate
, devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
7229 error(RtError::WARNING
);
7234 alFreeConfig(al_config
);
7236 stream_
.nUserChannels
[mode
] = channels
;
7237 stream_
.nDeviceChannels
[mode
] = channels
;
7239 // Save stream handle.
7240 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
7241 if ( handle
== 0 ) {
7242 handle
= (ALport
*) calloc(2, sizeof(ALport
));
7243 if ( handle
== NULL
) {
7244 sprintf(message_
, "RtApiAl: Irix Al error allocating handle memory (%s).",
7245 devices_
[device
].name
.c_str());
7248 stream_
.apiHandle
= (void *) handle
;
7252 handle
[mode
] = port
;
7254 // Set flags for buffer conversion
7255 stream_
.doConvertBuffer
[mode
] = false;
7256 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
7257 stream_
.doConvertBuffer
[mode
] = true;
7259 // Allocate necessary internal buffers
7260 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
7263 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
7264 buffer_bytes
= stream_
.nUserChannels
[0];
7266 buffer_bytes
= stream_
.nUserChannels
[1];
7268 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
7269 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
7270 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
7271 if (stream_
.userBuffer
== NULL
) {
7272 sprintf(message_
, "RtApiAl: error allocating user buffer memory (%s).",
7273 devices_
[device
].name
.c_str());
7278 if ( stream_
.doConvertBuffer
[mode
] ) {
7281 bool makeBuffer
= true;
7282 if ( mode
== OUTPUT
)
7283 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
7284 else { // mode == INPUT
7285 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
7286 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
7287 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
7288 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
7293 buffer_bytes
*= *bufferSize
;
7294 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
7295 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
7296 if (stream_
.deviceBuffer
== NULL
) {
7297 sprintf(message_
, "RtApiAl: error allocating device buffer memory (%s).",
7298 devices_
[device
].name
.c_str());
7304 stream_
.device
[mode
] = device
;
7305 stream_
.state
= STREAM_STOPPED
;
7306 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
7307 // We had already set up an output stream.
7308 stream_
.mode
= DUPLEX
;
7310 stream_
.mode
= mode
;
7311 stream_
.nBuffers
= nBuffers
;
7312 stream_
.bufferSize
= *bufferSize
;
7313 stream_
.sampleRate
= sampleRate
;
7320 alClosePort(handle
[0]);
7322 alClosePort(handle
[1]);
7324 stream_
.apiHandle
= 0;
7327 if (stream_
.userBuffer
) {
7328 free(stream_
.userBuffer
);
7329 stream_
.userBuffer
= 0;
7332 error(RtError::WARNING
);
7336 void RtApiAl :: closeStream()
7338 // We don't want an exception to be thrown here because this
7339 // function is called by our class destructor. So, do our own
7341 if ( stream_
.mode
== UNINITIALIZED
) {
7342 sprintf(message_
, "RtApiAl::closeStream(): no open stream to close!");
7343 error(RtError::WARNING
);
7347 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
7348 if (stream_
.state
== STREAM_RUNNING
) {
7349 int buffer_size
= stream_
.bufferSize
* stream_
.nBuffers
;
7350 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
7351 alDiscardFrames(handle
[0], buffer_size
);
7352 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
)
7353 alDiscardFrames(handle
[1], buffer_size
);
7354 stream_
.state
= STREAM_STOPPED
;
7357 if (stream_
.callbackInfo
.usingCallback
) {
7358 stream_
.callbackInfo
.usingCallback
= false;
7359 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
7363 if (handle
[0]) alClosePort(handle
[0]);
7364 if (handle
[1]) alClosePort(handle
[1]);
7366 stream_
.apiHandle
= 0;
7369 if (stream_
.userBuffer
) {
7370 free(stream_
.userBuffer
);
7371 stream_
.userBuffer
= 0;
7374 if (stream_
.deviceBuffer
) {
7375 free(stream_
.deviceBuffer
);
7376 stream_
.deviceBuffer
= 0;
7379 stream_
.mode
= UNINITIALIZED
;
7382 void RtApiAl :: startStream()
7385 if (stream_
.state
== STREAM_RUNNING
) return;
7387 MUTEX_LOCK(&stream_
.mutex
);
7389 // The AL port is ready as soon as it is opened.
7390 stream_
.state
= STREAM_RUNNING
;
7392 MUTEX_UNLOCK(&stream_
.mutex
);
7395 void RtApiAl :: stopStream()
7398 if (stream_
.state
== STREAM_STOPPED
) return;
7400 // Change the state before the lock to improve shutdown response
7401 // when using a callback.
7402 stream_
.state
= STREAM_STOPPED
;
7403 MUTEX_LOCK(&stream_
.mutex
);
7405 int result
, buffer_size
= stream_
.bufferSize
* stream_
.nBuffers
;
7406 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
7408 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
7409 alZeroFrames(handle
[0], buffer_size
);
7411 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
7412 result
= alDiscardFrames(handle
[1], buffer_size
);
7414 sprintf(message_
, "RtApiAl: error draining stream device (%s): %s.",
7415 devices_
[stream_
.device
[1]].name
.c_str(), alGetErrorString(oserror()));
7416 error(RtError::DRIVER_ERROR
);
7420 MUTEX_UNLOCK(&stream_
.mutex
);
7423 void RtApiAl :: abortStream()
7426 if (stream_
.state
== STREAM_STOPPED
) return;
7428 // Change the state before the lock to improve shutdown response
7429 // when using a callback.
7430 stream_
.state
= STREAM_STOPPED
;
7431 MUTEX_LOCK(&stream_
.mutex
);
7433 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
7434 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
7436 int buffer_size
= stream_
.bufferSize
* stream_
.nBuffers
;
7437 int result
= alDiscardFrames(handle
[0], buffer_size
);
7439 sprintf(message_
, "RtApiAl: error aborting stream device (%s): %s.",
7440 devices_
[stream_
.device
[0]].name
.c_str(), alGetErrorString(oserror()));
7441 error(RtError::DRIVER_ERROR
);
7445 // There is no clear action to take on the input stream, since the
7446 // port will continue to run in any event.
7448 MUTEX_UNLOCK(&stream_
.mutex
);
7451 int RtApiAl :: streamWillBlock()
7455 if (stream_
.state
== STREAM_STOPPED
) return 0;
7457 MUTEX_LOCK(&stream_
.mutex
);
7461 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
7462 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
7463 err
= alGetFillable(handle
[0]);
7465 sprintf(message_
, "RtApiAl: error getting available frames for stream (%s): %s.",
7466 devices_
[stream_
.device
[0]].name
.c_str(), alGetErrorString(oserror()));
7467 error(RtError::DRIVER_ERROR
);
7473 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
7474 err
= alGetFilled(handle
[1]);
7476 sprintf(message_
, "RtApiAl: error getting available frames for stream (%s): %s.",
7477 devices_
[stream_
.device
[1]].name
.c_str(), alGetErrorString(oserror()));
7478 error(RtError::DRIVER_ERROR
);
7480 if (frames
> err
) frames
= err
;
7483 frames
= stream_
.bufferSize
- frames
;
7484 if (frames
< 0) frames
= 0;
7486 MUTEX_UNLOCK(&stream_
.mutex
);
7490 void RtApiAl :: tickStream()
7495 if (stream_
.state
== STREAM_STOPPED
) {
7496 if (stream_
.callbackInfo
.usingCallback
) usleep(50000); // sleep 50 milliseconds
7499 else if (stream_
.callbackInfo
.usingCallback
) {
7500 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
7501 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
7504 MUTEX_LOCK(&stream_
.mutex
);
7506 // The state might change while waiting on a mutex.
7507 if (stream_
.state
== STREAM_STOPPED
)
7512 RtAudioFormat format
;
7513 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
7514 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
7516 // Setup parameters and do buffer conversion if necessary.
7517 if (stream_
.doConvertBuffer
[0]) {
7518 convertStreamBuffer(OUTPUT
);
7519 buffer
= stream_
.deviceBuffer
;
7520 channels
= stream_
.nDeviceChannels
[0];
7521 format
= stream_
.deviceFormat
[0];
7524 buffer
= stream_
.userBuffer
;
7525 channels
= stream_
.nUserChannels
[0];
7526 format
= stream_
.userFormat
;
7529 // Do byte swapping if necessary.
7530 if (stream_
.doByteSwap
[0])
7531 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
7533 // Write interleaved samples to device.
7534 alWriteFrames(handle
[0], buffer
, stream_
.bufferSize
);
7537 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
7539 // Setup parameters.
7540 if (stream_
.doConvertBuffer
[1]) {
7541 buffer
= stream_
.deviceBuffer
;
7542 channels
= stream_
.nDeviceChannels
[1];
7543 format
= stream_
.deviceFormat
[1];
7546 buffer
= stream_
.userBuffer
;
7547 channels
= stream_
.nUserChannels
[1];
7548 format
= stream_
.userFormat
;
7551 // Read interleaved samples from device.
7552 alReadFrames(handle
[1], buffer
, stream_
.bufferSize
);
7554 // Do byte swapping if necessary.
7555 if (stream_
.doByteSwap
[1])
7556 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
7558 // Do buffer conversion if necessary.
7559 if (stream_
.doConvertBuffer
[1])
7560 convertStreamBuffer(INPUT
);
7564 MUTEX_UNLOCK(&stream_
.mutex
);
7566 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
7570 void RtApiAl :: setStreamCallback(RtAudioCallback callback
, void *userData
)
7574 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
7575 if ( info
->usingCallback
) {
7576 sprintf(message_
, "RtApiAl: A callback is already set for this stream!");
7577 error(RtError::WARNING
);
7581 info
->callback
= (void *) callback
;
7582 info
->userData
= userData
;
7583 info
->usingCallback
= true;
7584 info
->object
= (void *) this;
7586 // Set the thread attributes for joinable and realtime scheduling
7587 // priority. The higher priority will only take affect if the
7588 // program is run as root or suid.
7589 pthread_attr_t attr
;
7590 pthread_attr_init(&attr
);
7591 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
7592 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
7594 int err
= pthread_create(&info
->thread
, &attr
, callbackHandler
, &stream_
.callbackInfo
);
7595 pthread_attr_destroy(&attr
);
7597 info
->usingCallback
= false;
7598 sprintf(message_
, "RtApiAl: error starting callback thread!");
7599 error(RtError::THREAD_ERROR
);
7603 void RtApiAl :: cancelStreamCallback()
7607 if (stream_
.callbackInfo
.usingCallback
) {
7609 if (stream_
.state
== STREAM_RUNNING
)
7612 MUTEX_LOCK(&stream_
.mutex
);
7614 stream_
.callbackInfo
.usingCallback
= false;
7615 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
7616 stream_
.callbackInfo
.thread
= 0;
7617 stream_
.callbackInfo
.callback
= NULL
;
7618 stream_
.callbackInfo
.userData
= NULL
;
7620 MUTEX_UNLOCK(&stream_
.mutex
);
7624 extern "C" void *callbackHandler(void *ptr
)
7626 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
7627 RtApiAl
*object
= (RtApiAl
*) info
->object
;
7628 bool *usingCallback
= &info
->usingCallback
;
7630 while ( *usingCallback
) {
7632 object
->tickStream();
7634 catch (RtError
&exception
) {
7635 fprintf(stderr
, "\nRtApiAl: callback thread error (%s) ... closing thread.\n\n",
7636 exception
.getMessageString());
7644 //******************** End of __IRIX_AL__ *********************//
7648 // *************************************************** //
7650 // Protected common (OS-independent) RtAudio methods.
7652 // *************************************************** //
7654 // This method can be modified to control the behavior of error
7655 // message reporting and throwing.
7656 void RtApi :: error(RtError::Type type
)
7658 if (type
== RtError::WARNING
) {
7659 #if defined(__CHUCK_DEBUG__)
7660 fprintf(stderr
, "[chuck](via rtaudio): %s\n", message_
);
7663 else if (type
== RtError::DEBUG_WARNING
) {
7664 #if defined(__CHUCK_DEBUG__)
7665 #if defined(__RTAUDIO_DEBUG__)
7666 fprintf(stderr
, "[chuck](via rtaudio): %s\n", message_
);
7671 #if defined(__RTAUDIO_DEBUG__)
7672 fprintf(stderr
, "[chuck](via rtaudio): %s\n", message_
);
7674 throw RtError(std::string(message_
), type
);
7678 void RtApi :: verifyStream()
7680 if ( stream_
.mode
== UNINITIALIZED
) {
7681 sprintf(message_
, "RtAudio: a stream was not previously opened!");
7682 error(RtError::INVALID_STREAM
);
7686 void RtApi :: clearDeviceInfo(RtApiDevice
*info
)
7688 // Don't clear the name or DEVICE_ID fields here ... they are
7689 // typically set prior to a call of this function.
7690 info
->probed
= false;
7691 info
->maxOutputChannels
= 0;
7692 info
->maxInputChannels
= 0;
7693 info
->maxDuplexChannels
= 0;
7694 info
->minOutputChannels
= 0;
7695 info
->minInputChannels
= 0;
7696 info
->minDuplexChannels
= 0;
7697 info
->hasDuplexSupport
= false;
7698 info
->sampleRates
.clear();
7699 info
->nativeFormats
= 0;
7702 void RtApi :: clearStreamInfo()
7704 stream_
.mode
= UNINITIALIZED
;
7705 stream_
.state
= STREAM_STOPPED
;
7706 stream_
.sampleRate
= 0;
7707 stream_
.bufferSize
= 0;
7708 stream_
.nBuffers
= 0;
7709 stream_
.userFormat
= 0;
7710 for ( int i
=0; i
<2; i
++ ) {
7711 stream_
.device
[i
] = 0;
7712 stream_
.doConvertBuffer
[i
] = false;
7713 stream_
.deInterleave
[i
] = false;
7714 stream_
.doByteSwap
[i
] = false;
7715 stream_
.nUserChannels
[i
] = 0;
7716 stream_
.nDeviceChannels
[i
] = 0;
7717 stream_
.deviceFormat
[i
] = 0;
7721 int RtApi :: formatBytes(RtAudioFormat format
)
7723 if (format
== RTAUDIO_SINT16
)
7725 else if (format
== RTAUDIO_SINT24
|| format
== RTAUDIO_SINT32
||
7726 format
== RTAUDIO_FLOAT32
)
7728 else if (format
== RTAUDIO_FLOAT64
)
7730 else if (format
== RTAUDIO_SINT8
)
7733 sprintf(message_
,"RtApi: undefined format in formatBytes().");
7734 error(RtError::WARNING
);
7739 void RtApi :: convertStreamBuffer( StreamMode mode
)
7741 // This method does format conversion, input/output channel compensation, and
7742 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7743 // the upper three bytes of a 32-bit integer.
7745 int j
, jump_in
, jump_out
, channels
;
7746 RtAudioFormat format_in
, format_out
;
7747 char *input
, *output
;
7749 if (mode
== INPUT
) { // convert device to user buffer
7750 input
= stream_
.deviceBuffer
;
7751 output
= stream_
.userBuffer
;
7752 jump_in
= stream_
.nDeviceChannels
[1];
7753 jump_out
= stream_
.nUserChannels
[1];
7754 format_in
= stream_
.deviceFormat
[1];
7755 format_out
= stream_
.userFormat
;
7757 else { // convert user to device buffer
7758 input
= stream_
.userBuffer
;
7759 output
= stream_
.deviceBuffer
;
7760 jump_in
= stream_
.nUserChannels
[0];
7761 jump_out
= stream_
.nDeviceChannels
[0];
7762 format_in
= stream_
.userFormat
;
7763 format_out
= stream_
.deviceFormat
[0];
7765 // clear our device buffer when in/out duplex device channels are different
7766 if ( stream_
.mode
== DUPLEX
&&
7767 stream_
.nDeviceChannels
[0] != stream_
.nDeviceChannels
[1] )
7768 memset(output
, 0, stream_
.bufferSize
* jump_out
* formatBytes(format_out
));
7771 channels
= (jump_in
< jump_out
) ? jump_in
: jump_out
;
7773 // Set up the interleave/deinterleave offsets
7774 std::vector
<int> offset_in(channels
);
7775 std::vector
<int> offset_out(channels
);
7776 if (mode
== INPUT
&& stream_
.deInterleave
[1]) {
7777 for (int k
=0; k
<channels
; k
++) {
7778 offset_in
[k
] = k
* stream_
.bufferSize
;
7783 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
7784 for (int k
=0; k
<channels
; k
++) {
7786 offset_out
[k
] = k
* stream_
.bufferSize
;
7791 for (int k
=0; k
<channels
; k
++) {
7797 if (format_out
== RTAUDIO_FLOAT64
) {
7799 Float64
*out
= (Float64
*)output
;
7801 if (format_in
== RTAUDIO_SINT8
) {
7802 signed char *in
= (signed char *)input
;
7803 scale
= 1.0 / 128.0;
7804 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7805 for (j
=0; j
<channels
; j
++) {
7806 out
[offset_out
[j
]] = (Float64
) in
[offset_in
[j
]];
7807 out
[offset_out
[j
]] *= scale
;
7813 else if (format_in
== RTAUDIO_SINT16
) {
7814 Int16
*in
= (Int16
*)input
;
7815 scale
= 1.0 / 32768.0;
7816 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7817 for (j
=0; j
<channels
; j
++) {
7818 out
[offset_out
[j
]] = (Float64
) in
[offset_in
[j
]];
7819 out
[offset_out
[j
]] *= scale
;
7825 else if (format_in
== RTAUDIO_SINT24
) {
7826 Int32
*in
= (Int32
*)input
;
7827 scale
= 1.0 / 2147483648.0;
7828 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7829 for (j
=0; j
<channels
; j
++) {
7830 out
[offset_out
[j
]] = (Float64
) (in
[offset_in
[j
]] & 0xffffff00);
7831 out
[offset_out
[j
]] *= scale
;
7837 else if (format_in
== RTAUDIO_SINT32
) {
7838 Int32
*in
= (Int32
*)input
;
7839 scale
= 1.0 / 2147483648.0;
7840 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7841 for (j
=0; j
<channels
; j
++) {
7842 out
[offset_out
[j
]] = (Float64
) in
[offset_in
[j
]];
7843 out
[offset_out
[j
]] *= scale
;
7849 else if (format_in
== RTAUDIO_FLOAT32
) {
7850 Float32
*in
= (Float32
*)input
;
7851 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7852 for (j
=0; j
<channels
; j
++) {
7853 out
[offset_out
[j
]] = (Float64
) in
[offset_in
[j
]];
7859 else if (format_in
== RTAUDIO_FLOAT64
) {
7860 // Channel compensation and/or (de)interleaving only.
7861 Float64
*in
= (Float64
*)input
;
7862 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7863 for (j
=0; j
<channels
; j
++) {
7864 out
[offset_out
[j
]] = in
[offset_in
[j
]];
7871 else if (format_out
== RTAUDIO_FLOAT32
) {
7873 Float32
*out
= (Float32
*)output
;
7875 if (format_in
== RTAUDIO_SINT8
) {
7876 signed char *in
= (signed char *)input
;
7877 scale
= 1.0 / 128.0;
7878 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7879 for (j
=0; j
<channels
; j
++) {
7880 out
[offset_out
[j
]] = (Float32
) in
[offset_in
[j
]];
7881 out
[offset_out
[j
]] *= scale
;
7887 else if (format_in
== RTAUDIO_SINT16
) {
7888 Int16
*in
= (Int16
*)input
;
7889 scale
= 1.0 / 32768.0;
7890 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7891 for (j
=0; j
<channels
; j
++) {
7892 out
[offset_out
[j
]] = (Float32
) in
[offset_in
[j
]];
7893 out
[offset_out
[j
]] *= scale
;
7899 else if (format_in
== RTAUDIO_SINT24
) {
7900 Int32
*in
= (Int32
*)input
;
7901 scale
= 1.0 / 2147483648.0;
7902 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7903 for (j
=0; j
<channels
; j
++) {
7904 out
[offset_out
[j
]] = (Float32
) (in
[offset_in
[j
]] & 0xffffff00);
7905 out
[offset_out
[j
]] *= scale
;
7911 else if (format_in
== RTAUDIO_SINT32
) {
7912 Int32
*in
= (Int32
*)input
;
7913 scale
= 1.0 / 2147483648.0;
7914 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7915 for (j
=0; j
<channels
; j
++) {
7916 out
[offset_out
[j
]] = (Float32
) in
[offset_in
[j
]];
7917 out
[offset_out
[j
]] *= scale
;
7923 else if (format_in
== RTAUDIO_FLOAT32
) {
7924 // Channel compensation and/or (de)interleaving only.
7925 Float32
*in
= (Float32
*)input
;
7926 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7927 for (j
=0; j
<channels
; j
++) {
7928 out
[offset_out
[j
]] = in
[offset_in
[j
]];
7934 else if (format_in
== RTAUDIO_FLOAT64
) {
7935 Float64
*in
= (Float64
*)input
;
7936 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7937 for (j
=0; j
<channels
; j
++) {
7938 out
[offset_out
[j
]] = (Float32
) in
[offset_in
[j
]];
7945 else if (format_out
== RTAUDIO_SINT32
) {
7946 Int32
*out
= (Int32
*)output
;
7947 if (format_in
== RTAUDIO_SINT8
) {
7948 signed char *in
= (signed char *)input
;
7949 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7950 for (j
=0; j
<channels
; j
++) {
7951 out
[offset_out
[j
]] = (Int32
) in
[offset_in
[j
]];
7952 out
[offset_out
[j
]] <<= 24;
7958 else if (format_in
== RTAUDIO_SINT16
) {
7959 Int16
*in
= (Int16
*)input
;
7960 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7961 for (j
=0; j
<channels
; j
++) {
7962 out
[offset_out
[j
]] = (Int32
) in
[offset_in
[j
]];
7963 out
[offset_out
[j
]] <<= 16;
7969 else if (format_in
== RTAUDIO_SINT24
) {
7970 Int32
*in
= (Int32
*)input
;
7971 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7972 for (j
=0; j
<channels
; j
++) {
7973 out
[offset_out
[j
]] = (Int32
) in
[offset_in
[j
]];
7979 else if (format_in
== RTAUDIO_SINT32
) {
7980 // Channel compensation and/or (de)interleaving only.
7981 Int32
*in
= (Int32
*)input
;
7982 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7983 for (j
=0; j
<channels
; j
++) {
7984 out
[offset_out
[j
]] = in
[offset_in
[j
]];
7990 else if (format_in
== RTAUDIO_FLOAT32
) {
7991 Float32
*in
= (Float32
*)input
;
7992 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
7993 for (j
=0; j
<channels
; j
++) {
7994 out
[offset_out
[j
]] = (Int32
) (in
[offset_in
[j
]] * 2147483647.0);
8000 else if (format_in
== RTAUDIO_FLOAT64
) {
8001 Float64
*in
= (Float64
*)input
;
8002 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8003 for (j
=0; j
<channels
; j
++) {
8004 out
[offset_out
[j
]] = (Int32
) (in
[offset_in
[j
]] * 2147483647.0);
8011 else if (format_out
== RTAUDIO_SINT24
) {
8012 Int32
*out
= (Int32
*)output
;
8013 if (format_in
== RTAUDIO_SINT8
) {
8014 signed char *in
= (signed char *)input
;
8015 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8016 for (j
=0; j
<channels
; j
++) {
8017 out
[offset_out
[j
]] = (Int32
) in
[offset_in
[j
]];
8018 out
[offset_out
[j
]] <<= 24;
8024 else if (format_in
== RTAUDIO_SINT16
) {
8025 Int16
*in
= (Int16
*)input
;
8026 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8027 for (j
=0; j
<channels
; j
++) {
8028 out
[offset_out
[j
]] = (Int32
) in
[offset_in
[j
]];
8029 out
[offset_out
[j
]] <<= 16;
8035 else if (format_in
== RTAUDIO_SINT24
) {
8036 // Channel compensation and/or (de)interleaving only.
8037 Int32
*in
= (Int32
*)input
;
8038 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8039 for (j
=0; j
<channels
; j
++) {
8040 out
[offset_out
[j
]] = in
[offset_in
[j
]];
8046 else if (format_in
== RTAUDIO_SINT32
) {
8047 Int32
*in
= (Int32
*)input
;
8048 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8049 for (j
=0; j
<channels
; j
++) {
8050 out
[offset_out
[j
]] = (Int32
) (in
[offset_in
[j
]] & 0xffffff00);
8056 else if (format_in
== RTAUDIO_FLOAT32
) {
8057 Float32
*in
= (Float32
*)input
;
8058 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8059 for (j
=0; j
<channels
; j
++) {
8060 out
[offset_out
[j
]] = (Int32
) (in
[offset_in
[j
]] * 2147483647.0);
8066 else if (format_in
== RTAUDIO_FLOAT64
) {
8067 Float64
*in
= (Float64
*)input
;
8068 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8069 for (j
=0; j
<channels
; j
++) {
8070 out
[offset_out
[j
]] = (Int32
) (in
[offset_in
[j
]] * 2147483647.0);
8077 else if (format_out
== RTAUDIO_SINT16
) {
8078 Int16
*out
= (Int16
*)output
;
8079 if (format_in
== RTAUDIO_SINT8
) {
8080 signed char *in
= (signed char *)input
;
8081 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8082 for (j
=0; j
<channels
; j
++) {
8083 out
[offset_out
[j
]] = (Int16
) in
[offset_in
[j
]];
8084 out
[offset_out
[j
]] <<= 8;
8090 else if (format_in
== RTAUDIO_SINT16
) {
8091 // Channel compensation and/or (de)interleaving only.
8092 Int16
*in
= (Int16
*)input
;
8093 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8094 for (j
=0; j
<channels
; j
++) {
8095 out
[offset_out
[j
]] = in
[offset_in
[j
]];
8101 else if (format_in
== RTAUDIO_SINT24
) {
8102 Int32
*in
= (Int32
*)input
;
8103 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8104 for (j
=0; j
<channels
; j
++) {
8105 out
[offset_out
[j
]] = (Int16
) ((in
[offset_in
[j
]] >> 16) & 0x0000ffff);
8111 else if (format_in
== RTAUDIO_SINT32
) {
8112 Int32
*in
= (Int32
*)input
;
8113 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8114 for (j
=0; j
<channels
; j
++) {
8115 out
[offset_out
[j
]] = (Int16
) ((in
[offset_in
[j
]] >> 16) & 0x0000ffff);
8121 else if (format_in
== RTAUDIO_FLOAT32
) {
8122 Float32
*in
= (Float32
*)input
;
8123 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8124 for (j
=0; j
<channels
; j
++) {
8125 out
[offset_out
[j
]] = (Int16
) (in
[offset_in
[j
]] * 32767.0);
8131 else if (format_in
== RTAUDIO_FLOAT64
) {
8132 Float64
*in
= (Float64
*)input
;
8133 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8134 for (j
=0; j
<channels
; j
++) {
8135 out
[offset_out
[j
]] = (Int16
) (in
[offset_in
[j
]] * 32767.0);
8142 else if (format_out
== RTAUDIO_SINT8
) {
8143 signed char *out
= (signed char *)output
;
8144 if (format_in
== RTAUDIO_SINT8
) {
8145 // Channel compensation and/or (de)interleaving only.
8146 signed char *in
= (signed char *)input
;
8147 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8148 for (j
=0; j
<channels
; j
++) {
8149 out
[offset_out
[j
]] = in
[offset_in
[j
]];
8155 if (format_in
== RTAUDIO_SINT16
) {
8156 Int16
*in
= (Int16
*)input
;
8157 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8158 for (j
=0; j
<channels
; j
++) {
8159 out
[offset_out
[j
]] = (signed char) ((in
[offset_in
[j
]] >> 8) & 0x00ff);
8165 else if (format_in
== RTAUDIO_SINT24
) {
8166 Int32
*in
= (Int32
*)input
;
8167 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8168 for (j
=0; j
<channels
; j
++) {
8169 out
[offset_out
[j
]] = (signed char) ((in
[offset_in
[j
]] >> 24) & 0x000000ff);
8175 else if (format_in
== RTAUDIO_SINT32
) {
8176 Int32
*in
= (Int32
*)input
;
8177 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8178 for (j
=0; j
<channels
; j
++) {
8179 out
[offset_out
[j
]] = (signed char) ((in
[offset_in
[j
]] >> 24) & 0x000000ff);
8185 else if (format_in
== RTAUDIO_FLOAT32
) {
8186 Float32
*in
= (Float32
*)input
;
8187 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8188 for (j
=0; j
<channels
; j
++) {
8189 out
[offset_out
[j
]] = (signed char) (in
[offset_in
[j
]] * 127.0);
8195 else if (format_in
== RTAUDIO_FLOAT64
) {
8196 Float64
*in
= (Float64
*)input
;
8197 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8198 for (j
=0; j
<channels
; j
++) {
8199 out
[offset_out
[j
]] = (signed char) (in
[offset_in
[j
]] * 127.0);
8208 void RtApi :: byteSwapBuffer( char *buffer
, int samples
, RtAudioFormat format
)
8214 if (format
== RTAUDIO_SINT16
) {
8215 for (int i
=0; i
<samples
; i
++) {
8216 // Swap 1st and 2nd bytes.
8221 // Increment 2 bytes.
8225 else if (format
== RTAUDIO_SINT24
||
8226 format
== RTAUDIO_SINT32
||
8227 format
== RTAUDIO_FLOAT32
) {
8228 for (int i
=0; i
<samples
; i
++) {
8229 // Swap 1st and 4th bytes.
8234 // Swap 2nd and 3rd bytes.
8240 // Increment 4 bytes.
8244 else if (format
== RTAUDIO_FLOAT64
) {
8245 for (int i
=0; i
<samples
; i
++) {
8246 // Swap 1st and 8th bytes
8251 // Swap 2nd and 7th bytes
8257 // Swap 3rd and 6th bytes
8263 // Swap 4th and 5th bytes
8269 // Increment 8 bytes.