1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2005 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 requested to send the modifications to the original developer so that
28 they can be incorporated into the canonical version.
30 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
31 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
32 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
33 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
34 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
35 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
36 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 /************************************************************************/
40 // RtAudio: Version 3.0.2 (14 October 2005)
42 // Modified by Robin Davies, 1 October 2005
43 // - Improvements to DirectX pointer chasing.
44 // - Backdoor RtDsStatistics hook provides DirectX performance information.
45 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
46 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
50 #include "chuck_errmsg.h"
51 #include "digiio_rtaudio.h"
55 // #include "RtAudio.h"
56 // #include <iostream>
59 // Static variable definitions.
60 const unsigned int RtApi::MAX_SAMPLE_RATES
= 14;
61 const unsigned int RtApi::SAMPLE_RATES
[] = {
62 4000, 5512, 8000, 9600, 11025, 16000, 22050,
63 32000, 44100, 48000, 88200, 96000, 176400, 192000
67 #if ( defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) ) && !defined(__WINDOWS_PTHREAD__)
69 // #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
70 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
71 #define MUTEX_DESTROY(A) DeleteCriticalSection(A);
72 #define MUTEX_LOCK(A) EnterCriticalSection(A)
73 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
75 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
76 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A);
77 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
78 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
81 // *************************************************** //
83 // Public common (OS-independent) methods.
85 // *************************************************** //
87 RtAudio :: RtAudio( RtAudioApi api
)
92 RtAudio :: RtAudio( int outputDevice
, int outputChannels
,
93 int inputDevice
, int inputChannels
,
94 RtAudioFormat format
, int sampleRate
,
95 int *bufferSize
, int numberOfBuffers
, RtAudioApi api
)
100 rtapi_
->openStream( outputDevice
, outputChannels
,
101 inputDevice
, inputChannels
,
103 bufferSize
, numberOfBuffers
);
105 catch (RtError
&exception
) {
106 // Deallocate the RtApi instance.
112 RtAudio :: RtAudio( int outputDevice
, int outputChannels
,
113 int inputDevice
, int inputChannels
,
114 RtAudioFormat format
, int sampleRate
,
115 int *bufferSize
, int *numberOfBuffers
, RtAudioApi api
)
120 rtapi_
->openStream( outputDevice
, outputChannels
,
121 inputDevice
, inputChannels
,
123 bufferSize
, numberOfBuffers
);
125 catch (RtError
&exception
) {
126 // Deallocate the RtApi instance.
132 RtAudio :: ~RtAudio()
137 void RtAudio :: openStream( int outputDevice
, int outputChannels
,
138 int inputDevice
, int inputChannels
,
139 RtAudioFormat format
, int sampleRate
,
140 int *bufferSize
, int numberOfBuffers
)
142 rtapi_
->openStream( outputDevice
, outputChannels
, inputDevice
,
143 inputChannels
, format
, sampleRate
,
144 bufferSize
, numberOfBuffers
);
147 void RtAudio :: openStream( int outputDevice
, int outputChannels
,
148 int inputDevice
, int inputChannels
,
149 RtAudioFormat format
, int sampleRate
,
150 int *bufferSize
, int *numberOfBuffers
)
152 rtapi_
->openStream( outputDevice
, outputChannels
, inputDevice
,
153 inputChannels
, format
, sampleRate
,
154 bufferSize
, *numberOfBuffers
);
157 void RtAudio::initialize( RtAudioApi api
)
161 // First look for a compiled match to a specified API value. If one
162 // of these constructors throws an error, it will be passed up the
163 // inheritance chain.
164 #if defined(__LINUX_JACK__)
165 if ( api
== LINUX_JACK
)
166 rtapi_
= new RtApiJack();
168 #if defined(__LINUX_ALSA__)
169 if ( api
== LINUX_ALSA
)
170 rtapi_
= new RtApiAlsa();
172 #if defined(__LINUX_OSS__)
173 if ( api
== LINUX_OSS
)
174 rtapi_
= new RtApiOss();
176 #if defined(__WINDOWS_ASIO__)
177 if ( api
== WINDOWS_ASIO
)
178 rtapi_
= new RtApiAsio();
180 #if defined(__WINDOWS_DS__)
181 if ( api
== WINDOWS_DS
)
182 rtapi_
= new RtApiDs();
184 #if defined(__IRIX_AL__)
185 if ( api
== IRIX_AL
)
186 rtapi_
= new RtApiAl();
188 #if defined(__MACOSX_CORE__)
189 if ( api
== MACOSX_CORE
)
190 rtapi_
= new RtApiCore();
193 if ( rtapi_
) return;
195 // No compiled support for specified API value.
196 throw RtError( "(via rtaudio): no compiled support for specified API!", RtError::INVALID_PARAMETER
);
199 // No specified API ... search for "best" option.
201 #if defined(__LINUX_JACK__)
202 rtapi_
= new RtApiJack();
203 #elif defined(__WINDOWS_ASIO__)
204 rtapi_
= new RtApiAsio();
205 #elif defined(__IRIX_AL__)
206 rtapi_
= new RtApiAl();
207 #elif defined(__MACOSX_CORE__)
208 rtapi_
= new RtApiCore();
214 #if defined(__RTAUDIO_DEBUG__)
216 EM_log( CK_LOG_SYSTEM
, "RtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio)...");
221 if ( rtapi_
) return;
223 // Try second API support
226 #if defined(__LINUX_ALSA__)
227 rtapi_
= new RtApiAlsa();
228 #elif defined(__WINDOWS_DS__)
229 rtapi_
= new RtApiDs();
235 #if defined(__RTAUDIO_DEBUG__)
236 EM_log( CK_LOG_SYSTEM
, "RtAudio: no devices found for second api option (Alsa or DirectSound)...");
242 if ( rtapi_
) return;
244 // Try third API support
246 #if defined(__LINUX_OSS__)
248 rtapi_
= new RtApiOss();
250 catch (RtError
&error
) {
260 throw RtError( "(via rtaudio): no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND
);
266 stream_
.mode
= UNINITIALIZED
;
267 stream_
.state
= STREAM_STOPPED
;
268 stream_
.apiHandle
= 0;
269 MUTEX_INITIALIZE(&stream_
.mutex
);
274 MUTEX_DESTROY(&stream_
.mutex
);
277 void RtApi :: openStream( int outputDevice
, int outputChannels
,
278 int inputDevice
, int inputChannels
,
279 RtAudioFormat format
, int sampleRate
,
280 int *bufferSize
, int *numberOfBuffers
)
282 this->openStream( outputDevice
, outputChannels
, inputDevice
,
283 inputChannels
, format
, sampleRate
,
284 bufferSize
, *numberOfBuffers
);
285 *numberOfBuffers
= stream_
.nBuffers
;
288 void RtApi :: openStream( int outputDevice
, int outputChannels
,
289 int inputDevice
, int inputChannels
,
290 RtAudioFormat format
, int sampleRate
,
291 int *bufferSize
, int numberOfBuffers
)
293 if ( stream_
.mode
!= UNINITIALIZED
) {
294 sprintf(message_
, "(via rtaudio): only one open stream allowed per class instance.");
295 error(RtError::INVALID_STREAM
);
298 if ( outputChannels
< 1 && inputChannels
< 1 ) {
299 sprintf(message_
,"(via rtaudio): one or both 'channel' parameters must be greater than zero.");
300 error(RtError::INVALID_PARAMETER
);
303 if ( formatBytes(format
) == 0 ) {
304 sprintf(message_
,"(via rtaudio): 'format' parameter value is undefined.");
305 error(RtError::INVALID_PARAMETER
);
308 if ( outputChannels
> 0 ) {
309 if (outputDevice
> nDevices_
|| outputDevice
< 0) {
310 sprintf(message_
,"(via rtaudio): 'outputDevice' parameter value (%d) is invalid.", outputDevice
);
311 error(RtError::INVALID_PARAMETER
);
315 if ( inputChannels
> 0 ) {
316 if (inputDevice
> nDevices_
|| inputDevice
< 0) {
317 sprintf(message_
,"(via rtaudio): 'inputDevice' parameter value (%d) is invalid.", inputDevice
);
318 error(RtError::INVALID_PARAMETER
);
322 std::string errorMessages
;
324 bool result
= FAILURE
;
325 int device
, defaultDevice
= 0;
328 if ( outputChannels
> 0 ) {
331 channels
= outputChannels
;
333 if ( outputDevice
== 0 ) { // Try default device first.
334 defaultDevice
= getDefaultOutputDevice();
335 device
= defaultDevice
;
338 device
= outputDevice
- 1;
340 for ( int i
=-1; i
<nDevices_
; i
++ ) {
342 if ( i
== defaultDevice
) continue;
345 if ( devices_
[device
].probed
== false ) {
346 // If the device wasn't successfully probed before, try it
348 clearDeviceInfo(&devices_
[device
]);
349 probeDeviceInfo(&devices_
[device
]);
351 if ( devices_
[device
].probed
)
352 result
= probeDeviceOpen(device
, mode
, channels
, sampleRate
,
353 format
, bufferSize
, numberOfBuffers
);
354 if ( result
== SUCCESS
) break;
355 errorMessages
.append( "... " );
356 errorMessages
.append( message_
);
357 errorMessages
.append( "\n" );
358 if ( outputDevice
> 0 ) break;
363 if ( inputChannels
> 0 && ( result
== SUCCESS
|| outputChannels
<= 0 ) ) {
366 channels
= inputChannels
;
368 if ( inputDevice
== 0 ) { // Try default device first.
369 defaultDevice
= getDefaultInputDevice();
370 device
= defaultDevice
;
373 device
= inputDevice
- 1;
375 for ( int i
=-1; i
<nDevices_
; i
++ ) {
377 if ( i
== defaultDevice
) continue;
380 if ( devices_
[device
].probed
== false ) {
381 // If the device wasn't successfully probed before, try it
383 clearDeviceInfo(&devices_
[device
]);
384 probeDeviceInfo(&devices_
[device
]);
386 if ( devices_
[device
].probed
)
387 result
= probeDeviceOpen( device
, mode
, channels
, sampleRate
,
388 format
, bufferSize
, numberOfBuffers
);
389 if ( result
== SUCCESS
) break;
390 errorMessages
.append( "... " );
391 errorMessages
.append( message_
);
392 errorMessages
.append( "\n" );
393 if ( inputDevice
> 0 ) break;
397 if ( result
== SUCCESS
)
400 // If we get here, all attempted probes failed. Close any opened
401 // devices and clear the stream structure.
402 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
404 if ( ( outputDevice
== 0 && outputChannels
> 0 )
405 || ( inputDevice
== 0 && inputChannels
> 0 ) )
406 sprintf(message_
,"(via rtaudio): no devices found for given stream parameters: \n%s",
407 errorMessages
.c_str());
409 sprintf(message_
,"(via rtaudio): unable to open specified device(s) with given stream parameters: \n%s",
410 errorMessages
.c_str());
411 error(RtError::INVALID_PARAMETER
);
416 int RtApi :: getDeviceCount(void)
418 return devices_
.size();
421 RtApi::StreamState
RtApi :: getStreamState( void ) const
423 return stream_
.state
;
426 RtAudioDeviceInfo
RtApi :: getDeviceInfo( int device
)
428 if (device
> (int) devices_
.size() || device
< 1) {
429 sprintf(message_
, "(via rtaudio): invalid device specifier (%d)...", device
);
430 error(RtError::INVALID_DEVICE
);
433 RtAudioDeviceInfo info
;
434 int deviceIndex
= device
- 1;
436 // If the device wasn't successfully probed before, try it now (or again).
437 if (devices_
[deviceIndex
].probed
== false) {
438 clearDeviceInfo(&devices_
[deviceIndex
]);
439 probeDeviceInfo(&devices_
[deviceIndex
]);
442 info
.name
.append( devices_
[deviceIndex
].name
);
443 info
.probed
= devices_
[deviceIndex
].probed
;
444 if ( info
.probed
== true ) {
445 info
.outputChannels
= devices_
[deviceIndex
].maxOutputChannels
;
446 info
.inputChannels
= devices_
[deviceIndex
].maxInputChannels
;
447 info
.duplexChannels
= devices_
[deviceIndex
].maxDuplexChannels
;
448 for (unsigned int i
=0; i
<devices_
[deviceIndex
].sampleRates
.size(); i
++)
449 info
.sampleRates
.push_back( devices_
[deviceIndex
].sampleRates
[i
] );
450 info
.nativeFormats
= devices_
[deviceIndex
].nativeFormats
;
451 if ( (deviceIndex
== getDefaultOutputDevice()) ||
452 (deviceIndex
== getDefaultInputDevice()) )
453 info
.isDefault
= true;
459 char * const RtApi :: getStreamBuffer(void)
462 return stream_
.userBuffer
;
465 int RtApi :: getDefaultInputDevice(void)
467 // Should be implemented in subclasses if appropriate.
471 int RtApi :: getDefaultOutputDevice(void)
473 // Should be implemented in subclasses if appropriate.
477 void RtApi :: closeStream(void)
479 // MUST be implemented in subclasses!
482 void RtApi :: probeDeviceInfo( RtApiDevice
*info
)
484 // MUST be implemented in subclasses!
487 bool RtApi :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
488 int sampleRate
, RtAudioFormat format
,
489 int *bufferSize
, int numberOfBuffers
)
491 // MUST be implemented in subclasses!
496 // *************************************************** //
498 // OS/API-specific methods.
500 // *************************************************** //
502 #if defined(__LINUX_OSS__)
505 #include <sys/stat.h>
506 #include <sys/types.h>
507 #include <sys/ioctl.h>
510 #include <sys/soundcard.h>
514 #define DAC_NAME "/dev/dsp"
515 #define MAX_DEVICES 16
516 #define MAX_CHANNELS 16
518 extern "C" void *ossCallbackHandler(void * ptr
);
520 RtApiOss :: RtApiOss()
524 if (nDevices_
<= 0) {
525 sprintf(message_
, "RtApiOss: no Linux OSS audio devices found!");
526 error(RtError::NO_DEVICES_FOUND
);
530 RtApiOss :: ~RtApiOss()
532 if ( stream_
.mode
!= UNINITIALIZED
)
536 void RtApiOss :: initialize(void)
538 // Count cards and devices
541 // We check /dev/dsp before probing devices. /dev/dsp is supposed to
542 // be a link to the "default" audio device, of the form /dev/dsp0,
543 // /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a
544 // real device, so we need to check for that. Also, sometimes the
545 // link is to /dev/dspx and other times just dspx. I'm not sure how
546 // the latter works, but it does.
547 char device_name
[16];
551 if (lstat(DAC_NAME
, &dspstat
) == 0) {
552 if (S_ISLNK(dspstat
.st_mode
)) {
553 i
= readlink(DAC_NAME
, device_name
, sizeof(device_name
));
555 device_name
[i
] = '\0';
556 if (i
> 8) { // check for "/dev/dspx"
557 if (!strncmp(DAC_NAME
, device_name
, 8))
558 dsplink
= atoi(&device_name
[8]);
560 else if (i
> 3) { // check for "dspx"
561 if (!strncmp("dsp", device_name
, 3))
562 dsplink
= atoi(&device_name
[3]);
566 sprintf(message_
, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME
);
567 error(RtError::SYSTEM_ERROR
);
572 sprintf(message_
, "RtApiOss: cannot stat %s.", DAC_NAME
);
573 error(RtError::SYSTEM_ERROR
);
576 // The OSS API doesn't provide a routine for determining the number
577 // of devices. Thus, we'll just pursue a brute force method. The
578 // idea is to start with /dev/dsp(0) and continue with higher device
579 // numbers until we reach MAX_DSP_DEVICES. This should tell us how
580 // many devices we have ... it is not a fullproof scheme, but hopefully
581 // it will work most of the time.
584 for (i
=-1; i
<MAX_DEVICES
; i
++) {
586 // Probe /dev/dsp first, since it is supposed to be the default device.
588 sprintf(device_name
, "%s", DAC_NAME
);
589 else if (i
== dsplink
)
590 continue; // We've aready probed this device via /dev/dsp link ... try next device.
592 sprintf(device_name
, "%s%d", DAC_NAME
, i
);
594 // First try to open the device for playback, then record mode.
595 fd
= open(device_name
, O_WRONLY
| O_NONBLOCK
);
597 // Open device for playback failed ... either busy or doesn't exist.
598 if (errno
!= EBUSY
&& errno
!= EAGAIN
) {
599 // Try to open for capture
600 fd
= open(device_name
, O_RDONLY
| O_NONBLOCK
);
602 // Open device for record failed.
603 if (errno
!= EBUSY
&& errno
!= EAGAIN
)
606 sprintf(message_
, "RtApiOss: OSS record device (%s) is busy.", device_name
);
607 error(RtError::WARNING
);
608 // still count it for now
613 sprintf(message_
, "RtApiOss: OSS playback device (%s) is busy.", device_name
);
614 error(RtError::WARNING
);
615 // still count it for now
619 if (fd
>= 0) close(fd
);
621 device
.name
.append( (const char *)device_name
, strlen(device_name
)+1);
622 devices_
.push_back(device
);
627 void RtApiOss :: probeDeviceInfo(RtApiDevice
*info
)
629 int i
, fd
, channels
, mask
;
631 // The OSS API doesn't provide a means for probing the capabilities
632 // of devices. Thus, we'll just pursue a brute force method.
634 // First try for playback
635 fd
= open(info
->name
.c_str(), O_WRONLY
| O_NONBLOCK
);
637 // Open device failed ... either busy or doesn't exist
638 if (errno
== EBUSY
|| errno
== EAGAIN
)
639 sprintf(message_
, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.",
642 sprintf(message_
, "RtApiOss: OSS playback device (%s) open error.", info
->name
.c_str());
643 error(RtError::DEBUG_WARNING
);
647 // We have an open device ... see how many channels it can handle
648 for (i
=MAX_CHANNELS
; i
>0; i
--) {
650 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1) {
651 // This would normally indicate some sort of hardware error, but under ALSA's
652 // OSS emulation, it sometimes indicates an invalid channel value. Further,
653 // the returned channel value is not changed. So, we'll ignore the possible
655 continue; // try next channel number
657 // Check to see whether the device supports the requested number of channels
658 if (channels
!= i
) continue; // try next channel number
659 // If here, we found the largest working channel value
662 info
->maxOutputChannels
= i
;
664 // Now find the minimum number of channels it can handle
665 for (i
=1; i
<=info
->maxOutputChannels
; i
++) {
667 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
668 continue; // try next channel number
669 // If here, we found the smallest working channel value
672 info
->minOutputChannels
= i
;
676 // Now try for capture
677 fd
= open(info
->name
.c_str(), O_RDONLY
| O_NONBLOCK
);
679 // Open device for capture failed ... either busy or doesn't exist
680 if (errno
== EBUSY
|| errno
== EAGAIN
)
681 sprintf(message_
, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.",
684 sprintf(message_
, "RtApiOss: OSS capture device (%s) open error.", info
->name
.c_str());
685 error(RtError::DEBUG_WARNING
);
686 if (info
->maxOutputChannels
== 0)
687 // didn't open for playback either ... device invalid
689 goto probe_parameters
;
692 // We have the device open for capture ... see how many channels it can handle
693 for (i
=MAX_CHANNELS
; i
>0; i
--) {
695 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
) {
696 continue; // as above
698 // If here, we found a working channel value
701 info
->maxInputChannels
= i
;
703 // Now find the minimum number of channels it can handle
704 for (i
=1; i
<=info
->maxInputChannels
; i
++) {
706 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
707 continue; // try next channel number
708 // If here, we found the smallest working channel value
711 info
->minInputChannels
= i
;
714 if (info
->maxOutputChannels
== 0 && info
->maxInputChannels
== 0) {
715 sprintf(message_
, "RtApiOss: device (%s) reports zero channels for input and output.",
717 error(RtError::DEBUG_WARNING
);
721 // If device opens for both playback and capture, we determine the channels.
722 if (info
->maxOutputChannels
== 0 || info
->maxInputChannels
== 0)
723 goto probe_parameters
;
725 fd
= open(info
->name
.c_str(), O_RDWR
| O_NONBLOCK
);
727 goto probe_parameters
;
729 ioctl(fd
, SNDCTL_DSP_SETDUPLEX
, 0);
730 ioctl(fd
, SNDCTL_DSP_GETCAPS
, &mask
);
731 if (mask
& DSP_CAP_DUPLEX
) {
732 info
->hasDuplexSupport
= true;
733 // We have the device open for duplex ... see how many channels it can handle
734 for (i
=MAX_CHANNELS
; i
>0; i
--) {
736 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
737 continue; // as above
738 // If here, we found a working channel value
741 info
->maxDuplexChannels
= i
;
743 // Now find the minimum number of channels it can handle
744 for (i
=1; i
<=info
->maxDuplexChannels
; i
++) {
746 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
)
747 continue; // try next channel number
748 // If here, we found the smallest working channel value
751 info
->minDuplexChannels
= i
;
756 // At this point, we need to figure out the supported data formats
757 // and sample rates. We'll proceed by openning the device in the
758 // direction with the maximum number of channels, or playback if
759 // they are equal. This might limit our sample rate options, but so
762 if (info
->maxOutputChannels
>= info
->maxInputChannels
) {
763 fd
= open(info
->name
.c_str(), O_WRONLY
| O_NONBLOCK
);
764 channels
= info
->maxOutputChannels
;
767 fd
= open(info
->name
.c_str(), O_RDONLY
| O_NONBLOCK
);
768 channels
= info
->maxInputChannels
;
772 // We've got some sort of conflict ... abort
773 sprintf(message_
, "RtApiOss: device (%s) won't reopen during probe.",
775 error(RtError::DEBUG_WARNING
);
779 // We have an open device ... set to maximum channels.
781 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &channels
) == -1 || channels
!= i
) {
782 // We've got some sort of conflict ... abort
784 sprintf(message_
, "RtApiOss: device (%s) won't revert to previous channel setting.",
786 error(RtError::DEBUG_WARNING
);
790 if (ioctl(fd
, SNDCTL_DSP_GETFMTS
, &mask
) == -1) {
792 sprintf(message_
, "RtApiOss: device (%s) can't get supported audio formats.",
794 error(RtError::DEBUG_WARNING
);
798 // Probe the supported data formats ... we don't care about endian-ness just yet.
800 info
->nativeFormats
= 0;
801 #if defined (AFMT_S32_BE)
802 // This format does not seem to be in the 2.4 kernel version of OSS soundcard.h
803 if (mask
& AFMT_S32_BE
) {
804 format
= AFMT_S32_BE
;
805 info
->nativeFormats
|= RTAUDIO_SINT32
;
808 #if defined (AFMT_S32_LE)
809 /* This format is not in the 2.4.4 kernel version of OSS soundcard.h */
810 if (mask
& AFMT_S32_LE
) {
811 format
= AFMT_S32_LE
;
812 info
->nativeFormats
|= RTAUDIO_SINT32
;
815 if (mask
& AFMT_S8
) {
817 info
->nativeFormats
|= RTAUDIO_SINT8
;
819 if (mask
& AFMT_S16_BE
) {
820 format
= AFMT_S16_BE
;
821 info
->nativeFormats
|= RTAUDIO_SINT16
;
823 if (mask
& AFMT_S16_LE
) {
824 format
= AFMT_S16_LE
;
825 info
->nativeFormats
|= RTAUDIO_SINT16
;
828 // Check that we have at least one supported format
829 if (info
->nativeFormats
== 0) {
831 sprintf(message_
, "RtApiOss: device (%s) data format not supported by RtAudio.",
833 error(RtError::DEBUG_WARNING
);
839 if (ioctl(fd
, SNDCTL_DSP_SETFMT
, &format
) == -1 || format
!= i
) {
841 sprintf(message_
, "RtApiOss: device (%s) error setting data format.",
843 error(RtError::DEBUG_WARNING
);
847 // Probe the supported sample rates.
848 info
->sampleRates
.clear();
849 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
850 int speed
= SAMPLE_RATES
[k
];
851 if (ioctl(fd
, SNDCTL_DSP_SPEED
, &speed
) != -1 && speed
== (int)SAMPLE_RATES
[k
])
852 info
->sampleRates
.push_back(speed
);
855 if (info
->sampleRates
.size() == 0) {
857 sprintf(message_
, "RtApiOss: no supported sample rates found for device (%s).",
859 error(RtError::DEBUG_WARNING
);
863 // That's all ... close the device and return
869 bool RtApiOss :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
870 int sampleRate
, RtAudioFormat format
,
871 int *bufferSize
, int numberOfBuffers
)
873 int buffers
, buffer_bytes
, device_channels
, device_format
;
875 int *handle
= (int *) stream_
.apiHandle
;
877 const char *name
= devices_
[device
].name
.c_str();
880 fd
= open(name
, O_WRONLY
| O_NONBLOCK
);
881 else { // mode == INPUT
882 if (stream_
.mode
== OUTPUT
&& stream_
.device
[0] == device
) {
883 // We just set the same device for playback ... close and reopen for duplex (OSS only).
886 // First check that the number previously set channels is the same.
887 if (stream_
.nUserChannels
[0] != channels
) {
888 sprintf(message_
, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name
);
891 fd
= open(name
, O_RDWR
| O_NONBLOCK
);
894 fd
= open(name
, O_RDONLY
| O_NONBLOCK
);
898 if (errno
== EBUSY
|| errno
== EAGAIN
)
899 sprintf(message_
, "RtApiOss: device (%s) is busy and cannot be opened.",
902 sprintf(message_
, "RtApiOss: device (%s) cannot be opened.", name
);
906 // Now reopen in blocking mode.
909 fd
= open(name
, O_WRONLY
| O_SYNC
);
910 else { // mode == INPUT
911 if (stream_
.mode
== OUTPUT
&& stream_
.device
[0] == device
)
912 fd
= open(name
, O_RDWR
| O_SYNC
);
914 fd
= open(name
, O_RDONLY
| O_SYNC
);
918 sprintf(message_
, "RtApiOss: device (%s) cannot be opened.", name
);
922 // Get the sample format mask
924 if (ioctl(fd
, SNDCTL_DSP_GETFMTS
, &mask
) == -1) {
926 sprintf(message_
, "RtApiOss: device (%s) can't get supported audio formats.",
931 // Determine how to set the device format.
932 stream_
.userFormat
= format
;
934 stream_
.doByteSwap
[mode
] = false;
935 if (format
== RTAUDIO_SINT8
) {
936 if (mask
& AFMT_S8
) {
937 device_format
= AFMT_S8
;
938 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
941 else if (format
== RTAUDIO_SINT16
) {
942 if (mask
& AFMT_S16_NE
) {
943 device_format
= AFMT_S16_NE
;
944 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
946 #if BYTE_ORDER == LITTLE_ENDIAN
947 else if (mask
& AFMT_S16_BE
) {
948 device_format
= AFMT_S16_BE
;
949 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
950 stream_
.doByteSwap
[mode
] = true;
953 else if (mask
& AFMT_S16_LE
) {
954 device_format
= AFMT_S16_LE
;
955 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
956 stream_
.doByteSwap
[mode
] = true;
960 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
961 else if (format
== RTAUDIO_SINT32
) {
962 if (mask
& AFMT_S32_NE
) {
963 device_format
= AFMT_S32_NE
;
964 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
966 #if BYTE_ORDER == LITTLE_ENDIAN
967 else if (mask
& AFMT_S32_BE
) {
968 device_format
= AFMT_S32_BE
;
969 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
970 stream_
.doByteSwap
[mode
] = true;
973 else if (mask
& AFMT_S32_LE
) {
974 device_format
= AFMT_S32_LE
;
975 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
976 stream_
.doByteSwap
[mode
] = true;
982 if (device_format
== -1) {
983 // The user requested format is not natively supported by the device.
984 if (mask
& AFMT_S16_NE
) {
985 device_format
= AFMT_S16_NE
;
986 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
988 #if BYTE_ORDER == LITTLE_ENDIAN
989 else if (mask
& AFMT_S16_BE
) {
990 device_format
= AFMT_S16_BE
;
991 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
992 stream_
.doByteSwap
[mode
] = true;
995 else if (mask
& AFMT_S16_LE
) {
996 device_format
= AFMT_S16_LE
;
997 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
998 stream_
.doByteSwap
[mode
] = true;
1001 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
1002 else if (mask
& AFMT_S32_NE
) {
1003 device_format
= AFMT_S32_NE
;
1004 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
1006 #if BYTE_ORDER == LITTLE_ENDIAN
1007 else if (mask
& AFMT_S32_BE
) {
1008 device_format
= AFMT_S32_BE
;
1009 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
1010 stream_
.doByteSwap
[mode
] = true;
1013 else if (mask
& AFMT_S32_LE
) {
1014 device_format
= AFMT_S32_LE
;
1015 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
1016 stream_
.doByteSwap
[mode
] = true;
1020 else if (mask
& AFMT_S8
) {
1021 device_format
= AFMT_S8
;
1022 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
1026 if (stream_
.deviceFormat
[mode
] == 0) {
1027 // This really shouldn't happen ...
1029 sprintf(message_
, "RtApiOss: device (%s) data format not supported by RtAudio.",
1034 // Determine the number of channels for this device. Note that the
1035 // channel value requested by the user might be < min_X_Channels.
1036 stream_
.nUserChannels
[mode
] = channels
;
1037 device_channels
= channels
;
1038 if (mode
== OUTPUT
) {
1039 if (channels
< devices_
[device
].minOutputChannels
)
1040 device_channels
= devices_
[device
].minOutputChannels
;
1042 else { // mode == INPUT
1043 if (stream_
.mode
== OUTPUT
&& stream_
.device
[0] == device
) {
1044 // We're doing duplex setup here.
1045 if (channels
< devices_
[device
].minDuplexChannels
)
1046 device_channels
= devices_
[device
].minDuplexChannels
;
1049 if (channels
< devices_
[device
].minInputChannels
)
1050 device_channels
= devices_
[device
].minInputChannels
;
1053 stream_
.nDeviceChannels
[mode
] = device_channels
;
1055 // Attempt to set the buffer size. According to OSS, the minimum
1056 // number of buffers is two. The supposed minimum buffer size is 16
1057 // bytes, so that will be our lower bound. The argument to this
1058 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
1059 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
1060 // We'll check the actual value used near the end of the setup
1062 buffer_bytes
= *bufferSize
* formatBytes(stream_
.deviceFormat
[mode
]) * device_channels
;
1063 if (buffer_bytes
< 16) buffer_bytes
= 16;
1064 buffers
= numberOfBuffers
;
1065 if (buffers
< 2) buffers
= 2;
1066 temp
= ((int) buffers
<< 16) + (int)(log10((double)buffer_bytes
)/log10(2.0));
1067 if (ioctl(fd
, SNDCTL_DSP_SETFRAGMENT
, &temp
)) {
1069 sprintf(message_
, "RtApiOss: error setting fragment size for device (%s).",
1073 stream_
.nBuffers
= buffers
;
1075 // Set the data format.
1076 temp
= device_format
;
1077 if (ioctl(fd
, SNDCTL_DSP_SETFMT
, &device_format
) == -1 || device_format
!= temp
) {
1079 sprintf(message_
, "RtApiOss: error setting data format for device (%s).",
1084 // Set the number of channels.
1085 temp
= device_channels
;
1086 if (ioctl(fd
, SNDCTL_DSP_CHANNELS
, &device_channels
) == -1 || device_channels
!= temp
) {
1088 sprintf(message_
, "RtApiOss: error setting %d channels on device (%s).",
1093 // Set the sample rate.
1096 if (ioctl(fd
, SNDCTL_DSP_SPEED
, &srate
) == -1) {
1098 sprintf(message_
, "RtApiOss: error setting sample rate = %d on device (%s).",
1103 // Verify the sample rate setup worked.
1104 if (abs(srate
- temp
) > 100) {
1106 sprintf(message_
, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",
1110 stream_
.sampleRate
= sampleRate
;
1112 if (ioctl(fd
, SNDCTL_DSP_GETBLKSIZE
, &buffer_bytes
) == -1) {
1114 sprintf(message_
, "RtApiOss: error getting buffer size for device (%s).",
1119 // Save buffer size (in sample frames).
1120 *bufferSize
= buffer_bytes
/ (formatBytes(stream_
.deviceFormat
[mode
]) * device_channels
);
1121 stream_
.bufferSize
= *bufferSize
;
1123 if (mode
== INPUT
&& stream_
.mode
== OUTPUT
&&
1124 stream_
.device
[0] == device
) {
1125 // We're doing duplex setup here.
1126 stream_
.deviceFormat
[0] = stream_
.deviceFormat
[1];
1127 stream_
.nDeviceChannels
[0] = device_channels
;
1130 // Allocate the stream handles if necessary and then save.
1131 if ( stream_
.apiHandle
== 0 ) {
1132 handle
= (int *) calloc(2, sizeof(int));
1133 stream_
.apiHandle
= (void *) handle
;
1138 handle
= (int *) stream_
.apiHandle
;
1142 // Set flags for buffer conversion
1143 stream_
.doConvertBuffer
[mode
] = false;
1144 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
1145 stream_
.doConvertBuffer
[mode
] = true;
1146 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
1147 stream_
.doConvertBuffer
[mode
] = true;
1149 // Allocate necessary internal buffers
1150 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
1153 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
1154 buffer_bytes
= stream_
.nUserChannels
[0];
1156 buffer_bytes
= stream_
.nUserChannels
[1];
1158 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
1159 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
1160 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
1161 if (stream_
.userBuffer
== NULL
) {
1163 sprintf(message_
, "RtApiOss: error allocating user buffer memory (%s).",
1169 if ( stream_
.doConvertBuffer
[mode
] ) {
1172 bool makeBuffer
= true;
1173 if ( mode
== OUTPUT
)
1174 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
1175 else { // mode == INPUT
1176 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
1177 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
1178 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
1179 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
1184 buffer_bytes
*= *bufferSize
;
1185 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
1186 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
1187 if (stream_
.deviceBuffer
== NULL
) {
1189 sprintf(message_
, "RtApiOss: error allocating device buffer memory (%s).",
1196 stream_
.device
[mode
] = device
;
1197 stream_
.state
= STREAM_STOPPED
;
1199 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
) {
1200 stream_
.mode
= DUPLEX
;
1201 if (stream_
.device
[0] == device
)
1205 stream_
.mode
= mode
;
1207 // Setup the buffer conversion information structure.
1208 if ( stream_
.doConvertBuffer
[mode
] ) {
1209 if (mode
== INPUT
) { // convert device to user buffer
1210 stream_
.convertInfo
[mode
].inJump
= stream_
.nDeviceChannels
[1];
1211 stream_
.convertInfo
[mode
].outJump
= stream_
.nUserChannels
[1];
1212 stream_
.convertInfo
[mode
].inFormat
= stream_
.deviceFormat
[1];
1213 stream_
.convertInfo
[mode
].outFormat
= stream_
.userFormat
;
1215 else { // convert user to device buffer
1216 stream_
.convertInfo
[mode
].inJump
= stream_
.nUserChannels
[0];
1217 stream_
.convertInfo
[mode
].outJump
= stream_
.nDeviceChannels
[0];
1218 stream_
.convertInfo
[mode
].inFormat
= stream_
.userFormat
;
1219 stream_
.convertInfo
[mode
].outFormat
= stream_
.deviceFormat
[0];
1222 if ( stream_
.convertInfo
[mode
].inJump
< stream_
.convertInfo
[mode
].outJump
)
1223 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].inJump
;
1225 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].outJump
;
1227 // Set up the interleave/deinterleave offsets.
1228 if ( mode
== INPUT
&& stream_
.deInterleave
[1] ) {
1229 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
1230 stream_
.convertInfo
[mode
].inOffset
.push_back( k
* stream_
.bufferSize
);
1231 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
1232 stream_
.convertInfo
[mode
].inJump
= 1;
1235 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
1236 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
1237 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
1238 stream_
.convertInfo
[mode
].outOffset
.push_back( k
* stream_
.bufferSize
);
1239 stream_
.convertInfo
[mode
].outJump
= 1;
1243 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
1244 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
1245 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
1257 stream_
.apiHandle
= 0;
1260 if (stream_
.userBuffer
) {
1261 free(stream_
.userBuffer
);
1262 stream_
.userBuffer
= 0;
1265 error(RtError::DEBUG_WARNING
);
1269 void RtApiOss :: closeStream()
1271 // We don't want an exception to be thrown here because this
1272 // function is called by our class destructor. So, do our own
1274 if ( stream_
.mode
== UNINITIALIZED
) {
1275 sprintf(message_
, "RtApiOss::closeStream(): no open stream to close!");
1276 error(RtError::WARNING
);
1280 int *handle
= (int *) stream_
.apiHandle
;
1281 if (stream_
.state
== STREAM_RUNNING
) {
1282 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
1283 ioctl(handle
[0], SNDCTL_DSP_RESET
, 0);
1285 ioctl(handle
[1], SNDCTL_DSP_RESET
, 0);
1286 stream_
.state
= STREAM_STOPPED
;
1289 if (stream_
.callbackInfo
.usingCallback
) {
1290 stream_
.callbackInfo
.usingCallback
= false;
1291 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
1295 if (handle
[0]) close(handle
[0]);
1296 if (handle
[1]) close(handle
[1]);
1298 stream_
.apiHandle
= 0;
1301 if (stream_
.userBuffer
) {
1302 free(stream_
.userBuffer
);
1303 stream_
.userBuffer
= 0;
1306 if (stream_
.deviceBuffer
) {
1307 free(stream_
.deviceBuffer
);
1308 stream_
.deviceBuffer
= 0;
1311 stream_
.mode
= UNINITIALIZED
;
1314 void RtApiOss :: startStream()
1317 if (stream_
.state
== STREAM_RUNNING
) return;
1319 MUTEX_LOCK(&stream_
.mutex
);
1321 stream_
.state
= STREAM_RUNNING
;
1323 // No need to do anything else here ... OSS automatically starts
1324 // when fed samples.
1326 MUTEX_UNLOCK(&stream_
.mutex
);
1329 void RtApiOss :: stopStream()
1332 if (stream_
.state
== STREAM_STOPPED
) return;
1334 // Change the state before the lock to improve shutdown response
1335 // when using a callback.
1336 stream_
.state
= STREAM_STOPPED
;
1337 MUTEX_LOCK(&stream_
.mutex
);
1340 int *handle
= (int *) stream_
.apiHandle
;
1341 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
1342 err
= ioctl(handle
[0], SNDCTL_DSP_POST
, 0);
1343 //err = ioctl(handle[0], SNDCTL_DSP_SYNC, 0);
1345 sprintf(message_
, "RtApiOss: error stopping device (%s).",
1346 devices_
[stream_
.device
[0]].name
.c_str());
1347 error(RtError::DRIVER_ERROR
);
1351 err
= ioctl(handle
[1], SNDCTL_DSP_POST
, 0);
1352 //err = ioctl(handle[1], SNDCTL_DSP_SYNC, 0);
1354 sprintf(message_
, "RtApiOss: error stopping device (%s).",
1355 devices_
[stream_
.device
[1]].name
.c_str());
1356 error(RtError::DRIVER_ERROR
);
1360 MUTEX_UNLOCK(&stream_
.mutex
);
1363 void RtApiOss :: abortStream()
1368 int RtApiOss :: streamWillBlock()
1371 if (stream_
.state
== STREAM_STOPPED
) return 0;
1373 MUTEX_LOCK(&stream_
.mutex
);
1375 int bytes
= 0, channels
= 0, frames
= 0;
1376 audio_buf_info info
;
1377 int *handle
= (int *) stream_
.apiHandle
;
1378 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
1379 ioctl(handle
[0], SNDCTL_DSP_GETOSPACE
, &info
);
1381 channels
= stream_
.nDeviceChannels
[0];
1384 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
1385 ioctl(handle
[1], SNDCTL_DSP_GETISPACE
, &info
);
1386 if (stream_
.mode
== DUPLEX
) {
1387 bytes
= (bytes
< info
.bytes
) ? bytes
: info
.bytes
;
1388 channels
= stream_
.nDeviceChannels
[0];
1392 channels
= stream_
.nDeviceChannels
[1];
1396 frames
= (int) (bytes
/ (channels
* formatBytes(stream_
.deviceFormat
[0])));
1397 frames
-= stream_
.bufferSize
;
1398 if (frames
< 0) frames
= 0;
1400 MUTEX_UNLOCK(&stream_
.mutex
);
1404 void RtApiOss :: tickStream()
1409 if (stream_
.state
== STREAM_STOPPED
) {
1410 if (stream_
.callbackInfo
.usingCallback
) usleep(50000); // sleep 50 milliseconds
1413 else if (stream_
.callbackInfo
.usingCallback
) {
1414 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
1415 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
1418 MUTEX_LOCK(&stream_
.mutex
);
1420 // The state might change while waiting on a mutex.
1421 if (stream_
.state
== STREAM_STOPPED
)
1424 int result
, *handle
;
1427 RtAudioFormat format
;
1428 handle
= (int *) stream_
.apiHandle
;
1429 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
1431 // Setup parameters and do buffer conversion if necessary.
1432 if (stream_
.doConvertBuffer
[0]) {
1433 buffer
= stream_
.deviceBuffer
;
1434 convertBuffer( buffer
, stream_
.userBuffer
, stream_
.convertInfo
[0] );
1435 samples
= stream_
.bufferSize
* stream_
.nDeviceChannels
[0];
1436 format
= stream_
.deviceFormat
[0];
1439 buffer
= stream_
.userBuffer
;
1440 samples
= stream_
.bufferSize
* stream_
.nUserChannels
[0];
1441 format
= stream_
.userFormat
;
1444 // Do byte swapping if necessary.
1445 if (stream_
.doByteSwap
[0])
1446 byteSwapBuffer(buffer
, samples
, format
);
1448 // Write samples to device.
1449 result
= write(handle
[0], buffer
, samples
* formatBytes(format
));
1452 // This could be an underrun, but the basic OSS API doesn't provide a means for determining that.
1453 sprintf(message_
, "RtApiOss: audio write error for device (%s).",
1454 devices_
[stream_
.device
[0]].name
.c_str());
1455 error(RtError::DRIVER_ERROR
);
1459 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
1461 // Setup parameters.
1462 if (stream_
.doConvertBuffer
[1]) {
1463 buffer
= stream_
.deviceBuffer
;
1464 samples
= stream_
.bufferSize
* stream_
.nDeviceChannels
[1];
1465 format
= stream_
.deviceFormat
[1];
1468 buffer
= stream_
.userBuffer
;
1469 samples
= stream_
.bufferSize
* stream_
.nUserChannels
[1];
1470 format
= stream_
.userFormat
;
1473 // Read samples from device.
1474 result
= read(handle
[1], buffer
, samples
* formatBytes(format
));
1477 // This could be an overrun, but the basic OSS API doesn't provide a means for determining that.
1478 sprintf(message_
, "RtApiOss: audio read error for device (%s).",
1479 devices_
[stream_
.device
[1]].name
.c_str());
1480 error(RtError::DRIVER_ERROR
);
1483 // Do byte swapping if necessary.
1484 if (stream_
.doByteSwap
[1])
1485 byteSwapBuffer(buffer
, samples
, format
);
1487 // Do buffer conversion if necessary.
1488 if (stream_
.doConvertBuffer
[1])
1489 convertBuffer( stream_
.userBuffer
, stream_
.deviceBuffer
, stream_
.convertInfo
[1] );
1493 MUTEX_UNLOCK(&stream_
.mutex
);
1495 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
1499 void RtApiOss :: setStreamCallback(RtAudioCallback callback
, void *userData
)
1503 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
1504 if ( info
->usingCallback
) {
1505 sprintf(message_
, "RtApiOss: A callback is already set for this stream!");
1506 error(RtError::WARNING
);
1510 info
->callback
= (void *) callback
;
1511 info
->userData
= userData
;
1512 info
->usingCallback
= true;
1513 info
->object
= (void *) this;
1515 // Set the thread attributes for joinable and realtime scheduling
1516 // priority. The higher priority will only take affect if the
1517 // program is run as root or suid.
1518 pthread_attr_t attr
;
1519 pthread_attr_init(&attr
);
1520 // chuck (commented out)
1521 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
1522 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
1524 int err
= pthread_create(&(info
->thread
), &attr
, ossCallbackHandler
, &stream_
.callbackInfo
);
1525 pthread_attr_destroy(&attr
);
1527 info
->usingCallback
= false;
1528 sprintf(message_
, "RtApiOss: error starting callback thread!");
1529 error(RtError::THREAD_ERROR
);
1533 void RtApiOss :: cancelStreamCallback()
1537 if (stream_
.callbackInfo
.usingCallback
) {
1539 if (stream_
.state
== STREAM_RUNNING
)
1542 MUTEX_LOCK(&stream_
.mutex
);
1544 stream_
.callbackInfo
.usingCallback
= false;
1545 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
1546 stream_
.callbackInfo
.thread
= 0;
1547 stream_
.callbackInfo
.callback
= NULL
;
1548 stream_
.callbackInfo
.userData
= NULL
;
1550 MUTEX_UNLOCK(&stream_
.mutex
);
1554 extern "C" void *ossCallbackHandler(void *ptr
)
1556 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
1557 RtApiOss
*object
= (RtApiOss
*) info
->object
;
1558 bool *usingCallback
= &info
->usingCallback
;
1560 while ( *usingCallback
) {
1561 pthread_testcancel();
1563 object
->tickStream();
1565 catch (RtError
&exception
) {
1566 EM_log( CK_LOG_SYSTEM
, "RtApiOss: callback thread error..." );
1568 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
1569 EM_log( CK_LOG_INFO
, "closing thread..." );
1578 //******************** End of __LINUX_OSS__ *********************//
1581 #if defined(__MACOSX_CORE__)
1584 // The OS X CoreAudio API is designed to use a separate callback
1585 // procedure for each of its audio devices. A single RtAudio duplex
1586 // stream using two different devices is supported here, though it
1587 // cannot be guaranteed to always behave correctly because we cannot
1588 // synchronize these two callbacks. This same functionality can be
1589 // achieved with better synchrony by opening two separate streams for
1590 // the devices and using RtAudio blocking calls (i.e. tickStream()).
1592 // A property listener is installed for over/underrun information.
1593 // However, no functionality is currently provided to allow property
1594 // listeners to trigger user handlers because it is unclear what could
1595 // be done if a critical stream parameter (buffer size, sample rate,
1596 // device disconnect) notification arrived. The listeners entail
1597 // quite a bit of extra code and most likely, a user program wouldn't
1598 // be prepared for the result anyway.
1600 // A structure to hold various information related to the CoreAudio API
1607 pthread_cond_t condition
;
1610 :stopStream(false), xrun(false), deviceBuffer(0) {}
1613 RtApiCore :: RtApiCore()
1617 if (nDevices_
<= 0) {
1618 sprintf(message_
, "RtApiCore: no Macintosh OS-X Core Audio devices found!");
1619 error(RtError::NO_DEVICES_FOUND
);
1623 RtApiCore :: ~RtApiCore()
1625 // The subclass destructor gets called before the base class
1626 // destructor, so close an existing stream before deallocating
1627 // apiDeviceId memory.
1628 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
1630 // Free our allocated apiDeviceId memory.
1632 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
1633 id
= (AudioDeviceID
*) devices_
[i
].apiDeviceId
;
1638 void RtApiCore :: initialize(void)
1640 OSStatus err
= noErr
;
1642 AudioDeviceID
*deviceList
= NULL
;
1645 // Find out how many audio devices there are, if any.
1646 err
= AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices
, &dataSize
, NULL
);
1648 sprintf(message_
, "RtApiCore: OS-X error getting device info!");
1649 error(RtError::SYSTEM_ERROR
);
1652 nDevices_
= dataSize
/ sizeof(AudioDeviceID
);
1653 if (nDevices_
== 0) return;
1655 // Make space for the devices we are about to get.
1656 deviceList
= (AudioDeviceID
*) malloc( dataSize
);
1657 if (deviceList
== NULL
) {
1658 sprintf(message_
, "RtApiCore: memory allocation error during initialization!");
1659 error(RtError::MEMORY_ERROR
);
1662 // Get the array of AudioDeviceIDs.
1663 err
= AudioHardwareGetProperty(kAudioHardwarePropertyDevices
, &dataSize
, (void *) deviceList
);
1666 sprintf(message_
, "RtApiCore: OS-X error getting device properties!");
1667 error(RtError::SYSTEM_ERROR
);
1670 // Create list of device structures and write device identifiers.
1673 for (int i
=0; i
<nDevices_
; i
++) {
1674 devices_
.push_back(device
);
1675 id
= (AudioDeviceID
*) malloc( sizeof(AudioDeviceID
) );
1676 *id
= deviceList
[i
];
1677 devices_
[i
].apiDeviceId
= (void *) id
;
1683 int RtApiCore :: getDefaultInputDevice(void)
1685 AudioDeviceID id
, *deviceId
;
1686 UInt32 dataSize
= sizeof( AudioDeviceID
);
1688 OSStatus result
= AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice
,
1691 if (result
!= noErr
) {
1692 sprintf( message_
, "RtApiCore: OS-X error getting default input device." );
1693 error(RtError::WARNING
);
1697 for ( int i
=0; i
<nDevices_
; i
++ ) {
1698 deviceId
= (AudioDeviceID
*) devices_
[i
].apiDeviceId
;
1699 if ( id
== *deviceId
) return i
;
1705 int RtApiCore :: getDefaultOutputDevice(void)
1707 AudioDeviceID id
, *deviceId
;
1708 UInt32 dataSize
= sizeof( AudioDeviceID
);
1710 OSStatus result
= AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice
,
1713 if (result
!= noErr
) {
1714 sprintf( message_
, "RtApiCore: OS-X error getting default output device." );
1715 error(RtError::WARNING
);
1719 for ( int i
=0; i
<nDevices_
; i
++ ) {
1720 deviceId
= (AudioDeviceID
*) devices_
[i
].apiDeviceId
;
1721 if ( id
== *deviceId
) return i
;
1727 static bool deviceSupportsFormat( AudioDeviceID id
, bool isInput
,
1728 AudioStreamBasicDescription
*desc
, bool isDuplex
)
1730 OSStatus result
= noErr
;
1731 UInt32 dataSize
= sizeof( AudioStreamBasicDescription
);
1733 result
= AudioDeviceGetProperty( id
, 0, isInput
,
1734 kAudioDevicePropertyStreamFormatSupported
,
1737 if (result
== kAudioHardwareNoError
) {
1739 result
= AudioDeviceGetProperty( id
, 0, true,
1740 kAudioDevicePropertyStreamFormatSupported
,
1744 if (result
!= kAudioHardwareNoError
)
1753 void RtApiCore :: probeDeviceInfo( RtApiDevice
*info
)
1755 OSStatus err
= noErr
;
1757 // Get the device manufacturer and name.
1760 UInt32 dataSize
= 256;
1761 AudioDeviceID
*id
= (AudioDeviceID
*) info
->apiDeviceId
;
1762 err
= AudioDeviceGetProperty( *id
, 0, false,
1763 kAudioDevicePropertyDeviceManufacturer
,
1766 sprintf( message_
, "RtApiCore: OS-X error getting device manufacturer." );
1767 error(RtError::DEBUG_WARNING
);
1770 strncpy(fullname
, name
, 256);
1771 strcat(fullname
, ": " );
1774 err
= AudioDeviceGetProperty( *id
, 0, false,
1775 kAudioDevicePropertyDeviceName
,
1778 sprintf( message_
, "RtApiCore: OS-X error getting device name." );
1779 error(RtError::DEBUG_WARNING
);
1782 strncat(fullname
, name
, 254);
1784 info
->name
.append( (const char *)fullname
, strlen(fullname
)+1);
1786 // Get output channel information.
1787 unsigned int i
, minChannels
= 0, maxChannels
= 0, nStreams
= 0;
1788 AudioBufferList
*bufferList
= nil
;
1789 err
= AudioDeviceGetPropertyInfo( *id
, 0, false,
1790 kAudioDevicePropertyStreamConfiguration
,
1792 if (err
== noErr
&& dataSize
> 0) {
1793 bufferList
= (AudioBufferList
*) malloc( dataSize
);
1794 if (bufferList
== NULL
) {
1795 sprintf(message_
, "RtApiCore: memory allocation error!");
1796 error(RtError::DEBUG_WARNING
);
1800 err
= AudioDeviceGetProperty( *id
, 0, false,
1801 kAudioDevicePropertyStreamConfiguration
,
1802 &dataSize
, bufferList
);
1806 nStreams
= bufferList
->mNumberBuffers
;
1807 for ( i
=0; i
<nStreams
; i
++ ) {
1808 maxChannels
+= bufferList
->mBuffers
[i
].mNumberChannels
;
1809 if ( bufferList
->mBuffers
[i
].mNumberChannels
< minChannels
)
1810 minChannels
= bufferList
->mBuffers
[i
].mNumberChannels
;
1816 if (err
!= noErr
|| dataSize
<= 0) {
1817 sprintf( message_
, "RtApiCore: OS-X error getting output channels for device (%s).",
1818 info
->name
.c_str() );
1819 error(RtError::DEBUG_WARNING
);
1824 if ( maxChannels
> 0 )
1825 info
->maxOutputChannels
= maxChannels
;
1826 if ( minChannels
> 0 )
1827 info
->minOutputChannels
= minChannels
;
1830 // Get input channel information.
1832 err
= AudioDeviceGetPropertyInfo( *id
, 0, true,
1833 kAudioDevicePropertyStreamConfiguration
,
1835 if (err
== noErr
&& dataSize
> 0) {
1836 bufferList
= (AudioBufferList
*) malloc( dataSize
);
1837 if (bufferList
== NULL
) {
1838 sprintf(message_
, "RtApiCore: memory allocation error!");
1839 error(RtError::DEBUG_WARNING
);
1842 err
= AudioDeviceGetProperty( *id
, 0, true,
1843 kAudioDevicePropertyStreamConfiguration
,
1844 &dataSize
, bufferList
);
1848 nStreams
= bufferList
->mNumberBuffers
;
1849 for ( i
=0; i
<nStreams
; i
++ ) {
1850 if ( bufferList
->mBuffers
[i
].mNumberChannels
< minChannels
)
1851 minChannels
= bufferList
->mBuffers
[i
].mNumberChannels
;
1852 maxChannels
+= bufferList
->mBuffers
[i
].mNumberChannels
;
1858 if (err
!= noErr
|| dataSize
<= 0) {
1859 sprintf( message_
, "RtApiCore: OS-X error getting input channels for device (%s).",
1860 info
->name
.c_str() );
1861 error(RtError::DEBUG_WARNING
);
1866 if ( maxChannels
> 0 )
1867 info
->maxInputChannels
= maxChannels
;
1868 if ( minChannels
> 0 )
1869 info
->minInputChannels
= minChannels
;
1872 // If device opens for both playback and capture, we determine the channels.
1873 if (info
->maxOutputChannels
> 0 && info
->maxInputChannels
> 0) {
1874 info
->hasDuplexSupport
= true;
1875 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
1876 info
->maxInputChannels
: info
->maxOutputChannels
;
1877 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
1878 info
->minInputChannels
: info
->minOutputChannels
;
1881 // Probe the device sample rate and data format parameters. The
1882 // core audio query mechanism is performed on a "stream"
1883 // description, which can have a variable number of channels and
1884 // apply to input or output only.
1886 // Create a stream description structure.
1887 AudioStreamBasicDescription description
;
1888 dataSize
= sizeof( AudioStreamBasicDescription
);
1889 memset(&description
, 0, sizeof(AudioStreamBasicDescription
));
1890 bool isInput
= false;
1891 if ( info
->maxOutputChannels
== 0 ) isInput
= true;
1892 bool isDuplex
= false;
1893 if ( info
->maxDuplexChannels
> 0 ) isDuplex
= true;
1895 // Determine the supported sample rates.
1896 info
->sampleRates
.clear();
1897 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
1898 description
.mSampleRate
= (double) SAMPLE_RATES
[k
];
1899 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1900 info
->sampleRates
.push_back( SAMPLE_RATES
[k
] );
1903 if (info
->sampleRates
.size() == 0) {
1904 sprintf( message_
, "RtApiCore: No supported sample rates found for OS-X device (%s).",
1905 info
->name
.c_str() );
1906 error(RtError::DEBUG_WARNING
);
1910 // Determine the supported data formats.
1911 info
->nativeFormats
= 0;
1912 description
.mFormatID
= kAudioFormatLinearPCM
;
1913 description
.mBitsPerChannel
= 8;
1914 description
.mFormatFlags
= kLinearPCMFormatFlagIsSignedInteger
| kLinearPCMFormatFlagIsPacked
| kLinearPCMFormatFlagIsBigEndian
;
1915 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1916 info
->nativeFormats
|= RTAUDIO_SINT8
;
1918 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1919 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1920 info
->nativeFormats
|= RTAUDIO_SINT8
;
1923 description
.mBitsPerChannel
= 16;
1924 description
.mFormatFlags
|= kLinearPCMFormatFlagIsBigEndian
;
1925 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1926 info
->nativeFormats
|= RTAUDIO_SINT16
;
1928 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1929 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1930 info
->nativeFormats
|= RTAUDIO_SINT16
;
1933 description
.mBitsPerChannel
= 32;
1934 description
.mFormatFlags
|= kLinearPCMFormatFlagIsBigEndian
;
1935 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1936 info
->nativeFormats
|= RTAUDIO_SINT32
;
1938 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1939 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1940 info
->nativeFormats
|= RTAUDIO_SINT32
;
1943 description
.mBitsPerChannel
= 24;
1944 description
.mFormatFlags
= kLinearPCMFormatFlagIsSignedInteger
| kLinearPCMFormatFlagIsAlignedHigh
| kLinearPCMFormatFlagIsBigEndian
;
1945 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1946 info
->nativeFormats
|= RTAUDIO_SINT24
;
1948 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1949 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1950 info
->nativeFormats
|= RTAUDIO_SINT24
;
1953 description
.mBitsPerChannel
= 32;
1954 description
.mFormatFlags
= kLinearPCMFormatFlagIsFloat
| kLinearPCMFormatFlagIsPacked
| kLinearPCMFormatFlagIsBigEndian
;
1955 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1956 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
1958 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1959 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1960 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
1963 description
.mBitsPerChannel
= 64;
1964 description
.mFormatFlags
|= kLinearPCMFormatFlagIsBigEndian
;
1965 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1966 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
1968 description
.mFormatFlags
&= ~kLinearPCMFormatFlagIsBigEndian
;
1969 if ( deviceSupportsFormat( *id
, isInput
, &description
, isDuplex
) )
1970 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
1973 // Check that we have at least one supported format.
1974 if (info
->nativeFormats
== 0) {
1975 sprintf(message_
, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.",
1976 info
->name
.c_str());
1977 error(RtError::DEBUG_WARNING
);
1981 info
->probed
= true;
1984 OSStatus
callbackHandler( AudioDeviceID inDevice
,
1985 const AudioTimeStamp
* inNow
,
1986 const AudioBufferList
* inInputData
,
1987 const AudioTimeStamp
* inInputTime
,
1988 AudioBufferList
* outOutputData
,
1989 const AudioTimeStamp
* inOutputTime
,
1992 CallbackInfo
*info
= (CallbackInfo
*) infoPointer
;
1994 RtApiCore
*object
= (RtApiCore
*) info
->object
;
1996 object
->callbackEvent( inDevice
, (void *)inInputData
, (void *)outOutputData
);
1998 catch (RtError
&exception
) {
1999 EM_log( CK_LOG_SYSTEM
, "RtApiCore: callback handler error..." );
2001 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
2003 return kAudioHardwareUnspecifiedError
;
2006 return kAudioHardwareNoError
;
2009 OSStatus
deviceListener( AudioDeviceID inDevice
,
2012 AudioDevicePropertyID propertyID
,
2013 void* handlePointer
)
2015 CoreHandle
*handle
= (CoreHandle
*) handlePointer
;
2016 if ( propertyID
== kAudioDeviceProcessorOverload
)
2020 EM_log( CK_LOG_FINEST
, "(via rtaudio): OS-X audio input overrun detected!" );
2023 EM_log( CK_LOG_FINEST
, "(via rtaudio): OS-X audio output overrun detected!" );
2026 Digitalio::m_xrun
+= 2;
2028 handle
->xrun
= true;
2031 return kAudioHardwareNoError
;
2034 bool RtApiCore :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
2035 int sampleRate
, RtAudioFormat format
,
2036 int *bufferSize
, int numberOfBuffers
)
2038 // Setup for stream mode.
2039 bool isInput
= false;
2040 AudioDeviceID id
= *((AudioDeviceID
*) devices_
[device
].apiDeviceId
);
2041 if ( mode
== INPUT
) isInput
= true;
2043 // Search for a stream which contains the desired number of channels.
2044 OSStatus err
= noErr
;
2046 unsigned int deviceChannels
, nStreams
= 0;
2047 UInt32 iChannel
= 0, iStream
= 0;
2048 AudioBufferList
*bufferList
= nil
;
2049 err
= AudioDeviceGetPropertyInfo( id
, 0, isInput
,
2050 kAudioDevicePropertyStreamConfiguration
,
2053 if (err
== noErr
&& dataSize
> 0) {
2054 bufferList
= (AudioBufferList
*) malloc( dataSize
);
2055 if (bufferList
== NULL
) {
2056 sprintf(message_
, "RtApiCore: memory allocation error in probeDeviceOpen()!");
2057 error(RtError::DEBUG_WARNING
);
2060 err
= AudioDeviceGetProperty( id
, 0, isInput
,
2061 kAudioDevicePropertyStreamConfiguration
,
2062 &dataSize
, bufferList
);
2065 stream_
.deInterleave
[mode
] = false;
2066 nStreams
= bufferList
->mNumberBuffers
;
2067 for ( iStream
=0; iStream
<nStreams
; iStream
++ ) {
2068 if ( bufferList
->mBuffers
[iStream
].mNumberChannels
>= (unsigned int) channels
) break;
2069 iChannel
+= bufferList
->mBuffers
[iStream
].mNumberChannels
;
2071 // If we didn't find a single stream above, see if we can meet
2072 // the channel specification in mono mode (i.e. using separate
2073 // non-interleaved buffers). This can only work if there are N
2074 // consecutive one-channel streams, where N is the number of
2075 // desired channels.
2077 if ( iStream
>= nStreams
&& nStreams
>= (unsigned int) channels
) {
2079 for ( iStream
=0; iStream
<nStreams
; iStream
++ ) {
2080 if ( bufferList
->mBuffers
[iStream
].mNumberChannels
== 1 )
2084 if ( counter
== channels
) {
2085 iStream
-= channels
- 1;
2086 iChannel
-= channels
- 1;
2087 stream_
.deInterleave
[mode
] = true;
2090 iChannel
+= bufferList
->mBuffers
[iStream
].mNumberChannels
;
2095 if (err
!= noErr
|| dataSize
<= 0) {
2096 if ( bufferList
) free( bufferList
);
2097 sprintf( message_
, "RtApiCore: OS-X error getting channels for device (%s).",
2098 devices_
[device
].name
.c_str() );
2099 error(RtError::DEBUG_WARNING
);
2103 if (iStream
>= nStreams
) {
2105 sprintf( message_
, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).",
2106 devices_
[device
].name
.c_str(), channels
);
2107 error(RtError::DEBUG_WARNING
);
2111 // This is ok even for mono mode ... it gets updated later.
2112 deviceChannels
= bufferList
->mBuffers
[iStream
].mNumberChannels
;
2115 // Determine the buffer size.
2116 AudioValueRange bufferRange
;
2117 dataSize
= sizeof(AudioValueRange
);
2118 err
= AudioDeviceGetProperty( id
, 0, isInput
,
2119 kAudioDevicePropertyBufferSizeRange
,
2120 &dataSize
, &bufferRange
);
2122 sprintf( message_
, "RtApiCore: OS-X error getting buffer size range for device (%s).",
2123 devices_
[device
].name
.c_str() );
2124 error(RtError::DEBUG_WARNING
);
2128 long bufferBytes
= *bufferSize
* deviceChannels
* formatBytes(RTAUDIO_FLOAT32
);
2129 if (bufferRange
.mMinimum
> bufferBytes
) bufferBytes
= (int) bufferRange
.mMinimum
;
2130 else if (bufferRange
.mMaximum
< bufferBytes
) bufferBytes
= (int) bufferRange
.mMaximum
;
2132 // Set the buffer size. For mono mode, I'm assuming we only need to
2133 // make this setting for the first channel.
2134 UInt32 theSize
= (UInt32
) bufferBytes
;
2135 dataSize
= sizeof( UInt32
);
2136 err
= AudioDeviceSetProperty(id
, NULL
, 0, isInput
,
2137 kAudioDevicePropertyBufferSize
,
2138 dataSize
, &theSize
);
2140 sprintf( message_
, "RtApiCore: OS-X error setting the buffer size for device (%s).",
2141 devices_
[device
].name
.c_str() );
2142 error(RtError::DEBUG_WARNING
);
2146 // If attempting to setup a duplex stream, the bufferSize parameter
2147 // MUST be the same in both directions!
2148 *bufferSize
= bufferBytes
/ ( deviceChannels
* formatBytes(RTAUDIO_FLOAT32
) );
2149 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
&& *bufferSize
!= stream_
.bufferSize
) {
2150 sprintf( message_
, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).",
2151 devices_
[device
].name
.c_str() );
2152 error(RtError::DEBUG_WARNING
);
2156 stream_
.bufferSize
= *bufferSize
;
2157 stream_
.nBuffers
= 1;
2159 // Set the stream format description. Do for each channel in mono mode.
2160 AudioStreamBasicDescription description
;
2161 dataSize
= sizeof( AudioStreamBasicDescription
);
2162 if ( stream_
.deInterleave
[mode
] ) nStreams
= channels
;
2164 for ( unsigned int i
=0; i
<nStreams
; i
++, iChannel
++ ) {
2166 err
= AudioDeviceGetProperty( id
, iChannel
, isInput
,
2167 kAudioDevicePropertyStreamFormat
,
2168 &dataSize
, &description
);
2170 sprintf( message_
, "RtApiCore: OS-X error getting stream format for device (%s).",
2171 devices_
[device
].name
.c_str() );
2172 error(RtError::DEBUG_WARNING
);
2176 // Set the sample rate and data format id.
2177 description
.mSampleRate
= (double) sampleRate
;
2178 description
.mFormatID
= kAudioFormatLinearPCM
;
2179 err
= AudioDeviceSetProperty( id
, NULL
, iChannel
, isInput
,
2180 kAudioDevicePropertyStreamFormat
,
2181 dataSize
, &description
);
2183 sprintf( message_
, "RtApiCore: OS-X error setting sample rate or data format for device (%s).",
2184 devices_
[device
].name
.c_str() );
2185 error(RtError::DEBUG_WARNING
);
2190 // Check whether we need byte-swapping (assuming OS-X host is big-endian).
2191 iChannel
-= nStreams
;
2192 err
= AudioDeviceGetProperty( id
, iChannel
, isInput
,
2193 kAudioDevicePropertyStreamFormat
,
2194 &dataSize
, &description
);
2196 sprintf( message_
, "RtApiCore: OS-X error getting stream format for device (%s).", devices_
[device
].name
.c_str() );
2197 error(RtError::DEBUG_WARNING
);
2201 stream_
.doByteSwap
[mode
] = false;
2202 if ( !description
.mFormatFlags
& kLinearPCMFormatFlagIsBigEndian
)
2203 stream_
.doByteSwap
[mode
] = true;
2205 // From the CoreAudio documentation, PCM data must be supplied as
2207 stream_
.userFormat
= format
;
2208 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
2210 if ( stream_
.deInterleave
[mode
] ) // mono mode
2211 stream_
.nDeviceChannels
[mode
] = channels
;
2213 stream_
.nDeviceChannels
[mode
] = description
.mChannelsPerFrame
;
2214 stream_
.nUserChannels
[mode
] = channels
;
2216 // Set flags for buffer conversion.
2217 stream_
.doConvertBuffer
[mode
] = false;
2218 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
2219 stream_
.doConvertBuffer
[mode
] = true;
2220 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
2221 stream_
.doConvertBuffer
[mode
] = true;
2222 if (stream_
.nUserChannels
[mode
] > 1 && stream_
.deInterleave
[mode
])
2223 stream_
.doConvertBuffer
[mode
] = true;
2225 // Allocate our CoreHandle structure for the stream.
2227 if ( stream_
.apiHandle
== 0 ) {
2228 handle
= (CoreHandle
*) calloc(1, sizeof(CoreHandle
));
2229 if ( handle
== NULL
) {
2230 sprintf(message_
, "RtApiCore: OS-X error allocating coreHandle memory (%s).",
2231 devices_
[device
].name
.c_str());
2234 handle
->index
[0] = 0;
2235 handle
->index
[1] = 0;
2236 if ( pthread_cond_init(&handle
->condition
, NULL
) ) {
2237 sprintf(message_
, "RtApiCore: error initializing pthread condition variable (%s).",
2238 devices_
[device
].name
.c_str());
2241 stream_
.apiHandle
= (void *) handle
;
2244 handle
= (CoreHandle
*) stream_
.apiHandle
;
2245 handle
->index
[mode
] = iStream
;
2247 // Allocate necessary internal buffers.
2248 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
2251 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
2252 buffer_bytes
= stream_
.nUserChannels
[0];
2254 buffer_bytes
= stream_
.nUserChannels
[1];
2256 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
2257 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
2258 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
2259 if (stream_
.userBuffer
== NULL
) {
2260 sprintf(message_
, "RtApiCore: OS-X error allocating user buffer memory (%s).",
2261 devices_
[device
].name
.c_str());
2266 if ( stream_
.deInterleave
[mode
] ) {
2269 bool makeBuffer
= true;
2270 if ( mode
== OUTPUT
)
2271 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
2272 else { // mode == INPUT
2273 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
2274 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
2275 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
2276 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
2281 buffer_bytes
*= *bufferSize
;
2282 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
2283 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
2284 if (stream_
.deviceBuffer
== NULL
) {
2285 sprintf(message_
, "RtApiCore: error allocating device buffer memory (%s).",
2286 devices_
[device
].name
.c_str());
2290 // If not de-interleaving, we point stream_.deviceBuffer to the
2291 // OS X supplied device buffer before doing any necessary data
2292 // conversions. This presents a problem if we have a duplex
2293 // stream using one device which needs de-interleaving and
2294 // another device which doesn't. So, save a pointer to our own
2295 // device buffer in the CallbackInfo structure.
2296 handle
->deviceBuffer
= stream_
.deviceBuffer
;
2300 stream_
.sampleRate
= sampleRate
;
2301 stream_
.device
[mode
] = device
;
2302 stream_
.state
= STREAM_STOPPED
;
2303 stream_
.callbackInfo
.object
= (void *) this;
2305 // Setup the buffer conversion information structure.
2306 if ( stream_
.doConvertBuffer
[mode
] ) {
2307 if (mode
== INPUT
) { // convert device to user buffer
2308 stream_
.convertInfo
[mode
].inJump
= stream_
.nDeviceChannels
[1];
2309 stream_
.convertInfo
[mode
].outJump
= stream_
.nUserChannels
[1];
2310 stream_
.convertInfo
[mode
].inFormat
= stream_
.deviceFormat
[1];
2311 stream_
.convertInfo
[mode
].outFormat
= stream_
.userFormat
;
2313 else { // convert user to device buffer
2314 stream_
.convertInfo
[mode
].inJump
= stream_
.nUserChannels
[0];
2315 stream_
.convertInfo
[mode
].outJump
= stream_
.nDeviceChannels
[0];
2316 stream_
.convertInfo
[mode
].inFormat
= stream_
.userFormat
;
2317 stream_
.convertInfo
[mode
].outFormat
= stream_
.deviceFormat
[0];
2320 if ( stream_
.convertInfo
[mode
].inJump
< stream_
.convertInfo
[mode
].outJump
)
2321 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].inJump
;
2323 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].outJump
;
2325 // Set up the interleave/deinterleave offsets.
2326 if ( mode
== INPUT
&& stream_
.deInterleave
[1] ) {
2327 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
2328 stream_
.convertInfo
[mode
].inOffset
.push_back( k
* stream_
.bufferSize
);
2329 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
2330 stream_
.convertInfo
[mode
].inJump
= 1;
2333 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
2334 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
2335 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
2336 stream_
.convertInfo
[mode
].outOffset
.push_back( k
* stream_
.bufferSize
);
2337 stream_
.convertInfo
[mode
].outJump
= 1;
2341 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
2342 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
2343 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
2348 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
&& stream_
.device
[0] == device
)
2349 // Only one callback procedure per device.
2350 stream_
.mode
= DUPLEX
;
2352 err
= AudioDeviceAddIOProc( id
, callbackHandler
, (void *) &stream_
.callbackInfo
);
2354 sprintf( message_
, "RtApiCore: OS-X error setting callback for device (%s).", devices_
[device
].name
.c_str() );
2355 error(RtError::DEBUG_WARNING
);
2358 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
2359 stream_
.mode
= DUPLEX
;
2361 stream_
.mode
= mode
;
2364 // Setup the device property listener for over/underload.
2365 err
= AudioDeviceAddPropertyListener( id
, iChannel
, isInput
,
2366 kAudioDeviceProcessorOverload
,
2367 deviceListener
, (void *) handle
);
2373 pthread_cond_destroy(&handle
->condition
);
2375 stream_
.apiHandle
= 0;
2378 if (stream_
.userBuffer
) {
2379 free(stream_
.userBuffer
);
2380 stream_
.userBuffer
= 0;
2383 error(RtError::DEBUG_WARNING
);
2387 void RtApiCore :: closeStream()
2389 // We don't want an exception to be thrown here because this
2390 // function is called by our class destructor. So, do our own
2392 if ( stream_
.mode
== UNINITIALIZED
) {
2393 sprintf(message_
, "RtApiCore::closeStream(): no open stream to close!");
2394 error(RtError::WARNING
);
2398 AudioDeviceID id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2399 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2400 if (stream_
.state
== STREAM_RUNNING
)
2401 AudioDeviceStop( id
, callbackHandler
);
2402 AudioDeviceRemoveIOProc( id
, callbackHandler
);
2405 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[1]].apiDeviceId
);
2406 if (stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& stream_
.device
[0] != stream_
.device
[1]) ) {
2407 if (stream_
.state
== STREAM_RUNNING
)
2408 AudioDeviceStop( id
, callbackHandler
);
2409 AudioDeviceRemoveIOProc( id
, callbackHandler
);
2412 if (stream_
.userBuffer
) {
2413 free(stream_
.userBuffer
);
2414 stream_
.userBuffer
= 0;
2417 if ( stream_
.deInterleave
[0] || stream_
.deInterleave
[1] ) {
2418 free(stream_
.deviceBuffer
);
2419 stream_
.deviceBuffer
= 0;
2422 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2424 // Destroy pthread condition variable and free the CoreHandle structure.
2426 pthread_cond_destroy(&handle
->condition
);
2428 stream_
.apiHandle
= 0;
2431 stream_
.mode
= UNINITIALIZED
;
2434 void RtApiCore :: startStream()
2437 if (stream_
.state
== STREAM_RUNNING
) return;
2439 MUTEX_LOCK(&stream_
.mutex
);
2443 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2445 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2446 err
= AudioDeviceStart(id
, callbackHandler
);
2448 sprintf(message_
, "RtApiCore: OS-X error starting callback procedure on device (%s).",
2449 devices_
[stream_
.device
[0]].name
.c_str());
2450 MUTEX_UNLOCK(&stream_
.mutex
);
2451 error(RtError::DRIVER_ERROR
);
2455 if (stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& stream_
.device
[0] != stream_
.device
[1]) ) {
2457 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[1]].apiDeviceId
);
2458 err
= AudioDeviceStart(id
, callbackHandler
);
2460 sprintf(message_
, "RtApiCore: OS-X error starting input callback procedure on device (%s).",
2461 devices_
[stream_
.device
[0]].name
.c_str());
2462 MUTEX_UNLOCK(&stream_
.mutex
);
2463 error(RtError::DRIVER_ERROR
);
2467 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2468 handle
->stopStream
= false;
2469 stream_
.state
= STREAM_RUNNING
;
2471 MUTEX_UNLOCK(&stream_
.mutex
);
2474 void RtApiCore :: stopStream()
2477 if (stream_
.state
== STREAM_STOPPED
) return;
2479 // Change the state before the lock to improve shutdown response
2480 // when using a callback.
2481 stream_
.state
= STREAM_STOPPED
;
2482 MUTEX_LOCK(&stream_
.mutex
);
2486 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
2488 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2489 err
= AudioDeviceStop(id
, callbackHandler
);
2491 sprintf(message_
, "RtApiCore: OS-X error stopping callback procedure on device (%s).",
2492 devices_
[stream_
.device
[0]].name
.c_str());
2493 MUTEX_UNLOCK(&stream_
.mutex
);
2494 error(RtError::DRIVER_ERROR
);
2498 if (stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& stream_
.device
[0] != stream_
.device
[1]) ) {
2500 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[1]].apiDeviceId
);
2501 err
= AudioDeviceStop(id
, callbackHandler
);
2503 sprintf(message_
, "RtApiCore: OS-X error stopping input callback procedure on device (%s).",
2504 devices_
[stream_
.device
[0]].name
.c_str());
2505 MUTEX_UNLOCK(&stream_
.mutex
);
2506 error(RtError::DRIVER_ERROR
);
2510 MUTEX_UNLOCK(&stream_
.mutex
);
2513 void RtApiCore :: abortStream()
2518 void RtApiCore :: tickStream()
2522 if (stream_
.state
== STREAM_STOPPED
) return;
2524 if (stream_
.callbackInfo
.usingCallback
) {
2525 sprintf(message_
, "RtApiCore: tickStream() should not be used when a callback function is set!");
2526 error(RtError::WARNING
);
2530 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2532 MUTEX_LOCK(&stream_
.mutex
);
2534 pthread_cond_wait(&handle
->condition
, &stream_
.mutex
);
2536 MUTEX_UNLOCK(&stream_
.mutex
);
2539 void RtApiCore :: callbackEvent( AudioDeviceID deviceId
, void *inData
, void *outData
)
2543 if (stream_
.state
== STREAM_STOPPED
) return;
2545 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
2546 CoreHandle
*handle
= (CoreHandle
*) stream_
.apiHandle
;
2547 AudioBufferList
*inBufferList
= (AudioBufferList
*) inData
;
2548 AudioBufferList
*outBufferList
= (AudioBufferList
*) outData
;
2550 if ( info
->usingCallback
&& handle
->stopStream
) {
2551 // Check if the stream should be stopped (via the previous user
2552 // callback return value). We stop the stream here, rather than
2553 // after the function call, so that output data can first be
2559 MUTEX_LOCK(&stream_
.mutex
);
2561 // Invoke user callback first, to get fresh output data. Don't
2562 // invoke the user callback if duplex mode AND the input/output devices
2563 // are different AND this function is called for the input device.
2564 AudioDeviceID id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[0]].apiDeviceId
);
2565 if ( info
->usingCallback
&& (stream_
.mode
!= DUPLEX
|| deviceId
== id
) ) {
2566 RtAudioCallback callback
= (RtAudioCallback
) info
->callback
;
2567 handle
->stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, info
->userData
);
2568 if ( handle
->xrun
== true ) {
2569 handle
->xrun
= false;
2570 MUTEX_UNLOCK(&stream_
.mutex
);
2575 if ( stream_
.mode
== OUTPUT
|| ( stream_
.mode
== DUPLEX
&& deviceId
== id
) ) {
2577 if (stream_
.doConvertBuffer
[0]) {
2579 if ( !stream_
.deInterleave
[0] )
2580 stream_
.deviceBuffer
= (char *) outBufferList
->mBuffers
[handle
->index
[0]].mData
;
2582 stream_
.deviceBuffer
= handle
->deviceBuffer
;
2584 convertBuffer( stream_
.deviceBuffer
, stream_
.userBuffer
, stream_
.convertInfo
[0] );
2585 if ( stream_
.doByteSwap
[0] )
2586 byteSwapBuffer(stream_
.deviceBuffer
,
2587 stream_
.bufferSize
* stream_
.nDeviceChannels
[0],
2588 stream_
.deviceFormat
[0]);
2590 if ( stream_
.deInterleave
[0] ) {
2591 int bufferBytes
= outBufferList
->mBuffers
[handle
->index
[0]].mDataByteSize
;
2592 for ( int i
=0; i
<stream_
.nDeviceChannels
[0]; i
++ ) {
2593 memcpy(outBufferList
->mBuffers
[handle
->index
[0]+i
].mData
,
2594 &stream_
.deviceBuffer
[i
*bufferBytes
], bufferBytes
);
2600 if (stream_
.doByteSwap
[0])
2601 byteSwapBuffer(stream_
.userBuffer
,
2602 stream_
.bufferSize
* stream_
.nUserChannels
[0],
2603 stream_
.userFormat
);
2605 memcpy(outBufferList
->mBuffers
[handle
->index
[0]].mData
,
2607 outBufferList
->mBuffers
[handle
->index
[0]].mDataByteSize
);
2611 id
= *( (AudioDeviceID
*) devices_
[stream_
.device
[1]].apiDeviceId
);
2612 if ( stream_
.mode
== INPUT
|| ( stream_
.mode
== DUPLEX
&& deviceId
== id
) ) {
2614 if (stream_
.doConvertBuffer
[1]) {
2616 if ( stream_
.deInterleave
[1] ) {
2617 stream_
.deviceBuffer
= (char *) handle
->deviceBuffer
;
2618 int bufferBytes
= inBufferList
->mBuffers
[handle
->index
[1]].mDataByteSize
;
2619 for ( int i
=0; i
<stream_
.nDeviceChannels
[1]; i
++ ) {
2620 memcpy(&stream_
.deviceBuffer
[i
*bufferBytes
],
2621 inBufferList
->mBuffers
[handle
->index
[1]+i
].mData
, bufferBytes
);
2625 stream_
.deviceBuffer
= (char *) inBufferList
->mBuffers
[handle
->index
[1]].mData
;
2627 if ( stream_
.doByteSwap
[1] )
2628 byteSwapBuffer(stream_
.deviceBuffer
,
2629 stream_
.bufferSize
* stream_
.nDeviceChannels
[1],
2630 stream_
.deviceFormat
[1]);
2631 convertBuffer( stream_
.userBuffer
, stream_
.deviceBuffer
, stream_
.convertInfo
[1] );
2635 memcpy(stream_
.userBuffer
,
2636 inBufferList
->mBuffers
[handle
->index
[1]].mData
,
2637 inBufferList
->mBuffers
[handle
->index
[1]].mDataByteSize
);
2639 if (stream_
.doByteSwap
[1])
2640 byteSwapBuffer(stream_
.userBuffer
,
2641 stream_
.bufferSize
* stream_
.nUserChannels
[1],
2642 stream_
.userFormat
);
2646 if ( !info
->usingCallback
&& (stream_
.mode
!= DUPLEX
|| deviceId
== id
) )
2647 pthread_cond_signal(&handle
->condition
);
2649 MUTEX_UNLOCK(&stream_
.mutex
);
2652 void RtApiCore :: setStreamCallback(RtAudioCallback callback
, void *userData
)
2656 if ( stream_
.callbackInfo
.usingCallback
) {
2657 sprintf(message_
, "RtApiCore: A callback is already set for this stream!");
2658 error(RtError::WARNING
);
2662 stream_
.callbackInfo
.callback
= (void *) callback
;
2663 stream_
.callbackInfo
.userData
= userData
;
2664 stream_
.callbackInfo
.usingCallback
= true;
2667 void RtApiCore :: cancelStreamCallback()
2671 if (stream_
.callbackInfo
.usingCallback
) {
2673 if (stream_
.state
== STREAM_RUNNING
)
2676 MUTEX_LOCK(&stream_
.mutex
);
2678 stream_
.callbackInfo
.usingCallback
= false;
2679 stream_
.callbackInfo
.userData
= NULL
;
2680 stream_
.state
= STREAM_STOPPED
;
2681 stream_
.callbackInfo
.callback
= NULL
;
2683 MUTEX_UNLOCK(&stream_
.mutex
);
2688 //******************** End of __MACOSX_CORE__ *********************//
2691 #if defined(__LINUX_JACK__)
2693 // JACK is a low-latency audio server, written primarily for the
2694 // GNU/Linux operating system. It can connect a number of different
2695 // applications to an audio device, as well as allowing them to share
2696 // audio between themselves.
2698 // The JACK server must be running before RtApiJack can be instantiated.
2699 // RtAudio will report just a single "device", which is the JACK audio
2700 // server. The JACK server is typically started in a terminal as follows:
2702 // .jackd -d alsa -d hw:0
2704 // or through an interface program such as qjackctl. Many of the
2705 // parameters normally set for a stream are fixed by the JACK server
2706 // and can be specified when the JACK server is started. In
2709 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
2711 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
2712 // frames, and number of buffers = 4. Once the server is running, it
2713 // is not possible to override these values. If the values are not
2714 // specified in the command-line, the JACK server uses default values.
2716 #include <jack/jack.h>
2719 // A structure to hold various information related to the Jack API
2722 jack_client_t
*client
;
2723 jack_port_t
**ports
[2];
2726 pthread_cond_t condition
;
2729 :client(0), clientOpen(false), stopStream(false) {}
2732 std::string jackmsg
;
2734 static void jackerror (const char *desc
)
2737 jackmsg
.append( desc
, strlen(desc
)+1 );
2740 RtApiJack :: RtApiJack()
2744 if (nDevices_
<= 0) {
2745 sprintf(message_
, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!",
2747 error(RtError::NO_DEVICES_FOUND
);
2751 RtApiJack :: ~RtApiJack()
2753 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
2756 void RtApiJack :: initialize(void)
2760 // Tell the jack server to call jackerror() when it experiences an
2761 // error. This function saves the error message for subsequent
2762 // reporting via the normal RtAudio error function.
2763 jack_set_error_function( jackerror
);
2765 // Look for jack server and try to become a client.
2766 jack_client_t
*client
;
2767 // chuck: modified client name
2768 if ( (client
= jack_client_new( "ChucK" )) == 0)
2772 // Determine the name of the device.
2773 device
.name
= "Jack Server";
2774 devices_
.push_back(device
);
2777 jack_client_close(client
);
2780 void RtApiJack :: probeDeviceInfo(RtApiDevice
*info
)
2782 // Look for jack server and try to become a client.
2783 jack_client_t
*client
;
2784 // chuck: modified client name
2785 if ( (client
= jack_client_new( "ChucK" )) == 0) {
2786 sprintf(message_
, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!",
2788 error(RtError::WARNING
);
2792 // Get the current jack server sample rate.
2793 info
->sampleRates
.clear();
2794 info
->sampleRates
.push_back( jack_get_sample_rate(client
) );
2796 // Count the available ports as device channels. Jack "input ports"
2797 // equal RtAudio output channels.
2800 unsigned int nChannels
= 0;
2801 ports
= jack_get_ports( client
, NULL
, NULL
, JackPortIsInput
);
2803 port
= (char *) ports
[nChannels
];
2805 port
= (char *) ports
[++nChannels
];
2807 info
->maxOutputChannels
= nChannels
;
2808 info
->minOutputChannels
= 1;
2811 // Jack "output ports" equal RtAudio input channels.
2813 ports
= jack_get_ports( client
, NULL
, NULL
, JackPortIsOutput
);
2815 port
= (char *) ports
[nChannels
];
2817 port
= (char *) ports
[++nChannels
];
2819 info
->maxInputChannels
= nChannels
;
2820 info
->minInputChannels
= 1;
2823 if (info
->maxOutputChannels
== 0 && info
->maxInputChannels
== 0) {
2824 jack_client_close(client
);
2825 sprintf(message_
, "RtApiJack: error determining jack input/output channels!");
2826 error(RtError::DEBUG_WARNING
);
2830 if (info
->maxOutputChannels
> 0 && info
->maxInputChannels
> 0) {
2831 info
->hasDuplexSupport
= true;
2832 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
2833 info
->maxInputChannels
: info
->maxOutputChannels
;
2834 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
2835 info
->minInputChannels
: info
->minOutputChannels
;
2838 // Get the jack data format type. There isn't much documentation
2839 // regarding supported data formats in jack. I'm assuming here that
2840 // the default type will always be a floating-point type, of length
2841 // equal to either 4 or 8 bytes.
2842 int sample_size
= sizeof( jack_default_audio_sample_t
);
2843 if ( sample_size
== 4 )
2844 info
->nativeFormats
= RTAUDIO_FLOAT32
;
2845 else if ( sample_size
== 8 )
2846 info
->nativeFormats
= RTAUDIO_FLOAT64
;
2848 // Check that we have a supported format
2849 if (info
->nativeFormats
== 0) {
2850 jack_client_close(client
);
2851 sprintf(message_
, "RtApiJack: error determining jack server data format!");
2852 error(RtError::DEBUG_WARNING
);
2856 jack_client_close(client
);
2857 info
->probed
= true;
2860 int jackCallbackHandler(jack_nframes_t nframes
, void *infoPointer
)
2862 CallbackInfo
*info
= (CallbackInfo
*) infoPointer
;
2863 RtApiJack
*object
= (RtApiJack
*) info
->object
;
2865 object
->callbackEvent( (unsigned long) nframes
);
2867 catch (RtError
&exception
) {
2868 EM_log( CK_LOG_SYSTEM
, "RtApiJack: callback handler error..." );
2870 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
2878 void jackShutdown(void *infoPointer
)
2880 CallbackInfo
*info
= (CallbackInfo
*) infoPointer
;
2881 JackHandle
*handle
= (JackHandle
*) info
->apiInfo
;
2882 handle
->clientOpen
= false;
2883 RtApiJack
*object
= (RtApiJack
*) info
->object
;
2885 // Check current stream state. If stopped, then we'll assume this
2886 // was called as a result of a call to RtApiJack::stopStream (the
2887 // deactivation of a client handle causes this function to be called).
2888 // If not, we'll assume the Jack server is shutting down or some
2889 // other problem occurred and we should close the stream.
2890 if ( object
->getStreamState() == RtApi::STREAM_STOPPED
) return;
2893 object
->closeStream();
2895 catch (RtError
&exception
) {
2896 EM_log( CK_LOG_SYSTEM
, "RtApiJack: jackShutdown error..." );
2898 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
2903 EM_log( CK_LOG_SYSTEM
, "(via rtaudio): the Jack server is shutting down this client..." );
2904 EM_log( CK_LOG_SYSTEM
, "...stream stopped and closed" );
2907 int jackXrun( void * )
2909 EM_log( CK_LOG_INFO
, "(via rtaudio): Jack audio overrun/underrun reported..." );
2913 bool RtApiJack :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
2914 int sampleRate
, RtAudioFormat format
,
2915 int *bufferSize
, int numberOfBuffers
)
2917 // Compare the jack server channels to the requested number of channels.
2918 if ( (mode
== OUTPUT
&& devices_
[device
].maxOutputChannels
< channels
) ||
2919 (mode
== INPUT
&& devices_
[device
].maxInputChannels
< channels
) ) {
2920 sprintf(message_
, "RtApiJack: the Jack server does not support requested channels!");
2921 error(RtError::DEBUG_WARNING
);
2925 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
2927 // Look for jack server and try to become a client (only do once per stream).
2929 jack_client_t
*client
= 0;
2930 if ( mode
== OUTPUT
|| (mode
== INPUT
&& stream_
.mode
!= OUTPUT
) ) {
2931 // chuck: modified client names
2932 snprintf(label
, 32, "ChucK");
2933 if ( (client
= jack_client_new( (const char *) label
)) == 0) {
2934 sprintf(message_
, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!",
2936 error(RtError::DEBUG_WARNING
);
2941 // The handle must have been created on an earlier pass.
2942 client
= handle
->client
;
2945 // First, check the jack server sample rate.
2947 jack_rate
= (int) jack_get_sample_rate(client
);
2948 if ( sampleRate
!= jack_rate
) {
2949 jack_client_close(client
);
2950 sprintf( message_
, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).",
2951 sampleRate
, jack_rate
);
2952 error(RtError::DEBUG_WARNING
);
2955 stream_
.sampleRate
= jack_rate
;
2957 // The jack server seems to support just a single floating-point
2958 // data type. Since we already checked it before, just use what we
2960 stream_
.deviceFormat
[mode
] = devices_
[device
].nativeFormats
;
2961 stream_
.userFormat
= format
;
2963 // Jack always uses non-interleaved buffers. We'll need to
2964 // de-interleave if we have more than one channel.
2965 stream_
.deInterleave
[mode
] = false;
2967 stream_
.deInterleave
[mode
] = true;
2969 // Jack always provides host byte-ordered data.
2970 stream_
.doByteSwap
[mode
] = false;
2972 // Get the buffer size. The buffer size and number of buffers
2973 // (periods) is set when the jack server is started.
2974 stream_
.bufferSize
= (int) jack_get_buffer_size(client
);
2975 *bufferSize
= stream_
.bufferSize
;
2977 stream_
.nDeviceChannels
[mode
] = channels
;
2978 stream_
.nUserChannels
[mode
] = channels
;
2980 stream_
.doConvertBuffer
[mode
] = false;
2981 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
2982 stream_
.doConvertBuffer
[mode
] = true;
2983 if (stream_
.deInterleave
[mode
])
2984 stream_
.doConvertBuffer
[mode
] = true;
2986 // Allocate our JackHandle structure for the stream.
2987 if ( handle
== 0 ) {
2988 handle
= (JackHandle
*) calloc(1, sizeof(JackHandle
));
2989 if ( handle
== NULL
) {
2990 sprintf(message_
, "RtApiJack: error allocating JackHandle memory (%s).",
2991 devices_
[device
].name
.c_str());
2994 handle
->ports
[0] = 0;
2995 handle
->ports
[1] = 0;
2996 if ( pthread_cond_init(&handle
->condition
, NULL
) ) {
2997 sprintf(message_
, "RtApiJack: error initializing pthread condition variable!");
3000 stream_
.apiHandle
= (void *) handle
;
3001 handle
->client
= client
;
3002 handle
->clientOpen
= true;
3005 // Allocate necessary internal buffers.
3006 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
3009 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
3010 buffer_bytes
= stream_
.nUserChannels
[0];
3012 buffer_bytes
= stream_
.nUserChannels
[1];
3014 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
3015 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
3016 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
3017 if (stream_
.userBuffer
== NULL
) {
3018 sprintf(message_
, "RtApiJack: error allocating user buffer memory (%s).",
3019 devices_
[device
].name
.c_str());
3024 if ( stream_
.doConvertBuffer
[mode
] ) {
3027 bool makeBuffer
= true;
3028 if ( mode
== OUTPUT
)
3029 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
3030 else { // mode == INPUT
3031 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
3032 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
3033 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
3034 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
3039 buffer_bytes
*= *bufferSize
;
3040 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
3041 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
3042 if (stream_
.deviceBuffer
== NULL
) {
3043 sprintf(message_
, "RtApiJack: error allocating device buffer memory (%s).",
3044 devices_
[device
].name
.c_str());
3050 // Allocate memory for the Jack ports (channels) identifiers.
3051 handle
->ports
[mode
] = (jack_port_t
**) malloc (sizeof (jack_port_t
*) * channels
);
3052 if ( handle
->ports
[mode
] == NULL
) {
3053 sprintf(message_
, "RtApiJack: error allocating port handle memory (%s).",
3054 devices_
[device
].name
.c_str());
3058 stream_
.device
[mode
] = device
;
3059 stream_
.state
= STREAM_STOPPED
;
3060 stream_
.callbackInfo
.usingCallback
= false;
3061 stream_
.callbackInfo
.object
= (void *) this;
3062 stream_
.callbackInfo
.apiInfo
= (void *) handle
;
3064 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
3065 // We had already set up the stream for output.
3066 stream_
.mode
= DUPLEX
;
3068 stream_
.mode
= mode
;
3069 jack_set_process_callback( handle
->client
, jackCallbackHandler
, (void *) &stream_
.callbackInfo
);
3070 jack_set_xrun_callback( handle
->client
, jackXrun
, NULL
);
3071 jack_on_shutdown( handle
->client
, jackShutdown
, (void *) &stream_
.callbackInfo
);
3074 // Setup the buffer conversion information structure.
3075 if ( stream_
.doConvertBuffer
[mode
] ) {
3076 if (mode
== INPUT
) { // convert device to user buffer
3077 stream_
.convertInfo
[mode
].inJump
= stream_
.nDeviceChannels
[1];
3078 stream_
.convertInfo
[mode
].outJump
= stream_
.nUserChannels
[1];
3079 stream_
.convertInfo
[mode
].inFormat
= stream_
.deviceFormat
[1];
3080 stream_
.convertInfo
[mode
].outFormat
= stream_
.userFormat
;
3082 else { // convert user to device buffer
3083 stream_
.convertInfo
[mode
].inJump
= stream_
.nUserChannels
[0];
3084 stream_
.convertInfo
[mode
].outJump
= stream_
.nDeviceChannels
[0];
3085 stream_
.convertInfo
[mode
].inFormat
= stream_
.userFormat
;
3086 stream_
.convertInfo
[mode
].outFormat
= stream_
.deviceFormat
[0];
3089 if ( stream_
.convertInfo
[mode
].inJump
< stream_
.convertInfo
[mode
].outJump
)
3090 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].inJump
;
3092 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].outJump
;
3094 // Set up the interleave/deinterleave offsets.
3095 if ( mode
== INPUT
&& stream_
.deInterleave
[1] ) {
3096 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
3097 stream_
.convertInfo
[mode
].inOffset
.push_back( k
* stream_
.bufferSize
);
3098 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
3099 stream_
.convertInfo
[mode
].inJump
= 1;
3102 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
3103 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
3104 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
3105 stream_
.convertInfo
[mode
].outOffset
.push_back( k
* stream_
.bufferSize
);
3106 stream_
.convertInfo
[mode
].outJump
= 1;
3110 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
3111 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
3112 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
3121 pthread_cond_destroy(&handle
->condition
);
3122 if ( handle
->clientOpen
== true )
3123 jack_client_close(handle
->client
);
3125 if ( handle
->ports
[0] ) free(handle
->ports
[0]);
3126 if ( handle
->ports
[1] ) free(handle
->ports
[1]);
3129 stream_
.apiHandle
= 0;
3132 if (stream_
.userBuffer
) {
3133 free(stream_
.userBuffer
);
3134 stream_
.userBuffer
= 0;
3137 error(RtError::DEBUG_WARNING
);
3141 void RtApiJack :: closeStream()
3143 // We don't want an exception to be thrown here because this
3144 // function is called by our class destructor. So, do our own
3146 if ( stream_
.mode
== UNINITIALIZED
) {
3147 sprintf(message_
, "RtApiJack::closeStream(): no open stream to close!");
3148 error(RtError::WARNING
);
3152 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3153 if ( handle
&& handle
->clientOpen
== true ) {
3154 if (stream_
.state
== STREAM_RUNNING
)
3155 jack_deactivate(handle
->client
);
3157 jack_client_close(handle
->client
);
3161 if ( handle
->ports
[0] ) free(handle
->ports
[0]);
3162 if ( handle
->ports
[1] ) free(handle
->ports
[1]);
3163 pthread_cond_destroy(&handle
->condition
);
3165 stream_
.apiHandle
= 0;
3168 if (stream_
.userBuffer
) {
3169 free(stream_
.userBuffer
);
3170 stream_
.userBuffer
= 0;
3173 if (stream_
.deviceBuffer
) {
3174 free(stream_
.deviceBuffer
);
3175 stream_
.deviceBuffer
= 0;
3178 stream_
.mode
= UNINITIALIZED
;
3182 void RtApiJack :: startStream()
3185 if (stream_
.state
== STREAM_RUNNING
) return;
3187 MUTEX_LOCK(&stream_
.mutex
);
3190 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3191 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
3192 for ( int i
=0; i
<stream_
.nUserChannels
[0]; i
++ ) {
3193 snprintf(label
, 64, "outport %d", i
);
3194 handle
->ports
[0][i
] = jack_port_register(handle
->client
, (const char *)label
,
3195 JACK_DEFAULT_AUDIO_TYPE
, JackPortIsOutput
, 0);
3199 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
3200 for ( int i
=0; i
<stream_
.nUserChannels
[1]; i
++ ) {
3201 snprintf(label
, 64, "inport %d", i
);
3202 handle
->ports
[1][i
] = jack_port_register(handle
->client
, (const char *)label
,
3203 JACK_DEFAULT_AUDIO_TYPE
, JackPortIsInput
, 0);
3207 if (jack_activate(handle
->client
)) {
3208 sprintf(message_
, "RtApiJack: unable to activate JACK client!");
3209 error(RtError::SYSTEM_ERROR
);
3214 // Get the list of available ports.
3215 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
3216 ports
= jack_get_ports(handle
->client
, NULL
, NULL
, JackPortIsPhysical
|JackPortIsInput
);
3217 if ( ports
== NULL
) {
3218 sprintf(message_
, "RtApiJack: error determining available jack input ports!");
3219 error(RtError::SYSTEM_ERROR
);
3222 // Now make the port connections. Since RtAudio wasn't designed to
3223 // allow the user to select particular channels of a device, we'll
3224 // just open the first "nChannels" ports.
3225 for ( int i
=0; i
<stream_
.nUserChannels
[0]; i
++ ) {
3228 result
= jack_connect( handle
->client
, jack_port_name(handle
->ports
[0][i
]), ports
[i
] );
3231 sprintf(message_
, "RtApiJack: error connecting output ports!");
3232 error(RtError::SYSTEM_ERROR
);
3238 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
3239 ports
= jack_get_ports( handle
->client
, NULL
, NULL
, JackPortIsPhysical
|JackPortIsOutput
);
3240 if ( ports
== NULL
) {
3241 sprintf(message_
, "RtApiJack: error determining available jack output ports!");
3242 error(RtError::SYSTEM_ERROR
);
3245 // Now make the port connections. See note above.
3246 for ( int i
=0; i
<stream_
.nUserChannels
[1]; i
++ ) {
3249 result
= jack_connect( handle
->client
, ports
[i
], jack_port_name(handle
->ports
[1][i
]) );
3252 sprintf(message_
, "RtApiJack: error connecting input ports!");
3253 error(RtError::SYSTEM_ERROR
);
3259 handle
->stopStream
= false;
3260 stream_
.state
= STREAM_RUNNING
;
3262 MUTEX_UNLOCK(&stream_
.mutex
);
3265 void RtApiJack :: stopStream()
3268 if (stream_
.state
== STREAM_STOPPED
) return;
3270 // Change the state before the lock to improve shutdown response
3271 // when using a callback.
3272 stream_
.state
= STREAM_STOPPED
;
3273 MUTEX_LOCK(&stream_
.mutex
);
3275 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3276 jack_deactivate(handle
->client
);
3278 MUTEX_UNLOCK(&stream_
.mutex
);
3281 void RtApiJack :: abortStream()
3286 void RtApiJack :: tickStream()
3290 if (stream_
.state
== STREAM_STOPPED
) return;
3292 if (stream_
.callbackInfo
.usingCallback
) {
3293 sprintf(message_
, "RtApiJack: tickStream() should not be used when a callback function is set!");
3294 error(RtError::WARNING
);
3298 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3300 MUTEX_LOCK(&stream_
.mutex
);
3302 pthread_cond_wait(&handle
->condition
, &stream_
.mutex
);
3304 MUTEX_UNLOCK(&stream_
.mutex
);
3307 void RtApiJack :: callbackEvent( unsigned long nframes
)
3311 if (stream_
.state
== STREAM_STOPPED
) return;
3313 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
3314 JackHandle
*handle
= (JackHandle
*) stream_
.apiHandle
;
3315 if ( info
->usingCallback
&& handle
->stopStream
) {
3316 // Check if the stream should be stopped (via the previous user
3317 // callback return value). We stop the stream here, rather than
3318 // after the function call, so that output data can first be
3324 MUTEX_LOCK(&stream_
.mutex
);
3326 // Invoke user callback first, to get fresh output data.
3327 if ( info
->usingCallback
) {
3328 RtAudioCallback callback
= (RtAudioCallback
) info
->callback
;
3329 handle
->stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, info
->userData
);
3332 jack_default_audio_sample_t
*jackbuffer
;
3333 long bufferBytes
= nframes
* sizeof(jack_default_audio_sample_t
);
3334 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
3336 if (stream_
.doConvertBuffer
[0]) {
3337 convertBuffer( stream_
.deviceBuffer
, stream_
.userBuffer
, stream_
.convertInfo
[0] );
3339 for ( int i
=0; i
<stream_
.nDeviceChannels
[0]; i
++ ) {
3340 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[0][i
],
3341 (jack_nframes_t
) nframes
);
3342 memcpy(jackbuffer
, &stream_
.deviceBuffer
[i
*bufferBytes
], bufferBytes
);
3345 else { // single channel only
3346 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[0][0],
3347 (jack_nframes_t
) nframes
);
3348 memcpy(jackbuffer
, stream_
.userBuffer
, bufferBytes
);
3352 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
3354 if (stream_
.doConvertBuffer
[1]) {
3355 for ( int i
=0; i
<stream_
.nDeviceChannels
[1]; i
++ ) {
3356 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[1][i
],
3357 (jack_nframes_t
) nframes
);
3358 memcpy(&stream_
.deviceBuffer
[i
*bufferBytes
], jackbuffer
, bufferBytes
);
3360 convertBuffer( stream_
.userBuffer
, stream_
.deviceBuffer
, stream_
.convertInfo
[1] );
3362 else { // single channel only
3363 jackbuffer
= (jack_default_audio_sample_t
*) jack_port_get_buffer(handle
->ports
[1][0],
3364 (jack_nframes_t
) nframes
);
3365 memcpy(stream_
.userBuffer
, jackbuffer
, bufferBytes
);
3369 if ( !info
->usingCallback
)
3370 pthread_cond_signal(&handle
->condition
);
3372 MUTEX_UNLOCK(&stream_
.mutex
);
3375 void RtApiJack :: setStreamCallback(RtAudioCallback callback
, void *userData
)
3379 if ( stream_
.callbackInfo
.usingCallback
) {
3380 sprintf(message_
, "RtApiJack: A callback is already set for this stream!");
3381 error(RtError::WARNING
);
3385 stream_
.callbackInfo
.callback
= (void *) callback
;
3386 stream_
.callbackInfo
.userData
= userData
;
3387 stream_
.callbackInfo
.usingCallback
= true;
3390 void RtApiJack :: cancelStreamCallback()
3394 if (stream_
.callbackInfo
.usingCallback
) {
3396 if (stream_
.state
== STREAM_RUNNING
)
3399 MUTEX_LOCK(&stream_
.mutex
);
3401 stream_
.callbackInfo
.usingCallback
= false;
3402 stream_
.callbackInfo
.userData
= NULL
;
3403 stream_
.state
= STREAM_STOPPED
;
3404 stream_
.callbackInfo
.callback
= NULL
;
3406 MUTEX_UNLOCK(&stream_
.mutex
);
3412 #if defined(__LINUX_ALSA__)
3414 #include <alsa/asoundlib.h>
3418 // A structure to hold various information related to the ALSA API
3421 snd_pcm_t
*handles
[2];
3426 :synchronized(false), tempBuffer(0) {}
3429 extern "C" void *alsaCallbackHandler(void * ptr
);
3431 RtApiAlsa :: RtApiAlsa()
3435 if (nDevices_
<= 0) {
3436 sprintf(message_
, "RtApiAlsa: no Linux ALSA audio devices found!");
3437 error(RtError::NO_DEVICES_FOUND
);
3441 RtApiAlsa :: ~RtApiAlsa()
3443 if ( stream_
.mode
!= UNINITIALIZED
)
3447 void RtApiAlsa :: initialize(void)
3449 int card
, subdevice
, result
;
3453 snd_ctl_card_info_t
*info
;
3454 snd_ctl_card_info_alloca(&info
);
3457 // Count cards and devices
3460 snd_card_next(&card
);
3461 while ( card
>= 0 ) {
3462 sprintf(name
, "hw:%d", card
);
3463 result
= snd_ctl_open(&handle
, name
, 0);
3465 sprintf(message_
, "RtApiAlsa: control open (%i): %s.", card
, snd_strerror(result
));
3466 error(RtError::DEBUG_WARNING
);
3469 result
= snd_ctl_card_info(handle
, info
);
3471 sprintf(message_
, "RtApiAlsa: control hardware info (%i): %s.", card
, snd_strerror(result
));
3472 error(RtError::DEBUG_WARNING
);
3475 cardId
= snd_ctl_card_info_get_id(info
);
3478 result
= snd_ctl_pcm_next_device(handle
, &subdevice
);
3480 sprintf(message_
, "RtApiAlsa: control next device (%i): %s.", card
, snd_strerror(result
));
3481 error(RtError::DEBUG_WARNING
);
3486 sprintf( name
, "hw:%d,%d", card
, subdevice
);
3487 // If a cardId exists and it contains at least one non-numeric
3488 // character, use it to identify the device. This avoids a bug
3489 // in ALSA such that a numeric string is interpreted as a device
3491 for ( unsigned int i
=0; i
<strlen(cardId
); i
++ ) {
3492 if ( !isdigit( cardId
[i
] ) ) {
3493 sprintf( name
, "hw:%s,%d", cardId
, subdevice
);
3497 device
.name
.erase();
3498 device
.name
.append( (const char *)name
, strlen(name
)+1 );
3499 devices_
.push_back(device
);
3503 snd_ctl_close(handle
);
3504 snd_card_next(&card
);
3508 void RtApiAlsa :: probeDeviceInfo(RtApiDevice
*info
)
3511 int open_mode
= SND_PCM_ASYNC
;
3514 snd_pcm_stream_t stream
;
3515 snd_pcm_info_t
*pcminfo
;
3516 snd_pcm_info_alloca(&pcminfo
);
3517 snd_pcm_hw_params_t
*params
;
3518 snd_pcm_hw_params_alloca(¶ms
);
3522 // Open the control interface for this card.
3523 strncpy( name
, info
->name
.c_str(), 64 );
3524 card
= strtok(name
, ",");
3525 err
= snd_ctl_open(&chandle
, card
, SND_CTL_NONBLOCK
);
3527 sprintf(message_
, "RtApiAlsa: control open (%s): %s.", card
, snd_strerror(err
));
3528 error(RtError::DEBUG_WARNING
);
3531 unsigned int dev
= (unsigned int) atoi( strtok(NULL
, ",") );
3533 // First try for playback
3534 stream
= SND_PCM_STREAM_PLAYBACK
;
3535 snd_pcm_info_set_device(pcminfo
, dev
);
3536 snd_pcm_info_set_subdevice(pcminfo
, 0);
3537 snd_pcm_info_set_stream(pcminfo
, stream
);
3539 if ((err
= snd_ctl_pcm_info(chandle
, pcminfo
)) < 0) {
3540 if (err
== -ENOENT
) {
3541 sprintf(message_
, "RtApiAlsa: pcm device (%s) doesn't handle output!", info
->name
.c_str());
3542 error(RtError::DEBUG_WARNING
);
3545 sprintf(message_
, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s",
3546 info
->name
.c_str(), snd_strerror(err
));
3547 error(RtError::DEBUG_WARNING
);
3552 err
= snd_pcm_open(&handle
, info
->name
.c_str(), stream
, open_mode
| SND_PCM_NONBLOCK
);
3555 sprintf(message_
, "RtApiAlsa: pcm playback device (%s) is busy: %s.",
3556 info
->name
.c_str(), snd_strerror(err
));
3558 sprintf(message_
, "RtApiAlsa: pcm playback open (%s) error: %s.",
3559 info
->name
.c_str(), snd_strerror(err
));
3560 error(RtError::DEBUG_WARNING
);
3564 // We have an open device ... allocate the parameter structure.
3565 err
= snd_pcm_hw_params_any(handle
, params
);
3567 snd_pcm_close(handle
);
3568 sprintf(message_
, "RtApiAlsa: hardware probe error (%s): %s.",
3569 info
->name
.c_str(), snd_strerror(err
));
3570 error(RtError::DEBUG_WARNING
);
3574 // Get output channel information.
3576 err
= snd_pcm_hw_params_get_channels_min(params
, &value
);
3578 snd_pcm_close(handle
);
3579 sprintf(message_
, "RtApiAlsa: hardware minimum channel probe error (%s): %s.",
3580 info
->name
.c_str(), snd_strerror(err
));
3581 error(RtError::DEBUG_WARNING
);
3584 info
->minOutputChannels
= value
;
3586 err
= snd_pcm_hw_params_get_channels_max(params
, &value
);
3588 snd_pcm_close(handle
);
3589 sprintf(message_
, "RtApiAlsa: hardware maximum channel probe error (%s): %s.",
3590 info
->name
.c_str(), snd_strerror(err
));
3591 error(RtError::DEBUG_WARNING
);
3594 info
->maxOutputChannels
= value
;
3596 snd_pcm_close(handle
);
3599 // Now try for capture
3600 stream
= SND_PCM_STREAM_CAPTURE
;
3601 snd_pcm_info_set_stream(pcminfo
, stream
);
3603 err
= snd_ctl_pcm_info(chandle
, pcminfo
);
3604 snd_ctl_close(chandle
);
3606 if (err
== -ENOENT
) {
3607 sprintf(message_
, "RtApiAlsa: pcm device (%s) doesn't handle input!", info
->name
.c_str());
3608 error(RtError::DEBUG_WARNING
);
3611 sprintf(message_
, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s",
3612 info
->name
.c_str(), snd_strerror(err
));
3613 error(RtError::DEBUG_WARNING
);
3615 if (info
->maxOutputChannels
== 0)
3616 // didn't open for playback either ... device invalid
3618 goto probe_parameters
;
3621 err
= snd_pcm_open(&handle
, info
->name
.c_str(), stream
, open_mode
| SND_PCM_NONBLOCK
);
3624 sprintf(message_
, "RtApiAlsa: pcm capture device (%s) is busy: %s.",
3625 info
->name
.c_str(), snd_strerror(err
));
3627 sprintf(message_
, "RtApiAlsa: pcm capture open (%s) error: %s.",
3628 info
->name
.c_str(), snd_strerror(err
));
3629 error(RtError::DEBUG_WARNING
);
3630 if (info
->maxOutputChannels
== 0)
3631 // didn't open for playback either ... device invalid
3633 goto probe_parameters
;
3636 // We have an open capture device ... allocate the parameter structure.
3637 err
= snd_pcm_hw_params_any(handle
, params
);
3639 snd_pcm_close(handle
);
3640 sprintf(message_
, "RtApiAlsa: hardware probe error (%s): %s.",
3641 info
->name
.c_str(), snd_strerror(err
));
3642 error(RtError::DEBUG_WARNING
);
3643 if (info
->maxOutputChannels
> 0)
3644 goto probe_parameters
;
3649 // Get input channel information.
3650 err
= snd_pcm_hw_params_get_channels_min(params
, &value
);
3652 snd_pcm_close(handle
);
3653 sprintf(message_
, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.",
3654 info
->name
.c_str(), snd_strerror(err
));
3655 error(RtError::DEBUG_WARNING
);
3656 if (info
->maxOutputChannels
> 0)
3657 goto probe_parameters
;
3661 info
->minInputChannels
= value
;
3663 err
= snd_pcm_hw_params_get_channels_max(params
, &value
);
3665 snd_pcm_close(handle
);
3666 sprintf(message_
, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.",
3667 info
->name
.c_str(), snd_strerror(err
));
3668 error(RtError::DEBUG_WARNING
);
3669 if (info
->maxOutputChannels
> 0)
3670 goto probe_parameters
;
3674 info
->maxInputChannels
= value
;
3676 snd_pcm_close(handle
);
3678 // If device opens for both playback and capture, we determine the channels.
3679 if (info
->maxOutputChannels
== 0 || info
->maxInputChannels
== 0)
3680 goto probe_parameters
;
3682 info
->hasDuplexSupport
= true;
3683 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
3684 info
->maxInputChannels
: info
->maxOutputChannels
;
3685 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
3686 info
->minInputChannels
: info
->minOutputChannels
;
3689 // At this point, we just need to figure out the supported data
3690 // formats and sample rates. We'll proceed by opening the device in
3691 // the direction with the maximum number of channels, or playback if
3692 // they are equal. This might limit our sample rate options, but so
3695 if (info
->maxOutputChannels
>= info
->maxInputChannels
)
3696 stream
= SND_PCM_STREAM_PLAYBACK
;
3698 stream
= SND_PCM_STREAM_CAPTURE
;
3700 err
= snd_pcm_open(&handle
, info
->name
.c_str(), stream
, open_mode
);
3702 sprintf(message_
, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.",
3703 info
->name
.c_str(), snd_strerror(err
));
3704 error(RtError::DEBUG_WARNING
);
3708 // We have an open device ... allocate the parameter structure.
3709 err
= snd_pcm_hw_params_any(handle
, params
);
3711 snd_pcm_close(handle
);
3712 sprintf(message_
, "RtApiAlsa: hardware reopen probe error (%s): %s.",
3713 info
->name
.c_str(), snd_strerror(err
));
3714 error(RtError::DEBUG_WARNING
);
3718 // Test our discrete set of sample rate values.
3720 info
->sampleRates
.clear();
3721 for (unsigned int i
=0; i
<MAX_SAMPLE_RATES
; i
++) {
3722 if (snd_pcm_hw_params_test_rate(handle
, params
, SAMPLE_RATES
[i
], dir
) == 0)
3723 info
->sampleRates
.push_back(SAMPLE_RATES
[i
]);
3725 if (info
->sampleRates
.size() == 0) {
3726 snd_pcm_close(handle
);
3727 sprintf(message_
, "RtApiAlsa: no supported sample rates found for device (%s).",
3728 info
->name
.c_str());
3729 error(RtError::DEBUG_WARNING
);
3733 // Probe the supported data formats ... we don't care about endian-ness just yet
3734 snd_pcm_format_t format
;
3735 info
->nativeFormats
= 0;
3736 format
= SND_PCM_FORMAT_S8
;
3737 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3738 info
->nativeFormats
|= RTAUDIO_SINT8
;
3739 format
= SND_PCM_FORMAT_S16
;
3740 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3741 info
->nativeFormats
|= RTAUDIO_SINT16
;
3742 format
= SND_PCM_FORMAT_S24
;
3743 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3744 info
->nativeFormats
|= RTAUDIO_SINT24
;
3745 format
= SND_PCM_FORMAT_S32
;
3746 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3747 info
->nativeFormats
|= RTAUDIO_SINT32
;
3748 format
= SND_PCM_FORMAT_FLOAT
;
3749 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3750 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
3751 format
= SND_PCM_FORMAT_FLOAT64
;
3752 if (snd_pcm_hw_params_test_format(handle
, params
, format
) == 0)
3753 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
3755 // Check that we have at least one supported format
3756 if (info
->nativeFormats
== 0) {
3757 snd_pcm_close(handle
);
3758 sprintf(message_
, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.",
3759 info
->name
.c_str());
3760 error(RtError::DEBUG_WARNING
);
3764 // That's all ... close the device and return
3765 snd_pcm_close(handle
);
3766 info
->probed
= true;
3770 bool RtApiAlsa :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
3771 int sampleRate
, RtAudioFormat format
,
3772 int *bufferSize
, int numberOfBuffers
)
3774 #if defined(__RTAUDIO_DEBUG__)
3776 snd_output_stdio_attach(&out
, stderr
, 0);
3779 // I'm not using the "plug" interface ... too much inconsistent behavior.
3780 const char *name
= devices_
[device
].name
.c_str();
3782 snd_pcm_stream_t alsa_stream
;
3784 alsa_stream
= SND_PCM_STREAM_PLAYBACK
;
3786 alsa_stream
= SND_PCM_STREAM_CAPTURE
;
3790 int alsa_open_mode
= SND_PCM_ASYNC
;
3791 err
= snd_pcm_open(&handle
, name
, alsa_stream
, alsa_open_mode
);
3793 sprintf(message_
,"RtApiAlsa: pcm device (%s) won't open: %s.",
3794 name
, snd_strerror(err
));
3795 error(RtError::DEBUG_WARNING
);
3799 // Fill the parameter structure.
3800 snd_pcm_hw_params_t
*hw_params
;
3801 snd_pcm_hw_params_alloca(&hw_params
);
3802 err
= snd_pcm_hw_params_any(handle
, hw_params
);
3804 snd_pcm_close(handle
);
3805 sprintf(message_
, "RtApiAlsa: error getting parameter handle (%s): %s.",
3806 name
, snd_strerror(err
));
3807 error(RtError::DEBUG_WARNING
);
3811 #if defined(__RTAUDIO_DEBUG__)
3812 fprintf(stderr
, "\nRtApiAlsa: dump hardware params just after device open:\n\n");
3813 snd_pcm_hw_params_dump(hw_params
, out
);
3816 // Set access ... try interleaved access first, then non-interleaved
3817 if ( !snd_pcm_hw_params_test_access( handle
, hw_params
, SND_PCM_ACCESS_RW_INTERLEAVED
) ) {
3818 err
= snd_pcm_hw_params_set_access(handle
, hw_params
, SND_PCM_ACCESS_RW_INTERLEAVED
);
3820 else if ( !snd_pcm_hw_params_test_access( handle
, hw_params
, SND_PCM_ACCESS_RW_NONINTERLEAVED
) ) {
3821 err
= snd_pcm_hw_params_set_access(handle
, hw_params
, SND_PCM_ACCESS_RW_NONINTERLEAVED
);
3822 stream_
.deInterleave
[mode
] = true;
3825 snd_pcm_close(handle
);
3826 sprintf(message_
, "RtApiAlsa: device (%s) access not supported by RtAudio.", name
);
3827 error(RtError::DEBUG_WARNING
);
3832 snd_pcm_close(handle
);
3833 sprintf(message_
, "RtApiAlsa: error setting access ( (%s): %s.", name
, snd_strerror(err
));
3834 error(RtError::DEBUG_WARNING
);
3838 // Determine how to set the device format.
3839 stream_
.userFormat
= format
;
3840 snd_pcm_format_t device_format
= SND_PCM_FORMAT_UNKNOWN
;
3842 if (format
== RTAUDIO_SINT8
)
3843 device_format
= SND_PCM_FORMAT_S8
;
3844 else if (format
== RTAUDIO_SINT16
)
3845 device_format
= SND_PCM_FORMAT_S16
;
3846 else if (format
== RTAUDIO_SINT24
)
3847 device_format
= SND_PCM_FORMAT_S24
;
3848 else if (format
== RTAUDIO_SINT32
)
3849 device_format
= SND_PCM_FORMAT_S32
;
3850 else if (format
== RTAUDIO_FLOAT32
)
3851 device_format
= SND_PCM_FORMAT_FLOAT
;
3852 else if (format
== RTAUDIO_FLOAT64
)
3853 device_format
= SND_PCM_FORMAT_FLOAT64
;
3855 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3856 stream_
.deviceFormat
[mode
] = format
;
3860 // The user requested format is not natively supported by the device.
3861 device_format
= SND_PCM_FORMAT_FLOAT64
;
3862 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3863 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT64
;
3867 device_format
= SND_PCM_FORMAT_FLOAT
;
3868 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3869 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
3873 device_format
= SND_PCM_FORMAT_S32
;
3874 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3875 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
3879 device_format
= SND_PCM_FORMAT_S24
;
3880 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3881 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT24
;
3885 device_format
= SND_PCM_FORMAT_S16
;
3886 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3887 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
3891 device_format
= SND_PCM_FORMAT_S8
;
3892 if (snd_pcm_hw_params_test_format(handle
, hw_params
, device_format
) == 0) {
3893 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
3897 // If we get here, no supported format was found.
3898 sprintf(message_
,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name
);
3899 snd_pcm_close(handle
);
3900 error(RtError::DEBUG_WARNING
);
3904 err
= snd_pcm_hw_params_set_format(handle
, hw_params
, device_format
);
3906 snd_pcm_close(handle
);
3907 sprintf(message_
, "RtApiAlsa: error setting format (%s): %s.",
3908 name
, snd_strerror(err
));
3909 error(RtError::DEBUG_WARNING
);
3913 // Determine whether byte-swaping is necessary.
3914 stream_
.doByteSwap
[mode
] = false;
3915 if (device_format
!= SND_PCM_FORMAT_S8
) {
3916 err
= snd_pcm_format_cpu_endian(device_format
);
3918 stream_
.doByteSwap
[mode
] = true;
3920 snd_pcm_close(handle
);
3921 sprintf(message_
, "RtApiAlsa: error getting format endian-ness (%s): %s.",
3922 name
, snd_strerror(err
));
3923 error(RtError::DEBUG_WARNING
);
3928 // Set the sample rate.
3929 err
= snd_pcm_hw_params_set_rate(handle
, hw_params
, (unsigned int)sampleRate
, 0);
3931 snd_pcm_close(handle
);
3932 sprintf(message_
, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.",
3933 sampleRate
, name
, snd_strerror(err
));
3934 error(RtError::DEBUG_WARNING
);
3938 // Determine the number of channels for this device. We support a possible
3939 // minimum device channel number > than the value requested by the user.
3940 stream_
.nUserChannels
[mode
] = channels
;
3942 err
= snd_pcm_hw_params_get_channels_max(hw_params
, &value
);
3943 int device_channels
= value
;
3944 if (err
< 0 || device_channels
< channels
) {
3945 snd_pcm_close(handle
);
3946 sprintf(message_
, "RtApiAlsa: channels (%d) not supported by device (%s).",
3948 error(RtError::DEBUG_WARNING
);
3952 err
= snd_pcm_hw_params_get_channels_min(hw_params
, &value
);
3954 snd_pcm_close(handle
);
3955 sprintf(message_
, "RtApiAlsa: error getting min channels count on device (%s).", name
);
3956 error(RtError::DEBUG_WARNING
);
3959 device_channels
= value
;
3960 if (device_channels
< channels
) device_channels
= channels
;
3961 stream_
.nDeviceChannels
[mode
] = device_channels
;
3963 // Set the device channels.
3964 err
= snd_pcm_hw_params_set_channels(handle
, hw_params
, device_channels
);
3966 snd_pcm_close(handle
);
3967 sprintf(message_
, "RtApiAlsa: error setting channels (%d) on device (%s): %s.",
3968 device_channels
, name
, snd_strerror(err
));
3969 error(RtError::DEBUG_WARNING
);
3973 // Set the buffer number, which in ALSA is referred to as the "period".
3975 unsigned int periods
= numberOfBuffers
;
3976 // Even though the hardware might allow 1 buffer, it won't work reliably.
3977 if (periods
< 2) periods
= 2;
3978 err
= snd_pcm_hw_params_set_periods_near(handle
, hw_params
, &periods
, &dir
);
3980 snd_pcm_close(handle
);
3981 sprintf(message_
, "RtApiAlsa: error setting periods (%s): %s.",
3982 name
, snd_strerror(err
));
3983 error(RtError::DEBUG_WARNING
);
3987 // Set the buffer (or period) size.
3988 snd_pcm_uframes_t period_size
= *bufferSize
;
3989 err
= snd_pcm_hw_params_set_period_size_near(handle
, hw_params
, &period_size
, &dir
);
3991 snd_pcm_close(handle
);
3992 sprintf(message_
, "RtApiAlsa: error setting period size (%s): %s.",
3993 name
, snd_strerror(err
));
3994 error(RtError::DEBUG_WARNING
);
3997 *bufferSize
= period_size
;
3999 // If attempting to setup a duplex stream, the bufferSize parameter
4000 // MUST be the same in both directions!
4001 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
&& *bufferSize
!= stream_
.bufferSize
) {
4002 sprintf( message_
, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).",
4004 error(RtError::DEBUG_WARNING
);
4008 stream_
.bufferSize
= *bufferSize
;
4010 // Install the hardware configuration
4011 err
= snd_pcm_hw_params(handle
, hw_params
);
4013 snd_pcm_close(handle
);
4014 sprintf(message_
, "RtApiAlsa: error installing hardware configuration (%s): %s.",
4015 name
, snd_strerror(err
));
4016 error(RtError::DEBUG_WARNING
);
4020 #if defined(__RTAUDIO_DEBUG__)
4021 fprintf(stderr
, "\nRtApiAlsa: dump hardware params after installation:\n\n");
4022 snd_pcm_hw_params_dump(hw_params
, out
);
4025 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
4026 snd_pcm_sw_params_t
*sw_params
= NULL
;
4027 snd_pcm_sw_params_alloca( &sw_params
);
4028 snd_pcm_sw_params_current( handle
, sw_params
);
4029 snd_pcm_sw_params_set_start_threshold( handle
, sw_params
, *bufferSize
);
4030 snd_pcm_sw_params_set_stop_threshold( handle
, sw_params
, 0x7fffffff );
4031 snd_pcm_sw_params_set_silence_threshold( handle
, sw_params
, 0 );
4032 snd_pcm_sw_params_set_silence_size( handle
, sw_params
, INT_MAX
);
4033 err
= snd_pcm_sw_params( handle
, sw_params
);
4035 snd_pcm_close(handle
);
4036 sprintf(message_
, "RtAudio: ALSA error installing software configuration (%s): %s.",
4037 name
, snd_strerror(err
));
4038 error(RtError::DEBUG_WARNING
);
4042 #if defined(__RTAUDIO_DEBUG__)
4043 fprintf(stderr
, "\nRtApiAlsa: dump software params after installation:\n\n");
4044 snd_pcm_sw_params_dump(sw_params
, out
);
4047 // Allocate the ApiHandle if necessary and then save.
4048 AlsaHandle
*apiInfo
= 0;
4049 if ( stream_
.apiHandle
== 0 ) {
4050 apiInfo
= (AlsaHandle
*) new AlsaHandle
;
4051 stream_
.apiHandle
= (void *) apiInfo
;
4052 apiInfo
->handles
[0] = 0;
4053 apiInfo
->handles
[1] = 0;
4056 apiInfo
= (AlsaHandle
*) stream_
.apiHandle
;
4058 apiInfo
->handles
[mode
] = handle
;
4060 // Set flags for buffer conversion
4061 stream_
.doConvertBuffer
[mode
] = false;
4062 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
4063 stream_
.doConvertBuffer
[mode
] = true;
4064 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
4065 stream_
.doConvertBuffer
[mode
] = true;
4066 if (stream_
.nUserChannels
[mode
] > 1 && stream_
.deInterleave
[mode
])
4067 stream_
.doConvertBuffer
[mode
] = true;
4069 // Allocate necessary internal buffers
4070 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
4073 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
4074 buffer_bytes
= stream_
.nUserChannels
[0];
4076 buffer_bytes
= stream_
.nUserChannels
[1];
4078 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
4079 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
4080 if (apiInfo
->tempBuffer
) free(apiInfo
->tempBuffer
);
4081 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
4082 apiInfo
->tempBuffer
= (char *) calloc(buffer_bytes
, 1);
4083 if ( stream_
.userBuffer
== NULL
|| apiInfo
->tempBuffer
== NULL
) {
4084 sprintf(message_
, "RtApiAlsa: error allocating user buffer memory (%s).",
4085 devices_
[device
].name
.c_str());
4090 if ( stream_
.doConvertBuffer
[mode
] ) {
4093 bool makeBuffer
= true;
4094 if ( mode
== OUTPUT
)
4095 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
4096 else { // mode == INPUT
4097 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
4098 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
4099 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
4100 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
4105 buffer_bytes
*= *bufferSize
;
4106 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
4107 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
4108 if (stream_
.deviceBuffer
== NULL
) {
4109 sprintf(message_
, "RtApiAlsa: error allocating device buffer memory (%s).",
4110 devices_
[device
].name
.c_str());
4116 stream_
.device
[mode
] = device
;
4117 stream_
.state
= STREAM_STOPPED
;
4118 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
) {
4119 // We had already set up an output stream.
4120 stream_
.mode
= DUPLEX
;
4121 // Link the streams if possible.
4122 apiInfo
->synchronized
= false;
4123 if (snd_pcm_link( apiInfo
->handles
[0], apiInfo
->handles
[1] ) == 0)
4124 apiInfo
->synchronized
= true;
4126 sprintf(message_
, "RtApiAlsa: unable to synchronize input and output streams (%s).",
4127 devices_
[device
].name
.c_str());
4128 error(RtError::DEBUG_WARNING
);
4132 stream_
.mode
= mode
;
4133 stream_
.nBuffers
= periods
;
4134 stream_
.sampleRate
= sampleRate
;
4136 // Setup the buffer conversion information structure.
4137 if ( stream_
.doConvertBuffer
[mode
] ) {
4138 if (mode
== INPUT
) { // convert device to user buffer
4139 stream_
.convertInfo
[mode
].inJump
= stream_
.nDeviceChannels
[1];
4140 stream_
.convertInfo
[mode
].outJump
= stream_
.nUserChannels
[1];
4141 stream_
.convertInfo
[mode
].inFormat
= stream_
.deviceFormat
[1];
4142 stream_
.convertInfo
[mode
].outFormat
= stream_
.userFormat
;
4144 else { // convert user to device buffer
4145 stream_
.convertInfo
[mode
].inJump
= stream_
.nUserChannels
[0];
4146 stream_
.convertInfo
[mode
].outJump
= stream_
.nDeviceChannels
[0];
4147 stream_
.convertInfo
[mode
].inFormat
= stream_
.userFormat
;
4148 stream_
.convertInfo
[mode
].outFormat
= stream_
.deviceFormat
[0];
4151 if ( stream_
.convertInfo
[mode
].inJump
< stream_
.convertInfo
[mode
].outJump
)
4152 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].inJump
;
4154 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].outJump
;
4156 // Set up the interleave/deinterleave offsets.
4157 if ( mode
== INPUT
&& stream_
.deInterleave
[1] ) {
4158 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
4159 stream_
.convertInfo
[mode
].inOffset
.push_back( k
* stream_
.bufferSize
);
4160 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
4161 stream_
.convertInfo
[mode
].inJump
= 1;
4164 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
4165 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
4166 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
4167 stream_
.convertInfo
[mode
].outOffset
.push_back( k
* stream_
.bufferSize
);
4168 stream_
.convertInfo
[mode
].outJump
= 1;
4172 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
4173 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
4174 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
4183 if (apiInfo
->handles
[0])
4184 snd_pcm_close(apiInfo
->handles
[0]);
4185 if (apiInfo
->handles
[1])
4186 snd_pcm_close(apiInfo
->handles
[1]);
4187 if ( apiInfo
->tempBuffer
) free(apiInfo
->tempBuffer
);
4189 stream_
.apiHandle
= 0;
4192 if (stream_
.userBuffer
) {
4193 free(stream_
.userBuffer
);
4194 stream_
.userBuffer
= 0;
4197 error(RtError::DEBUG_WARNING
);
4201 void RtApiAlsa :: closeStream()
4203 // We don't want an exception to be thrown here because this
4204 // function is called by our class destructor. So, do our own
4206 if ( stream_
.mode
== UNINITIALIZED
) {
4207 sprintf(message_
, "RtApiAlsa::closeStream(): no open stream to close!");
4208 error(RtError::WARNING
);
4212 AlsaHandle
*apiInfo
= (AlsaHandle
*) stream_
.apiHandle
;
4213 if (stream_
.state
== STREAM_RUNNING
) {
4214 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
4215 snd_pcm_drop(apiInfo
->handles
[0]);
4216 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
)
4217 snd_pcm_drop(apiInfo
->handles
[1]);
4218 stream_
.state
= STREAM_STOPPED
;
4221 if (stream_
.callbackInfo
.usingCallback
) {
4222 stream_
.callbackInfo
.usingCallback
= false;
4223 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
4227 if (apiInfo
->handles
[0]) snd_pcm_close(apiInfo
->handles
[0]);
4228 if (apiInfo
->handles
[1]) snd_pcm_close(apiInfo
->handles
[1]);
4229 free(apiInfo
->tempBuffer
);
4231 stream_
.apiHandle
= 0;
4234 if (stream_
.userBuffer
) {
4235 free(stream_
.userBuffer
);
4236 stream_
.userBuffer
= 0;
4239 if (stream_
.deviceBuffer
) {
4240 free(stream_
.deviceBuffer
);
4241 stream_
.deviceBuffer
= 0;
4244 stream_
.mode
= UNINITIALIZED
;
4247 void RtApiAlsa :: startStream()
4249 // This method calls snd_pcm_prepare if the device isn't already in that state.
4252 if (stream_
.state
== STREAM_RUNNING
) return;
4254 MUTEX_LOCK(&stream_
.mutex
);
4257 snd_pcm_state_t state
;
4258 AlsaHandle
*apiInfo
= (AlsaHandle
*) stream_
.apiHandle
;
4259 snd_pcm_t
**handle
= (snd_pcm_t
**) apiInfo
->handles
;
4260 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4261 state
= snd_pcm_state(handle
[0]);
4262 if (state
!= SND_PCM_STATE_PREPARED
) {
4263 err
= snd_pcm_prepare(handle
[0]);
4265 sprintf(message_
, "RtApiAlsa: error preparing pcm device (%s): %s.",
4266 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4267 MUTEX_UNLOCK(&stream_
.mutex
);
4268 error(RtError::DRIVER_ERROR
);
4273 if ( (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) && !apiInfo
->synchronized
) {
4274 state
= snd_pcm_state(handle
[1]);
4275 if (state
!= SND_PCM_STATE_PREPARED
) {
4276 err
= snd_pcm_prepare(handle
[1]);
4278 sprintf(message_
, "RtApiAlsa: error preparing pcm device (%s): %s.",
4279 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4280 MUTEX_UNLOCK(&stream_
.mutex
);
4281 error(RtError::DRIVER_ERROR
);
4285 stream_
.state
= STREAM_RUNNING
;
4287 MUTEX_UNLOCK(&stream_
.mutex
);
4290 void RtApiAlsa :: stopStream()
4293 if (stream_
.state
== STREAM_STOPPED
) return;
4295 // Change the state before the lock to improve shutdown response
4296 // when using a callback.
4297 stream_
.state
= STREAM_STOPPED
;
4298 MUTEX_LOCK(&stream_
.mutex
);
4301 AlsaHandle
*apiInfo
= (AlsaHandle
*) stream_
.apiHandle
;
4302 snd_pcm_t
**handle
= (snd_pcm_t
**) apiInfo
->handles
;
4303 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4304 err
= snd_pcm_drain(handle
[0]);
4306 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4307 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4308 MUTEX_UNLOCK(&stream_
.mutex
);
4309 error(RtError::DRIVER_ERROR
);
4313 if ( (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) && !apiInfo
->synchronized
) {
4314 err
= snd_pcm_drain(handle
[1]);
4316 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4317 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4318 MUTEX_UNLOCK(&stream_
.mutex
);
4319 error(RtError::DRIVER_ERROR
);
4323 MUTEX_UNLOCK(&stream_
.mutex
);
4326 void RtApiAlsa :: abortStream()
4329 if (stream_
.state
== STREAM_STOPPED
) return;
4331 // Change the state before the lock to improve shutdown response
4332 // when using a callback.
4333 stream_
.state
= STREAM_STOPPED
;
4334 MUTEX_LOCK(&stream_
.mutex
);
4337 AlsaHandle
*apiInfo
= (AlsaHandle
*) stream_
.apiHandle
;
4338 snd_pcm_t
**handle
= (snd_pcm_t
**) apiInfo
->handles
;
4339 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4340 err
= snd_pcm_drop(handle
[0]);
4342 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4343 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4344 MUTEX_UNLOCK(&stream_
.mutex
);
4345 error(RtError::DRIVER_ERROR
);
4349 if ( (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) && !apiInfo
->synchronized
) {
4350 err
= snd_pcm_drop(handle
[1]);
4352 sprintf(message_
, "RtApiAlsa: error draining pcm device (%s): %s.",
4353 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4354 MUTEX_UNLOCK(&stream_
.mutex
);
4355 error(RtError::DRIVER_ERROR
);
4359 MUTEX_UNLOCK(&stream_
.mutex
);
4362 int RtApiAlsa :: streamWillBlock()
4365 if (stream_
.state
== STREAM_STOPPED
) return 0;
4367 MUTEX_LOCK(&stream_
.mutex
);
4369 int err
= 0, frames
= 0;
4370 AlsaHandle
*apiInfo
= (AlsaHandle
*) stream_
.apiHandle
;
4371 snd_pcm_t
**handle
= (snd_pcm_t
**) apiInfo
->handles
;
4372 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4373 err
= snd_pcm_avail_update(handle
[0]);
4375 sprintf(message_
, "RtApiAlsa: error getting available frames for device (%s): %s.",
4376 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4377 MUTEX_UNLOCK(&stream_
.mutex
);
4378 error(RtError::DRIVER_ERROR
);
4384 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
4385 err
= snd_pcm_avail_update(handle
[1]);
4387 sprintf(message_
, "RtApiAlsa: error getting available frames for device (%s): %s.",
4388 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4389 MUTEX_UNLOCK(&stream_
.mutex
);
4390 error(RtError::DRIVER_ERROR
);
4392 if (frames
> err
) frames
= err
;
4395 frames
= stream_
.bufferSize
- frames
;
4396 if (frames
< 0) frames
= 0;
4398 MUTEX_UNLOCK(&stream_
.mutex
);
4402 void RtApiAlsa :: tickStream()
4407 if (stream_
.state
== STREAM_STOPPED
) {
4408 if (stream_
.callbackInfo
.usingCallback
) usleep(50000); // sleep 50 milliseconds
4411 else if (stream_
.callbackInfo
.usingCallback
) {
4412 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
4413 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
4416 MUTEX_LOCK(&stream_
.mutex
);
4418 // The state might change while waiting on a mutex.
4419 if (stream_
.state
== STREAM_STOPPED
)
4425 AlsaHandle
*apiInfo
;
4427 RtAudioFormat format
;
4428 apiInfo
= (AlsaHandle
*) stream_
.apiHandle
;
4429 handle
= (snd_pcm_t
**) apiInfo
->handles
;
4431 if ( stream_
.mode
== DUPLEX
) {
4432 // In duplex mode, we need to make the snd_pcm_read call before
4433 // the snd_pcm_write call in order to avoid under/over runs. So,
4434 // copy the userData to our temporary buffer.
4436 bufferBytes
= stream_
.bufferSize
* stream_
.nUserChannels
[0] * formatBytes(stream_
.userFormat
);
4437 memcpy( apiInfo
->tempBuffer
, stream_
.userBuffer
, bufferBytes
);
4440 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
4442 // Setup parameters.
4443 if (stream_
.doConvertBuffer
[1]) {
4444 buffer
= stream_
.deviceBuffer
;
4445 channels
= stream_
.nDeviceChannels
[1];
4446 format
= stream_
.deviceFormat
[1];
4449 buffer
= stream_
.userBuffer
;
4450 channels
= stream_
.nUserChannels
[1];
4451 format
= stream_
.userFormat
;
4454 // Read samples from device in interleaved/non-interleaved format.
4455 if (stream_
.deInterleave
[1]) {
4456 void *bufs
[channels
];
4457 size_t offset
= stream_
.bufferSize
* formatBytes(format
);
4458 for (int i
=0; i
<channels
; i
++)
4459 bufs
[i
] = (void *) (buffer
+ (i
* offset
));
4460 err
= snd_pcm_readn(handle
[1], bufs
, stream_
.bufferSize
);
4463 err
= snd_pcm_readi(handle
[1], buffer
, stream_
.bufferSize
);
4465 if (err
< stream_
.bufferSize
) {
4466 // Either an error or underrun occured.
4467 if (err
== -EPIPE
) {
4468 snd_pcm_state_t state
= snd_pcm_state(handle
[1]);
4469 if (state
== SND_PCM_STATE_XRUN
) {
4470 sprintf(message_
, "RtApiAlsa: overrun detected.");
4471 error(RtError::WARNING
);
4472 err
= snd_pcm_prepare(handle
[1]);
4474 sprintf(message_
, "RtApiAlsa: error preparing handle after overrun: %s.",
4476 MUTEX_UNLOCK(&stream_
.mutex
);
4477 error(RtError::DRIVER_ERROR
);
4481 sprintf(message_
, "RtApiAlsa: tickStream() error, current state is %s.",
4482 snd_pcm_state_name(state
));
4483 MUTEX_UNLOCK(&stream_
.mutex
);
4484 error(RtError::DRIVER_ERROR
);
4489 sprintf(message_
, "RtApiAlsa: audio read error for device (%s): %s.",
4490 devices_
[stream_
.device
[1]].name
.c_str(), snd_strerror(err
));
4491 MUTEX_UNLOCK(&stream_
.mutex
);
4492 error(RtError::DRIVER_ERROR
);
4496 // Do byte swapping if necessary.
4497 if (stream_
.doByteSwap
[1])
4498 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
4500 // Do buffer conversion if necessary.
4501 if (stream_
.doConvertBuffer
[1])
4502 convertBuffer( stream_
.userBuffer
, stream_
.deviceBuffer
, stream_
.convertInfo
[1] );
4505 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
4507 // Setup parameters and do buffer conversion if necessary.
4508 if (stream_
.doConvertBuffer
[0]) {
4509 buffer
= stream_
.deviceBuffer
;
4510 if ( stream_
.mode
== DUPLEX
)
4511 convertBuffer( buffer
, apiInfo
->tempBuffer
, stream_
.convertInfo
[0] );
4513 convertBuffer( buffer
, stream_
.userBuffer
, stream_
.convertInfo
[0] );
4514 channels
= stream_
.nDeviceChannels
[0];
4515 format
= stream_
.deviceFormat
[0];
4518 if ( stream_
.mode
== DUPLEX
)
4519 buffer
= apiInfo
->tempBuffer
;
4521 buffer
= stream_
.userBuffer
;
4522 channels
= stream_
.nUserChannels
[0];
4523 format
= stream_
.userFormat
;
4526 // Do byte swapping if necessary.
4527 if (stream_
.doByteSwap
[0])
4528 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
4530 // Write samples to device in interleaved/non-interleaved format.
4531 if (stream_
.deInterleave
[0]) {
4532 void *bufs
[channels
];
4533 size_t offset
= stream_
.bufferSize
* formatBytes(format
);
4534 for (int i
=0; i
<channels
; i
++)
4535 bufs
[i
] = (void *) (buffer
+ (i
* offset
));
4536 err
= snd_pcm_writen(handle
[0], bufs
, stream_
.bufferSize
);
4539 err
= snd_pcm_writei(handle
[0], buffer
, stream_
.bufferSize
);
4541 if (err
< stream_
.bufferSize
) {
4542 // Either an error or underrun occured.
4543 if (err
== -EPIPE
) {
4544 snd_pcm_state_t state
= snd_pcm_state(handle
[0]);
4545 if (state
== SND_PCM_STATE_XRUN
) {
4546 sprintf(message_
, "RtApiAlsa: underrun detected.");
4547 error(RtError::WARNING
);
4548 err
= snd_pcm_prepare(handle
[0]);
4550 sprintf(message_
, "RtApiAlsa: error preparing handle after underrun: %s.",
4552 MUTEX_UNLOCK(&stream_
.mutex
);
4553 error(RtError::DRIVER_ERROR
);
4557 sprintf(message_
, "RtApiAlsa: tickStream() error, current state is %s.",
4558 snd_pcm_state_name(state
));
4559 MUTEX_UNLOCK(&stream_
.mutex
);
4560 error(RtError::DRIVER_ERROR
);
4565 sprintf(message_
, "RtApiAlsa: audio write error for device (%s): %s.",
4566 devices_
[stream_
.device
[0]].name
.c_str(), snd_strerror(err
));
4567 MUTEX_UNLOCK(&stream_
.mutex
);
4568 error(RtError::DRIVER_ERROR
);
4574 MUTEX_UNLOCK(&stream_
.mutex
);
4576 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
4580 void RtApiAlsa :: setStreamCallback(RtAudioCallback callback
, void *userData
)
4584 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
4585 if ( info
->usingCallback
) {
4586 sprintf(message_
, "RtApiAlsa: A callback is already set for this stream!");
4587 error(RtError::WARNING
);
4591 info
->callback
= (void *) callback
;
4592 info
->userData
= userData
;
4593 info
->usingCallback
= true;
4594 info
->object
= (void *) this;
4596 // Set the thread attributes for joinable and realtime scheduling
4597 // priority. The higher priority will only take affect if the
4598 // program is run as root or suid.
4599 pthread_attr_t attr
;
4600 pthread_attr_init(&attr
);
4602 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
4603 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
4605 int err
= pthread_create(&info
->thread
, &attr
, alsaCallbackHandler
, &stream_
.callbackInfo
);
4606 pthread_attr_destroy(&attr
);
4608 info
->usingCallback
= false;
4609 sprintf(message_
, "RtApiAlsa: error starting callback thread!");
4610 error(RtError::THREAD_ERROR
);
4614 void RtApiAlsa :: cancelStreamCallback()
4618 if (stream_
.callbackInfo
.usingCallback
) {
4620 if (stream_
.state
== STREAM_RUNNING
)
4623 MUTEX_LOCK(&stream_
.mutex
);
4625 stream_
.callbackInfo
.usingCallback
= false;
4626 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
4627 stream_
.callbackInfo
.thread
= 0;
4628 stream_
.callbackInfo
.callback
= NULL
;
4629 stream_
.callbackInfo
.userData
= NULL
;
4631 MUTEX_UNLOCK(&stream_
.mutex
);
4635 extern "C" void *alsaCallbackHandler(void *ptr
)
4637 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
4638 RtApiAlsa
*object
= (RtApiAlsa
*) info
->object
;
4639 bool *usingCallback
= &info
->usingCallback
;
4641 while ( *usingCallback
) {
4643 object
->tickStream();
4645 catch (RtError
&exception
) {
4646 EM_log( CK_LOG_SYSTEM
, "RtApiAlsa: callback thread error..." );
4648 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
4649 EM_log( CK_LOG_INFO
, "closing thread..." );
4658 //******************** End of __LINUX_ALSA__ *********************//
4661 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
4663 // The ASIO API is designed around a callback scheme, so this
4664 // implementation is similar to that used for OS-X CoreAudio and Linux
4665 // Jack. The primary constraint with ASIO is that it only allows
4666 // access to a single driver at a time. Thus, it is not possible to
4667 // have more than one simultaneous RtAudio stream.
4669 // This implementation also requires a number of external ASIO files
4670 // and a few global variables. The ASIO callback scheme does not
4671 // allow for the passing of user data, so we must create a global
4672 // pointer to our callbackInfo structure.
4674 // On unix systems, we make use of a pthread condition variable.
4675 // Since there is no equivalent in Windows, I hacked something based
4676 // on information found in
4677 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
4679 #include "asio/asiosys.h"
4680 #include "asio/asio.h"
4681 #include "asio/asiodrivers.h"
4684 AsioDrivers drivers
;
4685 ASIOCallbacks asioCallbacks
;
4686 ASIODriverInfo driverInfo
;
4687 CallbackInfo
*asioCallbackInfo
;
4691 ASIOBufferInfo
*bufferInfos
;
4695 :stopStream(false), bufferInfos(0) {}
4698 static const char*GetAsioErrorString(ASIOError result
)
4705 static Messages m
[] =
4707 { ASE_NotPresent
, "Hardware input or output is not present or available." },
4708 { ASE_HWMalfunction
, "Hardware is malfunctioning." },
4709 { ASE_InvalidParameter
, "Invalid input parameter." },
4710 { ASE_InvalidMode
, "Invalid mode." },
4711 { ASE_SPNotAdvancing
, "Sample position not advancing." },
4712 { ASE_NoClock
, "Sample clock or rate cannot be determined or is not present." },
4713 { ASE_NoMemory
, "Not enough memory to complete the request." }
4716 for (int i
= 0; i
< sizeof(m
)/sizeof(m
[0]); ++i
)
4718 if (m
[i
].value
== result
) return m
[i
].message
;
4720 return "Unknown error.";
4723 RtApiAsio :: RtApiAsio()
4725 this->coInitialized
= false;
4728 if (nDevices_
<= 0) {
4729 sprintf(message_
, "RtApiAsio: no Windows ASIO audio drivers found!");
4730 error(RtError::NO_DEVICES_FOUND
);
4734 RtApiAsio :: ~RtApiAsio()
4736 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
4737 if ( coInitialized
)
4744 void RtApiAsio :: initialize(void)
4747 // ASIO cannot run on a multi-threaded appartment. You can call CoInitialize beforehand, but it must be
4748 // for appartment threading (in which case, CoInitilialize will return S_FALSE here).
4749 coInitialized
= false;
4750 HRESULT hr
= CoInitialize(NULL
);
4753 sprintf(message_
,"RtApiAsio: ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)");
4755 coInitialized
= true;
4757 nDevices_
= drivers
.asioGetNumDev();
4758 if (nDevices_
<= 0) return;
4760 // Create device structures and write device driver names to each.
4763 for (int i
=0; i
<nDevices_
; i
++) {
4764 if ( drivers
.asioGetDriverName( i
, name
, 128 ) == 0 ) {
4765 device
.name
.erase();
4766 device
.name
.append( (const char *)name
, strlen(name
)+1);
4767 devices_
.push_back(device
);
4770 sprintf(message_
, "RtApiAsio: error getting driver name for device index %d!", i
);
4771 error(RtError::WARNING
);
4775 nDevices_
= (int) devices_
.size();
4777 drivers
.removeCurrentDriver();
4778 driverInfo
.asioVersion
= 2;
4779 // See note in DirectSound implementation about GetDesktopWindow().
4780 driverInfo
.sysRef
= GetForegroundWindow();
4783 void RtApiAsio :: probeDeviceInfo(RtApiDevice
*info
)
4785 // Don't probe if a stream is already open.
4786 if ( stream_
.mode
!= UNINITIALIZED
) {
4787 sprintf(message_
, "RtApiAsio: unable to probe driver while a stream is open.");
4788 error(RtError::DEBUG_WARNING
);
4792 if ( !drivers
.loadDriver( (char *)info
->name
.c_str() ) ) {
4793 sprintf(message_
, "RtApiAsio: error loading driver (%s).", info
->name
.c_str());
4794 error(RtError::DEBUG_WARNING
);
4798 ASIOError result
= ASIOInit( &driverInfo
);
4799 if ( result
!= ASE_OK
) {
4800 sprintf(message_
, "RtApiAsio: error (%s) initializing driver (%s).",
4801 GetAsioErrorString(result
), info
->name
.c_str());
4802 error(RtError::DEBUG_WARNING
);
4806 // Determine the device channel information.
4807 long inputChannels
, outputChannels
;
4808 result
= ASIOGetChannels( &inputChannels
, &outputChannels
);
4809 if ( result
!= ASE_OK
) {
4810 drivers
.removeCurrentDriver();
4811 sprintf(message_
, "RtApiAsio: error (%s) getting input/output channel count (%s).",
4812 GetAsioErrorString(result
),
4813 info
->name
.c_str());
4814 error(RtError::DEBUG_WARNING
);
4818 info
->maxOutputChannels
= outputChannels
;
4819 if ( outputChannels
> 0 ) info
->minOutputChannels
= 1;
4821 info
->maxInputChannels
= inputChannels
;
4822 if ( inputChannels
> 0 ) info
->minInputChannels
= 1;
4824 // If device opens for both playback and capture, we determine the channels.
4825 if (info
->maxOutputChannels
> 0 && info
->maxInputChannels
> 0) {
4826 info
->hasDuplexSupport
= true;
4827 info
->maxDuplexChannels
= (info
->maxOutputChannels
> info
->maxInputChannels
) ?
4828 info
->maxInputChannels
: info
->maxOutputChannels
;
4829 info
->minDuplexChannels
= (info
->minOutputChannels
> info
->minInputChannels
) ?
4830 info
->minInputChannels
: info
->minOutputChannels
;
4833 // Determine the supported sample rates.
4834 info
->sampleRates
.clear();
4835 for (unsigned int i
=0; i
<MAX_SAMPLE_RATES
; i
++) {
4836 result
= ASIOCanSampleRate( (ASIOSampleRate
) SAMPLE_RATES
[i
] );
4837 if ( result
== ASE_OK
)
4838 info
->sampleRates
.push_back( SAMPLE_RATES
[i
] );
4841 if (info
->sampleRates
.size() == 0) {
4842 drivers
.removeCurrentDriver();
4843 sprintf( message_
, "RtApiAsio: No supported sample rates found for driver (%s).", info
->name
.c_str() );
4844 error(RtError::DEBUG_WARNING
);
4848 // Determine supported data types ... just check first channel and assume rest are the same.
4849 ASIOChannelInfo channelInfo
;
4850 channelInfo
.channel
= 0;
4851 channelInfo
.isInput
= true;
4852 if ( info
->maxInputChannels
<= 0 ) channelInfo
.isInput
= false;
4853 result
= ASIOGetChannelInfo( &channelInfo
);
4854 if ( result
!= ASE_OK
) {
4855 drivers
.removeCurrentDriver();
4856 sprintf(message_
, "RtApiAsio: error (%s) getting driver (%s) channel information.",
4857 GetAsioErrorString(result
),
4858 info
->name
.c_str());
4859 error(RtError::DEBUG_WARNING
);
4863 if ( channelInfo
.type
== ASIOSTInt16MSB
|| channelInfo
.type
== ASIOSTInt16LSB
)
4864 info
->nativeFormats
|= RTAUDIO_SINT16
;
4865 else if ( channelInfo
.type
== ASIOSTInt32MSB
|| channelInfo
.type
== ASIOSTInt32LSB
)
4866 info
->nativeFormats
|= RTAUDIO_SINT32
;
4867 else if ( channelInfo
.type
== ASIOSTFloat32MSB
|| channelInfo
.type
== ASIOSTFloat32LSB
)
4868 info
->nativeFormats
|= RTAUDIO_FLOAT32
;
4869 else if ( channelInfo
.type
== ASIOSTFloat64MSB
|| channelInfo
.type
== ASIOSTFloat64LSB
)
4870 info
->nativeFormats
|= RTAUDIO_FLOAT64
;
4872 // Check that we have at least one supported format.
4873 if (info
->nativeFormats
== 0) {
4874 drivers
.removeCurrentDriver();
4875 sprintf(message_
, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
4876 info
->name
.c_str());
4877 error(RtError::DEBUG_WARNING
);
4881 info
->probed
= true;
4882 drivers
.removeCurrentDriver();
4885 void bufferSwitch(long index
, ASIOBool processNow
)
4887 RtApiAsio
*object
= (RtApiAsio
*) asioCallbackInfo
->object
;
4889 object
->callbackEvent( index
);
4891 catch (RtError
&exception
) {
4892 EM_log( CK_LOG_SYSTEM
, "RtApiAsio: callback handler error..." );
4894 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
4902 void sampleRateChanged(ASIOSampleRate sRate
)
4904 // The ASIO documentation says that this usually only happens during
4905 // external sync. Audio processing is not stopped by the driver,
4906 // actual sample rate might not have even changed, maybe only the
4907 // sample rate status of an AES/EBU or S/PDIF digital input at the
4910 RtAudio
*object
= (RtAudio
*) asioCallbackInfo
->object
;
4912 object
->stopStream();
4914 catch (RtError
&exception
) {
4915 EM_log( CK_LOG_SYSTEM
, "RtApiAsio: sampleRateChanged() error..." );
4917 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
4922 EM_log( CK_LOG_SYSTEM
, "RtApiAsio: driver reports sample rate changed to %d", (int)sRate
);
4923 EM_log( CK_LOG_SYSTEM
, "... stream stopped..." );
4926 long asioMessages(long selector
, long value
, void* message
, double* opt
)
4930 case kAsioSelectorSupported
:
4931 if(value
== kAsioResetRequest
4932 || value
== kAsioEngineVersion
4933 || value
== kAsioResyncRequest
4934 || value
== kAsioLatenciesChanged
4935 // The following three were added for ASIO 2.0, you don't
4936 // necessarily have to support them.
4937 || value
== kAsioSupportsTimeInfo
4938 || value
== kAsioSupportsTimeCode
4939 || value
== kAsioSupportsInputMonitor
)
4942 case kAsioResetRequest
:
4943 // Defer the task and perform the reset of the driver during the
4944 // next "safe" situation. You cannot reset the driver right now,
4945 // as this code is called from the driver. Reset the driver is
4946 // done by completely destruct is. I.e. ASIOStop(),
4947 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
4949 EM_log( CK_LOG_INFO
, "RtApiAsio: driver reset requested!!!" );
4952 case kAsioResyncRequest
:
4953 // This informs the application that the driver encountered some
4954 // non-fatal data loss. It is used for synchronization purposes
4955 // of different media. Added mainly to work around the Win16Mutex
4956 // problems in Windows 95/98 with the Windows Multimedia system,
4957 // which could lose data because the Mutex was held too long by
4958 // another thread. However a driver can issue it in other
4960 EM_log( CK_LOG_INFO
, "RtApiAsio: driver resync requested!!!" );
4963 case kAsioLatenciesChanged
:
4964 // This will inform the host application that the drivers were
4965 // latencies changed. Beware, it this does not mean that the
4966 // buffer sizes have changed! You might need to update internal
4968 EM_log( CK_LOG_INFO
, "RtApiAsio: driver latency may have changed!!!" );
4971 case kAsioEngineVersion
:
4972 // Return the supported ASIO version of the host application. If
4973 // a host application does not implement this selector, ASIO 1.0
4974 // is assumed by the driver.
4977 case kAsioSupportsTimeInfo
:
4978 // Informs the driver whether the
4979 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
4980 // For compatibility with ASIO 1.0 drivers the host application
4981 // should always support the "old" bufferSwitch method, too.
4984 case kAsioSupportsTimeCode
:
4985 // Informs the driver wether application is interested in time
4986 // code info. If an application does not need to know about time
4987 // code, the driver has less work to do.
4994 bool RtApiAsio :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
4995 int sampleRate
, RtAudioFormat format
,
4996 int *bufferSize
, int numberOfBuffers
)
4998 // For ASIO, a duplex stream MUST use the same driver.
4999 if ( mode
== INPUT
&& stream_
.mode
== OUTPUT
&& stream_
.device
[0] != device
) {
5000 sprintf(message_
, "RtApiAsio: duplex stream must use the same device for input and output.");
5001 error(RtError::WARNING
);
5005 // Only load the driver once for duplex stream.
5007 if ( mode
!= INPUT
|| stream_
.mode
!= OUTPUT
) {
5008 if ( !drivers
.loadDriver( (char *)devices_
[device
].name
.c_str() ) ) {
5009 sprintf(message_
, "RtApiAsio: error loading driver (%s).",
5010 devices_
[device
].name
.c_str());
5011 error(RtError::DEBUG_WARNING
);
5015 result
= ASIOInit( &driverInfo
);
5016 if ( result
!= ASE_OK
) {
5017 sprintf(message_
, "RtApiAsio: error (%s) initializing driver (%s).",
5018 GetAsioErrorString(result
), devices_
[device
].name
.c_str());
5019 error(RtError::DEBUG_WARNING
);
5024 // Check the device channel count.
5025 long inputChannels
, outputChannels
;
5026 result
= ASIOGetChannels( &inputChannels
, &outputChannels
);
5027 if ( result
!= ASE_OK
) {
5028 drivers
.removeCurrentDriver();
5029 sprintf(message_
, "RtApiAsio: error (%s) getting input/output channel count (%s).",
5030 GetAsioErrorString(result
),
5031 devices_
[device
].name
.c_str());
5032 error(RtError::DEBUG_WARNING
);
5036 if ( ( mode
== OUTPUT
&& channels
> outputChannels
) ||
5037 ( mode
== INPUT
&& channels
> inputChannels
) ) {
5038 drivers
.removeCurrentDriver();
5039 sprintf(message_
, "RtApiAsio: driver (%s) does not support requested channel count (%d).",
5040 devices_
[device
].name
.c_str(), channels
);
5041 error(RtError::DEBUG_WARNING
);
5044 stream_
.nDeviceChannels
[mode
] = channels
;
5045 stream_
.nUserChannels
[mode
] = channels
;
5047 // Verify the sample rate is supported.
5048 result
= ASIOCanSampleRate( (ASIOSampleRate
) sampleRate
);
5049 if ( result
!= ASE_OK
) {
5050 drivers
.removeCurrentDriver();
5051 sprintf(message_
, "RtApiAsio: driver (%s) does not support requested sample rate (%d).",
5052 devices_
[device
].name
.c_str(), sampleRate
);
5053 error(RtError::DEBUG_WARNING
);
5057 // Set the sample rate.
5058 result
= ASIOSetSampleRate( (ASIOSampleRate
) sampleRate
);
5059 if ( result
!= ASE_OK
) {
5060 drivers
.removeCurrentDriver();
5061 sprintf(message_
, "RtApiAsio: driver (%s) error setting sample rate (%d).",
5062 devices_
[device
].name
.c_str(), sampleRate
);
5063 error(RtError::DEBUG_WARNING
);
5067 // Determine the driver data type.
5068 ASIOChannelInfo channelInfo
;
5069 channelInfo
.channel
= 0;
5070 if ( mode
== OUTPUT
) channelInfo
.isInput
= false;
5071 else channelInfo
.isInput
= true;
5072 result
= ASIOGetChannelInfo( &channelInfo
);
5073 if ( result
!= ASE_OK
) {
5074 drivers
.removeCurrentDriver();
5075 sprintf(message_
, "RtApiAsio: driver (%s) error getting data format.",
5076 devices_
[device
].name
.c_str());
5077 error(RtError::DEBUG_WARNING
);
5081 // Assuming WINDOWS host is always little-endian.
5082 stream_
.doByteSwap
[mode
] = false;
5083 stream_
.userFormat
= format
;
5084 stream_
.deviceFormat
[mode
] = 0;
5085 if ( channelInfo
.type
== ASIOSTInt16MSB
|| channelInfo
.type
== ASIOSTInt16LSB
) {
5086 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
5087 if ( channelInfo
.type
== ASIOSTInt16MSB
) stream_
.doByteSwap
[mode
] = true;
5089 else if ( channelInfo
.type
== ASIOSTInt32MSB
|| channelInfo
.type
== ASIOSTInt32LSB
) {
5090 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT32
;
5091 if ( channelInfo
.type
== ASIOSTInt32MSB
) stream_
.doByteSwap
[mode
] = true;
5093 else if ( channelInfo
.type
== ASIOSTFloat32MSB
|| channelInfo
.type
== ASIOSTFloat32LSB
) {
5094 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
5095 if ( channelInfo
.type
== ASIOSTFloat32MSB
) stream_
.doByteSwap
[mode
] = true;
5097 else if ( channelInfo
.type
== ASIOSTFloat64MSB
|| channelInfo
.type
== ASIOSTFloat64LSB
) {
5098 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT64
;
5099 if ( channelInfo
.type
== ASIOSTFloat64MSB
) stream_
.doByteSwap
[mode
] = true;
5102 if ( stream_
.deviceFormat
[mode
] == 0 ) {
5103 drivers
.removeCurrentDriver();
5104 sprintf(message_
, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
5105 devices_
[device
].name
.c_str());
5106 error(RtError::DEBUG_WARNING
);
5110 // Set the buffer size. For a duplex stream, this will end up
5111 // setting the buffer size based on the input constraints, which
5113 long minSize
, maxSize
, preferSize
, granularity
;
5114 result
= ASIOGetBufferSize( &minSize
, &maxSize
, &preferSize
, &granularity
);
5115 if ( result
!= ASE_OK
) {
5116 drivers
.removeCurrentDriver();
5117 sprintf(message_
, "RtApiAsio: error (%s) on driver (%s) error getting buffer size.",
5118 GetAsioErrorString(result
),
5119 devices_
[device
].name
.c_str());
5120 error(RtError::DEBUG_WARNING
);
5124 if ( *bufferSize
< minSize
) *bufferSize
= minSize
;
5125 else if ( *bufferSize
> maxSize
) *bufferSize
= maxSize
;
5126 else if ( granularity
== -1 ) {
5127 // Make sure bufferSize is a power of two.
5128 double power
= log10( (double) *bufferSize
) / log10( 2.0 );
5129 *bufferSize
= (int) pow( 2.0, floor(power
+0.5) );
5130 if ( *bufferSize
< minSize
) *bufferSize
= minSize
;
5131 else if ( *bufferSize
> maxSize
) *bufferSize
= maxSize
;
5132 else *bufferSize
= preferSize
;
5133 } else if (granularity
!= 0)
5135 // to an even multiple of granularity, rounding up.
5136 *bufferSize
= (*bufferSize
+ granularity
-1)/granularity
*granularity
;
5141 if ( mode
== INPUT
&& stream_
.mode
== OUTPUT
&& stream_
.bufferSize
!= *bufferSize
)
5142 std::cerr
<< "Possible input/output buffersize discrepancy!" << std::endl
;
5144 stream_
.bufferSize
= *bufferSize
;
5145 stream_
.nBuffers
= 2;
5147 // ASIO always uses deinterleaved channels.
5148 stream_
.deInterleave
[mode
] = true;
5150 // Allocate, if necessary, our AsioHandle structure for the stream.
5151 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5152 if ( handle
== 0 ) {
5153 handle
= (AsioHandle
*) calloc(1, sizeof(AsioHandle
));
5154 if ( handle
== NULL
) {
5155 drivers
.removeCurrentDriver();
5156 sprintf(message_
, "RtApiAsio: error allocating AsioHandle memory (%s).",
5157 devices_
[device
].name
.c_str());
5158 error(RtError::DEBUG_WARNING
);
5161 handle
->bufferInfos
= 0;
5162 // Create a manual-reset event.
5163 handle
->condition
= CreateEvent( NULL
, // no security
5164 TRUE
, // manual-reset
5165 FALSE
, // non-signaled initially
5167 stream_
.apiHandle
= (void *) handle
;
5170 // Create the ASIO internal buffers. Since RtAudio sets up input
5171 // and output separately, we'll have to dispose of previously
5172 // created output buffers for a duplex stream.
5173 if ( mode
== INPUT
&& stream_
.mode
== OUTPUT
) {
5174 ASIODisposeBuffers();
5175 if ( handle
->bufferInfos
) free( handle
->bufferInfos
);
5178 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
5179 int i
, nChannels
= stream_
.nDeviceChannels
[0] + stream_
.nDeviceChannels
[1];
5180 handle
->bufferInfos
= (ASIOBufferInfo
*) malloc( nChannels
* sizeof(ASIOBufferInfo
) );
5181 if (handle
->bufferInfos
== NULL
) {
5182 sprintf(message_
, "RtApiAsio: error allocating bufferInfo memory (%s).",
5183 devices_
[device
].name
.c_str());
5186 ASIOBufferInfo
*infos
;
5187 infos
= handle
->bufferInfos
;
5188 for ( i
=0; i
<stream_
.nDeviceChannels
[0]; i
++, infos
++ ) {
5189 infos
->isInput
= ASIOFalse
;
5190 infos
->channelNum
= i
;
5191 infos
->buffers
[0] = infos
->buffers
[1] = 0;
5193 for ( i
=0; i
<stream_
.nDeviceChannels
[1]; i
++, infos
++ ) {
5194 infos
->isInput
= ASIOTrue
;
5195 infos
->channelNum
= i
;
5196 infos
->buffers
[0] = infos
->buffers
[1] = 0;
5199 // Set up the ASIO callback structure and create the ASIO data buffers.
5200 asioCallbacks
.bufferSwitch
= &bufferSwitch
;
5201 asioCallbacks
.sampleRateDidChange
= &sampleRateChanged
;
5202 asioCallbacks
.asioMessage
= &asioMessages
;
5203 asioCallbacks
.bufferSwitchTimeInfo
= NULL
;
5204 result
= ASIOCreateBuffers( handle
->bufferInfos
, nChannels
, stream_
.bufferSize
, &asioCallbacks
);
5205 if ( result
!= ASE_OK
) {
5206 sprintf(message_
, "RtApiAsio: eror (%s) on driver (%s) error creating buffers.",
5207 GetAsioErrorString(result
),
5208 devices_
[device
].name
.c_str());
5212 // Set flags for buffer conversion.
5213 stream_
.doConvertBuffer
[mode
] = false;
5214 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
5215 stream_
.doConvertBuffer
[mode
] = true;
5216 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
5217 stream_
.doConvertBuffer
[mode
] = true;
5218 if (stream_
.nUserChannels
[mode
] > 1 && stream_
.deInterleave
[mode
])
5219 stream_
.doConvertBuffer
[mode
] = true;
5221 // Allocate necessary internal buffers
5222 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
5225 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
5226 buffer_bytes
= stream_
.nUserChannels
[0];
5228 buffer_bytes
= stream_
.nUserChannels
[1];
5230 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
5231 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
5232 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
5233 if (stream_
.userBuffer
== NULL
) {
5234 sprintf(message_
, "RtApiAsio: error (%s) allocating user buffer memory (%s).",
5235 GetAsioErrorString(result
),
5236 devices_
[device
].name
.c_str());
5241 if ( stream_
.doConvertBuffer
[mode
] ) {
5244 bool makeBuffer
= true;
5245 if ( mode
== OUTPUT
)
5246 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
5247 else { // mode == INPUT
5248 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
5249 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
5250 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
5251 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
5256 buffer_bytes
*= *bufferSize
;
5257 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
5258 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
5259 if (stream_
.deviceBuffer
== NULL
) {
5260 sprintf(message_
, "RtApiAsio: error (%s) allocating device buffer memory (%s).",
5261 GetAsioErrorString(result
),
5262 devices_
[device
].name
.c_str());
5268 stream_
.device
[mode
] = device
;
5269 stream_
.state
= STREAM_STOPPED
;
5270 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
5271 // We had already set up an output stream.
5272 stream_
.mode
= DUPLEX
;
5274 stream_
.mode
= mode
;
5275 stream_
.sampleRate
= sampleRate
;
5276 asioCallbackInfo
= &stream_
.callbackInfo
;
5277 stream_
.callbackInfo
.object
= (void *) this;
5279 // Setup the buffer conversion information structure.
5280 if ( stream_
.doConvertBuffer
[mode
] ) {
5281 if (mode
== INPUT
) { // convert device to user buffer
5282 stream_
.convertInfo
[mode
].inJump
= stream_
.nDeviceChannels
[1];
5283 stream_
.convertInfo
[mode
].outJump
= stream_
.nUserChannels
[1];
5284 stream_
.convertInfo
[mode
].inFormat
= stream_
.deviceFormat
[1];
5285 stream_
.convertInfo
[mode
].outFormat
= stream_
.userFormat
;
5287 else { // convert user to device buffer
5288 stream_
.convertInfo
[mode
].inJump
= stream_
.nUserChannels
[0];
5289 stream_
.convertInfo
[mode
].outJump
= stream_
.nDeviceChannels
[0];
5290 stream_
.convertInfo
[mode
].inFormat
= stream_
.userFormat
;
5291 stream_
.convertInfo
[mode
].outFormat
= stream_
.deviceFormat
[0];
5294 if ( stream_
.convertInfo
[mode
].inJump
< stream_
.convertInfo
[mode
].outJump
)
5295 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].inJump
;
5297 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].outJump
;
5299 // Set up the interleave/deinterleave offsets.
5300 if ( mode
== INPUT
&& stream_
.deInterleave
[1] ) {
5301 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
5302 stream_
.convertInfo
[mode
].inOffset
.push_back( k
* stream_
.bufferSize
);
5303 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
5304 stream_
.convertInfo
[mode
].inJump
= 1;
5307 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
5308 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
5309 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
5310 stream_
.convertInfo
[mode
].outOffset
.push_back( k
* stream_
.bufferSize
);
5311 stream_
.convertInfo
[mode
].outJump
= 1;
5315 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
5316 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
5317 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
5325 ASIODisposeBuffers();
5326 drivers
.removeCurrentDriver();
5329 CloseHandle( handle
->condition
);
5330 if ( handle
->bufferInfos
)
5331 free( handle
->bufferInfos
);
5333 stream_
.apiHandle
= 0;
5336 if (stream_
.userBuffer
) {
5337 free(stream_
.userBuffer
);
5338 stream_
.userBuffer
= 0;
5341 error(RtError::DEBUG_WARNING
);
5345 void RtApiAsio :: closeStream()
5347 // We don't want an exception to be thrown here because this
5348 // function is called by our class destructor. So, do our own
5350 if ( stream_
.mode
== UNINITIALIZED
) {
5351 sprintf(message_
, "RtApiAsio::closeStream(): no open stream to close!");
5352 error(RtError::WARNING
);
5356 if (stream_
.state
== STREAM_RUNNING
)
5359 ASIODisposeBuffers();
5360 drivers
.removeCurrentDriver();
5362 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5364 CloseHandle( handle
->condition
);
5365 if ( handle
->bufferInfos
)
5366 free( handle
->bufferInfos
);
5368 stream_
.apiHandle
= 0;
5371 if (stream_
.userBuffer
) {
5372 free(stream_
.userBuffer
);
5373 stream_
.userBuffer
= 0;
5376 if (stream_
.deviceBuffer
) {
5377 free(stream_
.deviceBuffer
);
5378 stream_
.deviceBuffer
= 0;
5381 stream_
.mode
= UNINITIALIZED
;
5384 void RtApiAsio :: setStreamCallback(RtAudioCallback callback
, void *userData
)
5388 if ( stream_
.callbackInfo
.usingCallback
) {
5389 sprintf(message_
, "RtApiAsio: A callback is already set for this stream!");
5390 error(RtError::WARNING
);
5394 stream_
.callbackInfo
.callback
= (void *) callback
;
5395 stream_
.callbackInfo
.userData
= userData
;
5396 stream_
.callbackInfo
.usingCallback
= true;
5399 void RtApiAsio :: cancelStreamCallback()
5403 if (stream_
.callbackInfo
.usingCallback
) {
5405 if (stream_
.state
== STREAM_RUNNING
)
5408 MUTEX_LOCK(&stream_
.mutex
);
5410 stream_
.callbackInfo
.usingCallback
= false;
5411 stream_
.callbackInfo
.userData
= NULL
;
5412 stream_
.state
= STREAM_STOPPED
;
5413 stream_
.callbackInfo
.callback
= NULL
;
5415 MUTEX_UNLOCK(&stream_
.mutex
);
5419 void RtApiAsio :: startStream()
5422 if (stream_
.state
== STREAM_RUNNING
) return;
5424 MUTEX_LOCK(&stream_
.mutex
);
5426 ASIOError result
= ASIOStart();
5427 if ( result
!= ASE_OK
) {
5428 sprintf(message_
, "RtApiAsio: error starting device (%s).",
5429 devices_
[stream_
.device
[0]].name
.c_str());
5430 MUTEX_UNLOCK(&stream_
.mutex
);
5431 error(RtError::DRIVER_ERROR
);
5433 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5434 handle
->stopStream
= false;
5435 stream_
.state
= STREAM_RUNNING
;
5437 MUTEX_UNLOCK(&stream_
.mutex
);
5440 void RtApiAsio :: stopStream()
5443 if (stream_
.state
== STREAM_STOPPED
) return;
5445 // Change the state before the lock to improve shutdown response
5446 // when using a callback.
5447 stream_
.state
= STREAM_STOPPED
;
5448 MUTEX_LOCK(&stream_
.mutex
);
5450 ASIOError result
= ASIOStop();
5451 if ( result
!= ASE_OK
) {
5452 sprintf(message_
, "RtApiAsio: error stopping device (%s).",
5453 devices_
[stream_
.device
[0]].name
.c_str());
5454 MUTEX_UNLOCK(&stream_
.mutex
);
5455 error(RtError::DRIVER_ERROR
);
5458 MUTEX_UNLOCK(&stream_
.mutex
);
5461 void RtApiAsio :: abortStream()
5466 void RtApiAsio :: tickStream()
5470 if (stream_
.state
== STREAM_STOPPED
)
5473 if (stream_
.callbackInfo
.usingCallback
) {
5474 sprintf(message_
, "RtApiAsio: tickStream() should not be used when a callback function is set!");
5475 error(RtError::WARNING
);
5479 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5481 MUTEX_LOCK(&stream_
.mutex
);
5483 // Release the stream_mutex here and wait for the event
5484 // to become signaled by the callback process.
5485 MUTEX_UNLOCK(&stream_
.mutex
);
5486 WaitForMultipleObjects(1, &handle
->condition
, FALSE
, INFINITE
);
5487 ResetEvent( handle
->condition
);
5490 void RtApiAsio :: callbackEvent(long bufferIndex
)
5494 if (stream_
.state
== STREAM_STOPPED
) return;
5496 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
5497 AsioHandle
*handle
= (AsioHandle
*) stream_
.apiHandle
;
5498 if ( info
->usingCallback
&& handle
->stopStream
) {
5499 // Check if the stream should be stopped (via the previous user
5500 // callback return value). We stop the stream here, rather than
5501 // after the function call, so that output data can first be
5507 MUTEX_LOCK(&stream_
.mutex
);
5509 // Invoke user callback first, to get fresh output data.
5510 if ( info
->usingCallback
) {
5511 RtAudioCallback callback
= (RtAudioCallback
) info
->callback
;
5512 if ( callback(stream_
.userBuffer
, stream_
.bufferSize
, info
->userData
) )
5513 handle
->stopStream
= true;
5517 int nChannels
= stream_
.nDeviceChannels
[0] + stream_
.nDeviceChannels
[1];
5518 if ( stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
5520 bufferBytes
= stream_
.bufferSize
* formatBytes(stream_
.deviceFormat
[0]);
5521 if (stream_
.doConvertBuffer
[0]) {
5523 convertBuffer( stream_
.deviceBuffer
, stream_
.userBuffer
, stream_
.convertInfo
[0] );
5524 if ( stream_
.doByteSwap
[0] )
5525 byteSwapBuffer(stream_
.deviceBuffer
,
5526 stream_
.bufferSize
* stream_
.nDeviceChannels
[0],
5527 stream_
.deviceFormat
[0]);
5529 // Always de-interleave ASIO output data.
5531 for ( int i
=0; i
<nChannels
; i
++ ) {
5532 if ( handle
->bufferInfos
[i
].isInput
!= ASIOTrue
)
5533 memcpy(handle
->bufferInfos
[i
].buffers
[bufferIndex
],
5534 &stream_
.deviceBuffer
[j
++*bufferBytes
], bufferBytes
);
5537 else { // single channel only
5539 if (stream_
.doByteSwap
[0])
5540 byteSwapBuffer(stream_
.userBuffer
,
5541 stream_
.bufferSize
* stream_
.nUserChannels
[0],
5542 stream_
.userFormat
);
5544 for ( int i
=0; i
<nChannels
; i
++ ) {
5545 if ( handle
->bufferInfos
[i
].isInput
!= ASIOTrue
) {
5546 memcpy(handle
->bufferInfos
[i
].buffers
[bufferIndex
], stream_
.userBuffer
, bufferBytes
);
5553 if ( stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
5555 bufferBytes
= stream_
.bufferSize
* formatBytes(stream_
.deviceFormat
[1]);
5556 if (stream_
.doConvertBuffer
[1]) {
5558 // Always interleave ASIO input data.
5560 for ( int i
=0; i
<nChannels
; i
++ ) {
5561 if ( handle
->bufferInfos
[i
].isInput
== ASIOTrue
)
5562 memcpy(&stream_
.deviceBuffer
[j
++*bufferBytes
],
5563 handle
->bufferInfos
[i
].buffers
[bufferIndex
],
5567 if ( stream_
.doByteSwap
[1] )
5568 byteSwapBuffer(stream_
.deviceBuffer
,
5569 stream_
.bufferSize
* stream_
.nDeviceChannels
[1],
5570 stream_
.deviceFormat
[1]);
5571 convertBuffer( stream_
.userBuffer
, stream_
.deviceBuffer
, stream_
.convertInfo
[1] );
5574 else { // single channel only
5575 for ( int i
=0; i
<nChannels
; i
++ ) {
5576 if ( handle
->bufferInfos
[i
].isInput
== ASIOTrue
) {
5577 memcpy(stream_
.userBuffer
,
5578 handle
->bufferInfos
[i
].buffers
[bufferIndex
],
5584 if (stream_
.doByteSwap
[1])
5585 byteSwapBuffer(stream_
.userBuffer
,
5586 stream_
.bufferSize
* stream_
.nUserChannels
[1],
5587 stream_
.userFormat
);
5591 if ( !info
->usingCallback
)
5592 SetEvent( handle
->condition
);
5594 // The following call was suggested by Malte Clasen. While the API
5595 // documentation indicates it should not be required, some device
5596 // drivers apparently do not function correctly without it.
5599 MUTEX_UNLOCK(&stream_
.mutex
);
5602 //******************** End of __WINDOWS_ASIO__ *********************//
5605 #if defined(__WINDOWS_DS__) // Windows DirectSound API
5608 #define DIRECTSOUND_VERSION 0x0500 // this #def is here for compatibility with newer DirectSound libs.
5613 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
5616 #ifdef _MSC_VER // if Microsoft Visual C++
5617 #pragma comment(lib,"winmm.lib") // then, auto-link winmm.lib. Otherwise, it has to be added manually.
5621 static inline DWORD
dsPointerDifference(DWORD laterPointer
,DWORD earlierPointer
,DWORD bufferSize
)
5623 if (laterPointer
> earlierPointer
)
5625 return laterPointer
-earlierPointer
;
5628 return laterPointer
-earlierPointer
+bufferSize
;
5632 static inline DWORD
dsPointerBetween(DWORD pointer
, DWORD laterPointer
,DWORD earlierPointer
, DWORD bufferSize
)
5634 if (pointer
> bufferSize
) pointer
-= bufferSize
;
5635 if (laterPointer
< earlierPointer
)
5637 laterPointer
+= bufferSize
;
5639 if (pointer
< earlierPointer
)
5641 pointer
+= bufferSize
;
5643 return pointer
>= earlierPointer
&& pointer
< laterPointer
;
5647 #undef GENERATE_DEBUG_LOG // Define this to generate a debug timing log file in c:/rtaudiolog.txt"
5648 #ifdef GENERATE_DEBUG_LOG
5650 #include "mmsystem.h"
5655 DWORD currentReadPointer
, safeReadPointer
;
5656 DWORD currentWritePointer
, safeWritePointer
;
5657 DWORD readTime
, writeTime
;
5658 DWORD nextWritePointer
, nextReadPointer
;
5661 int currentDebugLogEntry
= 0;
5662 std::vector
<TTickRecord
> debugLog(2000);
5667 // A structure to hold various information related to the DirectSound
5668 // API implementation.
5674 DWORD dsPointerLeadTime
; // the number of bytes ahead of the safe pointer to lead by.
5678 RtApiDs::RtDsStatistics
RtApiDs::statistics
;
5680 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
5681 RtApiDs::RtDsStatistics
RtApiDs::getDsStatistics()
5683 RtDsStatistics s
= statistics
;
5684 // update the calculated fields.
5687 if (s
.inputFrameSize
!= 0)
5689 s
.latency
+= s
.readDeviceSafeLeadBytes
*1.0/s
.inputFrameSize
/ s
.sampleRate
;
5691 if (s
.outputFrameSize
!= 0)
5694 (s
.writeDeviceSafeLeadBytes
+ s
.writeDeviceBufferLeadBytes
)*1.0/s
.outputFrameSize
/ s
.sampleRate
;
5700 // Declarations for utility functions, callbacks, and structures
5701 // specific to the DirectSound implementation.
5702 static bool CALLBACK
deviceCountCallback(LPGUID lpguid
,
5703 LPCSTR lpcstrDescription
,
5704 LPCSTR lpcstrModule
,
5707 static bool CALLBACK
deviceInfoCallback(LPGUID lpguid
,
5708 LPCSTR lpcstrDescription
,
5709 LPCSTR lpcstrModule
,
5712 static bool CALLBACK
defaultDeviceCallback(LPGUID lpguid
,
5713 LPCSTR lpcstrDescription
,
5714 LPCSTR lpcstrModule
,
5717 static bool CALLBACK
deviceIdCallback(LPGUID lpguid
,
5718 LPCSTR lpcstrDescription
,
5719 LPCSTR lpcstrModule
,
5722 static char* getErrorString(int code
);
5725 #if defined(__WINDOWS_PTHREAD__)
5726 extern "C" void * callbackHandler(void * ptr
);
5728 extern "C" unsigned __stdcall
callbackHandler(void *ptr
);
5738 RtApiDs :: RtApiDs()
5740 // Dsound will run both-threaded. If CoInitialize fails, then just accept whatever the mainline
5741 // chose for a threading model.
5742 coInitialized
= false;
5743 HRESULT hr
= CoInitialize(NULL
);
5745 coInitialized
= true;
5750 if (nDevices_
<= 0) {
5751 sprintf(message_
, "RtApiDs: no Windows DirectSound audio devices found!");
5752 error(RtError::NO_DEVICES_FOUND
);
5756 RtApiDs :: ~RtApiDs()
5760 CoUninitialize(); // balanced call.
5762 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
5765 int RtApiDs :: getDefaultInputDevice(void)
5768 info
.name
[0] = '\0';
5770 // Enumerate through devices to find the default output.
5771 HRESULT result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)defaultDeviceCallback
, &info
);
5772 if ( FAILED(result
) ) {
5773 sprintf(message_
, "RtApiDs: Error performing default input device enumeration: %s.",
5774 getErrorString(result
));
5775 error(RtError::WARNING
);
5779 for ( int i
=0; i
<nDevices_
; i
++ ) {
5780 if ( strncmp( info
.name
, devices_
[i
].name
.c_str(), 64 ) == 0 ) return i
;
5787 int RtApiDs :: getDefaultOutputDevice(void)
5790 info
.name
[0] = '\0';
5792 // Enumerate through devices to find the default output.
5793 HRESULT result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)defaultDeviceCallback
, &info
);
5794 if ( FAILED(result
) ) {
5795 sprintf(message_
, "RtApiDs: Error performing default output device enumeration: %s.",
5796 getErrorString(result
));
5797 error(RtError::WARNING
);
5801 for ( int i
=0; i
<nDevices_
; i
++ )
5802 if ( strncmp( info
.name
, devices_
[i
].name
.c_str(), 64 ) == 0 ) return i
;
5807 void RtApiDs :: initialize(void)
5809 int i
, ins
= 0, outs
= 0, count
= 0;
5813 // Count DirectSound devices.
5814 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceCountCallback
, &outs
);
5815 if ( FAILED(result
) ) {
5816 sprintf(message_
, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5817 getErrorString(result
));
5818 error(RtError::DRIVER_ERROR
);
5821 // Count DirectSoundCapture devices.
5822 result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceCountCallback
, &ins
);
5823 if ( FAILED(result
) ) {
5824 sprintf(message_
, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5825 getErrorString(result
));
5826 error(RtError::DRIVER_ERROR
);
5830 if (count
== 0) return;
5832 std::vector
<enum_info
> info(count
);
5833 for (i
=0; i
<count
; i
++) {
5834 info
[i
].name
[0] = '\0';
5835 if (i
< outs
) info
[i
].isInput
= false;
5836 else info
[i
].isInput
= true;
5839 // Get playback device info and check capabilities.
5840 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceInfoCallback
, &info
[0]);
5841 if ( FAILED(result
) ) {
5842 sprintf(message_
, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5843 getErrorString(result
));
5844 error(RtError::DRIVER_ERROR
);
5847 // Get capture device info and check capabilities.
5848 result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceInfoCallback
, &info
[0]);
5849 if ( FAILED(result
) ) {
5850 sprintf(message_
, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5851 getErrorString(result
));
5852 error(RtError::DRIVER_ERROR
);
5855 // Create device structures for valid devices and write device names
5856 // to each. Devices are considered invalid if they cannot be
5857 // opened, they report < 1 supported channels, or they report no
5858 // supported data (capture only).
5861 for (i
=0; i
<count
; i
++) {
5862 if ( info
[i
].isValid
) {
5863 device
.name
.erase();
5864 device
.name
.append( (const char *)info
[i
].name
, strlen(info
[i
].name
)+1);
5865 devices_
.push_back(device
);
5869 nDevices_
= devices_
.size();
5873 void RtApiDs :: probeDeviceInfo(RtApiDevice
*info
)
5876 strncpy( dsinfo
.name
, info
->name
.c_str(), 64 );
5877 dsinfo
.isValid
= false;
5879 // Enumerate through input devices to find the id (if it exists).
5880 HRESULT result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
5881 if ( FAILED(result
) ) {
5882 sprintf(message_
, "RtApiDs: Error performing input device id enumeration: %s.",
5883 getErrorString(result
));
5884 error(RtError::DEBUG_WARNING
);
5888 // Do capture probe first.
5889 if ( dsinfo
.isValid
== false )
5890 goto playback_probe
;
5892 LPDIRECTSOUNDCAPTURE input
;
5893 result
= DirectSoundCaptureCreate( dsinfo
.id
, &input
, NULL
);
5894 if ( FAILED(result
) ) {
5895 sprintf(message_
, "RtApiDs: Could not create capture object (%s): %s.",
5896 info
->name
.c_str(), getErrorString(result
));
5897 error(RtError::DEBUG_WARNING
);
5898 goto playback_probe
;
5902 in_caps
.dwSize
= sizeof(in_caps
);
5903 result
= input
->GetCaps( &in_caps
);
5904 if ( FAILED(result
) ) {
5906 sprintf(message_
, "RtApiDs: Could not get capture capabilities (%s): %s.",
5907 info
->name
.c_str(), getErrorString(result
));
5908 error(RtError::DEBUG_WARNING
);
5909 goto playback_probe
;
5912 // Get input channel information.
5913 info
->minInputChannels
= 1;
5914 info
->maxInputChannels
= in_caps
.dwChannels
;
5916 // Get sample rate and format information.
5917 info
->sampleRates
.clear();
5918 if( in_caps
.dwChannels
== 2 ) {
5919 if( in_caps
.dwFormats
& WAVE_FORMAT_1S16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5920 if( in_caps
.dwFormats
& WAVE_FORMAT_2S16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5921 if( in_caps
.dwFormats
& WAVE_FORMAT_4S16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5922 if( in_caps
.dwFormats
& WAVE_FORMAT_1S08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5923 if( in_caps
.dwFormats
& WAVE_FORMAT_2S08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5924 if( in_caps
.dwFormats
& WAVE_FORMAT_4S08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5926 if ( info
->nativeFormats
& RTAUDIO_SINT16
) {
5927 if( in_caps
.dwFormats
& WAVE_FORMAT_1S16
) info
->sampleRates
.push_back( 11025 );
5928 if( in_caps
.dwFormats
& WAVE_FORMAT_2S16
) info
->sampleRates
.push_back( 22050 );
5929 if( in_caps
.dwFormats
& WAVE_FORMAT_4S16
) info
->sampleRates
.push_back( 44100 );
5931 else if ( info
->nativeFormats
& RTAUDIO_SINT8
) {
5932 if( in_caps
.dwFormats
& WAVE_FORMAT_1S08
) info
->sampleRates
.push_back( 11025 );
5933 if( in_caps
.dwFormats
& WAVE_FORMAT_2S08
) info
->sampleRates
.push_back( 22050 );
5934 if( in_caps
.dwFormats
& WAVE_FORMAT_4S08
) info
->sampleRates
.push_back( 44100 );
5937 else if ( in_caps
.dwChannels
== 1 ) {
5938 if( in_caps
.dwFormats
& WAVE_FORMAT_1M16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5939 if( in_caps
.dwFormats
& WAVE_FORMAT_2M16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5940 if( in_caps
.dwFormats
& WAVE_FORMAT_4M16
) info
->nativeFormats
|= RTAUDIO_SINT16
;
5941 if( in_caps
.dwFormats
& WAVE_FORMAT_1M08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5942 if( in_caps
.dwFormats
& WAVE_FORMAT_2M08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5943 if( in_caps
.dwFormats
& WAVE_FORMAT_4M08
) info
->nativeFormats
|= RTAUDIO_SINT8
;
5945 if ( info
->nativeFormats
& RTAUDIO_SINT16
) {
5946 if( in_caps
.dwFormats
& WAVE_FORMAT_1M16
) info
->sampleRates
.push_back( 11025 );
5947 if( in_caps
.dwFormats
& WAVE_FORMAT_2M16
) info
->sampleRates
.push_back( 22050 );
5948 if( in_caps
.dwFormats
& WAVE_FORMAT_4M16
) info
->sampleRates
.push_back( 44100 );
5950 else if ( info
->nativeFormats
& RTAUDIO_SINT8
) {
5951 if( in_caps
.dwFormats
& WAVE_FORMAT_1M08
) info
->sampleRates
.push_back( 11025 );
5952 if( in_caps
.dwFormats
& WAVE_FORMAT_2M08
) info
->sampleRates
.push_back( 22050 );
5953 if( in_caps
.dwFormats
& WAVE_FORMAT_4M08
) info
->sampleRates
.push_back( 44100 );
5956 else info
->minInputChannels
= 0; // technically, this would be an error
5962 dsinfo
.isValid
= false;
5964 // Enumerate through output devices to find the id (if it exists).
5965 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
5966 if ( FAILED(result
) ) {
5967 sprintf(message_
, "RtApiDs: Error performing output device id enumeration: %s.",
5968 getErrorString(result
));
5969 error(RtError::DEBUG_WARNING
);
5973 // Now do playback probe.
5974 if ( dsinfo
.isValid
== false )
5975 goto check_parameters
;
5977 LPDIRECTSOUND output
;
5979 result
= DirectSoundCreate( dsinfo
.id
, &output
, NULL
);
5980 if ( FAILED(result
) ) {
5981 sprintf(message_
, "RtApiDs: Could not create playback object (%s): %s.",
5982 info
->name
.c_str(), getErrorString(result
));
5983 error(RtError::DEBUG_WARNING
);
5984 goto check_parameters
;
5987 out_caps
.dwSize
= sizeof(out_caps
);
5988 result
= output
->GetCaps( &out_caps
);
5989 if ( FAILED(result
) ) {
5991 sprintf(message_
, "RtApiDs: Could not get playback capabilities (%s): %s.",
5992 info
->name
.c_str(), getErrorString(result
));
5993 error(RtError::DEBUG_WARNING
);
5994 goto check_parameters
;
5997 // Get output channel information.
5998 info
->minOutputChannels
= 1;
5999 info
->maxOutputChannels
= ( out_caps
.dwFlags
& DSCAPS_PRIMARYSTEREO
) ? 2 : 1;
6001 // Get sample rate information. Use capture device rate information
6003 if ( info
->sampleRates
.size() == 0 ) {
6004 info
->sampleRates
.push_back( (int) out_caps
.dwMinSecondarySampleRate
);
6005 if ( out_caps
.dwMaxSecondarySampleRate
> out_caps
.dwMinSecondarySampleRate
)
6006 info
->sampleRates
.push_back( (int) out_caps
.dwMaxSecondarySampleRate
);
6009 // Check input rates against output rate range. If there's an
6010 // inconsistency (such as a duplex-capable device which reports a
6011 // single output rate of 48000 Hz), we'll go with the output
6012 // rate(s) since the DirectSoundCapture API is stupid and broken.
6013 // Note that the probed sample rate values are NOT used when
6014 // opening the device. Thanks to Tue Andersen for reporting this.
6015 if ( info
->sampleRates
.back() < (int) out_caps
.dwMinSecondarySampleRate
) {
6016 info
->sampleRates
.clear();
6017 info
->sampleRates
.push_back( (int) out_caps
.dwMinSecondarySampleRate
);
6018 if ( out_caps
.dwMaxSecondarySampleRate
> out_caps
.dwMinSecondarySampleRate
)
6019 info
->sampleRates
.push_back( (int) out_caps
.dwMaxSecondarySampleRate
);
6022 for ( int i
=info
->sampleRates
.size()-1; i
>=0; i
-- ) {
6023 if ( (unsigned int) info
->sampleRates
[i
] > out_caps
.dwMaxSecondarySampleRate
)
6024 info
->sampleRates
.erase( info
->sampleRates
.begin() + i
);
6026 while ( info
->sampleRates
.size() > 0 &&
6027 ((unsigned int) info
->sampleRates
[0] < out_caps
.dwMinSecondarySampleRate
) ) {
6028 info
->sampleRates
.erase( info
->sampleRates
.begin() );
6033 // Get format information.
6034 if ( out_caps
.dwFlags
& DSCAPS_PRIMARY16BIT
) info
->nativeFormats
|= RTAUDIO_SINT16
;
6035 if ( out_caps
.dwFlags
& DSCAPS_PRIMARY8BIT
) info
->nativeFormats
|= RTAUDIO_SINT8
;
6040 if ( info
->maxInputChannels
== 0 && info
->maxOutputChannels
== 0 ) {
6041 sprintf(message_
, "RtApiDs: no reported input or output channels for device (%s).",
6042 info
->name
.c_str());
6043 error(RtError::DEBUG_WARNING
);
6046 if ( info
->sampleRates
.size() == 0 || info
->nativeFormats
== 0 ) {
6047 sprintf(message_
, "RtApiDs: no reported sample rates or data formats for device (%s).",
6048 info
->name
.c_str());
6049 error(RtError::DEBUG_WARNING
);
6053 // Determine duplex status.
6054 if (info
->maxInputChannels
< info
->maxOutputChannels
)
6055 info
->maxDuplexChannels
= info
->maxInputChannels
;
6057 info
->maxDuplexChannels
= info
->maxOutputChannels
;
6058 if (info
->minInputChannels
< info
->minOutputChannels
)
6059 info
->minDuplexChannels
= info
->minInputChannels
;
6061 info
->minDuplexChannels
= info
->minOutputChannels
;
6063 if ( info
->maxDuplexChannels
> 0 ) info
->hasDuplexSupport
= true;
6064 else info
->hasDuplexSupport
= false;
6066 info
->probed
= true;
6071 bool RtApiDs :: probeDeviceOpen( int device
, StreamMode mode
, int channels
,
6072 int sampleRate
, RtAudioFormat format
,
6073 int *bufferSize
, int numberOfBuffers
)
6076 HWND hWnd
= GetForegroundWindow();
6078 // According to a note in PortAudio, using GetDesktopWindow()
6079 // instead of GetForegroundWindow() is supposed to avoid problems
6080 // that occur when the application's window is not the foreground
6081 // window. Also, if the application window closes before the
6082 // DirectSound buffer, DirectSound can crash. However, for console
6083 // applications, no sound was produced when using GetDesktopWindow().
6089 // Check the numberOfBuffers parameter and limit the lowest value to
6090 // two. This is a judgement call and a value of two is probably too
6091 // low for capture, but it should work for playback.
6092 if (numberOfBuffers
< 2)
6095 nBuffers
= numberOfBuffers
;
6097 // Define the wave format structure (16-bit PCM, srate, channels)
6098 WAVEFORMATEX waveFormat
;
6099 ZeroMemory(&waveFormat
, sizeof(WAVEFORMATEX
));
6100 waveFormat
.wFormatTag
= WAVE_FORMAT_PCM
;
6101 waveFormat
.nChannels
= channels
;
6102 waveFormat
.nSamplesPerSec
= (unsigned long) sampleRate
;
6104 // Determine the data format.
6105 if ( devices_
[device
].nativeFormats
) { // 8-bit and/or 16-bit support
6106 if ( format
== RTAUDIO_SINT8
) {
6107 if ( devices_
[device
].nativeFormats
& RTAUDIO_SINT8
)
6108 waveFormat
.wBitsPerSample
= 8;
6110 waveFormat
.wBitsPerSample
= 16;
6113 if ( devices_
[device
].nativeFormats
& RTAUDIO_SINT16
)
6114 waveFormat
.wBitsPerSample
= 16;
6116 waveFormat
.wBitsPerSample
= 8;
6120 sprintf(message_
, "RtApiDs: no reported data formats for device (%s).",
6121 devices_
[device
].name
.c_str());
6122 error(RtError::DEBUG_WARNING
);
6126 waveFormat
.nBlockAlign
= waveFormat
.nChannels
* waveFormat
.wBitsPerSample
/ 8;
6127 waveFormat
.nAvgBytesPerSec
= waveFormat
.nSamplesPerSec
* waveFormat
.nBlockAlign
;
6129 // Determine the device buffer size. By default, 32k,
6130 // but we will grow it to make allowances for very large softare buffer sizes.
6131 DWORD dsBufferSize
= 0;
6132 DWORD dsPointerLeadTime
= 0;
6134 buffer_size
= MINIMUM_DEVICE_BUFFER_SIZE
; // sound cards will always *knock wood* support this
6137 // poisonously large buffer lead time? Then increase the device buffer size accordingly.
6138 while (dsPointerLeadTime
*2U > (DWORD
)buffer_size
)
6146 void *ohandle
= 0, *bhandle
= 0;
6147 strncpy( dsinfo
.name
, devices_
[device
].name
.c_str(), 64 );
6148 dsinfo
.isValid
= false;
6149 if ( mode
== OUTPUT
) {
6150 dsPointerLeadTime
= (numberOfBuffers
) *
6152 (waveFormat
.wBitsPerSample
/ 8)
6156 if ( devices_
[device
].maxOutputChannels
< channels
) {
6157 sprintf(message_
, "RtApiDs: requested channels (%d) > than supported (%d) by device (%s).",
6158 channels
, devices_
[device
].maxOutputChannels
, devices_
[device
].name
.c_str());
6159 error(RtError::DEBUG_WARNING
);
6163 // Enumerate through output devices to find the id (if it exists).
6164 result
= DirectSoundEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
6165 if ( FAILED(result
) ) {
6166 sprintf(message_
, "RtApiDs: Error performing output device id enumeration: %s.",
6167 getErrorString(result
));
6168 error(RtError::DEBUG_WARNING
);
6172 if ( dsinfo
.isValid
== false ) {
6173 sprintf(message_
, "RtApiDs: output device (%s) id not found!", devices_
[device
].name
.c_str());
6174 error(RtError::DEBUG_WARNING
);
6178 LPGUID id
= dsinfo
.id
;
6179 LPDIRECTSOUND object
;
6180 LPDIRECTSOUNDBUFFER buffer
;
6181 DSBUFFERDESC bufferDescription
;
6183 result
= DirectSoundCreate( id
, &object
, NULL
);
6184 if ( FAILED(result
) ) {
6185 sprintf(message_
, "RtApiDs: Could not create playback object (%s): %s.",
6186 devices_
[device
].name
.c_str(), getErrorString(result
));
6187 error(RtError::DEBUG_WARNING
);
6191 // Set cooperative level to DSSCL_EXCLUSIVE
6192 result
= object
->SetCooperativeLevel(hWnd
, DSSCL_EXCLUSIVE
);
6193 if ( FAILED(result
) ) {
6195 sprintf(message_
, "RtApiDs: Unable to set cooperative level (%s): %s.",
6196 devices_
[device
].name
.c_str(), getErrorString(result
));
6197 error(RtError::DEBUG_WARNING
);
6201 // Even though we will write to the secondary buffer, we need to
6202 // access the primary buffer to set the correct output format
6203 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
6204 // buffer description.
6205 ZeroMemory(&bufferDescription
, sizeof(DSBUFFERDESC
));
6206 bufferDescription
.dwSize
= sizeof(DSBUFFERDESC
);
6207 bufferDescription
.dwFlags
= DSBCAPS_PRIMARYBUFFER
;
6208 // Obtain the primary buffer
6209 result
= object
->CreateSoundBuffer(&bufferDescription
, &buffer
, NULL
);
6210 if ( FAILED(result
) ) {
6212 sprintf(message_
, "RtApiDs: Unable to access primary buffer (%s): %s.",
6213 devices_
[device
].name
.c_str(), getErrorString(result
));
6214 error(RtError::DEBUG_WARNING
);
6218 // Set the primary DS buffer sound format.
6219 result
= buffer
->SetFormat(&waveFormat
);
6220 if ( FAILED(result
) ) {
6222 sprintf(message_
, "RtApiDs: Unable to set primary buffer format (%s): %s.",
6223 devices_
[device
].name
.c_str(), getErrorString(result
));
6224 error(RtError::DEBUG_WARNING
);
6228 // Setup the secondary DS buffer description.
6229 dsBufferSize
= (DWORD
)buffer_size
;
6230 ZeroMemory(&bufferDescription
, sizeof(DSBUFFERDESC
));
6231 bufferDescription
.dwSize
= sizeof(DSBUFFERDESC
);
6232 bufferDescription
.dwFlags
= ( DSBCAPS_STICKYFOCUS
|
6233 DSBCAPS_GETCURRENTPOSITION2
|
6234 DSBCAPS_LOCHARDWARE
); // Force hardware mixing
6235 bufferDescription
.dwBufferBytes
= buffer_size
;
6236 bufferDescription
.lpwfxFormat
= &waveFormat
;
6238 // Try to create the secondary DS buffer. If that doesn't work,
6239 // try to use software mixing. Otherwise, there's a problem.
6240 result
= object
->CreateSoundBuffer(&bufferDescription
, &buffer
, NULL
);
6241 if ( FAILED(result
) ) {
6242 bufferDescription
.dwFlags
= ( DSBCAPS_STICKYFOCUS
|
6243 DSBCAPS_GETCURRENTPOSITION2
|
6244 DSBCAPS_LOCSOFTWARE
); // Force software mixing
6245 result
= object
->CreateSoundBuffer(&bufferDescription
, &buffer
, NULL
);
6246 if ( FAILED(result
) ) {
6248 sprintf(message_
, "RtApiDs: Unable to create secondary DS buffer (%s): %s.",
6249 devices_
[device
].name
.c_str(), getErrorString(result
));
6250 error(RtError::DEBUG_WARNING
);
6255 // Get the buffer size ... might be different from what we specified.
6257 dsbcaps
.dwSize
= sizeof(DSBCAPS
);
6258 buffer
->GetCaps(&dsbcaps
);
6259 buffer_size
= dsbcaps
.dwBufferBytes
;
6261 // Lock the DS buffer
6262 result
= buffer
->Lock(0, buffer_size
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
6263 if ( FAILED(result
) ) {
6266 sprintf(message_
, "RtApiDs: Unable to lock buffer (%s): %s.",
6267 devices_
[device
].name
.c_str(), getErrorString(result
));
6268 error(RtError::DEBUG_WARNING
);
6272 // Zero the DS buffer
6273 ZeroMemory(audioPtr
, dataLen
);
6275 // Unlock the DS buffer
6276 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
6277 if ( FAILED(result
) ) {
6280 sprintf(message_
, "RtApiDs: Unable to unlock buffer(%s): %s.",
6281 devices_
[device
].name
.c_str(), getErrorString(result
));
6282 error(RtError::DEBUG_WARNING
);
6286 ohandle
= (void *) object
;
6287 bhandle
= (void *) buffer
;
6288 stream_
.nDeviceChannels
[0] = channels
;
6291 if ( mode
== INPUT
) {
6293 if ( devices_
[device
].maxInputChannels
< channels
) {
6294 sprintf(message_
, "RtAudioDS: device (%s) does not support %d channels.", devices_
[device
].name
.c_str(), channels
);
6295 error(RtError::DEBUG_WARNING
);
6299 // Enumerate through input devices to find the id (if it exists).
6300 result
= DirectSoundCaptureEnumerate((LPDSENUMCALLBACK
)deviceIdCallback
, &dsinfo
);
6301 if ( FAILED(result
) ) {
6302 sprintf(message_
, "RtApiDs: Error performing input device id enumeration: %s.",
6303 getErrorString(result
));
6304 error(RtError::DEBUG_WARNING
);
6308 if ( dsinfo
.isValid
== false ) {
6309 sprintf(message_
, "RtAudioDS: input device (%s) id not found!", devices_
[device
].name
.c_str());
6310 error(RtError::DEBUG_WARNING
);
6314 LPGUID id
= dsinfo
.id
;
6315 LPDIRECTSOUNDCAPTURE object
;
6316 LPDIRECTSOUNDCAPTUREBUFFER buffer
;
6317 DSCBUFFERDESC bufferDescription
;
6319 result
= DirectSoundCaptureCreate( id
, &object
, NULL
);
6320 if ( FAILED(result
) ) {
6321 sprintf(message_
, "RtApiDs: Could not create capture object (%s): %s.",
6322 devices_
[device
].name
.c_str(), getErrorString(result
));
6323 error(RtError::DEBUG_WARNING
);
6327 // Setup the secondary DS buffer description.
6328 dsBufferSize
= buffer_size
;
6329 ZeroMemory(&bufferDescription
, sizeof(DSCBUFFERDESC
));
6330 bufferDescription
.dwSize
= sizeof(DSCBUFFERDESC
);
6331 bufferDescription
.dwFlags
= 0;
6332 bufferDescription
.dwReserved
= 0;
6333 bufferDescription
.dwBufferBytes
= buffer_size
;
6334 bufferDescription
.lpwfxFormat
= &waveFormat
;
6336 // Create the capture buffer.
6337 result
= object
->CreateCaptureBuffer(&bufferDescription
, &buffer
, NULL
);
6338 if ( FAILED(result
) ) {
6340 sprintf(message_
, "RtApiDs: Unable to create capture buffer (%s): %s.",
6341 devices_
[device
].name
.c_str(), getErrorString(result
));
6342 error(RtError::DEBUG_WARNING
);
6346 // Lock the capture buffer
6347 result
= buffer
->Lock(0, buffer_size
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
6348 if ( FAILED(result
) ) {
6351 sprintf(message_
, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6352 devices_
[device
].name
.c_str(), getErrorString(result
));
6353 error(RtError::DEBUG_WARNING
);
6358 ZeroMemory(audioPtr
, dataLen
);
6360 // Unlock the buffer
6361 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
6362 if ( FAILED(result
) ) {
6365 sprintf(message_
, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6366 devices_
[device
].name
.c_str(), getErrorString(result
));
6367 error(RtError::DEBUG_WARNING
);
6371 ohandle
= (void *) object
;
6372 bhandle
= (void *) buffer
;
6373 stream_
.nDeviceChannels
[1] = channels
;
6376 stream_
.userFormat
= format
;
6377 if ( waveFormat
.wBitsPerSample
== 8 )
6378 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT8
;
6380 stream_
.deviceFormat
[mode
] = RTAUDIO_SINT16
;
6381 stream_
.nUserChannels
[mode
] = channels
;
6383 stream_
.bufferSize
= *bufferSize
;
6385 // Set flags for buffer conversion
6386 stream_
.doConvertBuffer
[mode
] = false;
6387 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
6388 stream_
.doConvertBuffer
[mode
] = true;
6389 if (stream_
.nUserChannels
[mode
] < stream_
.nDeviceChannels
[mode
])
6390 stream_
.doConvertBuffer
[mode
] = true;
6392 // Allocate necessary internal buffers
6393 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
6396 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
6397 buffer_bytes
= stream_
.nUserChannels
[0];
6399 buffer_bytes
= stream_
.nUserChannels
[1];
6401 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
6402 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
6403 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
6404 if (stream_
.userBuffer
== NULL
) {
6405 sprintf(message_
, "RtApiDs: error allocating user buffer memory (%s).",
6406 devices_
[device
].name
.c_str());
6411 if ( stream_
.doConvertBuffer
[mode
] ) {
6414 bool makeBuffer
= true;
6415 if ( mode
== OUTPUT
)
6416 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
6417 else { // mode == INPUT
6418 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
6419 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
6420 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
6421 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
6426 buffer_bytes
*= *bufferSize
;
6427 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
6428 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
6429 if (stream_
.deviceBuffer
== NULL
) {
6430 sprintf(message_
, "RtApiDs: error allocating device buffer memory (%s).",
6431 devices_
[device
].name
.c_str());
6437 // Allocate our DsHandle structures for the stream.
6439 if ( stream_
.apiHandle
== 0 ) {
6440 handles
= (DsHandle
*) calloc(2, sizeof(DsHandle
));
6441 if ( handles
== NULL
) {
6442 sprintf(message_
, "RtApiDs: Error allocating DsHandle memory (%s).",
6443 devices_
[device
].name
.c_str());
6446 handles
[0].object
= 0;
6447 handles
[1].object
= 0;
6448 stream_
.apiHandle
= (void *) handles
;
6451 handles
= (DsHandle
*) stream_
.apiHandle
;
6452 handles
[mode
].object
= ohandle
;
6453 handles
[mode
].buffer
= bhandle
;
6454 handles
[mode
].dsBufferSize
= dsBufferSize
;
6455 handles
[mode
].dsPointerLeadTime
= dsPointerLeadTime
;
6457 stream_
.device
[mode
] = device
;
6458 stream_
.state
= STREAM_STOPPED
;
6459 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
6460 // We had already set up an output stream.
6461 stream_
.mode
= DUPLEX
;
6463 stream_
.mode
= mode
;
6464 stream_
.nBuffers
= nBuffers
;
6465 stream_
.sampleRate
= sampleRate
;
6467 // Setup the buffer conversion information structure.
6468 if ( stream_
.doConvertBuffer
[mode
] ) {
6469 if (mode
== INPUT
) { // convert device to user buffer
6470 stream_
.convertInfo
[mode
].inJump
= stream_
.nDeviceChannels
[1];
6471 stream_
.convertInfo
[mode
].outJump
= stream_
.nUserChannels
[1];
6472 stream_
.convertInfo
[mode
].inFormat
= stream_
.deviceFormat
[1];
6473 stream_
.convertInfo
[mode
].outFormat
= stream_
.userFormat
;
6475 else { // convert user to device buffer
6476 stream_
.convertInfo
[mode
].inJump
= stream_
.nUserChannels
[0];
6477 stream_
.convertInfo
[mode
].outJump
= stream_
.nDeviceChannels
[0];
6478 stream_
.convertInfo
[mode
].inFormat
= stream_
.userFormat
;
6479 stream_
.convertInfo
[mode
].outFormat
= stream_
.deviceFormat
[0];
6482 if ( stream_
.convertInfo
[mode
].inJump
< stream_
.convertInfo
[mode
].outJump
)
6483 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].inJump
;
6485 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].outJump
;
6487 // Set up the interleave/deinterleave offsets.
6488 if ( mode
== INPUT
&& stream_
.deInterleave
[1] ) {
6489 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
6490 stream_
.convertInfo
[mode
].inOffset
.push_back( k
* stream_
.bufferSize
);
6491 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
6492 stream_
.convertInfo
[mode
].inJump
= 1;
6495 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
6496 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
6497 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
6498 stream_
.convertInfo
[mode
].outOffset
.push_back( k
* stream_
.bufferSize
);
6499 stream_
.convertInfo
[mode
].outJump
= 1;
6503 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
6504 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
6505 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
6514 if (handles
[0].object
) {
6515 LPDIRECTSOUND object
= (LPDIRECTSOUND
) handles
[0].object
;
6516 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6517 if (buffer
) buffer
->Release();
6520 if (handles
[1].object
) {
6521 LPDIRECTSOUNDCAPTURE object
= (LPDIRECTSOUNDCAPTURE
) handles
[1].object
;
6522 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6523 if (buffer
) buffer
->Release();
6527 stream_
.apiHandle
= 0;
6530 if (stream_
.userBuffer
) {
6531 free(stream_
.userBuffer
);
6532 stream_
.userBuffer
= 0;
6535 error(RtError::DEBUG_WARNING
);
6539 void RtApiDs :: setStreamCallback(RtAudioCallback callback
, void *userData
)
6543 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
6544 if ( info
->usingCallback
) {
6545 sprintf(message_
, "RtApiDs: A callback is already set for this stream!");
6546 error(RtError::WARNING
);
6550 info
->callback
= (void *) callback
;
6551 info
->userData
= userData
;
6552 info
->usingCallback
= true;
6553 info
->object
= (void *) this;
6556 #if defined(__WINDOWS_PTHREAD__)
6557 pthread_attr_t attr
;
6558 pthread_attr_init(&attr
);
6559 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
6560 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
6562 int err
= pthread_create(&info
->thread
, &attr
, callbackHandler
, &stream_
.callbackInfo
);
6563 pthread_attr_destroy(&attr
);
6565 info
->usingCallback
= false;
6566 sprintf(message_
, "RtApiDs: error starting callback thread!");
6567 error(RtError::THREAD_ERROR
);
6571 info
->thread
= _beginthreadex(NULL
, 0, &callbackHandler
,
6572 &stream_
.callbackInfo
, 0, &thread_id
);
6573 if (info
->thread
== 0) {
6574 info
->usingCallback
= false;
6575 sprintf(message_
, "RtApiDs: error starting callback thread!");
6576 error(RtError::THREAD_ERROR
);
6580 // When spawning multiple threads in quick succession, it appears to be
6581 // necessary to wait a bit for each to initialize ... another windoism!
6585 void RtApiDs :: cancelStreamCallback()
6589 if (stream_
.callbackInfo
.usingCallback
) {
6591 if (stream_
.state
== STREAM_RUNNING
)
6594 MUTEX_LOCK(&stream_
.mutex
);
6596 stream_
.callbackInfo
.usingCallback
= false;
6597 WaitForSingleObject( (HANDLE
)stream_
.callbackInfo
.thread
, INFINITE
);
6598 CloseHandle( (HANDLE
)stream_
.callbackInfo
.thread
);
6599 stream_
.callbackInfo
.thread
= 0;
6600 stream_
.callbackInfo
.callback
= NULL
;
6601 stream_
.callbackInfo
.userData
= NULL
;
6603 MUTEX_UNLOCK(&stream_
.mutex
);
6607 void RtApiDs :: closeStream()
6609 // We don't want an exception to be thrown here because this
6610 // function is called by our class destructor. So, do our own
6612 if ( stream_
.mode
== UNINITIALIZED
) {
6613 sprintf(message_
, "RtApiDs::closeStream(): no open stream to close!");
6614 error(RtError::WARNING
);
6618 if (stream_
.callbackInfo
.usingCallback
) {
6619 stream_
.callbackInfo
.usingCallback
= false;
6620 WaitForSingleObject( (HANDLE
)stream_
.callbackInfo
.thread
, INFINITE
);
6621 CloseHandle( (HANDLE
)stream_
.callbackInfo
.thread
);
6624 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6626 if (handles
[0].object
) {
6627 LPDIRECTSOUND object
= (LPDIRECTSOUND
) handles
[0].object
;
6628 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6636 if (handles
[1].object
) {
6637 LPDIRECTSOUNDCAPTURE object
= (LPDIRECTSOUNDCAPTURE
) handles
[1].object
;
6638 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6646 stream_
.apiHandle
= 0;
6649 if (stream_
.userBuffer
) {
6650 free(stream_
.userBuffer
);
6651 stream_
.userBuffer
= 0;
6654 if (stream_
.deviceBuffer
) {
6655 free(stream_
.deviceBuffer
);
6656 stream_
.deviceBuffer
= 0;
6659 stream_
.mode
= UNINITIALIZED
;
6662 void RtApiDs :: startStream()
6665 if (stream_
.state
== STREAM_RUNNING
) return;
6668 // increase scheduler frequency on lesser windows (a side-effect of increasing timer accuracy.
6669 // on greater windows (Win2K or later), this is already in effect.
6671 MUTEX_LOCK(&stream_
.mutex
);
6674 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6679 memset(&statistics
,0,sizeof(statistics
));
6680 statistics
.sampleRate
= stream_
.sampleRate
;
6681 statistics
.writeDeviceBufferLeadBytes
= handles
[0].dsPointerLeadTime
;
6683 buffersRolling
= false;
6684 duplexPrerollBytes
= 0;
6686 if (stream_
.mode
== DUPLEX
)
6688 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
6689 duplexPrerollBytes
= (int)(0.5*stream_
.sampleRate
*formatBytes( stream_
.deviceFormat
[1])*stream_
.nDeviceChannels
[1]);
6692 #ifdef GENERATE_DEBUG_LOG
6693 currentDebugLogEntry
= 0;
6697 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6698 statistics
.outputFrameSize
= formatBytes( stream_
.deviceFormat
[0])
6699 *stream_
.nDeviceChannels
[0];
6702 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6703 result
= buffer
->Play(0, 0, DSBPLAY_LOOPING
);
6704 if ( FAILED(result
) ) {
6705 sprintf(message_
, "RtApiDs: Unable to start buffer (%s): %s.",
6706 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6707 error(RtError::DRIVER_ERROR
);
6711 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6712 statistics
.inputFrameSize
= formatBytes( stream_
.deviceFormat
[1])
6713 *stream_
.nDeviceChannels
[1];
6715 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6716 result
= buffer
->Start(DSCBSTART_LOOPING
);
6717 if ( FAILED(result
) ) {
6718 sprintf(message_
, "RtApiDs: Unable to start capture buffer (%s): %s.",
6719 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6720 error(RtError::DRIVER_ERROR
);
6723 stream_
.state
= STREAM_RUNNING
;
6725 MUTEX_UNLOCK(&stream_
.mutex
);
6728 void RtApiDs :: stopStream()
6731 if (stream_
.state
== STREAM_STOPPED
) return;
6734 // Change the state before the lock to improve shutdown response
6735 // when using a callback.
6736 stream_
.state
= STREAM_STOPPED
;
6737 MUTEX_LOCK(&stream_
.mutex
);
6740 timeEndPeriod(1); // revert to normal scheduler frequency on lesser windows.
6742 #ifdef GENERATE_DEBUG_LOG
6743 // write the timing log to a .TSV file for analysis in Excel.
6744 unlink("c:/rtaudiolog.txt");
6745 std::ofstream
os("c:/rtaudiolog.txt");
6746 os
<< "writeTime\treadDelay\tnextWritePointer\tnextReadPointer\tcurrentWritePointer\tsafeWritePointer\tcurrentReadPointer\tsafeReadPointer" << std::endl
;
6747 for (int i
= 0; i
< currentDebugLogEntry
; ++i
)
6749 TTickRecord
&r
= debugLog
[i
];
6751 << r
.writeTime
-debugLog
[0].writeTime
<< "\t" << (r
.readTime
-r
.writeTime
) << "\t"
6752 << r
.nextWritePointer
% BUFFER_SIZE
<< "\t" << r
.nextReadPointer
% BUFFER_SIZE
6753 << "\t" << r
.currentWritePointer
% BUFFER_SIZE
<< "\t" << r
.safeWritePointer
% BUFFER_SIZE
6754 << "\t" << r
.currentReadPointer
% BUFFER_SIZE
<< "\t" << r
.safeReadPointer
% BUFFER_SIZE
<< std::endl
;
6758 // There is no specific DirectSound API call to "drain" a buffer
6759 // before stopping. We can hack this for playback by writing zeroes
6760 // for another bufferSize * nBuffers frames. For capture, the
6761 // concept is less clear so we'll repeat what we do in the
6762 // abortStream() case.
6765 LPVOID buffer1
= NULL
;
6766 LPVOID buffer2
= NULL
;
6767 DWORD bufferSize1
= 0;
6768 DWORD bufferSize2
= 0;
6769 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6770 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6772 DWORD currentPos
, safePos
;
6773 long buffer_bytes
= stream_
.bufferSize
* stream_
.nDeviceChannels
[0]
6774 * formatBytes(stream_
.deviceFormat
[0]);
6777 LPDIRECTSOUNDBUFFER dsBuffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6778 long nextWritePos
= handles
[0].bufferPointer
;
6779 dsBufferSize
= handles
[0].dsBufferSize
;
6781 // Write zeroes for nBuffer counts.
6782 for (int i
=0; i
<stream_
.nBuffers
; i
++) {
6784 // Find out where the read and "safe write" pointers are.
6785 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
6786 if ( FAILED(result
) ) {
6787 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6788 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6789 error(RtError::DRIVER_ERROR
);
6791 // Chase nextWritePos.
6793 if ( currentPos
< (DWORD
)nextWritePos
) currentPos
+= dsBufferSize
; // unwrap offset
6794 DWORD endWrite
= nextWritePos
+ buffer_bytes
;
6796 // Check whether the entire write region is behind the play pointer.
6797 while ( currentPos
< endWrite
) {
6798 double millis
= (endWrite
- currentPos
) * 900.0;
6799 millis
/= ( formatBytes(stream_
.deviceFormat
[0]) * stream_
.nDeviceChannels
[0] *stream_
.sampleRate
);
6800 if ( millis
< 1.0 ) millis
= 1.0;
6801 Sleep( (DWORD
) millis
);
6803 // Wake up, find out where we are now
6804 result
= dsBuffer
->GetCurrentPosition( ¤tPos
, &safePos
);
6805 if ( FAILED(result
) ) {
6806 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6807 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6808 error(RtError::DRIVER_ERROR
);
6810 if ( currentPos
< (DWORD
)nextWritePos
) currentPos
+= dsBufferSize
; // unwrap offset
6813 // Lock free space in the buffer
6814 result
= dsBuffer
->Lock (nextWritePos
, buffer_bytes
, &buffer1
,
6815 &bufferSize1
, &buffer2
, &bufferSize2
, 0);
6816 if ( FAILED(result
) ) {
6817 sprintf(message_
, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
6818 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6819 error(RtError::DRIVER_ERROR
, TRUE
);
6822 // Zero the free space
6823 ZeroMemory(buffer1
, bufferSize1
);
6824 if (buffer2
!= NULL
) ZeroMemory(buffer2
, bufferSize2
);
6826 // Update our buffer offset and unlock sound buffer
6827 dsBuffer
->Unlock (buffer1
, bufferSize1
, buffer2
, bufferSize2
);
6828 if ( FAILED(result
) ) {
6829 sprintf(message_
, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
6830 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6831 error(RtError::DRIVER_ERROR
);
6833 nextWritePos
= (nextWritePos
+ bufferSize1
+ bufferSize2
) % dsBufferSize
;
6834 handles
[0].bufferPointer
= nextWritePos
;
6837 // If we play again, start at the beginning of the buffer.
6838 handles
[0].bufferPointer
= 0;
6841 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6843 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6847 result
= buffer
->Stop();
6848 if ( FAILED(result
) ) {
6849 sprintf(message_
, "RtApiDs: Unable to stop capture buffer (%s): %s",
6850 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6851 error(RtError::DRIVER_ERROR
);
6854 dsBufferSize
= handles
[1].dsBufferSize
;
6856 // Lock the buffer and clear it so that if we start to play again,
6857 // we won't have old data playing.
6858 result
= buffer
->Lock(0, dsBufferSize
, &buffer1
, &bufferSize1
, NULL
, NULL
, 0);
6859 if ( FAILED(result
) ) {
6860 sprintf(message_
, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6861 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6862 error(RtError::DRIVER_ERROR
);
6865 // Zero the DS buffer
6866 ZeroMemory(buffer1
, bufferSize1
);
6868 // Unlock the DS buffer
6869 result
= buffer
->Unlock(buffer1
, bufferSize1
, NULL
, 0);
6870 if ( FAILED(result
) ) {
6871 sprintf(message_
, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6872 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6873 error(RtError::DRIVER_ERROR
);
6876 // If we start recording again, we must begin at beginning of buffer.
6877 handles
[1].bufferPointer
= 0;
6880 MUTEX_UNLOCK(&stream_
.mutex
);
6883 void RtApiDs :: abortStream()
6886 if (stream_
.state
== STREAM_STOPPED
) return;
6888 // Change the state before the lock to improve shutdown response
6889 // when using a callback.
6890 stream_
.state
= STREAM_STOPPED
;
6891 MUTEX_LOCK(&stream_
.mutex
);
6897 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6898 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6900 LPDIRECTSOUNDBUFFER buffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6901 result
= buffer
->Stop();
6902 if ( FAILED(result
) ) {
6903 sprintf(message_
, "RtApiDs: Unable to stop buffer (%s): %s",
6904 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6905 error(RtError::DRIVER_ERROR
);
6908 dsBufferSize
= handles
[0].dsBufferSize
;
6910 // Lock the buffer and clear it so that if we start to play again,
6911 // we won't have old data playing.
6912 result
= buffer
->Lock(0, dsBufferSize
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
6913 if ( FAILED(result
) ) {
6914 sprintf(message_
, "RtApiDs: Unable to lock buffer (%s): %s.",
6915 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6916 error(RtError::DRIVER_ERROR
);
6919 // Zero the DS buffer
6920 ZeroMemory(audioPtr
, dataLen
);
6922 // Unlock the DS buffer
6923 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
6924 if ( FAILED(result
) ) {
6925 sprintf(message_
, "RtApiDs: Unable to unlock buffer (%s): %s.",
6926 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
6927 error(RtError::DRIVER_ERROR
);
6930 // If we start playing again, we must begin at beginning of buffer.
6931 handles
[0].bufferPointer
= 0;
6934 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
6935 LPDIRECTSOUNDCAPTUREBUFFER buffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
6939 result
= buffer
->Stop();
6940 if ( FAILED(result
) ) {
6941 sprintf(message_
, "RtApiDs: Unable to stop capture buffer (%s): %s",
6942 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6943 error(RtError::DRIVER_ERROR
);
6946 dsBufferSize
= handles
[1].dsBufferSize
;
6948 // Lock the buffer and clear it so that if we start to play again,
6949 // we won't have old data playing.
6950 result
= buffer
->Lock(0, dsBufferSize
, &audioPtr
, &dataLen
, NULL
, NULL
, 0);
6951 if ( FAILED(result
) ) {
6952 sprintf(message_
, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6953 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6954 error(RtError::DRIVER_ERROR
);
6957 // Zero the DS buffer
6958 ZeroMemory(audioPtr
, dataLen
);
6960 // Unlock the DS buffer
6961 result
= buffer
->Unlock(audioPtr
, dataLen
, NULL
, 0);
6962 if ( FAILED(result
) ) {
6963 sprintf(message_
, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6964 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
6965 error(RtError::DRIVER_ERROR
);
6968 // If we start recording again, we must begin at beginning of buffer.
6969 handles
[1].bufferPointer
= 0;
6972 MUTEX_UNLOCK(&stream_
.mutex
);
6975 int RtApiDs :: streamWillBlock()
6978 if (stream_
.state
== STREAM_STOPPED
) return 0;
6980 MUTEX_LOCK(&stream_
.mutex
);
6985 DWORD currentPos
, safePos
;
6987 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
6988 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
6990 LPDIRECTSOUNDBUFFER dsBuffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
6991 UINT nextWritePos
= handles
[0].bufferPointer
;
6992 channels
= stream_
.nDeviceChannels
[0];
6993 DWORD dsBufferSize
= handles
[0].dsBufferSize
;
6995 // Find out where the read and "safe write" pointers are.
6996 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
6997 if ( FAILED(result
) ) {
6998 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
6999 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
7000 error(RtError::DRIVER_ERROR
);
7003 DWORD leadPos
= safePos
+ handles
[0].dsPointerLeadTime
;
7004 if (leadPos
> dsBufferSize
) {
7005 leadPos
-= dsBufferSize
;
7007 if ( leadPos
< nextWritePos
) leadPos
+= dsBufferSize
; // unwrap offset
7009 frames
= (leadPos
- nextWritePos
);
7010 frames
/= channels
* formatBytes(stream_
.deviceFormat
[0]);
7013 if (stream_
.mode
== INPUT
) {
7014 // note that we don't block on DUPLEX input anymore. We run lockstep with the write pointer instead.
7016 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
7017 UINT nextReadPos
= handles
[1].bufferPointer
;
7018 channels
= stream_
.nDeviceChannels
[1];
7019 DWORD dsBufferSize
= handles
[1].dsBufferSize
;
7021 // Find out where the write and "safe read" pointers are.
7022 result
= dsBuffer
->GetCurrentPosition(¤tPos
, &safePos
);
7023 if ( FAILED(result
) ) {
7024 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
7025 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
7026 error(RtError::DRIVER_ERROR
);
7029 if ( safePos
< (DWORD
)nextReadPos
) safePos
+= dsBufferSize
; // unwrap offset
7031 frames
= (int)(safePos
- nextReadPos
);
7032 frames
/= channels
* formatBytes(stream_
.deviceFormat
[1]);
7035 frames
= stream_
.bufferSize
- frames
;
7036 if (frames
< 0) frames
= 0;
7038 MUTEX_UNLOCK(&stream_
.mutex
);
7042 void RtApiDs :: tickStream()
7047 if (stream_
.state
== STREAM_STOPPED
) {
7048 if (stream_
.callbackInfo
.usingCallback
) Sleep(50); // sleep 50 milliseconds
7051 else if (stream_
.callbackInfo
.usingCallback
) {
7052 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
7053 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
7056 MUTEX_LOCK(&stream_
.mutex
);
7058 // The state might change while waiting on a mutex.
7059 if (stream_
.state
== STREAM_STOPPED
) {
7060 MUTEX_UNLOCK(&stream_
.mutex
);
7065 DWORD currentWritePos
, safeWritePos
;
7066 DWORD currentReadPos
, safeReadPos
;
7070 #ifdef GENERATE_DEBUG_LOG
7071 DWORD writeTime
, readTime
;
7073 LPVOID buffer1
= NULL
;
7074 LPVOID buffer2
= NULL
;
7075 DWORD bufferSize1
= 0;
7076 DWORD bufferSize2
= 0;
7080 DsHandle
*handles
= (DsHandle
*) stream_
.apiHandle
;
7082 if (stream_
.mode
== DUPLEX
&& !buffersRolling
)
7084 assert(handles
[0].dsBufferSize
== handles
[1].dsBufferSize
);
7086 // it takes a while for the devices to get rolling. As a result, there's
7087 // no guarantee that the capture and write device pointers will move in lockstep.
7088 // Wait here for both devices to start rolling, and then set our buffer pointers accordingly.
7089 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600 bytes later than the write
7092 // Stub: a serious risk of having a pre-emptive scheduling round take place between
7093 // the two GetCurrentPosition calls... but I'm really not sure how to solve the problem.
7094 // Temporarily boost to Realtime priority, maybe; but I'm not sure what priority the
7095 // directsound service threads run at. We *should* be roughly within a ms or so of correct.
7097 LPDIRECTSOUNDBUFFER dsWriteBuffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
7098 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
7101 DWORD initialWritePos
, initialSafeWritePos
;
7102 DWORD initialReadPos
, initialSafeReadPos
;;
7105 result
= dsWriteBuffer
->GetCurrentPosition(&initialWritePos
, &initialSafeWritePos
);
7106 if ( FAILED(result
) ) {
7107 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
7108 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
7109 error(RtError::DRIVER_ERROR
);
7111 result
= dsCaptureBuffer
->GetCurrentPosition(&initialReadPos
, &initialSafeReadPos
);
7112 if ( FAILED(result
) ) {
7113 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
7114 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
7115 error(RtError::DRIVER_ERROR
);
7119 result
= dsWriteBuffer
->GetCurrentPosition(¤tWritePos
, &safeWritePos
);
7120 if ( FAILED(result
) ) {
7121 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
7122 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
7123 error(RtError::DRIVER_ERROR
);
7125 result
= dsCaptureBuffer
->GetCurrentPosition(¤tReadPos
, &safeReadPos
);
7126 if ( FAILED(result
) ) {
7127 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
7128 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
7129 error(RtError::DRIVER_ERROR
);
7131 if (safeWritePos
!= initialSafeWritePos
&& safeReadPos
!= initialSafeReadPos
)
7138 assert(handles
[0].dsBufferSize
== handles
[1].dsBufferSize
);
7140 UINT writeBufferLead
= (safeWritePos
-safeReadPos
+ handles
[0].dsBufferSize
) % handles
[0].dsBufferSize
;
7141 buffersRolling
= true;
7142 handles
[0].bufferPointer
= (safeWritePos
+ handles
[0].dsPointerLeadTime
);
7143 handles
[1].bufferPointer
= safeReadPos
;
7147 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
7149 LPDIRECTSOUNDBUFFER dsBuffer
= (LPDIRECTSOUNDBUFFER
) handles
[0].buffer
;
7151 // Setup parameters and do buffer conversion if necessary.
7152 if (stream_
.doConvertBuffer
[0]) {
7153 buffer
= stream_
.deviceBuffer
;
7154 convertBuffer( buffer
, stream_
.userBuffer
, stream_
.convertInfo
[0] );
7155 buffer_bytes
= stream_
.bufferSize
* stream_
.nDeviceChannels
[0];
7156 buffer_bytes
*= formatBytes(stream_
.deviceFormat
[0]);
7159 buffer
= stream_
.userBuffer
;
7160 buffer_bytes
= stream_
.bufferSize
* stream_
.nUserChannels
[0];
7161 buffer_bytes
*= formatBytes(stream_
.userFormat
);
7164 // No byte swapping necessary in DirectSound implementation.
7166 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
7167 // unsigned. So, we need to convert our signed 8-bit data here to
7169 if ( stream_
.deviceFormat
[0] == RTAUDIO_SINT8
)
7170 for ( int i
=0; i
<buffer_bytes
; i
++ ) buffer
[i
] = (unsigned char) (buffer
[i
] + 128);
7172 DWORD dsBufferSize
= handles
[0].dsBufferSize
;
7173 nextWritePos
= handles
[0].bufferPointer
;
7178 // Find out where the read and "safe write" pointers are.
7179 result
= dsBuffer
->GetCurrentPosition(¤tWritePos
, &safeWritePos
);
7180 if ( FAILED(result
) ) {
7181 sprintf(message_
, "RtApiDs: Unable to get current position (%s): %s.",
7182 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
7183 error(RtError::DRIVER_ERROR
);
7186 leadPos
= safeWritePos
+ handles
[0].dsPointerLeadTime
;
7187 if (leadPos
> dsBufferSize
) {
7188 leadPos
-= dsBufferSize
;
7190 if ( leadPos
< nextWritePos
) leadPos
+= dsBufferSize
; // unwrap offset
7193 endWrite
= nextWritePos
+ buffer_bytes
;
7195 // Check whether the entire write region is behind the play pointer.
7197 if ( leadPos
>= endWrite
) break;
7199 // If we are here, then we must wait until the play pointer gets
7200 // beyond the write region. The approach here is to use the
7201 // Sleep() function to suspend operation until safePos catches
7202 // up. Calculate number of milliseconds to wait as:
7203 // time = distance * (milliseconds/second) * fudgefactor /
7204 // ((bytes/sample) * (samples/second))
7205 // A "fudgefactor" less than 1 is used because it was found
7206 // that sleeping too long was MUCH worse than sleeping for
7207 // several shorter periods.
7208 double millis
= (endWrite
- leadPos
) * 900.0;
7209 millis
/= ( formatBytes(stream_
.deviceFormat
[0]) *stream_
.nDeviceChannels
[0]* stream_
.sampleRate
);
7210 if ( millis
< 1.0 ) millis
= 1.0;
7211 if (millis
> 50.0) {
7212 static int nOverruns
= 0;
7215 Sleep( (DWORD
) millis
);
7216 // Sleep( (DWORD) 2);
7218 #ifdef GENERATE_DEBUG_LOG
7219 writeTime
= timeGetTime();
7221 if (statistics
.writeDeviceSafeLeadBytes
< dsPointerDifference(safeWritePos
,currentWritePos
,handles
[0].dsBufferSize
))
7223 statistics
.writeDeviceSafeLeadBytes
= dsPointerDifference(safeWritePos
,currentWritePos
,handles
[0].dsBufferSize
);
7227 dsPointerBetween(nextWritePos
,safeWritePos
,currentWritePos
,dsBufferSize
)
7228 || dsPointerBetween(endWrite
,safeWritePos
,currentWritePos
,dsBufferSize
)
7231 // we've strayed into the forbidden zone.
7232 // resync the read pointer.
7233 ++statistics
.numberOfWriteUnderruns
;
7234 nextWritePos
= safeWritePos
+ handles
[0].dsPointerLeadTime
-buffer_bytes
+dsBufferSize
;
7235 while (nextWritePos
>= dsBufferSize
) nextWritePos
-= dsBufferSize
;
7236 handles
[0].bufferPointer
= nextWritePos
;
7237 endWrite
= nextWritePos
+ buffer_bytes
;
7240 // Lock free space in the buffer
7241 result
= dsBuffer
->Lock (nextWritePos
, buffer_bytes
, &buffer1
,
7242 &bufferSize1
, &buffer2
, &bufferSize2
, 0);
7243 if ( FAILED(result
) ) {
7244 sprintf(message_
, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
7245 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
7246 error(RtError::DRIVER_ERROR
, TRUE
);
7249 // Copy our buffer into the DS buffer
7250 CopyMemory(buffer1
, buffer
, bufferSize1
);
7251 if (buffer2
!= NULL
) CopyMemory(buffer2
, buffer
+bufferSize1
, bufferSize2
);
7253 // Update our buffer offset and unlock sound buffer
7254 dsBuffer
->Unlock (buffer1
, bufferSize1
, buffer2
, bufferSize2
);
7255 if ( FAILED(result
) ) {
7256 sprintf(message_
, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
7257 devices_
[stream_
.device
[0]].name
.c_str(), getErrorString(result
));
7258 error(RtError::DRIVER_ERROR
);
7260 nextWritePos
= (nextWritePos
+ bufferSize1
+ bufferSize2
) % dsBufferSize
;
7261 handles
[0].bufferPointer
= nextWritePos
;
7264 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
7266 // Setup parameters.
7267 if (stream_
.doConvertBuffer
[1]) {
7268 buffer
= stream_
.deviceBuffer
;
7269 buffer_bytes
= stream_
.bufferSize
* stream_
.nDeviceChannels
[1];
7270 buffer_bytes
*= formatBytes(stream_
.deviceFormat
[1]);
7273 buffer
= stream_
.userBuffer
;
7274 buffer_bytes
= stream_
.bufferSize
* stream_
.nUserChannels
[1];
7275 buffer_bytes
*= formatBytes(stream_
.userFormat
);
7277 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer
= (LPDIRECTSOUNDCAPTUREBUFFER
) handles
[1].buffer
;
7278 long nextReadPos
= handles
[1].bufferPointer
;
7279 DWORD dsBufferSize
= handles
[1].dsBufferSize
;
7281 // Find out where the write and "safe read" pointers are.
7282 result
= dsBuffer
->GetCurrentPosition(¤tReadPos
, &safeReadPos
);
7283 if ( FAILED(result
) ) {
7284 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
7285 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
7286 error(RtError::DRIVER_ERROR
);
7289 if ( safeReadPos
< (DWORD
)nextReadPos
) safeReadPos
+= dsBufferSize
; // unwrap offset
7290 DWORD endRead
= nextReadPos
+ buffer_bytes
;
7292 // Handling depends on whether we are INPUT or DUPLEX.
7293 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
7294 // then a wait here will drag the write pointers into the forbidden zone.
7296 // In DUPLEX mode, rather than wait, we will back off the read pointer until
7297 // it's in a safe position. This causes dropouts, but it seems to be the only
7298 // practical way to sync up the read and write pointers reliably, given the
7299 // the very complex relationship between phase and increment of the read and write
7302 // In order to minimize audible dropouts in DUPLEX mode, we will provide a pre-roll
7303 // period of 0.5 seconds
7304 // in which we return zeros from the read buffer while the pointers sync up.
7306 if (stream_
.mode
== DUPLEX
)
7308 if (safeReadPos
< endRead
)
7310 if (duplexPrerollBytes
<= 0)
7312 // pre-roll time over. Be more agressive.
7313 int adjustment
= endRead
-safeReadPos
;
7315 ++statistics
.numberOfReadOverruns
;
7317 // large adjustments: we've probably run out of CPU cycles, so just resync exactly,
7318 // and perform fine adjustments later.
7319 // small adjustments: back off by twice as much.
7320 if (adjustment
>= 2*buffer_bytes
)
7322 nextReadPos
= safeReadPos
-2*buffer_bytes
;
7325 nextReadPos
= safeReadPos
-buffer_bytes
-adjustment
;
7327 statistics
.readDeviceSafeLeadBytes
= currentReadPos
-nextReadPos
;
7328 if (statistics
.readDeviceSafeLeadBytes
< 0) statistics
.readDeviceSafeLeadBytes
+= dsBufferSize
;
7330 if (nextReadPos
< 0) nextReadPos
+= dsBufferSize
;
7333 // in pre=roll time. Just do it.
7334 nextReadPos
= safeReadPos
-buffer_bytes
;
7335 while (nextReadPos
< 0) nextReadPos
+= dsBufferSize
;
7337 endRead
= nextReadPos
+ buffer_bytes
;
7340 while ( safeReadPos
< endRead
) {
7341 // See comments for playback.
7342 double millis
= (endRead
- safeReadPos
) * 900.0;
7343 millis
/= ( formatBytes(stream_
.deviceFormat
[1]) * stream_
.nDeviceChannels
[1] * stream_
.sampleRate
);
7344 if ( millis
< 1.0 ) millis
= 1.0;
7345 Sleep( (DWORD
) millis
);
7347 // Wake up, find out where we are now
7348 result
= dsBuffer
->GetCurrentPosition( ¤tReadPos
, &safeReadPos
);
7349 if ( FAILED(result
) ) {
7350 sprintf(message_
, "RtApiDs: Unable to get current capture position (%s): %s.",
7351 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
7352 error(RtError::DRIVER_ERROR
);
7355 if ( safeReadPos
< (DWORD
)nextReadPos
) safeReadPos
+= dsBufferSize
; // unwrap offset
7358 #ifdef GENERATE_DEBUG_LOG
7359 readTime
= timeGetTime();
7361 if (statistics
.readDeviceSafeLeadBytes
< dsPointerDifference(currentReadPos
,nextReadPos
,dsBufferSize
))
7363 statistics
.readDeviceSafeLeadBytes
= dsPointerDifference(currentReadPos
,nextReadPos
,dsBufferSize
);
7366 // Lock free space in the buffer
7367 result
= dsBuffer
->Lock (nextReadPos
, buffer_bytes
, &buffer1
,
7368 &bufferSize1
, &buffer2
, &bufferSize2
, 0);
7369 if ( FAILED(result
) ) {
7370 sprintf(message_
, "RtApiDs: Unable to lock buffer during capture (%s): %s.",
7371 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
7372 error(RtError::DRIVER_ERROR
);
7375 if (duplexPrerollBytes
<= 0)
7377 // Copy our buffer into the DS buffer
7378 CopyMemory(buffer
, buffer1
, bufferSize1
);
7379 if (buffer2
!= NULL
) CopyMemory(buffer
+bufferSize1
, buffer2
, bufferSize2
);
7381 memset(buffer
,0,bufferSize1
);
7382 if (buffer2
!= NULL
) memset(buffer
+bufferSize1
,0,bufferSize2
);
7383 duplexPrerollBytes
-= bufferSize1
+ bufferSize2
;
7386 // Update our buffer offset and unlock sound buffer
7387 nextReadPos
= (nextReadPos
+ bufferSize1
+ bufferSize2
) % dsBufferSize
;
7388 dsBuffer
->Unlock (buffer1
, bufferSize1
, buffer2
, bufferSize2
);
7389 if ( FAILED(result
) ) {
7390 sprintf(message_
, "RtApiDs: Unable to unlock buffer during capture (%s): %s.",
7391 devices_
[stream_
.device
[1]].name
.c_str(), getErrorString(result
));
7392 error(RtError::DRIVER_ERROR
);
7394 handles
[1].bufferPointer
= nextReadPos
;
7397 // No byte swapping necessary in DirectSound implementation.
7399 // If necessary, convert 8-bit data from unsigned to signed.
7400 if ( stream_
.deviceFormat
[1] == RTAUDIO_SINT8
)
7401 for ( int j
=0; j
<buffer_bytes
; j
++ ) buffer
[j
] = (signed char) (buffer
[j
] - 128);
7403 // Do buffer conversion if necessary.
7404 if (stream_
.doConvertBuffer
[1])
7405 convertBuffer( stream_
.userBuffer
, stream_
.deviceBuffer
, stream_
.convertInfo
[1] );
7407 #ifdef GENERATE_DEBUG_LOG
7408 if (currentDebugLogEntry
< debugLog
.size())
7410 TTickRecord
&r
= debugLog
[currentDebugLogEntry
++];
7411 r
.currentReadPointer
= currentReadPos
;
7412 r
.safeReadPointer
= safeReadPos
;
7413 r
.currentWritePointer
= currentWritePos
;
7414 r
.safeWritePointer
= safeWritePos
;
7415 r
.readTime
= readTime
;
7416 r
.writeTime
= writeTime
;
7417 r
.nextReadPointer
= handles
[1].bufferPointer
;
7418 r
.nextWritePointer
= handles
[0].bufferPointer
;
7423 MUTEX_UNLOCK(&stream_
.mutex
);
7425 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
7428 // Definitions for utility functions and callbacks
7429 // specific to the DirectSound implementation.
7432 #if defined(__WINDOWS_PTHREAD__)
7433 extern "C" void * callbackHandler( void * ptr
)
7435 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
7436 RtApiDs
*object
= (RtApiDs
*) info
->object
;
7437 bool *usingCallback
= &info
->usingCallback
;
7439 while ( *usingCallback
) {
7440 pthread_testcancel();
7442 object
->tickStream();
7444 catch (RtError
&exception
) {
7445 EM_log( CK_LOG_SYSTEM
, "RtApiDs: callback thread error..." );
7447 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
7448 EM_log( CK_LOG_INFO
, "closing thread..." );
7459 extern "C" unsigned __stdcall
callbackHandler(void *ptr
)
7461 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
7462 RtApiDs
*object
= (RtApiDs
*) info
->object
;
7463 bool *usingCallback
= &info
->usingCallback
;
7465 while ( *usingCallback
) {
7467 object
->tickStream();
7469 catch (RtError
&exception
) {
7470 EM_log( CK_LOG_SYSTEM
, "RtApiDs: callback thread error..." );
7472 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
7473 if( exception
.getContinue() )
7475 EM_log( CK_LOG_INFO
, "closing overridden - continuing..." );
7481 EM_log( CK_LOG_INFO
, "closing thread..." );
7495 static bool CALLBACK
deviceCountCallback(LPGUID lpguid
,
7496 LPCSTR lpcstrDescription
,
7497 LPCSTR lpcstrModule
,
7500 int *pointer
= ((int *) lpContext
);
7506 static bool CALLBACK
deviceInfoCallback(LPGUID lpguid
,
7507 LPCSTR lpcstrDescription
,
7508 LPCSTR lpcstrModule
,
7511 enum_info
*info
= ((enum_info
*) lpContext
);
7512 while (strlen(info
->name
) > 0) info
++;
7514 strncpy(info
->name
, lpcstrDescription
, 64);
7518 info
->isValid
= false;
7519 if (info
->isInput
== true) {
7521 LPDIRECTSOUNDCAPTURE object
;
7523 hr
= DirectSoundCaptureCreate( lpguid
, &object
, NULL
);
7524 if( hr
!= DS_OK
) return true;
7526 caps
.dwSize
= sizeof(caps
);
7527 hr
= object
->GetCaps( &caps
);
7529 if (caps
.dwChannels
> 0 && caps
.dwFormats
> 0)
7530 info
->isValid
= true;
7536 LPDIRECTSOUND object
;
7537 hr
= DirectSoundCreate( lpguid
, &object
, NULL
);
7538 if( hr
!= DS_OK
) return true;
7540 caps
.dwSize
= sizeof(caps
);
7541 hr
= object
->GetCaps( &caps
);
7543 if ( caps
.dwFlags
& DSCAPS_PRIMARYMONO
|| caps
.dwFlags
& DSCAPS_PRIMARYSTEREO
)
7544 info
->isValid
= true;
7552 static bool CALLBACK
defaultDeviceCallback(LPGUID lpguid
,
7553 LPCSTR lpcstrDescription
,
7554 LPCSTR lpcstrModule
,
7557 enum_info
*info
= ((enum_info
*) lpContext
);
7559 if ( lpguid
== NULL
) {
7560 strncpy(info
->name
, lpcstrDescription
, 64);
7567 static bool CALLBACK
deviceIdCallback(LPGUID lpguid
,
7568 LPCSTR lpcstrDescription
,
7569 LPCSTR lpcstrModule
,
7572 enum_info
*info
= ((enum_info
*) lpContext
);
7574 if ( strncmp( info
->name
, lpcstrDescription
, 64 ) == 0 ) {
7576 info
->isValid
= true;
7583 static char* getErrorString(int code
)
7587 case DSERR_ALLOCATED
:
7588 return "Already allocated.";
7590 case DSERR_CONTROLUNAVAIL
:
7591 return "Control unavailable.";
7593 case DSERR_INVALIDPARAM
:
7594 return "Invalid parameter.";
7596 case DSERR_INVALIDCALL
:
7597 return "Invalid call.";
7600 return "Generic error.";
7602 case DSERR_PRIOLEVELNEEDED
:
7603 return "Priority level needed";
7605 case DSERR_OUTOFMEMORY
:
7606 return "Out of memory";
7608 case DSERR_BADFORMAT
:
7609 return "The sample rate or the channel format is not supported.";
7611 case DSERR_UNSUPPORTED
:
7612 return "Not supported.";
7614 case DSERR_NODRIVER
:
7615 return "No driver.";
7617 case DSERR_ALREADYINITIALIZED
:
7618 return "Already initialized.";
7620 case DSERR_NOAGGREGATION
:
7621 return "No aggregation.";
7623 case DSERR_BUFFERLOST
:
7624 return "Buffer lost.";
7626 case DSERR_OTHERAPPHASPRIO
:
7627 return "Another application already has priority.";
7629 case DSERR_UNINITIALIZED
:
7630 return "Uninitialized.";
7633 return "DirectSound unknown error";
7637 //******************** End of __WINDOWS_DS__ *********************//
7640 #if defined(__IRIX_AL__) // SGI's AL API for IRIX
7642 #include <dmedia/audio.h>
7646 extern "C" void *callbackHandler(void * ptr
);
7648 RtApiAl :: RtApiAl()
7652 if (nDevices_
<= 0) {
7653 sprintf(message_
, "RtApiAl: no Irix AL audio devices found!");
7654 error(RtError::NO_DEVICES_FOUND
);
7658 RtApiAl :: ~RtApiAl()
7660 // The subclass destructor gets called before the base class
7661 // destructor, so close any existing streams before deallocating
7662 // apiDeviceId memory.
7663 if ( stream_
.mode
!= UNINITIALIZED
) closeStream();
7665 // Free our allocated apiDeviceId memory.
7667 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
7668 id
= (long *) devices_
[i
].apiDeviceId
;
7673 void RtApiAl :: initialize(void)
7675 // Count cards and devices
7678 // Determine the total number of input and output devices.
7679 nDevices_
= alQueryValues(AL_SYSTEM
, AL_DEVICES
, 0, 0, 0, 0);
7680 if (nDevices_
< 0) {
7681 sprintf(message_
, "RtApiAl: error counting devices: %s.",
7682 alGetErrorString(oserror()));
7683 error(RtError::DRIVER_ERROR
);
7686 if (nDevices_
<= 0) return;
7688 ALvalue
*vls
= (ALvalue
*) new ALvalue
[nDevices_
];
7690 // Create our list of devices and write their ascii identifiers and resource ids.
7694 pvs
[0].param
= AL_NAME
;
7695 pvs
[0].value
.ptr
= name
;
7700 outs
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_OUTPUT
, vls
, nDevices_
, 0, 0);
7703 sprintf(message_
, "RtApiAl: error getting output devices: %s.",
7704 alGetErrorString(oserror()));
7705 error(RtError::DRIVER_ERROR
);
7708 for (i
=0; i
<outs
; i
++) {
7709 if (alGetParams(vls
[i
].i
, pvs
, 1) < 0) {
7711 sprintf(message_
, "RtApiAl: error querying output devices: %s.",
7712 alGetErrorString(oserror()));
7713 error(RtError::DRIVER_ERROR
);
7715 device
.name
.erase();
7716 device
.name
.append( (const char *)name
, strlen(name
)+1);
7717 devices_
.push_back(device
);
7718 id
= (long *) calloc(2, sizeof(long));
7720 devices_
[i
].apiDeviceId
= (void *) id
;
7723 ins
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_INPUT
, &vls
[outs
], nDevices_
-outs
, 0, 0);
7726 sprintf(message_
, "RtApiAl: error getting input devices: %s.",
7727 alGetErrorString(oserror()));
7728 error(RtError::DRIVER_ERROR
);
7731 for (i
=outs
; i
<ins
+outs
; i
++) {
7732 if (alGetParams(vls
[i
].i
, pvs
, 1) < 0) {
7734 sprintf(message_
, "RtApiAl: error querying input devices: %s.",
7735 alGetErrorString(oserror()));
7736 error(RtError::DRIVER_ERROR
);
7738 device
.name
.erase();
7739 device
.name
.append( (const char *)name
, strlen(name
)+1);
7740 devices_
.push_back(device
);
7741 id
= (long *) calloc(2, sizeof(long));
7743 devices_
[i
].apiDeviceId
= (void *) id
;
7749 int RtApiAl :: getDefaultInputDevice(void)
7753 int result
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_INPUT
, &value
, 1, 0, 0);
7755 sprintf(message_
, "RtApiAl: error getting default input device id: %s.",
7756 alGetErrorString(oserror()));
7757 error(RtError::WARNING
);
7760 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
7761 id
= (long *) devices_
[i
].apiDeviceId
;
7762 if ( id
[1] == value
.i
) return i
;
7769 int RtApiAl :: getDefaultOutputDevice(void)
7773 int result
= alQueryValues(AL_SYSTEM
, AL_DEFAULT_OUTPUT
, &value
, 1, 0, 0);
7775 sprintf(message_
, "RtApiAl: error getting default output device id: %s.",
7776 alGetErrorString(oserror()));
7777 error(RtError::WARNING
);
7780 for ( unsigned int i
=0; i
<devices_
.size(); i
++ ) {
7781 id
= (long *) devices_
[i
].apiDeviceId
;
7782 if ( id
[0] == value
.i
) return i
;
7789 void RtApiAl :: probeDeviceInfo(RtApiDevice
*info
)
7796 // Get output resource ID if it exists.
7797 long *id
= (long *) info
->apiDeviceId
;
7801 // Probe output device parameters.
7802 result
= alQueryValues(resource
, AL_CHANNELS
, &value
, 1, 0, 0);
7804 sprintf(message_
, "RtApiAl: error getting device (%s) channels: %s.",
7805 info
->name
.c_str(), alGetErrorString(oserror()));
7806 error(RtError::DEBUG_WARNING
);
7809 info
->maxOutputChannels
= value
.i
;
7810 info
->minOutputChannels
= 1;
7813 result
= alGetParamInfo(resource
, AL_RATE
, &pinfo
);
7815 sprintf(message_
, "RtApiAl: error getting device (%s) rates: %s.",
7816 info
->name
.c_str(), alGetErrorString(oserror()));
7817 error(RtError::DEBUG_WARNING
);
7820 info
->sampleRates
.clear();
7821 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
7822 if ( SAMPLE_RATES
[k
] >= pinfo
.min
.i
&& SAMPLE_RATES
[k
] <= pinfo
.max
.i
)
7823 info
->sampleRates
.push_back( SAMPLE_RATES
[k
] );
7827 // The AL library supports all our formats, except 24-bit and 32-bit ints.
7828 info
->nativeFormats
= (RtAudioFormat
) 51;
7831 // Now get input resource ID if it exists.
7835 // Probe input device parameters.
7836 result
= alQueryValues(resource
, AL_CHANNELS
, &value
, 1, 0, 0);
7838 sprintf(message_
, "RtApiAl: error getting device (%s) channels: %s.",
7839 info
->name
.c_str(), alGetErrorString(oserror()));
7840 error(RtError::DEBUG_WARNING
);
7843 info
->maxInputChannels
= value
.i
;
7844 info
->minInputChannels
= 1;
7847 result
= alGetParamInfo(resource
, AL_RATE
, &pinfo
);
7849 sprintf(message_
, "RtApiAl: error getting device (%s) rates: %s.",
7850 info
->name
.c_str(), alGetErrorString(oserror()));
7851 error(RtError::DEBUG_WARNING
);
7854 // In the case of the default device, these values will
7855 // overwrite the rates determined for the output device. Since
7856 // the input device is most likely to be more limited than the
7857 // output device, this is ok.
7858 info
->sampleRates
.clear();
7859 for (unsigned int k
=0; k
<MAX_SAMPLE_RATES
; k
++) {
7860 if ( SAMPLE_RATES
[k
] >= pinfo
.min
.i
&& SAMPLE_RATES
[k
] <= pinfo
.max
.i
)
7861 info
->sampleRates
.push_back( SAMPLE_RATES
[k
] );
7865 // The AL library supports all our formats, except 24-bit and 32-bit ints.
7866 info
->nativeFormats
= (RtAudioFormat
) 51;
7869 if ( info
->maxInputChannels
== 0 && info
->maxOutputChannels
== 0 )
7871 if ( info
->sampleRates
.size() == 0 )
7874 // Determine duplex status.
7875 if (info
->maxInputChannels
< info
->maxOutputChannels
)
7876 info
->maxDuplexChannels
= info
->maxInputChannels
;
7878 info
->maxDuplexChannels
= info
->maxOutputChannels
;
7879 if (info
->minInputChannels
< info
->minOutputChannels
)
7880 info
->minDuplexChannels
= info
->minInputChannels
;
7882 info
->minDuplexChannels
= info
->minOutputChannels
;
7884 if ( info
->maxDuplexChannels
> 0 ) info
->hasDuplexSupport
= true;
7885 else info
->hasDuplexSupport
= false;
7887 info
->probed
= true;
7892 bool RtApiAl :: probeDeviceOpen(int device
, StreamMode mode
, int channels
,
7893 int sampleRate
, RtAudioFormat format
,
7894 int *bufferSize
, int numberOfBuffers
)
7896 int result
, nBuffers
;
7901 long *id
= (long *) devices_
[device
].apiDeviceId
;
7903 // Get a new ALconfig structure.
7904 al_config
= alNewConfig();
7906 sprintf(message_
,"RtApiAl: can't get AL config: %s.",
7907 alGetErrorString(oserror()));
7908 error(RtError::DEBUG_WARNING
);
7912 // Set the channels.
7913 result
= alSetChannels(al_config
, channels
);
7915 alFreeConfig(al_config
);
7916 sprintf(message_
,"RtApiAl: can't set %d channels in AL config: %s.",
7917 channels
, alGetErrorString(oserror()));
7918 error(RtError::DEBUG_WARNING
);
7922 // Attempt to set the queue size. The al API doesn't provide a
7923 // means for querying the minimum/maximum buffer size of a device,
7924 // so if the specified size doesn't work, take whatever the
7925 // al_config structure returns.
7926 if ( numberOfBuffers
< 1 )
7929 nBuffers
= numberOfBuffers
;
7930 long buffer_size
= *bufferSize
* nBuffers
;
7931 result
= alSetQueueSize(al_config
, buffer_size
); // in sample frames
7933 // Get the buffer size specified by the al_config and try that.
7934 buffer_size
= alGetQueueSize(al_config
);
7935 result
= alSetQueueSize(al_config
, buffer_size
);
7937 alFreeConfig(al_config
);
7938 sprintf(message_
,"RtApiAl: can't set buffer size (%ld) in AL config: %s.",
7939 buffer_size
, alGetErrorString(oserror()));
7940 error(RtError::DEBUG_WARNING
);
7943 *bufferSize
= buffer_size
/ nBuffers
;
7946 // Set the data format.
7947 stream_
.userFormat
= format
;
7948 stream_
.deviceFormat
[mode
] = format
;
7949 if (format
== RTAUDIO_SINT8
) {
7950 result
= alSetSampFmt(al_config
, AL_SAMPFMT_TWOSCOMP
);
7951 result
= alSetWidth(al_config
, AL_SAMPLE_8
);
7953 else if (format
== RTAUDIO_SINT16
) {
7954 result
= alSetSampFmt(al_config
, AL_SAMPFMT_TWOSCOMP
);
7955 result
= alSetWidth(al_config
, AL_SAMPLE_16
);
7957 else if (format
== RTAUDIO_SINT24
) {
7958 // Our 24-bit format assumes the upper 3 bytes of a 4 byte word.
7959 // The AL library uses the lower 3 bytes, so we'll need to do our
7961 result
= alSetSampFmt(al_config
, AL_SAMPFMT_FLOAT
);
7962 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
7964 else if (format
== RTAUDIO_SINT32
) {
7965 // The AL library doesn't seem to support the 32-bit integer
7966 // format, so we'll need to do our own conversion.
7967 result
= alSetSampFmt(al_config
, AL_SAMPFMT_FLOAT
);
7968 stream_
.deviceFormat
[mode
] = RTAUDIO_FLOAT32
;
7970 else if (format
== RTAUDIO_FLOAT32
)
7971 result
= alSetSampFmt(al_config
, AL_SAMPFMT_FLOAT
);
7972 else if (format
== RTAUDIO_FLOAT64
)
7973 result
= alSetSampFmt(al_config
, AL_SAMPFMT_DOUBLE
);
7975 if ( result
== -1 ) {
7976 alFreeConfig(al_config
);
7977 sprintf(message_
,"RtApiAl: error setting sample format in AL config: %s.",
7978 alGetErrorString(oserror()));
7979 error(RtError::DEBUG_WARNING
);
7983 if (mode
== OUTPUT
) {
7987 resource
= AL_DEFAULT_OUTPUT
;
7990 result
= alSetDevice(al_config
, resource
);
7991 if ( result
== -1 ) {
7992 alFreeConfig(al_config
);
7993 sprintf(message_
,"RtApiAl: error setting device (%s) in AL config: %s.",
7994 devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
7995 error(RtError::DEBUG_WARNING
);
8000 port
= alOpenPort("RtApiAl Output Port", "w", al_config
);
8002 alFreeConfig(al_config
);
8003 sprintf(message_
,"RtApiAl: error opening output port: %s.",
8004 alGetErrorString(oserror()));
8005 error(RtError::DEBUG_WARNING
);
8009 // Set the sample rate
8010 pvs
[0].param
= AL_MASTER_CLOCK
;
8011 pvs
[0].value
.i
= AL_CRYSTAL_MCLK_TYPE
;
8012 pvs
[1].param
= AL_RATE
;
8013 pvs
[1].value
.ll
= alDoubleToFixed((double)sampleRate
);
8014 result
= alSetParams(resource
, pvs
, 2);
8017 alFreeConfig(al_config
);
8018 sprintf(message_
,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
8019 sampleRate
, devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
8020 error(RtError::DEBUG_WARNING
);
8024 else { // mode == INPUT
8028 resource
= AL_DEFAULT_INPUT
;
8031 result
= alSetDevice(al_config
, resource
);
8032 if ( result
== -1 ) {
8033 alFreeConfig(al_config
);
8034 sprintf(message_
,"RtApiAl: error setting device (%s) in AL config: %s.",
8035 devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
8036 error(RtError::DEBUG_WARNING
);
8041 port
= alOpenPort("RtApiAl Input Port", "r", al_config
);
8043 alFreeConfig(al_config
);
8044 sprintf(message_
,"RtApiAl: error opening input port: %s.",
8045 alGetErrorString(oserror()));
8046 error(RtError::DEBUG_WARNING
);
8050 // Set the sample rate
8051 pvs
[0].param
= AL_MASTER_CLOCK
;
8052 pvs
[0].value
.i
= AL_CRYSTAL_MCLK_TYPE
;
8053 pvs
[1].param
= AL_RATE
;
8054 pvs
[1].value
.ll
= alDoubleToFixed((double)sampleRate
);
8055 result
= alSetParams(resource
, pvs
, 2);
8058 alFreeConfig(al_config
);
8059 sprintf(message_
,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
8060 sampleRate
, devices_
[device
].name
.c_str(), alGetErrorString(oserror()));
8061 error(RtError::DEBUG_WARNING
);
8066 alFreeConfig(al_config
);
8068 stream_
.nUserChannels
[mode
] = channels
;
8069 stream_
.nDeviceChannels
[mode
] = channels
;
8071 // Save stream handle.
8072 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
8073 if ( handle
== 0 ) {
8074 handle
= (ALport
*) calloc(2, sizeof(ALport
));
8075 if ( handle
== NULL
) {
8076 sprintf(message_
, "RtApiAl: Irix Al error allocating handle memory (%s).",
8077 devices_
[device
].name
.c_str());
8080 stream_
.apiHandle
= (void *) handle
;
8084 handle
[mode
] = port
;
8086 // Set flags for buffer conversion
8087 stream_
.doConvertBuffer
[mode
] = false;
8088 if (stream_
.userFormat
!= stream_
.deviceFormat
[mode
])
8089 stream_
.doConvertBuffer
[mode
] = true;
8091 // Allocate necessary internal buffers
8092 if ( stream_
.nUserChannels
[0] != stream_
.nUserChannels
[1] ) {
8095 if (stream_
.nUserChannels
[0] >= stream_
.nUserChannels
[1])
8096 buffer_bytes
= stream_
.nUserChannels
[0];
8098 buffer_bytes
= stream_
.nUserChannels
[1];
8100 buffer_bytes
*= *bufferSize
* formatBytes(stream_
.userFormat
);
8101 if (stream_
.userBuffer
) free(stream_
.userBuffer
);
8102 stream_
.userBuffer
= (char *) calloc(buffer_bytes
, 1);
8103 if (stream_
.userBuffer
== NULL
) {
8104 sprintf(message_
, "RtApiAl: error allocating user buffer memory (%s).",
8105 devices_
[device
].name
.c_str());
8110 if ( stream_
.doConvertBuffer
[mode
] ) {
8113 bool makeBuffer
= true;
8114 if ( mode
== OUTPUT
)
8115 buffer_bytes
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
8116 else { // mode == INPUT
8117 buffer_bytes
= stream_
.nDeviceChannels
[1] * formatBytes(stream_
.deviceFormat
[1]);
8118 if ( stream_
.mode
== OUTPUT
&& stream_
.deviceBuffer
) {
8119 long bytes_out
= stream_
.nDeviceChannels
[0] * formatBytes(stream_
.deviceFormat
[0]);
8120 if ( buffer_bytes
< bytes_out
) makeBuffer
= false;
8125 buffer_bytes
*= *bufferSize
;
8126 if (stream_
.deviceBuffer
) free(stream_
.deviceBuffer
);
8127 stream_
.deviceBuffer
= (char *) calloc(buffer_bytes
, 1);
8128 if (stream_
.deviceBuffer
== NULL
) {
8129 sprintf(message_
, "RtApiAl: error allocating device buffer memory (%s).",
8130 devices_
[device
].name
.c_str());
8136 stream_
.device
[mode
] = device
;
8137 stream_
.state
= STREAM_STOPPED
;
8138 if ( stream_
.mode
== OUTPUT
&& mode
== INPUT
)
8139 // We had already set up an output stream.
8140 stream_
.mode
= DUPLEX
;
8142 stream_
.mode
= mode
;
8143 stream_
.nBuffers
= nBuffers
;
8144 stream_
.bufferSize
= *bufferSize
;
8145 stream_
.sampleRate
= sampleRate
;
8147 // Setup the buffer conversion information structure.
8148 if ( stream_
.doConvertBuffer
[mode
] ) {
8149 if (mode
== INPUT
) { // convert device to user buffer
8150 stream_
.convertInfo
[mode
].inJump
= stream_
.nDeviceChannels
[1];
8151 stream_
.convertInfo
[mode
].outJump
= stream_
.nUserChannels
[1];
8152 stream_
.convertInfo
[mode
].inFormat
= stream_
.deviceFormat
[1];
8153 stream_
.convertInfo
[mode
].outFormat
= stream_
.userFormat
;
8155 else { // convert user to device buffer
8156 stream_
.convertInfo
[mode
].inJump
= stream_
.nUserChannels
[0];
8157 stream_
.convertInfo
[mode
].outJump
= stream_
.nDeviceChannels
[0];
8158 stream_
.convertInfo
[mode
].inFormat
= stream_
.userFormat
;
8159 stream_
.convertInfo
[mode
].outFormat
= stream_
.deviceFormat
[0];
8162 if ( stream_
.convertInfo
[mode
].inJump
< stream_
.convertInfo
[mode
].outJump
)
8163 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].inJump
;
8165 stream_
.convertInfo
[mode
].channels
= stream_
.convertInfo
[mode
].outJump
;
8167 // Set up the interleave/deinterleave offsets.
8168 if ( mode
== INPUT
&& stream_
.deInterleave
[1] ) {
8169 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
8170 stream_
.convertInfo
[mode
].inOffset
.push_back( k
* stream_
.bufferSize
);
8171 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
8172 stream_
.convertInfo
[mode
].inJump
= 1;
8175 else if (mode
== OUTPUT
&& stream_
.deInterleave
[0]) {
8176 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
8177 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
8178 stream_
.convertInfo
[mode
].outOffset
.push_back( k
* stream_
.bufferSize
);
8179 stream_
.convertInfo
[mode
].outJump
= 1;
8183 for (int k
=0; k
<stream_
.convertInfo
[mode
].channels
; k
++) {
8184 stream_
.convertInfo
[mode
].inOffset
.push_back( k
);
8185 stream_
.convertInfo
[mode
].outOffset
.push_back( k
);
8195 alClosePort(handle
[0]);
8197 alClosePort(handle
[1]);
8199 stream_
.apiHandle
= 0;
8202 if (stream_
.userBuffer
) {
8203 free(stream_
.userBuffer
);
8204 stream_
.userBuffer
= 0;
8207 error(RtError::DEBUG_WARNING
);
8211 void RtApiAl :: closeStream()
8213 // We don't want an exception to be thrown here because this
8214 // function is called by our class destructor. So, do our own
8216 if ( stream_
.mode
== UNINITIALIZED
) {
8217 sprintf(message_
, "RtApiAl::closeStream(): no open stream to close!");
8218 error(RtError::WARNING
);
8222 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
8223 if (stream_
.state
== STREAM_RUNNING
) {
8224 int buffer_size
= stream_
.bufferSize
* stream_
.nBuffers
;
8225 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
8226 alDiscardFrames(handle
[0], buffer_size
);
8227 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
)
8228 alDiscardFrames(handle
[1], buffer_size
);
8229 stream_
.state
= STREAM_STOPPED
;
8232 if (stream_
.callbackInfo
.usingCallback
) {
8233 stream_
.callbackInfo
.usingCallback
= false;
8234 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
8238 if (handle
[0]) alClosePort(handle
[0]);
8239 if (handle
[1]) alClosePort(handle
[1]);
8241 stream_
.apiHandle
= 0;
8244 if (stream_
.userBuffer
) {
8245 free(stream_
.userBuffer
);
8246 stream_
.userBuffer
= 0;
8249 if (stream_
.deviceBuffer
) {
8250 free(stream_
.deviceBuffer
);
8251 stream_
.deviceBuffer
= 0;
8254 stream_
.mode
= UNINITIALIZED
;
8257 void RtApiAl :: startStream()
8260 if (stream_
.state
== STREAM_RUNNING
) return;
8262 MUTEX_LOCK(&stream_
.mutex
);
8264 // The AL port is ready as soon as it is opened.
8265 stream_
.state
= STREAM_RUNNING
;
8267 MUTEX_UNLOCK(&stream_
.mutex
);
8270 void RtApiAl :: stopStream()
8273 if (stream_
.state
== STREAM_STOPPED
) return;
8275 // Change the state before the lock to improve shutdown response
8276 // when using a callback.
8277 stream_
.state
= STREAM_STOPPED
;
8278 MUTEX_LOCK(&stream_
.mutex
);
8280 int result
, buffer_size
= stream_
.bufferSize
* stream_
.nBuffers
;
8281 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
8283 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
)
8284 alZeroFrames(handle
[0], buffer_size
);
8286 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
8287 result
= alDiscardFrames(handle
[1], buffer_size
);
8289 sprintf(message_
, "RtApiAl: error draining stream device (%s): %s.",
8290 devices_
[stream_
.device
[1]].name
.c_str(), alGetErrorString(oserror()));
8291 error(RtError::DRIVER_ERROR
);
8295 MUTEX_UNLOCK(&stream_
.mutex
);
8298 void RtApiAl :: abortStream()
8301 if (stream_
.state
== STREAM_STOPPED
) return;
8303 // Change the state before the lock to improve shutdown response
8304 // when using a callback.
8305 stream_
.state
= STREAM_STOPPED
;
8306 MUTEX_LOCK(&stream_
.mutex
);
8308 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
8309 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
8311 int buffer_size
= stream_
.bufferSize
* stream_
.nBuffers
;
8312 int result
= alDiscardFrames(handle
[0], buffer_size
);
8314 sprintf(message_
, "RtApiAl: error aborting stream device (%s): %s.",
8315 devices_
[stream_
.device
[0]].name
.c_str(), alGetErrorString(oserror()));
8316 error(RtError::DRIVER_ERROR
);
8320 // There is no clear action to take on the input stream, since the
8321 // port will continue to run in any event.
8323 MUTEX_UNLOCK(&stream_
.mutex
);
8326 int RtApiAl :: streamWillBlock()
8330 if (stream_
.state
== STREAM_STOPPED
) return 0;
8332 MUTEX_LOCK(&stream_
.mutex
);
8336 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
8337 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
8338 err
= alGetFillable(handle
[0]);
8340 sprintf(message_
, "RtApiAl: error getting available frames for stream (%s): %s.",
8341 devices_
[stream_
.device
[0]].name
.c_str(), alGetErrorString(oserror()));
8342 error(RtError::DRIVER_ERROR
);
8348 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
8349 err
= alGetFilled(handle
[1]);
8351 sprintf(message_
, "RtApiAl: error getting available frames for stream (%s): %s.",
8352 devices_
[stream_
.device
[1]].name
.c_str(), alGetErrorString(oserror()));
8353 error(RtError::DRIVER_ERROR
);
8355 if (frames
> err
) frames
= err
;
8358 frames
= stream_
.bufferSize
- frames
;
8359 if (frames
< 0) frames
= 0;
8361 MUTEX_UNLOCK(&stream_
.mutex
);
8365 void RtApiAl :: tickStream()
8370 if (stream_
.state
== STREAM_STOPPED
) {
8371 if (stream_
.callbackInfo
.usingCallback
) usleep(50000); // sleep 50 milliseconds
8374 else if (stream_
.callbackInfo
.usingCallback
) {
8375 RtAudioCallback callback
= (RtAudioCallback
) stream_
.callbackInfo
.callback
;
8376 stopStream
= callback(stream_
.userBuffer
, stream_
.bufferSize
, stream_
.callbackInfo
.userData
);
8379 MUTEX_LOCK(&stream_
.mutex
);
8381 // The state might change while waiting on a mutex.
8382 if (stream_
.state
== STREAM_STOPPED
)
8387 RtAudioFormat format
;
8388 ALport
*handle
= (ALport
*) stream_
.apiHandle
;
8389 if (stream_
.mode
== OUTPUT
|| stream_
.mode
== DUPLEX
) {
8391 // Setup parameters and do buffer conversion if necessary.
8392 if (stream_
.doConvertBuffer
[0]) {
8393 buffer
= stream_
.deviceBuffer
;
8394 convertBuffer( buffer
, stream_
.userBuffer
, stream_
.convertInfo
[0] );
8395 channels
= stream_
.nDeviceChannels
[0];
8396 format
= stream_
.deviceFormat
[0];
8399 buffer
= stream_
.userBuffer
;
8400 channels
= stream_
.nUserChannels
[0];
8401 format
= stream_
.userFormat
;
8404 // Do byte swapping if necessary.
8405 if (stream_
.doByteSwap
[0])
8406 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
8408 // Write interleaved samples to device.
8409 alWriteFrames(handle
[0], buffer
, stream_
.bufferSize
);
8412 if (stream_
.mode
== INPUT
|| stream_
.mode
== DUPLEX
) {
8414 // Setup parameters.
8415 if (stream_
.doConvertBuffer
[1]) {
8416 buffer
= stream_
.deviceBuffer
;
8417 channels
= stream_
.nDeviceChannels
[1];
8418 format
= stream_
.deviceFormat
[1];
8421 buffer
= stream_
.userBuffer
;
8422 channels
= stream_
.nUserChannels
[1];
8423 format
= stream_
.userFormat
;
8426 // Read interleaved samples from device.
8427 alReadFrames(handle
[1], buffer
, stream_
.bufferSize
);
8429 // Do byte swapping if necessary.
8430 if (stream_
.doByteSwap
[1])
8431 byteSwapBuffer(buffer
, stream_
.bufferSize
* channels
, format
);
8433 // Do buffer conversion if necessary.
8434 if (stream_
.doConvertBuffer
[1])
8435 convertBuffer( stream_
.userBuffer
, stream_
.deviceBuffer
, stream_
.convertInfo
[1] );
8439 MUTEX_UNLOCK(&stream_
.mutex
);
8441 if (stream_
.callbackInfo
.usingCallback
&& stopStream
)
8445 void RtApiAl :: setStreamCallback(RtAudioCallback callback
, void *userData
)
8449 CallbackInfo
*info
= (CallbackInfo
*) &stream_
.callbackInfo
;
8450 if ( info
->usingCallback
) {
8451 sprintf(message_
, "RtApiAl: A callback is already set for this stream!");
8452 error(RtError::WARNING
);
8456 info
->callback
= (void *) callback
;
8457 info
->userData
= userData
;
8458 info
->usingCallback
= true;
8459 info
->object
= (void *) this;
8461 // Set the thread attributes for joinable and realtime scheduling
8462 // priority. The higher priority will only take affect if the
8463 // program is run as root or suid.
8464 pthread_attr_t attr
;
8465 pthread_attr_init(&attr
);
8466 // chuck (commented out)
8467 // pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
8468 // pthread_attr_setschedpolicy(&attr, SCHED_RR);
8470 int err
= pthread_create(&info
->thread
, &attr
, callbackHandler
, &stream_
.callbackInfo
);
8471 pthread_attr_destroy(&attr
);
8473 info
->usingCallback
= false;
8474 sprintf(message_
, "RtApiAl: error starting callback thread!");
8475 error(RtError::THREAD_ERROR
);
8479 void RtApiAl :: cancelStreamCallback()
8483 if (stream_
.callbackInfo
.usingCallback
) {
8485 if (stream_
.state
== STREAM_RUNNING
)
8488 MUTEX_LOCK(&stream_
.mutex
);
8490 stream_
.callbackInfo
.usingCallback
= false;
8491 pthread_join(stream_
.callbackInfo
.thread
, NULL
);
8492 stream_
.callbackInfo
.thread
= 0;
8493 stream_
.callbackInfo
.callback
= NULL
;
8494 stream_
.callbackInfo
.userData
= NULL
;
8496 MUTEX_UNLOCK(&stream_
.mutex
);
8500 extern "C" void *callbackHandler(void *ptr
)
8502 CallbackInfo
*info
= (CallbackInfo
*) ptr
;
8503 RtApiAl
*object
= (RtApiAl
*) info
->object
;
8504 bool *usingCallback
= &info
->usingCallback
;
8506 while ( *usingCallback
) {
8508 object
->tickStream();
8510 catch (RtError
&exception
) {
8511 EM_log( CK_LOG_SYSTEM
, "RtApiAl: callback thread error..." );
8513 EM_log( CK_LOG_INFO
, "(%s)", exception
.getMessageString() );
8514 EM_log( CK_LOG_INFO
, "closing thread..." );
8523 //******************** End of __IRIX_AL__ *********************//
8527 // *************************************************** //
8529 // Protected common (OS-independent) RtAudio methods.
8531 // *************************************************** //
8533 // This method can be modified to control the behavior of error
8534 // message reporting and throwing.
8535 void RtApi :: error(RtError::Type type
, long cont
)
8537 if (type
== RtError::WARNING
) {
8538 #if defined(__CHUCK_DEBUG__)
8539 fprintf(stderr
, "[chuck](via rtaudio): %s\n", message_
);
8542 else if (type
== RtError::DEBUG_WARNING
) {
8543 #if defined(__CHUCK_DEBUG__)
8544 #if defined(__RTAUDIO_DEBUG__)
8545 fprintf(stderr
, "[chuck](via rtaudio): %s\n", message_
);
8550 #if defined(__RTAUDIO_DEBUG__)
8551 fprintf(stderr
, "[chuck](via rtaudio): %s\n", message_
);
8553 throw RtError(std::string(message_
), type
, cont
);
8557 void RtApi :: verifyStream()
8559 if ( stream_
.mode
== UNINITIALIZED
) {
8560 sprintf(message_
, "RtAudio: stream is not open!");
8561 error(RtError::INVALID_STREAM
);
8565 void RtApi :: clearDeviceInfo(RtApiDevice
*info
)
8567 // Don't clear the name or DEVICE_ID fields here ... they are
8568 // typically set prior to a call of this function.
8569 info
->probed
= false;
8570 info
->maxOutputChannels
= 0;
8571 info
->maxInputChannels
= 0;
8572 info
->maxDuplexChannels
= 0;
8573 info
->minOutputChannels
= 0;
8574 info
->minInputChannels
= 0;
8575 info
->minDuplexChannels
= 0;
8576 info
->hasDuplexSupport
= false;
8577 info
->sampleRates
.clear();
8578 info
->nativeFormats
= 0;
8581 void RtApi :: clearStreamInfo()
8583 stream_
.mode
= UNINITIALIZED
;
8584 stream_
.state
= STREAM_STOPPED
;
8585 stream_
.sampleRate
= 0;
8586 stream_
.bufferSize
= 0;
8587 stream_
.nBuffers
= 0;
8588 stream_
.userFormat
= 0;
8589 for ( int i
=0; i
<2; i
++ ) {
8590 stream_
.device
[i
] = 0;
8591 stream_
.doConvertBuffer
[i
] = false;
8592 stream_
.deInterleave
[i
] = false;
8593 stream_
.doByteSwap
[i
] = false;
8594 stream_
.nUserChannels
[i
] = 0;
8595 stream_
.nDeviceChannels
[i
] = 0;
8596 stream_
.deviceFormat
[i
] = 0;
8600 int RtApi :: formatBytes(RtAudioFormat format
)
8602 if (format
== RTAUDIO_SINT16
)
8604 else if (format
== RTAUDIO_SINT24
|| format
== RTAUDIO_SINT32
||
8605 format
== RTAUDIO_FLOAT32
)
8607 else if (format
== RTAUDIO_FLOAT64
)
8609 else if (format
== RTAUDIO_SINT8
)
8612 sprintf(message_
,"(via rtaudio): undefined format in formatBytes().");
8613 error(RtError::WARNING
);
8618 void RtApi :: convertBuffer( char *outBuffer
, char *inBuffer
, ConvertInfo
&info
)
8620 // This function does format conversion, input/output channel compensation, and
8621 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
8622 // the upper three bytes of a 32-bit integer.
8624 // Clear our device buffer when in/out duplex device channels are different
8625 if ( outBuffer
== stream_
.deviceBuffer
&& stream_
.mode
== DUPLEX
&&
8626 stream_
.nDeviceChannels
[0] != stream_
.nDeviceChannels
[1] )
8627 memset( outBuffer
, 0, stream_
.bufferSize
* info
.outJump
* formatBytes( info
.outFormat
) );
8630 if (info
.outFormat
== RTAUDIO_FLOAT64
) {
8632 Float64
*out
= (Float64
*)outBuffer
;
8634 if (info
.inFormat
== RTAUDIO_SINT8
) {
8635 signed char *in
= (signed char *)inBuffer
;
8636 scale
= 1.0 / 128.0;
8637 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8638 for (j
=0; j
<info
.channels
; j
++) {
8639 out
[info
.outOffset
[j
]] = (Float64
) in
[info
.inOffset
[j
]];
8640 out
[info
.outOffset
[j
]] *= scale
;
8643 out
+= info
.outJump
;
8646 else if (info
.inFormat
== RTAUDIO_SINT16
) {
8647 Int16
*in
= (Int16
*)inBuffer
;
8648 scale
= 1.0 / 32768.0;
8649 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8650 for (j
=0; j
<info
.channels
; j
++) {
8651 out
[info
.outOffset
[j
]] = (Float64
) in
[info
.inOffset
[j
]];
8652 out
[info
.outOffset
[j
]] *= scale
;
8655 out
+= info
.outJump
;
8658 else if (info
.inFormat
== RTAUDIO_SINT24
) {
8659 Int32
*in
= (Int32
*)inBuffer
;
8660 scale
= 1.0 / 2147483648.0;
8661 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8662 for (j
=0; j
<info
.channels
; j
++) {
8663 out
[info
.outOffset
[j
]] = (Float64
) (in
[info
.inOffset
[j
]] & 0xffffff00);
8664 out
[info
.outOffset
[j
]] *= scale
;
8667 out
+= info
.outJump
;
8670 else if (info
.inFormat
== RTAUDIO_SINT32
) {
8671 Int32
*in
= (Int32
*)inBuffer
;
8672 scale
= 1.0 / 2147483648.0;
8673 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8674 for (j
=0; j
<info
.channels
; j
++) {
8675 out
[info
.outOffset
[j
]] = (Float64
) in
[info
.inOffset
[j
]];
8676 out
[info
.outOffset
[j
]] *= scale
;
8679 out
+= info
.outJump
;
8682 else if (info
.inFormat
== RTAUDIO_FLOAT32
) {
8683 Float32
*in
= (Float32
*)inBuffer
;
8684 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8685 for (j
=0; j
<info
.channels
; j
++) {
8686 out
[info
.outOffset
[j
]] = (Float64
) in
[info
.inOffset
[j
]];
8689 out
+= info
.outJump
;
8692 else if (info
.inFormat
== RTAUDIO_FLOAT64
) {
8693 // Channel compensation and/or (de)interleaving only.
8694 Float64
*in
= (Float64
*)inBuffer
;
8695 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8696 for (j
=0; j
<info
.channels
; j
++) {
8697 out
[info
.outOffset
[j
]] = in
[info
.inOffset
[j
]];
8700 out
+= info
.outJump
;
8704 else if (info
.outFormat
== RTAUDIO_FLOAT32
) {
8706 Float32
*out
= (Float32
*)outBuffer
;
8708 if (info
.inFormat
== RTAUDIO_SINT8
) {
8709 signed char *in
= (signed char *)inBuffer
;
8710 scale
= 1.0 / 128.0;
8711 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8712 for (j
=0; j
<info
.channels
; j
++) {
8713 out
[info
.outOffset
[j
]] = (Float32
) in
[info
.inOffset
[j
]];
8714 out
[info
.outOffset
[j
]] *= scale
;
8717 out
+= info
.outJump
;
8720 else if (info
.inFormat
== RTAUDIO_SINT16
) {
8721 Int16
*in
= (Int16
*)inBuffer
;
8722 scale
= 1.0 / 32768.0;
8723 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8724 for (j
=0; j
<info
.channels
; j
++) {
8725 out
[info
.outOffset
[j
]] = (Float32
) in
[info
.inOffset
[j
]];
8726 out
[info
.outOffset
[j
]] *= scale
;
8729 out
+= info
.outJump
;
8732 else if (info
.inFormat
== RTAUDIO_SINT24
) {
8733 Int32
*in
= (Int32
*)inBuffer
;
8734 scale
= 1.0 / 2147483648.0;
8735 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8736 for (j
=0; j
<info
.channels
; j
++) {
8737 out
[info
.outOffset
[j
]] = (Float32
) (in
[info
.inOffset
[j
]] & 0xffffff00);
8738 out
[info
.outOffset
[j
]] *= scale
;
8741 out
+= info
.outJump
;
8744 else if (info
.inFormat
== RTAUDIO_SINT32
) {
8745 Int32
*in
= (Int32
*)inBuffer
;
8746 scale
= 1.0 / 2147483648.0;
8747 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8748 for (j
=0; j
<info
.channels
; j
++) {
8749 out
[info
.outOffset
[j
]] = (Float32
) in
[info
.inOffset
[j
]];
8750 out
[info
.outOffset
[j
]] *= scale
;
8753 out
+= info
.outJump
;
8756 else if (info
.inFormat
== RTAUDIO_FLOAT32
) {
8757 // Channel compensation and/or (de)interleaving only.
8758 Float32
*in
= (Float32
*)inBuffer
;
8759 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8760 for (j
=0; j
<info
.channels
; j
++) {
8761 out
[info
.outOffset
[j
]] = in
[info
.inOffset
[j
]];
8764 out
+= info
.outJump
;
8767 else if (info
.inFormat
== RTAUDIO_FLOAT64
) {
8768 Float64
*in
= (Float64
*)inBuffer
;
8769 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8770 for (j
=0; j
<info
.channels
; j
++) {
8771 out
[info
.outOffset
[j
]] = (Float32
) in
[info
.inOffset
[j
]];
8774 out
+= info
.outJump
;
8778 else if (info
.outFormat
== RTAUDIO_SINT32
) {
8779 Int32
*out
= (Int32
*)outBuffer
;
8780 if (info
.inFormat
== RTAUDIO_SINT8
) {
8781 signed char *in
= (signed char *)inBuffer
;
8782 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8783 for (j
=0; j
<info
.channels
; j
++) {
8784 out
[info
.outOffset
[j
]] = (Int32
) in
[info
.inOffset
[j
]];
8785 out
[info
.outOffset
[j
]] <<= 24;
8788 out
+= info
.outJump
;
8791 else if (info
.inFormat
== RTAUDIO_SINT16
) {
8792 Int16
*in
= (Int16
*)inBuffer
;
8793 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8794 for (j
=0; j
<info
.channels
; j
++) {
8795 out
[info
.outOffset
[j
]] = (Int32
) in
[info
.inOffset
[j
]];
8796 out
[info
.outOffset
[j
]] <<= 16;
8799 out
+= info
.outJump
;
8802 else if (info
.inFormat
== RTAUDIO_SINT24
) {
8803 Int32
*in
= (Int32
*)inBuffer
;
8804 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8805 for (j
=0; j
<info
.channels
; j
++) {
8806 out
[info
.outOffset
[j
]] = (Int32
) in
[info
.inOffset
[j
]];
8809 out
+= info
.outJump
;
8812 else if (info
.inFormat
== RTAUDIO_SINT32
) {
8813 // Channel compensation and/or (de)interleaving only.
8814 Int32
*in
= (Int32
*)inBuffer
;
8815 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8816 for (j
=0; j
<info
.channels
; j
++) {
8817 out
[info
.outOffset
[j
]] = in
[info
.inOffset
[j
]];
8820 out
+= info
.outJump
;
8823 else if (info
.inFormat
== RTAUDIO_FLOAT32
) {
8824 Float32
*in
= (Float32
*)inBuffer
;
8825 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8826 for (j
=0; j
<info
.channels
; j
++) {
8827 out
[info
.outOffset
[j
]] = (Int32
) (in
[info
.inOffset
[j
]] * 2147483647.0);
8830 out
+= info
.outJump
;
8833 else if (info
.inFormat
== RTAUDIO_FLOAT64
) {
8834 Float64
*in
= (Float64
*)inBuffer
;
8835 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8836 for (j
=0; j
<info
.channels
; j
++) {
8837 out
[info
.outOffset
[j
]] = (Int32
) (in
[info
.inOffset
[j
]] * 2147483647.0);
8840 out
+= info
.outJump
;
8844 else if (info
.outFormat
== RTAUDIO_SINT24
) {
8845 Int32
*out
= (Int32
*)outBuffer
;
8846 if (info
.inFormat
== RTAUDIO_SINT8
) {
8847 signed char *in
= (signed char *)inBuffer
;
8848 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8849 for (j
=0; j
<info
.channels
; j
++) {
8850 out
[info
.outOffset
[j
]] = (Int32
) in
[info
.inOffset
[j
]];
8851 out
[info
.outOffset
[j
]] <<= 24;
8854 out
+= info
.outJump
;
8857 else if (info
.inFormat
== RTAUDIO_SINT16
) {
8858 Int16
*in
= (Int16
*)inBuffer
;
8859 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8860 for (j
=0; j
<info
.channels
; j
++) {
8861 out
[info
.outOffset
[j
]] = (Int32
) in
[info
.inOffset
[j
]];
8862 out
[info
.outOffset
[j
]] <<= 16;
8865 out
+= info
.outJump
;
8868 else if (info
.inFormat
== RTAUDIO_SINT24
) {
8869 // Channel compensation and/or (de)interleaving only.
8870 Int32
*in
= (Int32
*)inBuffer
;
8871 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8872 for (j
=0; j
<info
.channels
; j
++) {
8873 out
[info
.outOffset
[j
]] = in
[info
.inOffset
[j
]];
8876 out
+= info
.outJump
;
8879 else if (info
.inFormat
== RTAUDIO_SINT32
) {
8880 Int32
*in
= (Int32
*)inBuffer
;
8881 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8882 for (j
=0; j
<info
.channels
; j
++) {
8883 out
[info
.outOffset
[j
]] = (Int32
) (in
[info
.inOffset
[j
]] & 0xffffff00);
8886 out
+= info
.outJump
;
8889 else if (info
.inFormat
== RTAUDIO_FLOAT32
) {
8890 Float32
*in
= (Float32
*)inBuffer
;
8891 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8892 for (j
=0; j
<info
.channels
; j
++) {
8893 out
[info
.outOffset
[j
]] = (Int32
) (in
[info
.inOffset
[j
]] * 2147483647.0);
8896 out
+= info
.outJump
;
8899 else if (info
.inFormat
== RTAUDIO_FLOAT64
) {
8900 Float64
*in
= (Float64
*)inBuffer
;
8901 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8902 for (j
=0; j
<info
.channels
; j
++) {
8903 out
[info
.outOffset
[j
]] = (Int32
) (in
[info
.inOffset
[j
]] * 2147483647.0);
8906 out
+= info
.outJump
;
8910 else if (info
.outFormat
== RTAUDIO_SINT16
) {
8911 Int16
*out
= (Int16
*)outBuffer
;
8912 if (info
.inFormat
== RTAUDIO_SINT8
) {
8913 signed char *in
= (signed char *)inBuffer
;
8914 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8915 for (j
=0; j
<info
.channels
; j
++) {
8916 out
[info
.outOffset
[j
]] = (Int16
) in
[info
.inOffset
[j
]];
8917 out
[info
.outOffset
[j
]] <<= 8;
8920 out
+= info
.outJump
;
8923 else if (info
.inFormat
== RTAUDIO_SINT16
) {
8924 // Channel compensation and/or (de)interleaving only.
8925 Int16
*in
= (Int16
*)inBuffer
;
8926 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8927 for (j
=0; j
<info
.channels
; j
++) {
8928 out
[info
.outOffset
[j
]] = in
[info
.inOffset
[j
]];
8931 out
+= info
.outJump
;
8934 else if (info
.inFormat
== RTAUDIO_SINT24
) {
8935 Int32
*in
= (Int32
*)inBuffer
;
8936 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8937 for (j
=0; j
<info
.channels
; j
++) {
8938 out
[info
.outOffset
[j
]] = (Int16
) ((in
[info
.inOffset
[j
]] >> 16) & 0x0000ffff);
8941 out
+= info
.outJump
;
8944 else if (info
.inFormat
== RTAUDIO_SINT32
) {
8945 Int32
*in
= (Int32
*)inBuffer
;
8946 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8947 for (j
=0; j
<info
.channels
; j
++) {
8948 out
[info
.outOffset
[j
]] = (Int16
) ((in
[info
.inOffset
[j
]] >> 16) & 0x0000ffff);
8951 out
+= info
.outJump
;
8954 else if (info
.inFormat
== RTAUDIO_FLOAT32
) {
8955 Float32
*in
= (Float32
*)inBuffer
;
8956 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8957 for (j
=0; j
<info
.channels
; j
++) {
8958 out
[info
.outOffset
[j
]] = (Int16
) (in
[info
.inOffset
[j
]] * 32767.0);
8961 out
+= info
.outJump
;
8964 else if (info
.inFormat
== RTAUDIO_FLOAT64
) {
8965 Float64
*in
= (Float64
*)inBuffer
;
8966 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8967 for (j
=0; j
<info
.channels
; j
++) {
8968 out
[info
.outOffset
[j
]] = (Int16
) (in
[info
.inOffset
[j
]] * 32767.0);
8971 out
+= info
.outJump
;
8975 else if (info
.outFormat
== RTAUDIO_SINT8
) {
8976 signed char *out
= (signed char *)outBuffer
;
8977 if (info
.inFormat
== RTAUDIO_SINT8
) {
8978 // Channel compensation and/or (de)interleaving only.
8979 signed char *in
= (signed char *)inBuffer
;
8980 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8981 for (j
=0; j
<info
.channels
; j
++) {
8982 out
[info
.outOffset
[j
]] = in
[info
.inOffset
[j
]];
8985 out
+= info
.outJump
;
8988 if (info
.inFormat
== RTAUDIO_SINT16
) {
8989 Int16
*in
= (Int16
*)inBuffer
;
8990 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
8991 for (j
=0; j
<info
.channels
; j
++) {
8992 out
[info
.outOffset
[j
]] = (signed char) ((in
[info
.inOffset
[j
]] >> 8) & 0x00ff);
8995 out
+= info
.outJump
;
8998 else if (info
.inFormat
== RTAUDIO_SINT24
) {
8999 Int32
*in
= (Int32
*)inBuffer
;
9000 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
9001 for (j
=0; j
<info
.channels
; j
++) {
9002 out
[info
.outOffset
[j
]] = (signed char) ((in
[info
.inOffset
[j
]] >> 24) & 0x000000ff);
9005 out
+= info
.outJump
;
9008 else if (info
.inFormat
== RTAUDIO_SINT32
) {
9009 Int32
*in
= (Int32
*)inBuffer
;
9010 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
9011 for (j
=0; j
<info
.channels
; j
++) {
9012 out
[info
.outOffset
[j
]] = (signed char) ((in
[info
.inOffset
[j
]] >> 24) & 0x000000ff);
9015 out
+= info
.outJump
;
9018 else if (info
.inFormat
== RTAUDIO_FLOAT32
) {
9019 Float32
*in
= (Float32
*)inBuffer
;
9020 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
9021 for (j
=0; j
<info
.channels
; j
++) {
9022 out
[info
.outOffset
[j
]] = (signed char) (in
[info
.inOffset
[j
]] * 127.0);
9025 out
+= info
.outJump
;
9028 else if (info
.inFormat
== RTAUDIO_FLOAT64
) {
9029 Float64
*in
= (Float64
*)inBuffer
;
9030 for (int i
=0; i
<stream_
.bufferSize
; i
++) {
9031 for (j
=0; j
<info
.channels
; j
++) {
9032 out
[info
.outOffset
[j
]] = (signed char) (in
[info
.inOffset
[j
]] * 127.0);
9035 out
+= info
.outJump
;
9041 void RtApi :: byteSwapBuffer( char *buffer
, int samples
, RtAudioFormat format
)
9047 if (format
== RTAUDIO_SINT16
) {
9048 for (int i
=0; i
<samples
; i
++) {
9049 // Swap 1st and 2nd bytes.
9054 // Increment 2 bytes.
9058 else if (format
== RTAUDIO_SINT24
||
9059 format
== RTAUDIO_SINT32
||
9060 format
== RTAUDIO_FLOAT32
) {
9061 for (int i
=0; i
<samples
; i
++) {
9062 // Swap 1st and 4th bytes.
9067 // Swap 2nd and 3rd bytes.
9073 // Increment 4 bytes.
9077 else if (format
== RTAUDIO_FLOAT64
) {
9078 for (int i
=0; i
<samples
; i
++) {
9079 // Swap 1st and 8th bytes
9084 // Swap 2nd and 7th bytes
9090 // Swap 3rd and 6th bytes
9096 // Swap 4th and 5th bytes
9102 // Increment 8 bytes.