1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/
12 RtAudio: a realtime audio i/o C++ class
13 Copyright (c) 2001-2004 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 requested to send the modifications to the original developer so that
28 they can be incorporated into the canonical version.
30 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
31 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
32 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
33 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
34 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
35 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
36 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 /************************************************************************/
40 // RtAudio: Version 3.0, 11 March 2004
45 // Static variable definitions.
46 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
47 const unsigned int RtApi::SAMPLE_RATES[] = {
48 4000, 5512, 8000, 9600, 11025, 16000, 22050,
49 32000, 44100, 48000, 88200, 96000, 176400, 192000
52 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
53 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
54 #define MUTEX_DESTROY(A) DeleteCriticalSection(A);
55 #define MUTEX_LOCK(A) EnterCriticalSection(A)
56 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
58 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
59 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A);
60 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
61 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
64 // *************************************************** //
66 // Public common (OS-independent) methods.
68 // *************************************************** //
70 RtAudio :: RtAudio( RtAudioApi api )
75 RtAudio :: RtAudio( int outputDevice, int outputChannels,
76 int inputDevice, int inputChannels,
77 RtAudioFormat format, int sampleRate,
78 int *bufferSize, int numberOfBuffers, RtAudioApi api )
83 rtapi_->openStream( outputDevice, outputChannels,
84 inputDevice, inputChannels,
86 bufferSize, numberOfBuffers );
88 catch (RtError &exception) {
89 // Deallocate the RtApi instance.
100 void RtAudio :: openStream( int outputDevice, int outputChannels,
101 int inputDevice, int inputChannels,
102 RtAudioFormat format, int sampleRate,
103 int *bufferSize, int numberOfBuffers )
105 rtapi_->openStream( outputDevice, outputChannels, inputDevice,
106 inputChannels, format, sampleRate,
107 bufferSize, numberOfBuffers );
110 void RtAudio::initialize( RtAudioApi api )
114 // First look for a compiled match to a specified API value. If one
115 // of these constructors throws an error, it will be passed up the
116 // inheritance chain.
117 #if defined(__LINUX_JACK__)
118 if ( api == LINUX_JACK )
119 rtapi_ = new RtApiJack();
121 #if defined(__LINUX_ALSA__)
122 if ( api == LINUX_ALSA )
123 rtapi_ = new RtApiAlsa();
125 #if defined(__LINUX_OSS__)
126 if ( api == LINUX_OSS )
127 rtapi_ = new RtApiOss();
129 #if defined(__WINDOWS_ASIO__)
130 if ( api == WINDOWS_ASIO )
131 rtapi_ = new RtApiAsio();
133 #if defined(__WINDOWS_DS__)
134 if ( api == WINDOWS_DS )
135 rtapi_ = new RtApiDs();
137 #if defined(__IRIX_AL__)
138 if ( api == IRIX_AL )
139 rtapi_ = new RtApiAl();
141 #if defined(__MACOSX_CORE__)
142 if ( api == MACOSX_CORE )
143 rtapi_ = new RtApiCore();
146 if ( rtapi_ ) return;
148 // No compiled support for specified API value.
149 throw RtError( "RtAudio: no compiled support for specified API argument!", RtError::INVALID_PARAMETER );
152 // No specified API ... search for "best" option.
154 #if defined(__LINUX_JACK__)
155 rtapi_ = new RtApiJack();
156 #elif defined(__WINDOWS_ASIO__)
157 rtapi_ = new RtApiAsio();
158 #elif defined(__IRIX_AL__)
159 rtapi_ = new RtApiAl();
160 #elif defined(__MACOSX_CORE__)
161 rtapi_ = new RtApiCore();
167 #if defined(__RTAUDIO_DEBUG__)
168 fprintf(stderr, "\nRtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio).\n\n");
173 if ( rtapi_ ) return;
175 // Try second API support
178 #if defined(__LINUX_ALSA__)
179 rtapi_ = new RtApiAlsa();
180 #elif defined(__WINDOWS_DS__)
181 rtapi_ = new RtApiDs();
187 #if defined(__RTAUDIO_DEBUG__)
188 fprintf(stderr, "\nRtAudio: no devices found for second api option (Alsa or DirectSound).\n\n");
194 if ( rtapi_ ) return;
196 // Try third API support
198 #if defined(__LINUX_OSS__)
200 rtapi_ = new RtApiOss();
202 catch (RtError &error) {
212 throw RtError( "RtAudio: no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND );
218 stream_.mode = UNINITIALIZED;
219 stream_.apiHandle = 0;
220 MUTEX_INITIALIZE(&stream_.mutex);
225 MUTEX_DESTROY(&stream_.mutex);
228 void RtApi :: openStream( int outputDevice, int outputChannels,
229 int inputDevice, int inputChannels,
230 RtAudioFormat format, int sampleRate,
231 int *bufferSize, int numberOfBuffers )
233 if ( stream_.mode != UNINITIALIZED ) {
234 sprintf(message_, "RtApi: only one open stream allowed per class instance.");
235 error(RtError::INVALID_STREAM);
238 if (outputChannels < 1 && inputChannels < 1) {
239 sprintf(message_,"RtApi: one or both 'channel' parameters must be greater than zero.");
240 error(RtError::INVALID_PARAMETER);
243 if ( formatBytes(format) == 0 ) {
244 sprintf(message_,"RtApi: 'format' parameter value is undefined.");
245 error(RtError::INVALID_PARAMETER);
248 if ( outputChannels > 0 ) {
249 if (outputDevice > nDevices_ || outputDevice < 0) {
250 sprintf(message_,"RtApi: 'outputDevice' parameter value (%d) is invalid.", outputDevice);
251 error(RtError::INVALID_PARAMETER);
255 if ( inputChannels > 0 ) {
256 if (inputDevice > nDevices_ || inputDevice < 0) {
257 sprintf(message_,"RtApi: 'inputDevice' parameter value (%d) is invalid.", inputDevice);
258 error(RtError::INVALID_PARAMETER);
263 bool result = FAILURE;
264 int device, defaultDevice = 0;
267 if ( outputChannels > 0 ) {
270 channels = outputChannels;
272 if ( outputDevice == 0 ) { // Try default device first.
273 defaultDevice = getDefaultOutputDevice();
274 device = defaultDevice;
277 device = outputDevice - 1;
279 for ( int i=-1; i<nDevices_; i++ ) {
281 if ( i == defaultDevice ) continue;
284 if (devices_[device].probed == false) {
285 // If the device wasn't successfully probed before, try it
287 clearDeviceInfo(&devices_[device]);
288 probeDeviceInfo(&devices_[device]);
290 if ( devices_[device].probed )
291 result = probeDeviceOpen(device, mode, channels, sampleRate,
292 format, bufferSize, numberOfBuffers);
293 if ( result == SUCCESS ) break;
294 if ( outputDevice > 0 ) break;
299 if ( inputChannels > 0 && ( result == SUCCESS || outputChannels <= 0 ) ) {
302 channels = inputChannels;
304 if ( inputDevice == 0 ) { // Try default device first.
305 defaultDevice = getDefaultInputDevice();
306 device = defaultDevice;
309 device = inputDevice - 1;
311 for (int i=-1; i<nDevices_; i++) {
313 if ( i == defaultDevice ) continue;
316 if (devices_[device].probed == false) {
317 // If the device wasn't successfully probed before, try it
319 clearDeviceInfo(&devices_[device]);
320 probeDeviceInfo(&devices_[device]);
322 if ( devices_[device].probed )
323 result = probeDeviceOpen(device, mode, channels, sampleRate,
324 format, bufferSize, numberOfBuffers);
325 if (result == SUCCESS) break;
326 if ( outputDevice > 0 ) break;
330 if ( result == SUCCESS )
333 // If we get here, all attempted probes failed. Close any opened
334 // devices and clear the stream structure.
335 if ( stream_.mode != UNINITIALIZED ) closeStream();
337 if ( ( outputDevice == 0 && outputChannels > 0 )
338 || ( inputDevice == 0 && inputChannels > 0 ) )
339 sprintf(message_,"RtApi: no devices found for given stream parameters.");
341 sprintf(message_,"RtApi: unable to open specified device(s) with given stream parameters.");
342 error(RtError::INVALID_PARAMETER);
347 int RtApi :: getDeviceCount(void)
349 return devices_.size();
352 RtAudioDeviceInfo RtApi :: getDeviceInfo( int device )
354 if (device > (int) devices_.size() || device < 1) {
355 sprintf(message_, "RtApi: invalid device specifier (%d)!", device);
356 error(RtError::INVALID_DEVICE);
359 RtAudioDeviceInfo info;
360 int deviceIndex = device - 1;
362 // If the device wasn't successfully probed before, try it now (or again).
363 if (devices_[deviceIndex].probed == false) {
364 clearDeviceInfo(&devices_[deviceIndex]);
365 probeDeviceInfo(&devices_[deviceIndex]);
368 info.name.append( devices_[deviceIndex].name );
369 info.probed = devices_[deviceIndex].probed;
370 if ( info.probed == true ) {
371 info.outputChannels = devices_[deviceIndex].maxOutputChannels;
372 info.inputChannels = devices_[deviceIndex].maxInputChannels;
373 info.duplexChannels = devices_[deviceIndex].maxDuplexChannels;
374 for (unsigned int i=0; i<devices_[deviceIndex].sampleRates.size(); i++)
375 info.sampleRates.push_back( devices_[deviceIndex].sampleRates[i] );
376 info.nativeFormats = devices_[deviceIndex].nativeFormats;
377 if ( (deviceIndex == getDefaultOutputDevice()) ||
378 (deviceIndex == getDefaultInputDevice()) )
379 info.isDefault = true;
385 char * const RtApi :: getStreamBuffer(void)
388 return stream_.userBuffer;
391 int RtApi :: getDefaultInputDevice(void)
393 // Should be implemented in subclasses if appropriate.
397 int RtApi :: getDefaultOutputDevice(void)
399 // Should be implemented in subclasses if appropriate.
403 void RtApi :: closeStream(void)
405 // MUST be implemented in subclasses!
408 void RtApi :: probeDeviceInfo( RtApiDevice *info )
410 // MUST be implemented in subclasses!
413 bool RtApi :: probeDeviceOpen( int device, StreamMode mode, int channels,
414 int sampleRate, RtAudioFormat format,
415 int *bufferSize, int numberOfBuffers )
417 // MUST be implemented in subclasses!
422 // *************************************************** //
424 // OS/API-specific methods.
426 // *************************************************** //
428 #if defined(__LINUX_OSS__)
431 #include <sys/stat.h>
432 #include <sys/types.h>
433 #include <sys/ioctl.h>
436 #include <sys/soundcard.h>
440 #define DAC_NAME "/dev/dsp"
441 #define MAX_DEVICES 16
442 #define MAX_CHANNELS 16
444 extern "C" void *ossCallbackHandler(void * ptr);
446 RtApiOss :: RtApiOss()
450 if (nDevices_ <= 0) {
451 sprintf(message_, "RtApiOss: no Linux OSS audio devices found!");
452 error(RtError::NO_DEVICES_FOUND);
456 RtApiOss :: ~RtApiOss()
458 if ( stream_.mode != UNINITIALIZED )
462 void RtApiOss :: initialize(void)
464 // Count cards and devices
467 // We check /dev/dsp before probing devices. /dev/dsp is supposed to
468 // be a link to the "default" audio device, of the form /dev/dsp0,
469 // /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a
470 // real device, so we need to check for that. Also, sometimes the
471 // link is to /dev/dspx and other times just dspx. I'm not sure how
472 // the latter works, but it does.
473 char device_name[16];
477 if (lstat(DAC_NAME, &dspstat) == 0) {
478 if (S_ISLNK(dspstat.st_mode)) {
479 i = readlink(DAC_NAME, device_name, sizeof(device_name));
481 device_name[i] = '\0';
482 if (i > 8) { // check for "/dev/dspx"
483 if (!strncmp(DAC_NAME, device_name, 8))
484 dsplink = atoi(&device_name[8]);
486 else if (i > 3) { // check for "dspx"
487 if (!strncmp("dsp", device_name, 3))
488 dsplink = atoi(&device_name[3]);
492 sprintf(message_, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME);
493 error(RtError::SYSTEM_ERROR);
498 sprintf(message_, "RtApiOss: cannot stat %s.", DAC_NAME);
499 error(RtError::SYSTEM_ERROR);
502 // The OSS API doesn't provide a routine for determining the number
503 // of devices. Thus, we'll just pursue a brute force method. The
504 // idea is to start with /dev/dsp(0) and continue with higher device
505 // numbers until we reach MAX_DSP_DEVICES. This should tell us how
506 // many devices we have ... it is not a fullproof scheme, but hopefully
507 // it will work most of the time.
510 for (i=-1; i<MAX_DEVICES; i++) {
512 // Probe /dev/dsp first, since it is supposed to be the default device.
514 sprintf(device_name, "%s", DAC_NAME);
515 else if (i == dsplink)
516 continue; // We've aready probed this device via /dev/dsp link ... try next device.
518 sprintf(device_name, "%s%d", DAC_NAME, i);
520 // First try to open the device for playback, then record mode.
521 fd = open(device_name, O_WRONLY | O_NONBLOCK);
523 // Open device for playback failed ... either busy or doesn't exist.
524 if (errno != EBUSY && errno != EAGAIN) {
525 // Try to open for capture
526 fd = open(device_name, O_RDONLY | O_NONBLOCK);
528 // Open device for record failed.
529 if (errno != EBUSY && errno != EAGAIN)
532 sprintf(message_, "RtApiOss: OSS record device (%s) is busy.", device_name);
533 error(RtError::WARNING);
534 // still count it for now
539 sprintf(message_, "RtApiOss: OSS playback device (%s) is busy.", device_name);
540 error(RtError::WARNING);
541 // still count it for now
545 if (fd >= 0) close(fd);
547 device.name.append( (const char *)device_name, strlen(device_name)+1);
548 devices_.push_back(device);
553 void RtApiOss :: probeDeviceInfo(RtApiDevice *info)
555 int i, fd, channels, mask;
557 // The OSS API doesn't provide a means for probing the capabilities
558 // of devices. Thus, we'll just pursue a brute force method.
560 // First try for playback
561 fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
563 // Open device failed ... either busy or doesn't exist
564 if (errno == EBUSY || errno == EAGAIN)
565 sprintf(message_, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.",
568 sprintf(message_, "RtApiOss: OSS playback device (%s) open error.", info->name.c_str());
569 error(RtError::DEBUG_WARNING);
573 // We have an open device ... see how many channels it can handle
574 for (i=MAX_CHANNELS; i>0; i--) {
576 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1) {
577 // This would normally indicate some sort of hardware error, but under ALSA's
578 // OSS emulation, it sometimes indicates an invalid channel value. Further,
579 // the returned channel value is not changed. So, we'll ignore the possible
581 continue; // try next channel number
583 // Check to see whether the device supports the requested number of channels
584 if (channels != i ) continue; // try next channel number
585 // If here, we found the largest working channel value
588 info->maxOutputChannels = i;
590 // Now find the minimum number of channels it can handle
591 for (i=1; i<=info->maxOutputChannels; i++) {
593 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
594 continue; // try next channel number
595 // If here, we found the smallest working channel value
598 info->minOutputChannels = i;
602 // Now try for capture
603 fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
605 // Open device for capture failed ... either busy or doesn't exist
606 if (errno == EBUSY || errno == EAGAIN)
607 sprintf(message_, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.",
610 sprintf(message_, "RtApiOss: OSS capture device (%s) open error.", info->name.c_str());
611 error(RtError::DEBUG_WARNING);
612 if (info->maxOutputChannels == 0)
613 // didn't open for playback either ... device invalid
615 goto probe_parameters;
618 // We have the device open for capture ... see how many channels it can handle
619 for (i=MAX_CHANNELS; i>0; i--) {
621 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
622 continue; // as above
624 // If here, we found a working channel value
627 info->maxInputChannels = i;
629 // Now find the minimum number of channels it can handle
630 for (i=1; i<=info->maxInputChannels; i++) {
632 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
633 continue; // try next channel number
634 // If here, we found the smallest working channel value
637 info->minInputChannels = i;
640 if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
641 sprintf(message_, "RtApiOss: device (%s) reports zero channels for input and output.",
643 error(RtError::DEBUG_WARNING);
647 // If device opens for both playback and capture, we determine the channels.
648 if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
649 goto probe_parameters;
651 fd = open(info->name.c_str(), O_RDWR | O_NONBLOCK);
653 goto probe_parameters;
655 ioctl(fd, SNDCTL_DSP_SETDUPLEX, 0);
656 ioctl(fd, SNDCTL_DSP_GETCAPS, &mask);
657 if (mask & DSP_CAP_DUPLEX) {
658 info->hasDuplexSupport = true;
659 // We have the device open for duplex ... see how many channels it can handle
660 for (i=MAX_CHANNELS; i>0; i--) {
662 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
663 continue; // as above
664 // If here, we found a working channel value
667 info->maxDuplexChannels = i;
669 // Now find the minimum number of channels it can handle
670 for (i=1; i<=info->maxDuplexChannels; i++) {
672 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
673 continue; // try next channel number
674 // If here, we found the smallest working channel value
677 info->minDuplexChannels = i;
682 // At this point, we need to figure out the supported data formats
683 // and sample rates. We'll proceed by openning the device in the
684 // direction with the maximum number of channels, or playback if
685 // they are equal. This might limit our sample rate options, but so
688 if (info->maxOutputChannels >= info->maxInputChannels) {
689 fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
690 channels = info->maxOutputChannels;
693 fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
694 channels = info->maxInputChannels;
698 // We've got some sort of conflict ... abort
699 sprintf(message_, "RtApiOss: device (%s) won't reopen during probe.",
701 error(RtError::DEBUG_WARNING);
705 // We have an open device ... set to maximum channels.
707 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
708 // We've got some sort of conflict ... abort
710 sprintf(message_, "RtApiOss: device (%s) won't revert to previous channel setting.",
712 error(RtError::DEBUG_WARNING);
716 if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
718 sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
720 error(RtError::DEBUG_WARNING);
724 // Probe the supported data formats ... we don't care about endian-ness just yet.
726 info->nativeFormats = 0;
727 #if defined (AFMT_S32_BE)
728 // This format does not seem to be in the 2.4 kernel version of OSS soundcard.h
729 if (mask & AFMT_S32_BE) {
730 format = AFMT_S32_BE;
731 info->nativeFormats |= RTAUDIO_SINT32;
734 #if defined (AFMT_S32_LE)
735 /* This format is not in the 2.4.4 kernel version of OSS soundcard.h */
736 if (mask & AFMT_S32_LE) {
737 format = AFMT_S32_LE;
738 info->nativeFormats |= RTAUDIO_SINT32;
741 if (mask & AFMT_S8) {
743 info->nativeFormats |= RTAUDIO_SINT8;
745 if (mask & AFMT_S16_BE) {
746 format = AFMT_S16_BE;
747 info->nativeFormats |= RTAUDIO_SINT16;
749 if (mask & AFMT_S16_LE) {
750 format = AFMT_S16_LE;
751 info->nativeFormats |= RTAUDIO_SINT16;
754 // Check that we have at least one supported format
755 if (info->nativeFormats == 0) {
757 sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
759 error(RtError::DEBUG_WARNING);
765 if (ioctl(fd, SNDCTL_DSP_SETFMT, &format) == -1 || format != i) {
767 sprintf(message_, "RtApiOss: device (%s) error setting data format.",
769 error(RtError::DEBUG_WARNING);
773 // Probe the supported sample rates.
774 info->sampleRates.clear();
775 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
776 int speed = SAMPLE_RATES[k];
777 if (ioctl(fd, SNDCTL_DSP_SPEED, &speed) != -1 && speed == (int)SAMPLE_RATES[k])
778 info->sampleRates.push_back(speed);
781 if (info->sampleRates.size() == 0) {
783 sprintf(message_, "RtApiOss: no supported sample rates found for device (%s).",
785 error(RtError::DEBUG_WARNING);
789 // That's all ... close the device and return
795 bool RtApiOss :: probeDeviceOpen(int device, StreamMode mode, int channels,
796 int sampleRate, RtAudioFormat format,
797 int *bufferSize, int numberOfBuffers)
799 int buffers, buffer_bytes, device_channels, device_format;
801 int *handle = (int *) stream_.apiHandle;
803 const char *name = devices_[device].name.c_str();
806 fd = open(name, O_WRONLY | O_NONBLOCK);
807 else { // mode == INPUT
808 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
809 // We just set the same device for playback ... close and reopen for duplex (OSS only).
812 // First check that the number previously set channels is the same.
813 if (stream_.nUserChannels[0] != channels) {
814 sprintf(message_, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name);
817 fd = open(name, O_RDWR | O_NONBLOCK);
820 fd = open(name, O_RDONLY | O_NONBLOCK);
824 if (errno == EBUSY || errno == EAGAIN)
825 sprintf(message_, "RtApiOss: device (%s) is busy and cannot be opened.",
828 sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
832 // Now reopen in blocking mode.
835 fd = open(name, O_WRONLY | O_SYNC);
836 else { // mode == INPUT
837 if (stream_.mode == OUTPUT && stream_.device[0] == device)
838 fd = open(name, O_RDWR | O_SYNC);
840 fd = open(name, O_RDONLY | O_SYNC);
844 sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
848 // Get the sample format mask
850 if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
852 sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
857 // Determine how to set the device format.
858 stream_.userFormat = format;
860 stream_.doByteSwap[mode] = false;
861 if (format == RTAUDIO_SINT8) {
862 if (mask & AFMT_S8) {
863 device_format = AFMT_S8;
864 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
867 else if (format == RTAUDIO_SINT16) {
868 if (mask & AFMT_S16_NE) {
869 device_format = AFMT_S16_NE;
870 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
872 #if BYTE_ORDER == LITTLE_ENDIAN
873 else if (mask & AFMT_S16_BE) {
874 device_format = AFMT_S16_BE;
875 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
876 stream_.doByteSwap[mode] = true;
879 else if (mask & AFMT_S16_LE) {
880 device_format = AFMT_S16_LE;
881 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
882 stream_.doByteSwap[mode] = true;
886 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
887 else if (format == RTAUDIO_SINT32) {
888 if (mask & AFMT_S32_NE) {
889 device_format = AFMT_S32_NE;
890 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
892 #if BYTE_ORDER == LITTLE_ENDIAN
893 else if (mask & AFMT_S32_BE) {
894 device_format = AFMT_S32_BE;
895 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
896 stream_.doByteSwap[mode] = true;
899 else if (mask & AFMT_S32_LE) {
900 device_format = AFMT_S32_LE;
901 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
902 stream_.doByteSwap[mode] = true;
908 if (device_format == -1) {
909 // The user requested format is not natively supported by the device.
910 if (mask & AFMT_S16_NE) {
911 device_format = AFMT_S16_NE;
912 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
914 #if BYTE_ORDER == LITTLE_ENDIAN
915 else if (mask & AFMT_S16_BE) {
916 device_format = AFMT_S16_BE;
917 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
918 stream_.doByteSwap[mode] = true;
921 else if (mask & AFMT_S16_LE) {
922 device_format = AFMT_S16_LE;
923 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
924 stream_.doByteSwap[mode] = true;
927 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
928 else if (mask & AFMT_S32_NE) {
929 device_format = AFMT_S32_NE;
930 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
932 #if BYTE_ORDER == LITTLE_ENDIAN
933 else if (mask & AFMT_S32_BE) {
934 device_format = AFMT_S32_BE;
935 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
936 stream_.doByteSwap[mode] = true;
939 else if (mask & AFMT_S32_LE) {
940 device_format = AFMT_S32_LE;
941 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
942 stream_.doByteSwap[mode] = true;
946 else if (mask & AFMT_S8) {
947 device_format = AFMT_S8;
948 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
952 if (stream_.deviceFormat[mode] == 0) {
953 // This really shouldn't happen ...
955 sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
960 // Determine the number of channels for this device. Note that the
961 // channel value requested by the user might be < min_X_Channels.
962 stream_.nUserChannels[mode] = channels;
963 device_channels = channels;
964 if (mode == OUTPUT) {
965 if (channels < devices_[device].minOutputChannels)
966 device_channels = devices_[device].minOutputChannels;
968 else { // mode == INPUT
969 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
970 // We're doing duplex setup here.
971 if (channels < devices_[device].minDuplexChannels)
972 device_channels = devices_[device].minDuplexChannels;
975 if (channels < devices_[device].minInputChannels)
976 device_channels = devices_[device].minInputChannels;
979 stream_.nDeviceChannels[mode] = device_channels;
981 // Attempt to set the buffer size. According to OSS, the minimum
982 // number of buffers is two. The supposed minimum buffer size is 16
983 // bytes, so that will be our lower bound. The argument to this
984 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
985 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
986 // We'll check the actual value used near the end of the setup
988 buffer_bytes = *bufferSize * formatBytes(stream_.deviceFormat[mode]) * device_channels;
989 if (buffer_bytes < 16) buffer_bytes = 16;
990 buffers = numberOfBuffers;
991 if (buffers < 2) buffers = 2;
992 temp = ((int) buffers << 16) + (int)(log10((double)buffer_bytes)/log10(2.0));
993 if (ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp)) {
995 sprintf(message_, "RtApiOss: error setting fragment size for device (%s).",
999 stream_.nBuffers = buffers;
1001 // Set the data format.
1002 temp = device_format;
1003 if (ioctl(fd, SNDCTL_DSP_SETFMT, &device_format) == -1 || device_format != temp) {
1005 sprintf(message_, "RtApiOss: error setting data format for device (%s).",
1010 // Set the number of channels.
1011 temp = device_channels;
1012 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &device_channels) == -1 || device_channels != temp) {
1014 sprintf(message_, "RtApiOss: error setting %d channels on device (%s).",
1019 // Set the sample rate.
1022 if (ioctl(fd, SNDCTL_DSP_SPEED, &srate) == -1) {
1024 sprintf(message_, "RtApiOss: error setting sample rate = %d on device (%s).",
1029 // Verify the sample rate setup worked.
1030 if (abs(srate - temp) > 100) {
1032 sprintf(message_, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",
1036 stream_.sampleRate = sampleRate;
1038 if (ioctl(fd, SNDCTL_DSP_GETBLKSIZE, &buffer_bytes) == -1) {
1040 sprintf(message_, "RtApiOss: error getting buffer size for device (%s).",
1045 // Save buffer size (in sample frames).
1046 *bufferSize = buffer_bytes / (formatBytes(stream_.deviceFormat[mode]) * device_channels);
1047 stream_.bufferSize = *bufferSize;
1049 if (mode == INPUT && stream_.mode == OUTPUT &&
1050 stream_.device[0] == device) {
1051 // We're doing duplex setup here.
1052 stream_.deviceFormat[0] = stream_.deviceFormat[1];
1053 stream_.nDeviceChannels[0] = device_channels;
1056 // Allocate the stream handles if necessary and then save.
1057 if ( stream_.apiHandle == 0 ) {
1058 handle = (int *) calloc(2, sizeof(int));
1059 stream_.apiHandle = (void *) handle;
1064 handle = (int *) stream_.apiHandle;
1068 // Set flags for buffer conversion
1069 stream_.doConvertBuffer[mode] = false;
1070 if (stream_.userFormat != stream_.deviceFormat[mode])
1071 stream_.doConvertBuffer[mode] = true;
1072 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
1073 stream_.doConvertBuffer[mode] = true;
1075 // Allocate necessary internal buffers
1076 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
1079 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
1080 buffer_bytes = stream_.nUserChannels[0];
1082 buffer_bytes = stream_.nUserChannels[1];
1084 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
1085 if (stream_.userBuffer) free(stream_.userBuffer);
1086 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
1087 if (stream_.userBuffer == NULL) {
1089 sprintf(message_, "RtApiOss: error allocating user buffer memory (%s).",
1095 if ( stream_.doConvertBuffer[mode] ) {
1098 bool makeBuffer = true;
1099 if ( mode == OUTPUT )
1100 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1101 else { // mode == INPUT
1102 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
1103 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1104 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1105 if ( buffer_bytes < bytes_out ) makeBuffer = false;
1110 buffer_bytes *= *bufferSize;
1111 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
1112 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
1113 if (stream_.deviceBuffer == NULL) {
1115 sprintf(message_, "RtApiOss: error allocating device buffer memory (%s).",
1122 stream_.device[mode] = device;
1123 stream_.state = STREAM_STOPPED;
1125 if ( stream_.mode == OUTPUT && mode == INPUT ) {
1126 stream_.mode = DUPLEX;
1127 if (stream_.device[0] == device)
1131 stream_.mode = mode;
1140 stream_.apiHandle = 0;
1143 if (stream_.userBuffer) {
1144 free(stream_.userBuffer);
1145 stream_.userBuffer = 0;
1148 error(RtError::WARNING);
1152 void RtApiOss :: closeStream()
1154 // We don't want an exception to be thrown here because this
1155 // function is called by our class destructor. So, do our own
1157 if ( stream_.mode == UNINITIALIZED ) {
1158 sprintf(message_, "RtApiOss::closeStream(): no open stream to close!");
1159 error(RtError::WARNING);
1163 int *handle = (int *) stream_.apiHandle;
1164 if (stream_.state == STREAM_RUNNING) {
1165 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
1166 ioctl(handle[0], SNDCTL_DSP_RESET, 0);
1168 ioctl(handle[1], SNDCTL_DSP_RESET, 0);
1169 stream_.state = STREAM_STOPPED;
1172 if (stream_.callbackInfo.usingCallback) {
1173 stream_.callbackInfo.usingCallback = false;
1174 pthread_join(stream_.callbackInfo.thread, NULL);
1178 if (handle[0]) close(handle[0]);
1179 if (handle[1]) close(handle[1]);
1181 stream_.apiHandle = 0;
1184 if (stream_.userBuffer) {
1185 free(stream_.userBuffer);
1186 stream_.userBuffer = 0;
1189 if (stream_.deviceBuffer) {
1190 free(stream_.deviceBuffer);
1191 stream_.deviceBuffer = 0;
1194 stream_.mode = UNINITIALIZED;
1197 void RtApiOss :: startStream()
1200 if (stream_.state == STREAM_RUNNING) return;
1202 MUTEX_LOCK(&stream_.mutex);
1204 stream_.state = STREAM_RUNNING;
1206 // No need to do anything else here ... OSS automatically starts
1207 // when fed samples.
1209 MUTEX_UNLOCK(&stream_.mutex);
1212 void RtApiOss :: stopStream()
1215 if (stream_.state == STREAM_STOPPED) return;
1217 // Change the state before the lock to improve shutdown response
1218 // when using a callback.
1219 stream_.state = STREAM_STOPPED;
1220 MUTEX_LOCK(&stream_.mutex);
1223 int *handle = (int *) stream_.apiHandle;
1224 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1225 err = ioctl(handle[0], SNDCTL_DSP_POST, 0);
1226 //err = ioctl(handle[0], SNDCTL_DSP_SYNC, 0);
1228 sprintf(message_, "RtApiOss: error stopping device (%s).",
1229 devices_[stream_.device[0]].name.c_str());
1230 error(RtError::DRIVER_ERROR);
1234 err = ioctl(handle[1], SNDCTL_DSP_POST, 0);
1235 //err = ioctl(handle[1], SNDCTL_DSP_SYNC, 0);
1237 sprintf(message_, "RtApiOss: error stopping device (%s).",
1238 devices_[stream_.device[1]].name.c_str());
1239 error(RtError::DRIVER_ERROR);
1243 MUTEX_UNLOCK(&stream_.mutex);
1246 void RtApiOss :: abortStream()
1251 int RtApiOss :: streamWillBlock()
1254 if (stream_.state == STREAM_STOPPED) return 0;
1256 MUTEX_LOCK(&stream_.mutex);
1258 int bytes = 0, channels = 0, frames = 0;
1259 audio_buf_info info;
1260 int *handle = (int *) stream_.apiHandle;
1261 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1262 ioctl(handle[0], SNDCTL_DSP_GETOSPACE, &info);
1264 channels = stream_.nDeviceChannels[0];
1267 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
1268 ioctl(handle[1], SNDCTL_DSP_GETISPACE, &info);
1269 if (stream_.mode == DUPLEX ) {
1270 bytes = (bytes < info.bytes) ? bytes : info.bytes;
1271 channels = stream_.nDeviceChannels[0];
1275 channels = stream_.nDeviceChannels[1];
1279 frames = (int) (bytes / (channels * formatBytes(stream_.deviceFormat[0])));
1280 frames -= stream_.bufferSize;
1281 if (frames < 0) frames = 0;
1283 MUTEX_UNLOCK(&stream_.mutex);
1287 void RtApiOss :: tickStream()
1292 if (stream_.state == STREAM_STOPPED) {
1293 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
1296 else if (stream_.callbackInfo.usingCallback) {
1297 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
1298 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
1301 MUTEX_LOCK(&stream_.mutex);
1303 // The state might change while waiting on a mutex.
1304 if (stream_.state == STREAM_STOPPED)
1307 int result, *handle;
1310 RtAudioFormat format;
1311 handle = (int *) stream_.apiHandle;
1312 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1314 // Setup parameters and do buffer conversion if necessary.
1315 if (stream_.doConvertBuffer[0]) {
1316 convertStreamBuffer(OUTPUT);
1317 buffer = stream_.deviceBuffer;
1318 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
1319 format = stream_.deviceFormat[0];
1322 buffer = stream_.userBuffer;
1323 samples = stream_.bufferSize * stream_.nUserChannels[0];
1324 format = stream_.userFormat;
1327 // Do byte swapping if necessary.
1328 if (stream_.doByteSwap[0])
1329 byteSwapBuffer(buffer, samples, format);
1331 // Write samples to device.
1332 result = write(handle[0], buffer, samples * formatBytes(format));
1335 // This could be an underrun, but the basic OSS API doesn't provide a means for determining that.
1336 sprintf(message_, "RtApiOss: audio write error for device (%s).",
1337 devices_[stream_.device[0]].name.c_str());
1338 error(RtError::DRIVER_ERROR);
1342 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
1344 // Setup parameters.
1345 if (stream_.doConvertBuffer[1]) {
1346 buffer = stream_.deviceBuffer;
1347 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
1348 format = stream_.deviceFormat[1];
1351 buffer = stream_.userBuffer;
1352 samples = stream_.bufferSize * stream_.nUserChannels[1];
1353 format = stream_.userFormat;
1356 // Read samples from device.
1357 result = read(handle[1], buffer, samples * formatBytes(format));
1360 // This could be an overrun, but the basic OSS API doesn't provide a means for determining that.
1361 sprintf(message_, "RtApiOss: audio read error for device (%s).",
1362 devices_[stream_.device[1]].name.c_str());
1363 error(RtError::DRIVER_ERROR);
1366 // Do byte swapping if necessary.
1367 if (stream_.doByteSwap[1])
1368 byteSwapBuffer(buffer, samples, format);
1370 // Do buffer conversion if necessary.
1371 if (stream_.doConvertBuffer[1])
1372 convertStreamBuffer(INPUT);
1376 MUTEX_UNLOCK(&stream_.mutex);
1378 if (stream_.callbackInfo.usingCallback && stopStream)
1382 void RtApiOss :: setStreamCallback(RtAudioCallback callback, void *userData)
1386 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1387 if ( info->usingCallback ) {
1388 sprintf(message_, "RtApiOss: A callback is already set for this stream!");
1389 error(RtError::WARNING);
1393 info->callback = (void *) callback;
1394 info->userData = userData;
1395 info->usingCallback = true;
1396 info->object = (void *) this;
1398 // Set the thread attributes for joinable and realtime scheduling
1399 // priority. The higher priority will only take affect if the
1400 // program is run as root or suid.
1401 pthread_attr_t attr;
1402 pthread_attr_init(&attr);
1403 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
1404 pthread_attr_setschedpolicy(&attr, SCHED_RR);
1406 int err = pthread_create(&(info->thread), &attr, ossCallbackHandler, &stream_.callbackInfo);
1407 pthread_attr_destroy(&attr);
1409 info->usingCallback = false;
1410 sprintf(message_, "RtApiOss: error starting callback thread!");
1411 error(RtError::THREAD_ERROR);
1415 void RtApiOss :: cancelStreamCallback()
1419 if (stream_.callbackInfo.usingCallback) {
1421 if (stream_.state == STREAM_RUNNING)
1424 MUTEX_LOCK(&stream_.mutex);
1426 stream_.callbackInfo.usingCallback = false;
1427 pthread_join(stream_.callbackInfo.thread, NULL);
1428 stream_.callbackInfo.thread = 0;
1429 stream_.callbackInfo.callback = NULL;
1430 stream_.callbackInfo.userData = NULL;
1432 MUTEX_UNLOCK(&stream_.mutex);
1436 extern "C" void *ossCallbackHandler(void *ptr)
1438 CallbackInfo *info = (CallbackInfo *) ptr;
1439 RtApiOss *object = (RtApiOss *) info->object;
1440 bool *usingCallback = &info->usingCallback;
1442 while ( *usingCallback ) {
1443 pthread_testcancel();
1445 object->tickStream();
1447 catch (RtError &exception) {
1448 fprintf(stderr, "\nRtApiOss: callback thread error (%s) ... closing thread.\n\n",
1449 exception.getMessageString());
1457 //******************** End of __LINUX_OSS__ *********************//
1460 #if defined(__MACOSX_CORE__)
1463 // The OS X CoreAudio API is designed to use a separate callback
1464 // procedure for each of its audio devices. A single RtAudio duplex
1465 // stream using two different devices is supported here, though it
1466 // cannot be guaranteed to always behave correctly because we cannot
1467 // synchronize these two callbacks. This same functionality can be
1468 // achieved with better synchrony by opening two separate streams for
1469 // the devices and using RtAudio blocking calls (i.e. tickStream()).
1471 // A property listener is installed for over/underrun information.
1472 // However, no functionality is currently provided to allow property
1473 // listeners to trigger user handlers because it is unclear what could
1474 // be done if a critical stream parameter (buffer size, sample rate,
1475 // device disconnect) notification arrived. The listeners entail
1476 // quite a bit of extra code and most likely, a user program wouldn't
1477 // be prepared for the result anyway.
1479 // A structure to hold various information related to the CoreAuio API
1486 pthread_cond_t condition;
1489 :stopStream(false), xrun(false), deviceBuffer(0) {}
1492 RtApiCore :: RtApiCore()
1496 if (nDevices_ <= 0) {
1497 sprintf(message_, "RtApiCore: no Macintosh OS-X Core Audio devices found!");
1498 error(RtError::NO_DEVICES_FOUND);
1502 RtApiCore :: ~RtApiCore()
1504 // The subclass destructor gets called before the base class
1505 // destructor, so close an existing stream before deallocating
1506 // apiDeviceId memory.
1507 if ( stream_.mode != UNINITIALIZED ) closeStream();
1509 // Free our allocated apiDeviceId memory.
1511 for ( unsigned int i=0; i<devices_.size(); i++ ) {
1512 id = (AudioDeviceID *) devices_[i].apiDeviceId;
1517 void RtApiCore :: initialize(void)
1519 OSStatus err = noErr;
1521 AudioDeviceID *deviceList = NULL;
1524 // Find out how many audio devices there are, if any.
1525 err = AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices, &dataSize, NULL);
1527 sprintf(message_, "RtApiCore: OS-X error getting device info!");
1528 error(RtError::SYSTEM_ERROR);
1531 nDevices_ = dataSize / sizeof(AudioDeviceID);
1532 if (nDevices_ == 0) return;
1534 // Make space for the devices we are about to get.
1535 deviceList = (AudioDeviceID *) malloc( dataSize );
1536 if (deviceList == NULL) {
1537 sprintf(message_, "RtApiCore: memory allocation error during initialization!");
1538 error(RtError::MEMORY_ERROR);
1541 // Get the array of AudioDeviceIDs.
1542 err = AudioHardwareGetProperty(kAudioHardwarePropertyDevices, &dataSize, (void *) deviceList);
1545 sprintf(message_, "RtApiCore: OS-X error getting device properties!");
1546 error(RtError::SYSTEM_ERROR);
1549 // Create list of device structures and write device identifiers.
1552 for (int i=0; i<nDevices_; i++) {
1553 devices_.push_back(device);
1554 id = (AudioDeviceID *) malloc( sizeof(AudioDeviceID) );
1555 *id = deviceList[i];
1556 devices_[i].apiDeviceId = (void *) id;
1562 int RtApiCore :: getDefaultInputDevice(void)
1564 AudioDeviceID id, *deviceId;
1565 UInt32 dataSize = sizeof( AudioDeviceID );
1567 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
1570 if (result != noErr) {
1571 sprintf( message_, "RtApiCore: OS-X error getting default input device." );
1572 error(RtError::WARNING);
1576 for ( int i=0; i<nDevices_; i++ ) {
1577 deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
1578 if ( id == *deviceId ) return i;
1584 int RtApiCore :: getDefaultOutputDevice(void)
1586 AudioDeviceID id, *deviceId;
1587 UInt32 dataSize = sizeof( AudioDeviceID );
1589 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
1592 if (result != noErr) {
1593 sprintf( message_, "RtApiCore: OS-X error getting default output device." );
1594 error(RtError::WARNING);
1598 for ( int i=0; i<nDevices_; i++ ) {
1599 deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
1600 if ( id == *deviceId ) return i;
1606 static bool deviceSupportsFormat( AudioDeviceID id, bool isInput,
1607 AudioStreamBasicDescription *desc, bool isDuplex )
1609 OSStatus result = noErr;
1610 UInt32 dataSize = sizeof( AudioStreamBasicDescription );
1612 result = AudioDeviceGetProperty( id, 0, isInput,
1613 kAudioDevicePropertyStreamFormatSupported,
1616 if (result == kAudioHardwareNoError) {
1618 result = AudioDeviceGetProperty( id, 0, true,
1619 kAudioDevicePropertyStreamFormatSupported,
1623 if (result != kAudioHardwareNoError)
1632 void RtApiCore :: probeDeviceInfo( RtApiDevice *info )
1634 OSStatus err = noErr;
1636 // Get the device manufacturer and name.
1639 UInt32 dataSize = 256;
1640 AudioDeviceID *id = (AudioDeviceID *) info->apiDeviceId;
1641 err = AudioDeviceGetProperty( *id, 0, false,
1642 kAudioDevicePropertyDeviceManufacturer,
1645 sprintf( message_, "RtApiCore: OS-X error getting device manufacturer." );
1646 error(RtError::DEBUG_WARNING);
1649 strncpy(fullname, name, 256);
1650 strcat(fullname, ": " );
1653 err = AudioDeviceGetProperty( *id, 0, false,
1654 kAudioDevicePropertyDeviceName,
1657 sprintf( message_, "RtApiCore: OS-X error getting device name." );
1658 error(RtError::DEBUG_WARNING);
1661 strncat(fullname, name, 254);
1663 info->name.append( (const char *)fullname, strlen(fullname)+1);
1665 // Get output channel information.
1666 unsigned int i, minChannels = 0, maxChannels = 0, nStreams = 0;
1667 AudioBufferList *bufferList = nil;
1668 err = AudioDeviceGetPropertyInfo( *id, 0, false,
1669 kAudioDevicePropertyStreamConfiguration,
1671 if (err == noErr && dataSize > 0) {
1672 bufferList = (AudioBufferList *) malloc( dataSize );
1673 if (bufferList == NULL) {
1674 sprintf(message_, "RtApiCore: memory allocation error!");
1675 error(RtError::DEBUG_WARNING);
1679 err = AudioDeviceGetProperty( *id, 0, false,
1680 kAudioDevicePropertyStreamConfiguration,
1681 &dataSize, bufferList );
1685 nStreams = bufferList->mNumberBuffers;
1686 for ( i=0; i<nStreams; i++ ) {
1687 maxChannels += bufferList->mBuffers[i].mNumberChannels;
1688 if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
1689 minChannels = bufferList->mBuffers[i].mNumberChannels;
1695 if (err != noErr || dataSize <= 0) {
1696 sprintf( message_, "RtApiCore: OS-X error getting output channels for device (%s).",
1697 info->name.c_str() );
1698 error(RtError::DEBUG_WARNING);
1703 if ( maxChannels > 0 )
1704 info->maxOutputChannels = maxChannels;
1705 if ( minChannels > 0 )
1706 info->minOutputChannels = minChannels;
1709 // Get input channel information.
1711 err = AudioDeviceGetPropertyInfo( *id, 0, true,
1712 kAudioDevicePropertyStreamConfiguration,
1714 if (err == noErr && dataSize > 0) {
1715 bufferList = (AudioBufferList *) malloc( dataSize );
1716 if (bufferList == NULL) {
1717 sprintf(message_, "RtApiCore: memory allocation error!");
1718 error(RtError::DEBUG_WARNING);
1721 err = AudioDeviceGetProperty( *id, 0, true,
1722 kAudioDevicePropertyStreamConfiguration,
1723 &dataSize, bufferList );
1727 nStreams = bufferList->mNumberBuffers;
1728 for ( i=0; i<nStreams; i++ ) {
1729 if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
1730 minChannels = bufferList->mBuffers[i].mNumberChannels;
1731 maxChannels += bufferList->mBuffers[i].mNumberChannels;
1737 if (err != noErr || dataSize <= 0) {
1738 sprintf( message_, "RtApiCore: OS-X error getting input channels for device (%s).",
1739 info->name.c_str() );
1740 error(RtError::DEBUG_WARNING);
1745 if ( maxChannels > 0 )
1746 info->maxInputChannels = maxChannels;
1747 if ( minChannels > 0 )
1748 info->minInputChannels = minChannels;
1751 // If device opens for both playback and capture, we determine the channels.
1752 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
1753 info->hasDuplexSupport = true;
1754 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
1755 info->maxInputChannels : info->maxOutputChannels;
1756 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
1757 info->minInputChannels : info->minOutputChannels;
1760 // Probe the device sample rate and data format parameters. The
1761 // core audio query mechanism is performed on a "stream"
1762 // description, which can have a variable number of channels and
1763 // apply to input or output only.
1765 // Create a stream description structure.
1766 AudioStreamBasicDescription description;
1767 dataSize = sizeof( AudioStreamBasicDescription );
1768 memset(&description, 0, sizeof(AudioStreamBasicDescription));
1769 bool isInput = false;
1770 if ( info->maxOutputChannels == 0 ) isInput = true;
1771 bool isDuplex = false;
1772 if ( info->maxDuplexChannels > 0 ) isDuplex = true;
1774 // Determine the supported sample rates.
1775 info->sampleRates.clear();
1776 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
1777 description.mSampleRate = (double) SAMPLE_RATES[k];
1778 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1779 info->sampleRates.push_back( SAMPLE_RATES[k] );
1782 if (info->sampleRates.size() == 0) {
1783 sprintf( message_, "RtApiCore: No supported sample rates found for OS-X device (%s).",
1784 info->name.c_str() );
1785 error(RtError::DEBUG_WARNING);
1789 // Determine the supported data formats.
1790 info->nativeFormats = 0;
1791 description.mFormatID = kAudioFormatLinearPCM;
1792 description.mBitsPerChannel = 8;
1793 description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
1794 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1795 info->nativeFormats |= RTAUDIO_SINT8;
1797 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1798 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1799 info->nativeFormats |= RTAUDIO_SINT8;
1802 description.mBitsPerChannel = 16;
1803 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1804 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1805 info->nativeFormats |= RTAUDIO_SINT16;
1807 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1808 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1809 info->nativeFormats |= RTAUDIO_SINT16;
1812 description.mBitsPerChannel = 32;
1813 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1814 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1815 info->nativeFormats |= RTAUDIO_SINT32;
1817 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1818 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1819 info->nativeFormats |= RTAUDIO_SINT32;
1822 description.mBitsPerChannel = 24;
1823 description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsAlignedHigh | kLinearPCMFormatFlagIsBigEndian;
1824 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1825 info->nativeFormats |= RTAUDIO_SINT24;
1827 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1828 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1829 info->nativeFormats |= RTAUDIO_SINT24;
1832 description.mBitsPerChannel = 32;
1833 description.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
1834 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1835 info->nativeFormats |= RTAUDIO_FLOAT32;
1837 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1838 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1839 info->nativeFormats |= RTAUDIO_FLOAT32;
1842 description.mBitsPerChannel = 64;
1843 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1844 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1845 info->nativeFormats |= RTAUDIO_FLOAT64;
1847 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1848 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1849 info->nativeFormats |= RTAUDIO_FLOAT64;
1852 // Check that we have at least one supported format.
1853 if (info->nativeFormats == 0) {
1854 sprintf(message_, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.",
1855 info->name.c_str());
1856 error(RtError::DEBUG_WARNING);
1860 info->probed = true;
1863 OSStatus callbackHandler(AudioDeviceID inDevice,
1864 const AudioTimeStamp* inNow,
1865 const AudioBufferList* inInputData,
1866 const AudioTimeStamp* inInputTime,
1867 AudioBufferList* outOutputData,
1868 const AudioTimeStamp* inOutputTime,
1871 CallbackInfo *info = (CallbackInfo *) infoPointer;
1873 RtApiCore *object = (RtApiCore *) info->object;
1875 object->callbackEvent( inDevice, (void *)inInputData, (void *)outOutputData );
1877 catch (RtError &exception) {
1878 fprintf(stderr, "\nRtApiCore: callback handler error (%s)!\n\n", exception.getMessageString());
1879 return kAudioHardwareUnspecifiedError;
1882 return kAudioHardwareNoError;
1885 OSStatus deviceListener(AudioDeviceID inDevice,
1888 AudioDevicePropertyID propertyID,
1889 void* handlePointer)
1891 CoreHandle *handle = (CoreHandle *) handlePointer;
1892 if ( propertyID == kAudioDeviceProcessorOverload ) {
1894 fprintf(stderr, "\nRtApiCore: OS-X audio input overrun detected!\n");
1896 fprintf(stderr, "\nRtApiCore: OS-X audio output underrun detected!\n");
1897 handle->xrun = true;
1900 return kAudioHardwareNoError;
1903 bool RtApiCore :: probeDeviceOpen( int device, StreamMode mode, int channels,
1904 int sampleRate, RtAudioFormat format,
1905 int *bufferSize, int numberOfBuffers )
1907 // Setup for stream mode.
1908 bool isInput = false;
1909 AudioDeviceID id = *((AudioDeviceID *) devices_[device].apiDeviceId);
1910 if ( mode == INPUT ) isInput = true;
1912 // Search for a stream which contains the desired number of channels.
1913 OSStatus err = noErr;
1915 unsigned int deviceChannels, nStreams = 0;
1916 UInt32 iChannel = 0, iStream = 0;
1917 AudioBufferList *bufferList = nil;
1918 err = AudioDeviceGetPropertyInfo( id, 0, isInput,
1919 kAudioDevicePropertyStreamConfiguration,
1922 if (err == noErr && dataSize > 0) {
1923 bufferList = (AudioBufferList *) malloc( dataSize );
1924 if (bufferList == NULL) {
1925 sprintf(message_, "RtApiCore: memory allocation error in probeDeviceOpen()!");
1926 error(RtError::DEBUG_WARNING);
1929 err = AudioDeviceGetProperty( id, 0, isInput,
1930 kAudioDevicePropertyStreamConfiguration,
1931 &dataSize, bufferList );
1934 stream_.deInterleave[mode] = false;
1935 nStreams = bufferList->mNumberBuffers;
1936 for ( iStream=0; iStream<nStreams; iStream++ ) {
1937 if ( bufferList->mBuffers[iStream].mNumberChannels >= (unsigned int) channels ) break;
1938 iChannel += bufferList->mBuffers[iStream].mNumberChannels;
1940 // If we didn't find a single stream above, see if we can meet
1941 // the channel specification in mono mode (i.e. using separate
1942 // non-interleaved buffers). This can only work if there are N
1943 // consecutive one-channel streams, where N is the number of
1944 // desired channels.
1946 if ( iStream >= nStreams && nStreams >= (unsigned int) channels ) {
1948 for ( iStream=0; iStream<nStreams; iStream++ ) {
1949 if ( bufferList->mBuffers[iStream].mNumberChannels == 1 )
1953 if ( counter == channels ) {
1954 iStream -= channels - 1;
1955 iChannel -= channels - 1;
1956 stream_.deInterleave[mode] = true;
1959 iChannel += bufferList->mBuffers[iStream].mNumberChannels;
1964 if (err != noErr || dataSize <= 0) {
1965 if ( bufferList ) free( bufferList );
1966 sprintf( message_, "RtApiCore: OS-X error getting channels for device (%s).",
1967 devices_[device].name.c_str() );
1968 error(RtError::DEBUG_WARNING);
1972 if (iStream >= nStreams) {
1974 sprintf( message_, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).",
1975 devices_[device].name.c_str(), channels );
1976 error(RtError::DEBUG_WARNING);
1980 // This is ok even for mono mode ... it gets updated later.
1981 deviceChannels = bufferList->mBuffers[iStream].mNumberChannels;
1984 // Determine the buffer size.
1985 AudioValueRange bufferRange;
1986 dataSize = sizeof(AudioValueRange);
1987 err = AudioDeviceGetProperty( id, 0, isInput,
1988 kAudioDevicePropertyBufferSizeRange,
1989 &dataSize, &bufferRange);
1991 sprintf( message_, "RtApiCore: OS-X error getting buffer size range for device (%s).",
1992 devices_[device].name.c_str() );
1993 error(RtError::DEBUG_WARNING);
1997 long bufferBytes = *bufferSize * deviceChannels * formatBytes(RTAUDIO_FLOAT32);
1998 if (bufferRange.mMinimum > bufferBytes) bufferBytes = (int) bufferRange.mMinimum;
1999 else if (bufferRange.mMaximum < bufferBytes) bufferBytes = (int) bufferRange.mMaximum;
2001 // Set the buffer size. For mono mode, I'm assuming we only need to
2002 // make this setting for the first channel.
2003 UInt32 theSize = (UInt32) bufferBytes;
2004 dataSize = sizeof( UInt32);
2005 err = AudioDeviceSetProperty(id, NULL, 0, isInput,
2006 kAudioDevicePropertyBufferSize,
2007 dataSize, &theSize);
2009 sprintf( message_, "RtApiCore: OS-X error setting the buffer size for device (%s).",
2010 devices_[device].name.c_str() );
2011 error(RtError::DEBUG_WARNING);
2015 // If attempting to setup a duplex stream, the bufferSize parameter
2016 // MUST be the same in both directions!
2017 *bufferSize = bufferBytes / ( deviceChannels * formatBytes(RTAUDIO_FLOAT32) );
2018 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
2019 sprintf( message_, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).",
2020 devices_[device].name.c_str() );
2021 error(RtError::DEBUG_WARNING);
2025 stream_.bufferSize = *bufferSize;
2026 stream_.nBuffers = 1;
2028 // Set the stream format description. Do for each channel in mono mode.
2029 AudioStreamBasicDescription description;
2030 dataSize = sizeof( AudioStreamBasicDescription );
2031 if ( stream_.deInterleave[mode] ) nStreams = channels;
2033 for ( unsigned int i=0; i<nStreams; i++, iChannel++ ) {
2035 err = AudioDeviceGetProperty( id, iChannel, isInput,
2036 kAudioDevicePropertyStreamFormat,
2037 &dataSize, &description );
2039 sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).",
2040 devices_[device].name.c_str() );
2041 error(RtError::DEBUG_WARNING);
2045 // Set the sample rate and data format id.
2046 description.mSampleRate = (double) sampleRate;
2047 description.mFormatID = kAudioFormatLinearPCM;
2048 err = AudioDeviceSetProperty( id, NULL, iChannel, isInput,
2049 kAudioDevicePropertyStreamFormat,
2050 dataSize, &description );
2052 sprintf( message_, "RtApiCore: OS-X error setting sample rate or data format for device (%s).",
2053 devices_[device].name.c_str() );
2054 error(RtError::DEBUG_WARNING);
2059 // Check whether we need byte-swapping (assuming OS-X host is big-endian).
2060 iChannel -= nStreams;
2061 err = AudioDeviceGetProperty( id, iChannel, isInput,
2062 kAudioDevicePropertyStreamFormat,
2063 &dataSize, &description );
2065 sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).", devices_[device].name.c_str() );
2066 error(RtError::DEBUG_WARNING);
2070 stream_.doByteSwap[mode] = false;
2071 if ( !description.mFormatFlags & kLinearPCMFormatFlagIsBigEndian )
2072 stream_.doByteSwap[mode] = true;
2074 // From the CoreAudio documentation, PCM data must be supplied as
2076 stream_.userFormat = format;
2077 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2079 if ( stream_.deInterleave[mode] ) // mono mode
2080 stream_.nDeviceChannels[mode] = channels;
2082 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
2083 stream_.nUserChannels[mode] = channels;
2085 // Set flags for buffer conversion.
2086 stream_.doConvertBuffer[mode] = false;
2087 if (stream_.userFormat != stream_.deviceFormat[mode])
2088 stream_.doConvertBuffer[mode] = true;
2089 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
2090 stream_.doConvertBuffer[mode] = true;
2091 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
2092 stream_.doConvertBuffer[mode] = true;
2094 // Allocate our CoreHandle structure for the stream.
2096 if ( stream_.apiHandle == 0 ) {
2097 handle = (CoreHandle *) calloc(1, sizeof(CoreHandle));
2098 if ( handle == NULL ) {
2099 sprintf(message_, "RtApiCore: OS-X error allocating coreHandle memory (%s).",
2100 devices_[device].name.c_str());
2103 handle->index[0] = 0;
2104 handle->index[1] = 0;
2105 if ( pthread_cond_init(&handle->condition, NULL) ) {
2106 sprintf(message_, "RtApiCore: error initializing pthread condition variable (%s).",
2107 devices_[device].name.c_str());
2110 stream_.apiHandle = (void *) handle;
2113 handle = (CoreHandle *) stream_.apiHandle;
2114 handle->index[mode] = iStream;
2116 // Allocate necessary internal buffers.
2117 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
2120 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
2121 buffer_bytes = stream_.nUserChannels[0];
2123 buffer_bytes = stream_.nUserChannels[1];
2125 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
2126 if (stream_.userBuffer) free(stream_.userBuffer);
2127 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
2128 if (stream_.userBuffer == NULL) {
2129 sprintf(message_, "RtApiCore: OS-X error allocating user buffer memory (%s).",
2130 devices_[device].name.c_str());
2135 if ( stream_.deInterleave[mode] ) {
2138 bool makeBuffer = true;
2139 if ( mode == OUTPUT )
2140 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2141 else { // mode == INPUT
2142 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
2143 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2144 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2145 if ( buffer_bytes < bytes_out ) makeBuffer = false;
2150 buffer_bytes *= *bufferSize;
2151 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
2152 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
2153 if (stream_.deviceBuffer == NULL) {
2154 sprintf(message_, "RtApiCore: error allocating device buffer memory (%s).",
2155 devices_[device].name.c_str());
2159 // If not de-interleaving, we point stream_.deviceBuffer to the
2160 // OS X supplied device buffer before doing any necessary data
2161 // conversions. This presents a problem if we have a duplex
2162 // stream using one device which needs de-interleaving and
2163 // another device which doesn't. So, save a pointer to our own
2164 // device buffer in the CallbackInfo structure.
2165 handle->deviceBuffer = stream_.deviceBuffer;
2169 stream_.sampleRate = sampleRate;
2170 stream_.device[mode] = device;
2171 stream_.state = STREAM_STOPPED;
2172 stream_.callbackInfo.object = (void *) this;
2174 if ( stream_.mode == OUTPUT && mode == INPUT && stream_.device[0] == device )
2175 // Only one callback procedure per device.
2176 stream_.mode = DUPLEX;
2178 err = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
2180 sprintf( message_, "RtApiCore: OS-X error setting callback for device (%s).", devices_[device].name.c_str() );
2181 error(RtError::DEBUG_WARNING);
2184 if ( stream_.mode == OUTPUT && mode == INPUT )
2185 stream_.mode = DUPLEX;
2187 stream_.mode = mode;
2190 // Setup the device property listener for over/underload.
2191 err = AudioDeviceAddPropertyListener( id, iChannel, isInput,
2192 kAudioDeviceProcessorOverload,
2193 deviceListener, (void *) handle );
2199 pthread_cond_destroy(&handle->condition);
2201 stream_.apiHandle = 0;
2204 if (stream_.userBuffer) {
2205 free(stream_.userBuffer);
2206 stream_.userBuffer = 0;
2209 error(RtError::WARNING);
2213 void RtApiCore :: closeStream()
2215 // We don't want an exception to be thrown here because this
2216 // function is called by our class destructor. So, do our own
2218 if ( stream_.mode == UNINITIALIZED ) {
2219 sprintf(message_, "RtApiCore::closeStream(): no open stream to close!");
2220 error(RtError::WARNING);
2224 AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2225 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2226 if (stream_.state == STREAM_RUNNING)
2227 AudioDeviceStop( id, callbackHandler );
2228 AudioDeviceRemoveIOProc( id, callbackHandler );
2231 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2232 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2233 if (stream_.state == STREAM_RUNNING)
2234 AudioDeviceStop( id, callbackHandler );
2235 AudioDeviceRemoveIOProc( id, callbackHandler );
2238 if (stream_.userBuffer) {
2239 free(stream_.userBuffer);
2240 stream_.userBuffer = 0;
2243 if ( stream_.deInterleave[0] || stream_.deInterleave[1] ) {
2244 free(stream_.deviceBuffer);
2245 stream_.deviceBuffer = 0;
2248 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2250 // Destroy pthread condition variable and free the CoreHandle structure.
2252 pthread_cond_destroy(&handle->condition);
2254 stream_.apiHandle = 0;
2257 stream_.mode = UNINITIALIZED;
2260 void RtApiCore :: startStream()
2263 if (stream_.state == STREAM_RUNNING) return;
2265 MUTEX_LOCK(&stream_.mutex);
2269 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2271 id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2272 err = AudioDeviceStart(id, callbackHandler);
2274 sprintf(message_, "RtApiCore: OS-X error starting callback procedure on device (%s).",
2275 devices_[stream_.device[0]].name.c_str());
2276 MUTEX_UNLOCK(&stream_.mutex);
2277 error(RtError::DRIVER_ERROR);
2281 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2283 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2284 err = AudioDeviceStart(id, callbackHandler);
2286 sprintf(message_, "RtApiCore: OS-X error starting input callback procedure on device (%s).",
2287 devices_[stream_.device[0]].name.c_str());
2288 MUTEX_UNLOCK(&stream_.mutex);
2289 error(RtError::DRIVER_ERROR);
2293 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2294 handle->stopStream = false;
2295 stream_.state = STREAM_RUNNING;
2297 MUTEX_UNLOCK(&stream_.mutex);
2300 void RtApiCore :: stopStream()
2303 if (stream_.state == STREAM_STOPPED) return;
2305 // Change the state before the lock to improve shutdown response
2306 // when using a callback.
2307 stream_.state = STREAM_STOPPED;
2308 MUTEX_LOCK(&stream_.mutex);
2312 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2314 id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2315 err = AudioDeviceStop(id, callbackHandler);
2317 sprintf(message_, "RtApiCore: OS-X error stopping callback procedure on device (%s).",
2318 devices_[stream_.device[0]].name.c_str());
2319 MUTEX_UNLOCK(&stream_.mutex);
2320 error(RtError::DRIVER_ERROR);
2324 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2326 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2327 err = AudioDeviceStop(id, callbackHandler);
2329 sprintf(message_, "RtApiCore: OS-X error stopping input callback procedure on device (%s).",
2330 devices_[stream_.device[0]].name.c_str());
2331 MUTEX_UNLOCK(&stream_.mutex);
2332 error(RtError::DRIVER_ERROR);
2336 MUTEX_UNLOCK(&stream_.mutex);
2339 void RtApiCore :: abortStream()
2344 void RtApiCore :: tickStream()
2348 if (stream_.state == STREAM_STOPPED) return;
2350 if (stream_.callbackInfo.usingCallback) {
2351 sprintf(message_, "RtApiCore: tickStream() should not be used when a callback function is set!");
2352 error(RtError::WARNING);
2356 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2358 MUTEX_LOCK(&stream_.mutex);
2360 pthread_cond_wait(&handle->condition, &stream_.mutex);
2362 MUTEX_UNLOCK(&stream_.mutex);
2365 void RtApiCore :: callbackEvent( AudioDeviceID deviceId, void *inData, void *outData )
2369 if (stream_.state == STREAM_STOPPED) return;
2371 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2372 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2373 AudioBufferList *inBufferList = (AudioBufferList *) inData;
2374 AudioBufferList *outBufferList = (AudioBufferList *) outData;
2376 if ( info->usingCallback && handle->stopStream ) {
2377 // Check if the stream should be stopped (via the previous user
2378 // callback return value). We stop the stream here, rather than
2379 // after the function call, so that output data can first be
2385 MUTEX_LOCK(&stream_.mutex);
2387 // Invoke user callback first, to get fresh output data. Don't
2388 // invoke the user callback if duplex mode AND the input/output devices
2389 // are different AND this function is called for the input device.
2390 AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2391 if ( info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) ) {
2392 RtAudioCallback callback = (RtAudioCallback) info->callback;
2393 handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
2394 if ( handle->xrun == true ) {
2395 handle->xrun = false;
2396 MUTEX_UNLOCK(&stream_.mutex);
2401 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
2403 if (stream_.doConvertBuffer[0]) {
2405 if ( !stream_.deInterleave[0] )
2406 stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->index[0]].mData;
2408 stream_.deviceBuffer = handle->deviceBuffer;
2410 convertStreamBuffer(OUTPUT);
2411 if ( stream_.doByteSwap[0] )
2412 byteSwapBuffer(stream_.deviceBuffer,
2413 stream_.bufferSize * stream_.nDeviceChannels[0],
2414 stream_.deviceFormat[0]);
2416 if ( stream_.deInterleave[0] ) {
2417 int bufferBytes = outBufferList->mBuffers[handle->index[0]].mDataByteSize;
2418 for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2419 memcpy(outBufferList->mBuffers[handle->index[0]+i].mData,
2420 &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2426 if (stream_.doByteSwap[0])
2427 byteSwapBuffer(stream_.userBuffer,
2428 stream_.bufferSize * stream_.nUserChannels[0],
2429 stream_.userFormat);
2431 memcpy(outBufferList->mBuffers[handle->index[0]].mData,
2433 outBufferList->mBuffers[handle->index[0]].mDataByteSize );
2437 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
2439 if (stream_.doConvertBuffer[1]) {
2441 if ( stream_.deInterleave[1] ) {
2442 stream_.deviceBuffer = (char *) handle->deviceBuffer;
2443 int bufferBytes = inBufferList->mBuffers[handle->index[1]].mDataByteSize;
2444 for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2445 memcpy(&stream_.deviceBuffer[i*bufferBytes],
2446 inBufferList->mBuffers[handle->index[1]+i].mData, bufferBytes );
2450 stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->index[1]].mData;
2452 if ( stream_.doByteSwap[1] )
2453 byteSwapBuffer(stream_.deviceBuffer,
2454 stream_.bufferSize * stream_.nDeviceChannels[1],
2455 stream_.deviceFormat[1]);
2456 convertStreamBuffer(INPUT);
2460 memcpy(stream_.userBuffer,
2461 inBufferList->mBuffers[handle->index[1]].mData,
2462 inBufferList->mBuffers[handle->index[1]].mDataByteSize );
2464 if (stream_.doByteSwap[1])
2465 byteSwapBuffer(stream_.userBuffer,
2466 stream_.bufferSize * stream_.nUserChannels[1],
2467 stream_.userFormat);
2471 if ( !info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) )
2472 pthread_cond_signal(&handle->condition);
2474 MUTEX_UNLOCK(&stream_.mutex);
2477 void RtApiCore :: setStreamCallback(RtAudioCallback callback, void *userData)
2481 if ( stream_.callbackInfo.usingCallback ) {
2482 sprintf(message_, "RtApiCore: A callback is already set for this stream!");
2483 error(RtError::WARNING);
2487 stream_.callbackInfo.callback = (void *) callback;
2488 stream_.callbackInfo.userData = userData;
2489 stream_.callbackInfo.usingCallback = true;
2492 void RtApiCore :: cancelStreamCallback()
2496 if (stream_.callbackInfo.usingCallback) {
2498 if (stream_.state == STREAM_RUNNING)
2501 MUTEX_LOCK(&stream_.mutex);
2503 stream_.callbackInfo.usingCallback = false;
2504 stream_.callbackInfo.userData = NULL;
2505 stream_.state = STREAM_STOPPED;
2506 stream_.callbackInfo.callback = NULL;
2508 MUTEX_UNLOCK(&stream_.mutex);
2513 //******************** End of __MACOSX_CORE__ *********************//
2516 #if defined(__LINUX_JACK__)
2518 // JACK is a low-latency audio server, written primarily for the
2519 // GNU/Linux operating system. It can connect a number of different
2520 // applications to an audio device, as well as allowing them to share
2521 // audio between themselves.
2523 // The JACK server must be running before RtApiJack can be instantiated.
2524 // RtAudio will report just a single "device", which is the JACK audio
2525 // server. The JACK server is typically started in a terminal as follows:
2527 // .jackd -d alsa -d hw:0
2529 // Many of the parameters normally set for a stream are fixed by the
2530 // JACK server and can be specified when the JACK server is started.
2533 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
2535 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
2536 // frames, and number of buffers = 4. Once the server is running, it
2537 // is not possible to override these values. If the values are not
2538 // specified in the command-line, the JACK server uses default values.
2540 #include <jack/jack.h>
2543 // A structure to hold various information related to the Jack API
2546 jack_client_t *client;
2547 jack_port_t **ports[2];
2550 pthread_cond_t condition;
2553 :client(0), clientOpen(false), stopStream(false) {}
2556 std::string jackmsg;
2558 static void jackerror (const char *desc)
2561 jackmsg.append( desc, strlen(desc)+1 );
2564 RtApiJack :: RtApiJack()
2568 if (nDevices_ <= 0) {
2569 sprintf(message_, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!",
2571 error(RtError::NO_DEVICES_FOUND);
2575 RtApiJack :: ~RtApiJack()
2577 if ( stream_.mode != UNINITIALIZED ) closeStream();
2580 void RtApiJack :: initialize(void)
2584 // Tell the jack server to call jackerror() when it experiences an
2585 // error. This function saves the error message for subsequent
2586 // reporting via the normal RtAudio error function.
2587 jack_set_error_function( jackerror );
2589 // Look for jack server and try to become a client.
2590 jack_client_t *client;
2591 if ( (client = jack_client_new( "RtApiJack" )) == 0)
2595 // Determine the name of the device.
2596 device.name = "Jack Server";
2597 devices_.push_back(device);
2600 jack_client_close(client);
2603 void RtApiJack :: probeDeviceInfo(RtApiDevice *info)
2605 // Look for jack server and try to become a client.
2606 jack_client_t *client;
2607 if ( (client = jack_client_new( "RtApiJack" )) == 0) {
2608 sprintf(message_, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!",
2610 error(RtError::WARNING);
2614 // Get the current jack server sample rate.
2615 info->sampleRates.clear();
2616 info->sampleRates.push_back( jack_get_sample_rate(client) );
2618 // Count the available ports as device channels. Jack "input ports"
2619 // equal RtAudio output channels.
2622 unsigned int nChannels = 0;
2623 ports = jack_get_ports( client, NULL, NULL, JackPortIsInput );
2625 port = (char *) ports[nChannels];
2627 port = (char *) ports[++nChannels];
2629 info->maxOutputChannels = nChannels;
2630 info->minOutputChannels = 1;
2633 // Jack "output ports" equal RtAudio input channels.
2635 ports = jack_get_ports( client, NULL, NULL, JackPortIsOutput );
2637 port = (char *) ports[nChannels];
2639 port = (char *) ports[++nChannels];
2641 info->maxInputChannels = nChannels;
2642 info->minInputChannels = 1;
2645 if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
2646 jack_client_close(client);
2647 sprintf(message_, "RtApiJack: error determining jack input/output channels!");
2648 error(RtError::WARNING);
2652 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
2653 info->hasDuplexSupport = true;
2654 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
2655 info->maxInputChannels : info->maxOutputChannels;
2656 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
2657 info->minInputChannels : info->minOutputChannels;
2660 // Get the jack data format type. There isn't much documentation
2661 // regarding supported data formats in jack. I'm assuming here that
2662 // the default type will always be a floating-point type, of length
2663 // equal to either 4 or 8 bytes.
2664 int sample_size = sizeof( jack_default_audio_sample_t );
2665 if ( sample_size == 4 )
2666 info->nativeFormats = RTAUDIO_FLOAT32;
2667 else if ( sample_size == 8 )
2668 info->nativeFormats = RTAUDIO_FLOAT64;
2670 // Check that we have a supported format
2671 if (info->nativeFormats == 0) {
2672 jack_client_close(client);
2673 sprintf(message_, "RtApiJack: error determining jack server data format!");
2674 error(RtError::WARNING);
2678 jack_client_close(client);
2679 info->probed = true;
2682 int jackCallbackHandler(jack_nframes_t nframes, void *infoPointer)
2684 CallbackInfo *info = (CallbackInfo *) infoPointer;
2685 RtApiJack *object = (RtApiJack *) info->object;
2687 object->callbackEvent( (unsigned long) nframes );
2689 catch (RtError &exception) {
2690 fprintf(stderr, "\nRtApiJack: callback handler error (%s)!\n\n", exception.getMessageString());
2697 void jackShutdown(void *infoPointer)
2699 CallbackInfo *info = (CallbackInfo *) infoPointer;
2700 JackHandle *handle = (JackHandle *) info->apiInfo;
2701 handle->clientOpen = false;
2702 RtApiJack *object = (RtApiJack *) info->object;
2704 object->closeStream();
2706 catch (RtError &exception) {
2707 fprintf(stderr, "\nRtApiJack: jackShutdown error (%s)!\n\n", exception.getMessageString());
2711 fprintf(stderr, "\nRtApiJack: the Jack server is shutting down ... stream stopped and closed!!!\n\n");
2714 int jackXrun( void * )
2716 fprintf(stderr, "\nRtApiJack: audio overrun/underrun reported!\n");
2720 bool RtApiJack :: probeDeviceOpen(int device, StreamMode mode, int channels,
2721 int sampleRate, RtAudioFormat format,
2722 int *bufferSize, int numberOfBuffers)
2724 // Compare the jack server channels to the requested number of channels.
2725 if ( (mode == OUTPUT && devices_[device].maxOutputChannels < channels ) ||
2726 (mode == INPUT && devices_[device].maxInputChannels < channels ) ) {
2727 sprintf(message_, "RtApiJack: the Jack server does not support requested channels!");
2728 error(RtError::DEBUG_WARNING);
2732 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2734 // Look for jack server and try to become a client (only do once per stream).
2736 jack_client_t *client = 0;
2737 if ( mode == OUTPUT || (mode == INPUT && stream_.mode != OUTPUT) ) {
2738 snprintf(label, 32, "RtApiJack");
2739 if ( (client = jack_client_new( (const char *) label )) == 0) {
2740 sprintf(message_, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!",
2742 error(RtError::DEBUG_WARNING);
2747 // The handle must have been created on an earlier pass.
2748 client = handle->client;
2751 // First, check the jack server sample rate.
2753 jack_rate = (int) jack_get_sample_rate(client);
2754 if ( sampleRate != jack_rate ) {
2755 jack_client_close(client);
2756 sprintf( message_, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).",
2757 sampleRate, jack_rate );
2758 error(RtError::DEBUG_WARNING);
2761 stream_.sampleRate = jack_rate;
2763 // The jack server seems to support just a single floating-point
2764 // data type. Since we already checked it before, just use what we
2766 stream_.deviceFormat[mode] = devices_[device].nativeFormats;
2767 stream_.userFormat = format;
2769 // Jack always uses non-interleaved buffers. We'll need to
2770 // de-interleave if we have more than one channel.
2771 stream_.deInterleave[mode] = false;
2773 stream_.deInterleave[mode] = true;
2775 // Jack always provides host byte-ordered data.
2776 stream_.doByteSwap[mode] = false;
2778 // Get the buffer size. The buffer size and number of buffers
2779 // (periods) is set when the jack server is started.
2780 stream_.bufferSize = (int) jack_get_buffer_size(client);
2781 *bufferSize = stream_.bufferSize;
2783 stream_.nDeviceChannels[mode] = channels;
2784 stream_.nUserChannels[mode] = channels;
2786 stream_.doConvertBuffer[mode] = false;
2787 if (stream_.userFormat != stream_.deviceFormat[mode])
2788 stream_.doConvertBuffer[mode] = true;
2789 if (stream_.deInterleave[mode])
2790 stream_.doConvertBuffer[mode] = true;
2792 // Allocate our JackHandle structure for the stream.
2793 if ( handle == 0 ) {
2794 handle = (JackHandle *) calloc(1, sizeof(JackHandle));
2795 if ( handle == NULL ) {
2796 sprintf(message_, "RtApiJack: error allocating JackHandle memory (%s).",
2797 devices_[device].name.c_str());
2800 handle->ports[0] = 0;
2801 handle->ports[1] = 0;
2802 if ( pthread_cond_init(&handle->condition, NULL) ) {
2803 sprintf(message_, "RtApiJack: error initializing pthread condition variable!");
2806 stream_.apiHandle = (void *) handle;
2807 handle->client = client;
2808 handle->clientOpen = true;
2811 // Allocate necessary internal buffers.
2812 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
2815 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
2816 buffer_bytes = stream_.nUserChannels[0];
2818 buffer_bytes = stream_.nUserChannels[1];
2820 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
2821 if (stream_.userBuffer) free(stream_.userBuffer);
2822 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
2823 if (stream_.userBuffer == NULL) {
2824 sprintf(message_, "RtApiJack: error allocating user buffer memory (%s).",
2825 devices_[device].name.c_str());
2830 if ( stream_.doConvertBuffer[mode] ) {
2833 bool makeBuffer = true;
2834 if ( mode == OUTPUT )
2835 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2836 else { // mode == INPUT
2837 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
2838 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2839 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2840 if ( buffer_bytes < bytes_out ) makeBuffer = false;
2845 buffer_bytes *= *bufferSize;
2846 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
2847 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
2848 if (stream_.deviceBuffer == NULL) {
2849 sprintf(message_, "RtApiJack: error allocating device buffer memory (%s).",
2850 devices_[device].name.c_str());
2856 // Allocate memory for the Jack ports (channels) identifiers.
2857 handle->ports[mode] = (jack_port_t **) malloc (sizeof (jack_port_t *) * channels);
2858 if ( handle->ports[mode] == NULL ) {
2859 sprintf(message_, "RtApiJack: error allocating port handle memory (%s).",
2860 devices_[device].name.c_str());
2864 stream_.device[mode] = device;
2865 stream_.state = STREAM_STOPPED;
2866 stream_.callbackInfo.usingCallback = false;
2867 stream_.callbackInfo.object = (void *) this;
2868 stream_.callbackInfo.apiInfo = (void *) handle;
2870 if ( stream_.mode == OUTPUT && mode == INPUT )
2871 // We had already set up the stream for output.
2872 stream_.mode = DUPLEX;
2874 stream_.mode = mode;
2875 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
2876 jack_set_xrun_callback( handle->client, jackXrun, NULL );
2877 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
2884 pthread_cond_destroy(&handle->condition);
2885 if ( handle->clientOpen == true )
2886 jack_client_close(handle->client);
2888 if ( handle->ports[0] ) free(handle->ports[0]);
2889 if ( handle->ports[1] ) free(handle->ports[1]);
2892 stream_.apiHandle = 0;
2895 if (stream_.userBuffer) {
2896 free(stream_.userBuffer);
2897 stream_.userBuffer = 0;
2900 error(RtError::WARNING);
2904 void RtApiJack :: closeStream()
2906 // We don't want an exception to be thrown here because this
2907 // function is called by our class destructor. So, do our own
2909 if ( stream_.mode == UNINITIALIZED ) {
2910 sprintf(message_, "RtApiJack::closeStream(): no open stream to close!");
2911 error(RtError::WARNING);
2915 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2916 if ( handle && handle->clientOpen == true ) {
2917 if (stream_.state == STREAM_RUNNING)
2918 jack_deactivate(handle->client);
2920 jack_client_close(handle->client);
2924 if ( handle->ports[0] ) free(handle->ports[0]);
2925 if ( handle->ports[1] ) free(handle->ports[1]);
2926 pthread_cond_destroy(&handle->condition);
2928 stream_.apiHandle = 0;
2931 if (stream_.userBuffer) {
2932 free(stream_.userBuffer);
2933 stream_.userBuffer = 0;
2936 if (stream_.deviceBuffer) {
2937 free(stream_.deviceBuffer);
2938 stream_.deviceBuffer = 0;
2941 stream_.mode = UNINITIALIZED;
2945 void RtApiJack :: startStream()
2948 if (stream_.state == STREAM_RUNNING) return;
2950 MUTEX_LOCK(&stream_.mutex);
2953 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2954 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2955 for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
2956 snprintf(label, 64, "outport %d", i);
2957 handle->ports[0][i] = jack_port_register(handle->client, (const char *)label,
2958 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0);
2962 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2963 for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
2964 snprintf(label, 64, "inport %d", i);
2965 handle->ports[1][i] = jack_port_register(handle->client, (const char *)label,
2966 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0);
2970 if (jack_activate(handle->client)) {
2971 sprintf(message_, "RtApiJack: unable to activate JACK client!");
2972 error(RtError::SYSTEM_ERROR);
2977 // Get the list of available ports.
2978 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2979 ports = jack_get_ports(handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsInput);
2980 if ( ports == NULL) {
2981 sprintf(message_, "RtApiJack: error determining available jack input ports!");
2982 error(RtError::SYSTEM_ERROR);
2985 // Now make the port connections. Since RtAudio wasn't designed to
2986 // allow the user to select particular channels of a device, we'll
2987 // just open the first "nChannels" ports.
2988 for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
2991 result = jack_connect( handle->client, jack_port_name(handle->ports[0][i]), ports[i] );
2994 sprintf(message_, "RtApiJack: error connecting output ports!");
2995 error(RtError::SYSTEM_ERROR);
3001 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3002 ports = jack_get_ports( handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsOutput );
3003 if ( ports == NULL) {
3004 sprintf(message_, "RtApiJack: error determining available jack output ports!");
3005 error(RtError::SYSTEM_ERROR);
3008 // Now make the port connections. See note above.
3009 for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
3012 result = jack_connect( handle->client, ports[i], jack_port_name(handle->ports[1][i]) );
3015 sprintf(message_, "RtApiJack: error connecting input ports!");
3016 error(RtError::SYSTEM_ERROR);
3022 handle->stopStream = false;
3023 stream_.state = STREAM_RUNNING;
3025 MUTEX_UNLOCK(&stream_.mutex);
3028 void RtApiJack :: stopStream()
3031 if (stream_.state == STREAM_STOPPED) return;
3033 // Change the state before the lock to improve shutdown response
3034 // when using a callback.
3035 stream_.state = STREAM_STOPPED;
3036 MUTEX_LOCK(&stream_.mutex);
3038 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3039 jack_deactivate(handle->client);
3041 MUTEX_UNLOCK(&stream_.mutex);
3044 void RtApiJack :: abortStream()
3049 void RtApiJack :: tickStream()
3053 if (stream_.state == STREAM_STOPPED) return;
3055 if (stream_.callbackInfo.usingCallback) {
3056 sprintf(message_, "RtApiJack: tickStream() should not be used when a callback function is set!");
3057 error(RtError::WARNING);
3061 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3063 MUTEX_LOCK(&stream_.mutex);
3065 pthread_cond_wait(&handle->condition, &stream_.mutex);
3067 MUTEX_UNLOCK(&stream_.mutex);
3070 void RtApiJack :: callbackEvent( unsigned long nframes )
3074 if (stream_.state == STREAM_STOPPED) return;
3076 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3077 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3078 if ( info->usingCallback && handle->stopStream ) {
3079 // Check if the stream should be stopped (via the previous user
3080 // callback return value). We stop the stream here, rather than
3081 // after the function call, so that output data can first be
3087 MUTEX_LOCK(&stream_.mutex);
3089 // Invoke user callback first, to get fresh output data.
3090 if ( info->usingCallback ) {
3091 RtAudioCallback callback = (RtAudioCallback) info->callback;
3092 handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
3095 jack_default_audio_sample_t *jackbuffer;
3096 long bufferBytes = nframes * sizeof (jack_default_audio_sample_t);
3097 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3099 if (stream_.doConvertBuffer[0]) {
3100 convertStreamBuffer(OUTPUT);
3102 for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
3103 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i],
3104 (jack_nframes_t) nframes);
3105 memcpy(jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
3108 else { // single channel only
3109 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][0],
3110 (jack_nframes_t) nframes);
3111 memcpy(jackbuffer, stream_.userBuffer, bufferBytes );
3115 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3117 if (stream_.doConvertBuffer[1]) {
3118 for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
3119 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][i],
3120 (jack_nframes_t) nframes);
3121 memcpy(&stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
3123 convertStreamBuffer(INPUT);
3125 else { // single channel only
3126 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][0],
3127 (jack_nframes_t) nframes);
3128 memcpy(stream_.userBuffer, jackbuffer, bufferBytes );
3132 if ( !info->usingCallback )
3133 pthread_cond_signal(&handle->condition);
3135 MUTEX_UNLOCK(&stream_.mutex);
3138 void RtApiJack :: setStreamCallback(RtAudioCallback callback, void *userData)
3142 if ( stream_.callbackInfo.usingCallback ) {
3143 sprintf(message_, "RtApiJack: A callback is already set for this stream!");
3144 error(RtError::WARNING);
3148 stream_.callbackInfo.callback = (void *) callback;
3149 stream_.callbackInfo.userData = userData;
3150 stream_.callbackInfo.usingCallback = true;
3153 void RtApiJack :: cancelStreamCallback()
3157 if (stream_.callbackInfo.usingCallback) {
3159 if (stream_.state == STREAM_RUNNING)
3162 MUTEX_LOCK(&stream_.mutex);
3164 stream_.callbackInfo.usingCallback = false;
3165 stream_.callbackInfo.userData = NULL;
3166 stream_.state = STREAM_STOPPED;
3167 stream_.callbackInfo.callback = NULL;
3169 MUTEX_UNLOCK(&stream_.mutex);
3175 #if defined(__LINUX_ALSA__)
3177 #include <alsa/asoundlib.h>
3181 extern "C" void *alsaCallbackHandler(void * ptr);
3183 RtApiAlsa :: RtApiAlsa()
3187 if (nDevices_ <= 0) {
3188 sprintf(message_, "RtApiAlsa: no Linux ALSA audio devices found!");
3189 error(RtError::NO_DEVICES_FOUND);
3193 RtApiAlsa :: ~RtApiAlsa()
3195 if ( stream_.mode != UNINITIALIZED )
3199 void RtApiAlsa :: initialize(void)
3201 int card, subdevice, result;
3205 snd_ctl_card_info_t *info;
3206 snd_ctl_card_info_alloca(&info);
3209 // Count cards and devices
3212 snd_card_next(&card);
3213 while ( card >= 0 ) {
3214 sprintf(name, "hw:%d", card);
3215 result = snd_ctl_open(&handle, name, 0);
3217 sprintf(message_, "RtApiAlsa: control open (%i): %s.", card, snd_strerror(result));
3218 error(RtError::DEBUG_WARNING);
3221 result = snd_ctl_card_info(handle, info);
3223 sprintf(message_, "RtApiAlsa: control hardware info (%i): %s.", card, snd_strerror(result));
3224 error(RtError::DEBUG_WARNING);
3227 cardId = snd_ctl_card_info_get_id(info);
3230 result = snd_ctl_pcm_next_device(handle, &subdevice);
3232 sprintf(message_, "RtApiAlsa: control next device (%i): %s.", card, snd_strerror(result));
3233 error(RtError::DEBUG_WARNING);
3238 sprintf( name, "hw:%d,%d", card, subdevice );
3239 // If a cardId exists and it contains at least one non-numeric
3240 // character, use it to identify the device. This avoids a bug
3241 // in ALSA such that a numeric string is interpreted as a device
3243 for ( unsigned int i=0; i<strlen(cardId); i++ ) {
3244 if ( !isdigit( cardId[i] ) ) {
3245 sprintf( name, "hw:%s,%d", cardId, subdevice );
3249 device.name.erase();
3250 device.name.append( (const char *)name, strlen(name)+1 );
3251 devices_.push_back(device);
3255 snd_ctl_close(handle);
3256 snd_card_next(&card);
3260 void RtApiAlsa :: probeDeviceInfo(RtApiDevice *info)
3263 int open_mode = SND_PCM_ASYNC;
3266 snd_pcm_stream_t stream;
3267 snd_pcm_info_t *pcminfo;
3268 snd_pcm_info_alloca(&pcminfo);
3269 snd_pcm_hw_params_t *params;
3270 snd_pcm_hw_params_alloca(¶ms);
3274 // Open the control interface for this card.
3275 strncpy( name, info->name.c_str(), 64 );
3276 card = strtok(name, ",");
3277 err = snd_ctl_open(&chandle, card, SND_CTL_NONBLOCK);
3279 sprintf(message_, "RtApiAlsa: control open (%s): %s.", card, snd_strerror(err));
3280 error(RtError::DEBUG_WARNING);
3283 unsigned int dev = (unsigned int) atoi( strtok(NULL, ",") );
3285 // First try for playback
3286 stream = SND_PCM_STREAM_PLAYBACK;
3287 snd_pcm_info_set_device(pcminfo, dev);
3288 snd_pcm_info_set_subdevice(pcminfo, 0);
3289 snd_pcm_info_set_stream(pcminfo, stream);
3291 if ((err = snd_ctl_pcm_info(chandle, pcminfo)) < 0) {
3292 if (err == -ENOENT) {
3293 sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle output!", info->name.c_str());
3294 error(RtError::DEBUG_WARNING);
3297 sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s",
3298 info->name.c_str(), snd_strerror(err));
3299 error(RtError::DEBUG_WARNING);
3304 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK );
3307 sprintf(message_, "RtApiAlsa: pcm playback device (%s) is busy: %s.",
3308 info->name.c_str(), snd_strerror(err));
3310 sprintf(message_, "RtApiAlsa: pcm playback open (%s) error: %s.",
3311 info->name.c_str(), snd_strerror(err));
3312 error(RtError::DEBUG_WARNING);
3316 // We have an open device ... allocate the parameter structure.
3317 err = snd_pcm_hw_params_any(handle, params);
3319 snd_pcm_close(handle);
3320 sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
3321 info->name.c_str(), snd_strerror(err));
3322 error(RtError::WARNING);
3326 // Get output channel information.
3328 err = snd_pcm_hw_params_get_channels_min(params, &value);
3330 snd_pcm_close(handle);
3331 sprintf(message_, "RtApiAlsa: hardware minimum channel probe error (%s): %s.",
3332 info->name.c_str(), snd_strerror(err));
3333 error(RtError::WARNING);
3336 info->minOutputChannels = value;
3338 err = snd_pcm_hw_params_get_channels_max(params, &value);
3340 snd_pcm_close(handle);
3341 sprintf(message_, "RtApiAlsa: hardware maximum channel probe error (%s): %s.",
3342 info->name.c_str(), snd_strerror(err));
3343 error(RtError::WARNING);
3346 info->maxOutputChannels = value;
3348 snd_pcm_close(handle);
3351 // Now try for capture
3352 stream = SND_PCM_STREAM_CAPTURE;
3353 snd_pcm_info_set_stream(pcminfo, stream);
3355 err = snd_ctl_pcm_info(chandle, pcminfo);
3356 snd_ctl_close(chandle);
3358 if (err == -ENOENT) {
3359 sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle input!", info->name.c_str());
3360 error(RtError::DEBUG_WARNING);
3363 sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s",
3364 info->name.c_str(), snd_strerror(err));
3365 error(RtError::DEBUG_WARNING);
3367 if (info->maxOutputChannels == 0)
3368 // didn't open for playback either ... device invalid
3370 goto probe_parameters;
3373 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK);
3376 sprintf(message_, "RtApiAlsa: pcm capture device (%s) is busy: %s.",
3377 info->name.c_str(), snd_strerror(err));
3379 sprintf(message_, "RtApiAlsa: pcm capture open (%s) error: %s.",
3380 info->name.c_str(), snd_strerror(err));
3381 error(RtError::DEBUG_WARNING);
3382 if (info->maxOutputChannels == 0)
3383 // didn't open for playback either ... device invalid
3385 goto probe_parameters;
3388 // We have an open capture device ... allocate the parameter structure.
3389 err = snd_pcm_hw_params_any(handle, params);
3391 snd_pcm_close(handle);
3392 sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
3393 info->name.c_str(), snd_strerror(err));
3394 error(RtError::WARNING);
3395 if (info->maxOutputChannels > 0)
3396 goto probe_parameters;
3401 // Get input channel information.
3402 err = snd_pcm_hw_params_get_channels_min(params, &value);
3404 snd_pcm_close(handle);
3405 sprintf(message_, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.",
3406 info->name.c_str(), snd_strerror(err));
3407 error(RtError::WARNING);
3408 if (info->maxOutputChannels > 0)
3409 goto probe_parameters;
3413 info->minInputChannels = value;
3415 err = snd_pcm_hw_params_get_channels_max(params, &value);
3417 snd_pcm_close(handle);
3418 sprintf(message_, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.",
3419 info->name.c_str(), snd_strerror(err));
3420 error(RtError::WARNING);
3421 if (info->maxOutputChannels > 0)
3422 goto probe_parameters;
3426 info->maxInputChannels = value;
3428 snd_pcm_close(handle);
3430 // If device opens for both playback and capture, we determine the channels.
3431 if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
3432 goto probe_parameters;
3434 info->hasDuplexSupport = true;
3435 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
3436 info->maxInputChannels : info->maxOutputChannels;
3437 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
3438 info->minInputChannels : info->minOutputChannels;
3441 // At this point, we just need to figure out the supported data
3442 // formats and sample rates. We'll proceed by opening the device in
3443 // the direction with the maximum number of channels, or playback if
3444 // they are equal. This might limit our sample rate options, but so
3447 if (info->maxOutputChannels >= info->maxInputChannels)
3448 stream = SND_PCM_STREAM_PLAYBACK;
3450 stream = SND_PCM_STREAM_CAPTURE;
3452 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode);
3454 sprintf(message_, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.",
3455 info->name.c_str(), snd_strerror(err));
3456 error(RtError::WARNING);
3460 // We have an open device ... allocate the parameter structure.
3461 err = snd_pcm_hw_params_any(handle, params);
3463 snd_pcm_close(handle);
3464 sprintf(message_, "RtApiAlsa: hardware reopen probe error (%s): %s.",
3465 info->name.c_str(), snd_strerror(err));
3466 error(RtError::WARNING);
3470 // Test our discrete set of sample rate values.
3472 info->sampleRates.clear();
3473 for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
3474 if (snd_pcm_hw_params_test_rate(handle, params, SAMPLE_RATES[i], dir) == 0)
3475 info->sampleRates.push_back(SAMPLE_RATES[i]);
3477 if (info->sampleRates.size() == 0) {
3478 snd_pcm_close(handle);
3479 sprintf(message_, "RtApiAlsa: no supported sample rates found for device (%s).",
3480 info->name.c_str());
3481 error(RtError::DEBUG_WARNING);
3485 // Probe the supported data formats ... we don't care about endian-ness just yet
3486 snd_pcm_format_t format;
3487 info->nativeFormats = 0;
3488 format = SND_PCM_FORMAT_S8;
3489 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3490 info->nativeFormats |= RTAUDIO_SINT8;
3491 format = SND_PCM_FORMAT_S16;
3492 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3493 info->nativeFormats |= RTAUDIO_SINT16;
3494 format = SND_PCM_FORMAT_S24;
3495 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3496 info->nativeFormats |= RTAUDIO_SINT24;
3497 format = SND_PCM_FORMAT_S32;
3498 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3499 info->nativeFormats |= RTAUDIO_SINT32;
3500 format = SND_PCM_FORMAT_FLOAT;
3501 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3502 info->nativeFormats |= RTAUDIO_FLOAT32;
3503 format = SND_PCM_FORMAT_FLOAT64;
3504 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3505 info->nativeFormats |= RTAUDIO_FLOAT64;
3507 // Check that we have at least one supported format
3508 if (info->nativeFormats == 0) {
3509 snd_pcm_close(handle);
3510 sprintf(message_, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.",
3511 info->name.c_str());
3512 error(RtError::WARNING);
3516 // That's all ... close the device and return
3517 snd_pcm_close(handle);
3518 info->probed = true;
3522 bool RtApiAlsa :: probeDeviceOpen( int device, StreamMode mode, int channels,
3523 int sampleRate, RtAudioFormat format,
3524 int *bufferSize, int numberOfBuffers )
3526 #if defined(__RTAUDIO_DEBUG__)
3528 snd_output_stdio_attach(&out, stderr, 0);
3531 // I'm not using the "plug" interface ... too much inconsistent behavior.
3532 const char *name = devices_[device].name.c_str();
3534 snd_pcm_stream_t alsa_stream;
3536 alsa_stream = SND_PCM_STREAM_PLAYBACK;
3538 alsa_stream = SND_PCM_STREAM_CAPTURE;
3542 int alsa_open_mode = SND_PCM_ASYNC;
3543 err = snd_pcm_open(&handle, name, alsa_stream, alsa_open_mode);
3545 sprintf(message_,"RtApiAlsa: pcm device (%s) won't open: %s.",
3546 name, snd_strerror(err));
3547 error(RtError::WARNING);
3551 // Fill the parameter structure.
3552 snd_pcm_hw_params_t *hw_params;
3553 snd_pcm_hw_params_alloca(&hw_params);
3554 err = snd_pcm_hw_params_any(handle, hw_params);
3556 snd_pcm_close(handle);
3557 sprintf(message_, "RtApiAlsa: error getting parameter handle (%s): %s.",
3558 name, snd_strerror(err));
3559 error(RtError::WARNING);
3563 #if defined(__RTAUDIO_DEBUG__)
3564 fprintf(stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n");
3565 snd_pcm_hw_params_dump(hw_params, out);
3568 // Set access ... try interleaved access first, then non-interleaved
3569 if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED) ) {
3570 err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED);
3572 else if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED) ) {
3573 err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED);
3574 stream_.deInterleave[mode] = true;
3577 snd_pcm_close(handle);
3578 sprintf(message_, "RtApiAlsa: device (%s) access not supported by RtAudio.", name);
3579 error(RtError::WARNING);
3584 snd_pcm_close(handle);
3585 sprintf(message_, "RtApiAlsa: error setting access ( (%s): %s.", name, snd_strerror(err));
3586 error(RtError::WARNING);
3590 // Determine how to set the device format.
3591 stream_.userFormat = format;
3592 snd_pcm_format_t device_format = SND_PCM_FORMAT_UNKNOWN;
3594 if (format == RTAUDIO_SINT8)
3595 device_format = SND_PCM_FORMAT_S8;
3596 else if (format == RTAUDIO_SINT16)
3597 device_format = SND_PCM_FORMAT_S16;
3598 else if (format == RTAUDIO_SINT24)
3599 device_format = SND_PCM_FORMAT_S24;
3600 else if (format == RTAUDIO_SINT32)
3601 device_format = SND_PCM_FORMAT_S32;
3602 else if (format == RTAUDIO_FLOAT32)
3603 device_format = SND_PCM_FORMAT_FLOAT;
3604 else if (format == RTAUDIO_FLOAT64)
3605 device_format = SND_PCM_FORMAT_FLOAT64;
3607 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3608 stream_.deviceFormat[mode] = format;
3612 // The user requested format is not natively supported by the device.
3613 device_format = SND_PCM_FORMAT_FLOAT64;
3614 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3615 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
3619 device_format = SND_PCM_FORMAT_FLOAT;
3620 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3621 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
3625 device_format = SND_PCM_FORMAT_S32;
3626 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3627 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
3631 device_format = SND_PCM_FORMAT_S24;
3632 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3633 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
3637 device_format = SND_PCM_FORMAT_S16;
3638 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3639 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3643 device_format = SND_PCM_FORMAT_S8;
3644 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3645 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3649 // If we get here, no supported format was found.
3650 sprintf(message_,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name);
3651 snd_pcm_close(handle);
3652 error(RtError::WARNING);
3656 err = snd_pcm_hw_params_set_format(handle, hw_params, device_format);
3658 snd_pcm_close(handle);
3659 sprintf(message_, "RtApiAlsa: error setting format (%s): %s.",
3660 name, snd_strerror(err));
3661 error(RtError::WARNING);
3665 // Determine whether byte-swaping is necessary.
3666 stream_.doByteSwap[mode] = false;
3667 if (device_format != SND_PCM_FORMAT_S8) {
3668 err = snd_pcm_format_cpu_endian(device_format);
3670 stream_.doByteSwap[mode] = true;
3672 snd_pcm_close(handle);
3673 sprintf(message_, "RtApiAlsa: error getting format endian-ness (%s): %s.",
3674 name, snd_strerror(err));
3675 error(RtError::WARNING);
3680 // Set the sample rate.
3681 err = snd_pcm_hw_params_set_rate(handle, hw_params, (unsigned int)sampleRate, 0);
3683 snd_pcm_close(handle);
3684 sprintf(message_, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.",
3685 sampleRate, name, snd_strerror(err));
3686 error(RtError::WARNING);
3690 // Determine the number of channels for this device. We support a possible
3691 // minimum device channel number > than the value requested by the user.
3692 stream_.nUserChannels[mode] = channels;
3694 err = snd_pcm_hw_params_get_channels_max(hw_params, &value);
3695 int device_channels = value;
3696 if (err < 0 || device_channels < channels) {
3697 snd_pcm_close(handle);
3698 sprintf(message_, "RtApiAlsa: channels (%d) not supported by device (%s).",
3700 error(RtError::WARNING);
3704 err = snd_pcm_hw_params_get_channels_min(hw_params, &value);
3706 snd_pcm_close(handle);
3707 sprintf(message_, "RtApiAlsa: error getting min channels count on device (%s).", name);
3708 error(RtError::WARNING);
3711 device_channels = value;
3712 if (device_channels < channels) device_channels = channels;
3713 stream_.nDeviceChannels[mode] = device_channels;
3715 // Set the device channels.
3716 err = snd_pcm_hw_params_set_channels(handle, hw_params, device_channels);
3718 snd_pcm_close(handle);
3719 sprintf(message_, "RtApiAlsa: error setting channels (%d) on device (%s): %s.",
3720 device_channels, name, snd_strerror(err));
3721 error(RtError::WARNING);
3725 // Set the buffer number, which in ALSA is referred to as the "period".
3727 unsigned int periods = numberOfBuffers;
3728 // Even though the hardware might allow 1 buffer, it won't work reliably.
3729 if (periods < 2) periods = 2;
3730 err = snd_pcm_hw_params_get_periods_min(hw_params, &value, &dir);
3732 snd_pcm_close(handle);
3733 sprintf(message_, "RtApiAlsa: error getting min periods on device (%s): %s.",
3734 name, snd_strerror(err));
3735 error(RtError::WARNING);
3738 if (value > periods) periods = value;
3739 err = snd_pcm_hw_params_get_periods_max(hw_params, &value, &dir);
3741 snd_pcm_close(handle);
3742 sprintf(message_, "RtApiAlsa: error getting max periods on device (%s): %s.",
3743 name, snd_strerror(err));
3744 error(RtError::WARNING);
3747 if (value < periods) periods = value;
3749 err = snd_pcm_hw_params_set_periods(handle, hw_params, periods, 0);
3751 snd_pcm_close(handle);
3752 sprintf(message_, "RtApiAlsa: error setting periods (%s): %s.",
3753 name, snd_strerror(err));
3754 error(RtError::WARNING);
3758 // Set the buffer (or period) size.
3759 snd_pcm_uframes_t period_size;
3760 err = snd_pcm_hw_params_get_period_size_min(hw_params, &period_size, &dir);
3762 snd_pcm_close(handle);
3763 sprintf(message_, "RtApiAlsa: error getting period size (%s): %s.",
3764 name, snd_strerror(err));
3765 error(RtError::WARNING);
3768 if (*bufferSize < (int) period_size) *bufferSize = (int) period_size;
3770 err = snd_pcm_hw_params_set_period_size(handle, hw_params, *bufferSize, 0);
3772 snd_pcm_close(handle);
3773 sprintf(message_, "RtApiAlsa: error setting period size (%s): %s.",
3774 name, snd_strerror(err));
3775 error(RtError::WARNING);
3779 // If attempting to setup a duplex stream, the bufferSize parameter
3780 // MUST be the same in both directions!
3781 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
3782 sprintf( message_, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).",
3784 error(RtError::DEBUG_WARNING);
3788 stream_.bufferSize = *bufferSize;
3790 // Install the hardware configuration
3791 err = snd_pcm_hw_params(handle, hw_params);
3793 snd_pcm_close(handle);
3794 sprintf(message_, "RtApiAlsa: error installing hardware configuration (%s): %s.",
3795 name, snd_strerror(err));
3796 error(RtError::WARNING);
3800 #if defined(__RTAUDIO_DEBUG__)
3801 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
3802 snd_pcm_hw_params_dump(hw_params, out);
3805 // Allocate the stream handle if necessary and then save.
3806 snd_pcm_t **handles;
3807 if ( stream_.apiHandle == 0 ) {
3808 handles = (snd_pcm_t **) calloc(2, sizeof(snd_pcm_t *));
3809 if ( handle == NULL ) {
3810 sprintf(message_, "RtApiAlsa: error allocating handle memory (%s).",
3811 devices_[device].name.c_str());
3814 stream_.apiHandle = (void *) handles;
3819 handles = (snd_pcm_t **) stream_.apiHandle;
3821 handles[mode] = handle;
3823 // Set flags for buffer conversion
3824 stream_.doConvertBuffer[mode] = false;
3825 if (stream_.userFormat != stream_.deviceFormat[mode])
3826 stream_.doConvertBuffer[mode] = true;
3827 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
3828 stream_.doConvertBuffer[mode] = true;
3829 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
3830 stream_.doConvertBuffer[mode] = true;
3832 // Allocate necessary internal buffers
3833 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
3836 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
3837 buffer_bytes = stream_.nUserChannels[0];
3839 buffer_bytes = stream_.nUserChannels[1];
3841 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
3842 if (stream_.userBuffer) free(stream_.userBuffer);
3843 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
3844 if (stream_.userBuffer == NULL) {
3845 sprintf(message_, "RtApiAlsa: error allocating user buffer memory (%s).",
3846 devices_[device].name.c_str());
3851 if ( stream_.doConvertBuffer[mode] ) {
3854 bool makeBuffer = true;
3855 if ( mode == OUTPUT )
3856 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
3857 else { // mode == INPUT
3858 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
3859 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
3860 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
3861 if ( buffer_bytes < bytes_out ) makeBuffer = false;
3866 buffer_bytes *= *bufferSize;
3867 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
3868 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
3869 if (stream_.deviceBuffer == NULL) {
3870 sprintf(message_, "RtApiAlsa: error allocating device buffer memory (%s).",
3871 devices_[device].name.c_str());
3877 stream_.device[mode] = device;
3878 stream_.state = STREAM_STOPPED;
3879 if ( stream_.mode == OUTPUT && mode == INPUT )
3880 // We had already set up an output stream.
3881 stream_.mode = DUPLEX;
3883 stream_.mode = mode;
3884 stream_.nBuffers = periods;
3885 stream_.sampleRate = sampleRate;
3892 snd_pcm_close(handles[0]);
3894 snd_pcm_close(handles[1]);
3896 stream_.apiHandle = 0;
3899 if (stream_.userBuffer) {
3900 free(stream_.userBuffer);
3901 stream_.userBuffer = 0;
3904 error(RtError::WARNING);
3908 void RtApiAlsa :: closeStream()
3910 // We don't want an exception to be thrown here because this
3911 // function is called by our class destructor. So, do our own
3913 if ( stream_.mode == UNINITIALIZED ) {
3914 sprintf(message_, "RtApiAlsa::closeStream(): no open stream to close!");
3915 error(RtError::WARNING);
3919 snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
3920 if (stream_.state == STREAM_RUNNING) {
3921 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
3922 snd_pcm_drop(handle[0]);
3923 if (stream_.mode == INPUT || stream_.mode == DUPLEX)
3924 snd_pcm_drop(handle[1]);
3925 stream_.state = STREAM_STOPPED;
3928 if (stream_.callbackInfo.usingCallback) {
3929 stream_.callbackInfo.usingCallback = false;
3930 pthread_join(stream_.callbackInfo.thread, NULL);
3934 if (handle[0]) snd_pcm_close(handle[0]);
3935 if (handle[1]) snd_pcm_close(handle[1]);
3940 if (stream_.userBuffer) {
3941 free(stream_.userBuffer);
3942 stream_.userBuffer = 0;
3945 if (stream_.deviceBuffer) {
3946 free(stream_.deviceBuffer);
3947 stream_.deviceBuffer = 0;
3950 stream_.mode = UNINITIALIZED;
3953 void RtApiAlsa :: startStream()
3955 // This method calls snd_pcm_prepare if the device isn't already in that state.
3958 if (stream_.state == STREAM_RUNNING) return;
3960 MUTEX_LOCK(&stream_.mutex);
3963 snd_pcm_state_t state;
3964 snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
3965 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
3966 state = snd_pcm_state(handle[0]);
3967 if (state != SND_PCM_STATE_PREPARED) {
3968 err = snd_pcm_prepare(handle[0]);
3970 sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
3971 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
3972 MUTEX_UNLOCK(&stream_.mutex);
3973 error(RtError::DRIVER_ERROR);
3978 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
3979 state = snd_pcm_state(handle[1]);
3980 if (state != SND_PCM_STATE_PREPARED) {
3981 err = snd_pcm_prepare(handle[1]);
3983 sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
3984 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
3985 MUTEX_UNLOCK(&stream_.mutex);
3986 error(RtError::DRIVER_ERROR);
3990 stream_.state = STREAM_RUNNING;
3992 MUTEX_UNLOCK(&stream_.mutex);
3995 void RtApiAlsa :: stopStream()
3998 if (stream_.state == STREAM_STOPPED) return;
4000 // Change the state before the lock to improve shutdown response
4001 // when using a callback.
4002 stream_.state = STREAM_STOPPED;
4003 MUTEX_LOCK(&stream_.mutex);
4006 snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
4007 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4008 err = snd_pcm_drain(handle[0]);
4010 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4011 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4012 MUTEX_UNLOCK(&stream_.mutex);
4013 error(RtError::DRIVER_ERROR);
4017 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4018 err = snd_pcm_drain(handle[1]);
4020 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4021 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4022 MUTEX_UNLOCK(&stream_.mutex);
4023 error(RtError::DRIVER_ERROR);
4027 MUTEX_UNLOCK(&stream_.mutex);
4030 void RtApiAlsa :: abortStream()
4033 if (stream_.state == STREAM_STOPPED) return;
4035 // Change the state before the lock to improve shutdown response
4036 // when using a callback.
4037 stream_.state = STREAM_STOPPED;
4038 MUTEX_LOCK(&stream_.mutex);
4041 snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
4042 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4043 err = snd_pcm_drop(handle[0]);
4045 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4046 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4047 MUTEX_UNLOCK(&stream_.mutex);
4048 error(RtError::DRIVER_ERROR);
4052 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4053 err = snd_pcm_drop(handle[1]);
4055 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4056 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4057 MUTEX_UNLOCK(&stream_.mutex);
4058 error(RtError::DRIVER_ERROR);
4062 MUTEX_UNLOCK(&stream_.mutex);
4065 int RtApiAlsa :: streamWillBlock()
4068 if (stream_.state == STREAM_STOPPED) return 0;
4070 MUTEX_LOCK(&stream_.mutex);
4072 int err = 0, frames = 0;
4073 snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
4074 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4075 err = snd_pcm_avail_update(handle[0]);
4077 sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
4078 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4079 MUTEX_UNLOCK(&stream_.mutex);
4080 error(RtError::DRIVER_ERROR);
4086 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4087 err = snd_pcm_avail_update(handle[1]);
4089 sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
4090 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4091 MUTEX_UNLOCK(&stream_.mutex);
4092 error(RtError::DRIVER_ERROR);
4094 if (frames > err) frames = err;
4097 frames = stream_.bufferSize - frames;
4098 if (frames < 0) frames = 0;
4100 MUTEX_UNLOCK(&stream_.mutex);
4104 void RtApiAlsa :: tickStream()
4109 if (stream_.state == STREAM_STOPPED) {
4110 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
4113 else if (stream_.callbackInfo.usingCallback) {
4114 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
4115 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
4118 MUTEX_LOCK(&stream_.mutex);
4120 // The state might change while waiting on a mutex.
4121 if (stream_.state == STREAM_STOPPED)
4128 RtAudioFormat format;
4129 handle = (snd_pcm_t **) stream_.apiHandle;
4130 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4132 // Setup parameters and do buffer conversion if necessary.
4133 if (stream_.doConvertBuffer[0]) {
4134 convertStreamBuffer(OUTPUT);
4135 buffer = stream_.deviceBuffer;
4136 channels = stream_.nDeviceChannels[0];
4137 format = stream_.deviceFormat[0];
4140 buffer = stream_.userBuffer;
4141 channels = stream_.nUserChannels[0];
4142 format = stream_.userFormat;
4145 // Do byte swapping if necessary.
4146 if (stream_.doByteSwap[0])
4147 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
4149 // Write samples to device in interleaved/non-interleaved format.
4150 if (stream_.deInterleave[0]) {
4151 void *bufs[channels];
4152 size_t offset = stream_.bufferSize * formatBytes(format);
4153 for (int i=0; i<channels; i++)
4154 bufs[i] = (void *) (buffer + (i * offset));
4155 err = snd_pcm_writen(handle[0], bufs, stream_.bufferSize);
4158 err = snd_pcm_writei(handle[0], buffer, stream_.bufferSize);
4160 if (err < stream_.bufferSize) {
4161 // Either an error or underrun occured.
4162 if (err == -EPIPE) {
4163 snd_pcm_state_t state = snd_pcm_state(handle[0]);
4164 if (state == SND_PCM_STATE_XRUN) {
4165 sprintf(message_, "RtApiAlsa: underrun detected.");
4166 error(RtError::WARNING);
4167 err = snd_pcm_prepare(handle[0]);
4169 sprintf(message_, "RtApiAlsa: error preparing handle after underrun: %s.",
4171 MUTEX_UNLOCK(&stream_.mutex);
4172 error(RtError::DRIVER_ERROR);
4176 sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
4177 snd_pcm_state_name(state));
4178 MUTEX_UNLOCK(&stream_.mutex);
4179 error(RtError::DRIVER_ERROR);
4184 sprintf(message_, "RtApiAlsa: audio write error for device (%s): %s.",
4185 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4186 MUTEX_UNLOCK(&stream_.mutex);
4187 error(RtError::DRIVER_ERROR);
4192 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4194 // Setup parameters.
4195 if (stream_.doConvertBuffer[1]) {
4196 buffer = stream_.deviceBuffer;
4197 channels = stream_.nDeviceChannels[1];
4198 format = stream_.deviceFormat[1];
4201 buffer = stream_.userBuffer;
4202 channels = stream_.nUserChannels[1];
4203 format = stream_.userFormat;
4206 // Read samples from device in interleaved/non-interleaved format.
4207 if (stream_.deInterleave[1]) {
4208 void *bufs[channels];
4209 size_t offset = stream_.bufferSize * formatBytes(format);
4210 for (int i=0; i<channels; i++)
4211 bufs[i] = (void *) (buffer + (i * offset));
4212 err = snd_pcm_readn(handle[1], bufs, stream_.bufferSize);
4215 err = snd_pcm_readi(handle[1], buffer, stream_.bufferSize);
4217 if (err < stream_.bufferSize) {
4218 // Either an error or underrun occured.
4219 if (err == -EPIPE) {
4220 snd_pcm_state_t state = snd_pcm_state(handle[1]);
4221 if (state == SND_PCM_STATE_XRUN) {
4222 sprintf(message_, "RtApiAlsa: overrun detected.");
4223 error(RtError::WARNING);
4224 err = snd_pcm_prepare(handle[1]);
4226 sprintf(message_, "RtApiAlsa: error preparing handle after overrun: %s.",
4228 MUTEX_UNLOCK(&stream_.mutex);
4229 error(RtError::DRIVER_ERROR);
4233 sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
4234 snd_pcm_state_name(state));
4235 MUTEX_UNLOCK(&stream_.mutex);
4236 error(RtError::DRIVER_ERROR);
4241 sprintf(message_, "RtApiAlsa: audio read error for device (%s): %s.",
4242 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4243 MUTEX_UNLOCK(&stream_.mutex);
4244 error(RtError::DRIVER_ERROR);
4248 // Do byte swapping if necessary.
4249 if (stream_.doByteSwap[1])
4250 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
4252 // Do buffer conversion if necessary.
4253 if (stream_.doConvertBuffer[1])
4254 convertStreamBuffer(INPUT);
4258 MUTEX_UNLOCK(&stream_.mutex);
4260 if (stream_.callbackInfo.usingCallback && stopStream)
4264 void RtApiAlsa :: setStreamCallback(RtAudioCallback callback, void *userData)
4268 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4269 if ( info->usingCallback ) {
4270 sprintf(message_, "RtApiAlsa: A callback is already set for this stream!");
4271 error(RtError::WARNING);
4275 info->callback = (void *) callback;
4276 info->userData = userData;
4277 info->usingCallback = true;
4278 info->object = (void *) this;
4280 // Set the thread attributes for joinable and realtime scheduling
4281 // priority. The higher priority will only take affect if the
4282 // program is run as root or suid.
4283 pthread_attr_t attr;
4284 pthread_attr_init(&attr);
4285 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
4286 pthread_attr_setschedpolicy(&attr, SCHED_RR);
4288 int err = pthread_create(&info->thread, &attr, alsaCallbackHandler, &stream_.callbackInfo);
4289 pthread_attr_destroy(&attr);
4291 info->usingCallback = false;
4292 sprintf(message_, "RtApiAlsa: error starting callback thread!");
4293 error(RtError::THREAD_ERROR);
4297 void RtApiAlsa :: cancelStreamCallback()
4301 if (stream_.callbackInfo.usingCallback) {
4303 if (stream_.state == STREAM_RUNNING)
4306 MUTEX_LOCK(&stream_.mutex);
4308 stream_.callbackInfo.usingCallback = false;
4309 pthread_join(stream_.callbackInfo.thread, NULL);
4310 stream_.callbackInfo.thread = 0;
4311 stream_.callbackInfo.callback = NULL;
4312 stream_.callbackInfo.userData = NULL;
4314 MUTEX_UNLOCK(&stream_.mutex);
4318 extern "C" void *alsaCallbackHandler(void *ptr)
4320 CallbackInfo *info = (CallbackInfo *) ptr;
4321 RtApiAlsa *object = (RtApiAlsa *) info->object;
4322 bool *usingCallback = &info->usingCallback;
4324 while ( *usingCallback ) {
4326 object->tickStream();
4328 catch (RtError &exception) {
4329 fprintf(stderr, "\nRtApiAlsa: callback thread error (%s) ... closing thread.\n\n",
4330 exception.getMessageString());
4338 //******************** End of __LINUX_ALSA__ *********************//
4341 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
4343 // The ASIO API is designed around a callback scheme, so this
4344 // implementation is similar to that used for OS-X CoreAudio and Linux
4345 // Jack. The primary constraint with ASIO is that it only allows
4346 // access to a single driver at a time. Thus, it is not possible to
4347 // have more than one simultaneous RtAudio stream.
4349 // This implementation also requires a number of external ASIO files
4350 // and a few global variables. The ASIO callback scheme does not
4351 // allow for the passing of user data, so we must create a global
4352 // pointer to our callbackInfo structure.
4354 // On unix systems, we make use of a pthread condition variable.
4355 // Since there is no equivalent in Windows, I hacked something based
4356 // on information found in
4357 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
4359 #include "asio/asiosys.h"
4360 #include "asio/asio.h"
4361 #include "asio/asiodrivers.h"
4364 AsioDrivers drivers;
4365 ASIOCallbacks asioCallbacks;
4366 ASIODriverInfo driverInfo;
4367 CallbackInfo *asioCallbackInfo;
4371 ASIOBufferInfo *bufferInfos;
4375 :stopStream(false), bufferInfos(0) {}
4378 RtApiAsio :: RtApiAsio()
4382 if (nDevices_ <= 0) {
4383 sprintf(message_, "RtApiAsio: no Windows ASIO audio drivers found!");
4384 error(RtError::NO_DEVICES_FOUND);
4388 RtApiAsio :: ~RtApiAsio()
4390 if ( stream_.mode != UNINITIALIZED ) closeStream();
4393 void RtApiAsio :: initialize(void)
4395 nDevices_ = drivers.asioGetNumDev();
4396 if (nDevices_ <= 0) return;
4398 // Create device structures and write device driver names to each.
4401 for (int i=0; i<nDevices_; i++) {
4402 if ( drivers.asioGetDriverName( i, name, 128 ) == 0 ) {
4403 device.name.erase();
4404 device.name.append( (const char *)name, strlen(name)+1);
4405 devices_.push_back(device);
4408 sprintf(message_, "RtApiAsio: error getting driver name for device index %d!", i);
4409 error(RtError::WARNING);
4413 nDevices_ = (int) devices_.size();
4415 drivers.removeCurrentDriver();
4416 driverInfo.asioVersion = 2;
4417 // See note in DirectSound implementation about GetDesktopWindow().
4418 driverInfo.sysRef = GetForegroundWindow();
4421 void RtApiAsio :: probeDeviceInfo(RtApiDevice *info)
4423 // Don't probe if a stream is already open.
4424 if ( stream_.mode != UNINITIALIZED ) {
4425 sprintf(message_, "RtApiAsio: unable to probe driver while a stream is open.");
4426 error(RtError::DEBUG_WARNING);
4430 if ( !drivers.loadDriver( (char *)info->name.c_str() ) ) {
4431 sprintf(message_, "RtApiAsio: error loading driver (%s).", info->name.c_str());
4432 error(RtError::DEBUG_WARNING);
4436 ASIOError result = ASIOInit( &driverInfo );
4437 if ( result != ASE_OK ) {
4439 if ( result == ASE_HWMalfunction )
4440 sprintf(details, "hardware malfunction");
4441 else if ( result == ASE_NoMemory )
4442 sprintf(details, "no memory");
4443 else if ( result == ASE_NotPresent )
4444 sprintf(details, "driver/hardware not present");
4446 sprintf(details, "unspecified");
4447 sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).", details, info->name.c_str());
4448 error(RtError::DEBUG_WARNING);
4452 // Determine the device channel information.
4453 long inputChannels, outputChannels;
4454 result = ASIOGetChannels( &inputChannels, &outputChannels );
4455 if ( result != ASE_OK ) {
4456 drivers.removeCurrentDriver();
4457 sprintf(message_, "RtApiAsio: error getting input/output channel count (%s).", info->name.c_str());
4458 error(RtError::DEBUG_WARNING);
4462 info->maxOutputChannels = outputChannels;
4463 if ( outputChannels > 0 ) info->minOutputChannels = 1;
4465 info->maxInputChannels = inputChannels;
4466 if ( inputChannels > 0 ) info->minInputChannels = 1;
4468 // If device opens for both playback and capture, we determine the channels.
4469 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
4470 info->hasDuplexSupport = true;
4471 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
4472 info->maxInputChannels : info->maxOutputChannels;
4473 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
4474 info->minInputChannels : info->minOutputChannels;
4477 // Determine the supported sample rates.
4478 info->sampleRates.clear();
4479 for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
4480 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
4481 if ( result == ASE_OK )
4482 info->sampleRates.push_back( SAMPLE_RATES[i] );
4485 if (info->sampleRates.size() == 0) {
4486 drivers.removeCurrentDriver();
4487 sprintf( message_, "RtApiAsio: No supported sample rates found for driver (%s).", info->name.c_str() );
4488 error(RtError::DEBUG_WARNING);
4492 // Determine supported data types ... just check first channel and assume rest are the same.
4493 ASIOChannelInfo channelInfo;
4494 channelInfo.channel = 0;
4495 channelInfo.isInput = true;
4496 if ( info->maxInputChannels <= 0 ) channelInfo.isInput = false;
4497 result = ASIOGetChannelInfo( &channelInfo );
4498 if ( result != ASE_OK ) {
4499 drivers.removeCurrentDriver();
4500 sprintf(message_, "RtApiAsio: error getting driver (%s) channel information.", info->name.c_str());
4501 error(RtError::DEBUG_WARNING);
4505 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
4506 info->nativeFormats |= RTAUDIO_SINT16;
4507 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
4508 info->nativeFormats |= RTAUDIO_SINT32;
4509 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
4510 info->nativeFormats |= RTAUDIO_FLOAT32;
4511 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
4512 info->nativeFormats |= RTAUDIO_FLOAT64;
4514 // Check that we have at least one supported format.
4515 if (info->nativeFormats == 0) {
4516 drivers.removeCurrentDriver();
4517 sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
4518 info->name.c_str());
4519 error(RtError::DEBUG_WARNING);
4523 info->probed = true;
4524 drivers.removeCurrentDriver();
4527 void bufferSwitch(long index, ASIOBool processNow)
4529 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
4531 object->callbackEvent( index );
4533 catch (RtError &exception) {
4534 fprintf(stderr, "\nRtApiAsio: callback handler error (%s)!\n\n", exception.getMessageString());
4541 void sampleRateChanged(ASIOSampleRate sRate)
4543 // The ASIO documentation says that this usually only happens during
4544 // external sync. Audio processing is not stopped by the driver,
4545 // actual sample rate might not have even changed, maybe only the
4546 // sample rate status of an AES/EBU or S/PDIF digital input at the
4549 RtAudio *object = (RtAudio *) asioCallbackInfo->object;
4551 object->stopStream();
4553 catch (RtError &exception) {
4554 fprintf(stderr, "\nRtApiAsio: sampleRateChanged() error (%s)!\n\n", exception.getMessageString());
4558 fprintf(stderr, "\nRtApiAsio: driver reports sample rate changed to %d ... stream stopped!!!", (int) sRate);
4561 long asioMessages(long selector, long value, void* message, double* opt)
4565 case kAsioSelectorSupported:
4566 if(value == kAsioResetRequest
4567 || value == kAsioEngineVersion
4568 || value == kAsioResyncRequest
4569 || value == kAsioLatenciesChanged
4570 // The following three were added for ASIO 2.0, you don't
4571 // necessarily have to support them.
4572 || value == kAsioSupportsTimeInfo
4573 || value == kAsioSupportsTimeCode
4574 || value == kAsioSupportsInputMonitor)
4577 case kAsioResetRequest:
4578 // Defer the task and perform the reset of the driver during the
4579 // next "safe" situation. You cannot reset the driver right now,
4580 // as this code is called from the driver. Reset the driver is
4581 // done by completely destruct is. I.e. ASIOStop(),
4582 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
4584 fprintf(stderr, "\nRtApiAsio: driver reset requested!!!");
4587 case kAsioResyncRequest:
4588 // This informs the application that the driver encountered some
4589 // non-fatal data loss. It is used for synchronization purposes
4590 // of different media. Added mainly to work around the Win16Mutex
4591 // problems in Windows 95/98 with the Windows Multimedia system,
4592 // which could lose data because the Mutex was held too long by
4593 // another thread. However a driver can issue it in other
4595 fprintf(stderr, "\nRtApiAsio: driver resync requested!!!");
4598 case kAsioLatenciesChanged:
4599 // This will inform the host application that the drivers were
4600 // latencies changed. Beware, it this does not mean that the
4601 // buffer sizes have changed! You might need to update internal
4603 fprintf(stderr, "\nRtApiAsio: driver latency may have changed!!!");
4606 case kAsioEngineVersion:
4607 // Return the supported ASIO version of the host application. If
4608 // a host application does not implement this selector, ASIO 1.0
4609 // is assumed by the driver.
4612 case kAsioSupportsTimeInfo:
4613 // Informs the driver whether the
4614 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
4615 // For compatibility with ASIO 1.0 drivers the host application
4616 // should always support the "old" bufferSwitch method, too.
4619 case kAsioSupportsTimeCode:
4620 // Informs the driver wether application is interested in time
4621 // code info. If an application does not need to know about time
4622 // code, the driver has less work to do.
4629 bool RtApiAsio :: probeDeviceOpen(int device, StreamMode mode, int channels,
4630 int sampleRate, RtAudioFormat format,
4631 int *bufferSize, int numberOfBuffers)
4633 // For ASIO, a duplex stream MUST use the same driver.
4634 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
4635 sprintf(message_, "RtApiAsio: duplex stream must use the same device for input and output.");
4636 error(RtError::WARNING);
4640 // Only load the driver once for duplex stream.
4642 if ( mode != INPUT || stream_.mode != OUTPUT ) {
4643 if ( !drivers.loadDriver( (char *)devices_[device].name.c_str() ) ) {
4644 sprintf(message_, "RtApiAsio: error loading driver (%s).", devices_[device].name.c_str());
4645 error(RtError::DEBUG_WARNING);
4649 result = ASIOInit( &driverInfo );
4650 if ( result != ASE_OK ) {
4652 if ( result == ASE_HWMalfunction )
4653 sprintf(details, "hardware malfunction");
4654 else if ( result == ASE_NoMemory )
4655 sprintf(details, "no memory");
4656 else if ( result == ASE_NotPresent )
4657 sprintf(details, "driver/hardware not present");
4659 sprintf(details, "unspecified");
4660 sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).", details, devices_[device].name.c_str());
4661 error(RtError::DEBUG_WARNING);
4666 // Check the device channel count.
4667 long inputChannels, outputChannels;
4668 result = ASIOGetChannels( &inputChannels, &outputChannels );
4669 if ( result != ASE_OK ) {
4670 drivers.removeCurrentDriver();
4671 sprintf(message_, "RtApiAsio: error getting input/output channel count (%s).",
4672 devices_[device].name.c_str());
4673 error(RtError::DEBUG_WARNING);
4677 if ( ( mode == OUTPUT && channels > outputChannels) ||
4678 ( mode == INPUT && channels > inputChannels) ) {
4679 drivers.removeCurrentDriver();
4680 sprintf(message_, "RtApiAsio: driver (%s) does not support requested channel count (%d).",
4681 devices_[device].name.c_str(), channels);
4682 error(RtError::DEBUG_WARNING);
4685 stream_.nDeviceChannels[mode] = channels;
4686 stream_.nUserChannels[mode] = channels;
4688 // Verify the sample rate is supported.
4689 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
4690 if ( result != ASE_OK ) {
4691 drivers.removeCurrentDriver();
4692 sprintf(message_, "RtApiAsio: driver (%s) does not support requested sample rate (%d).",
4693 devices_[device].name.c_str(), sampleRate);
4694 error(RtError::DEBUG_WARNING);
4698 // Set the sample rate.
4699 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
4700 if ( result != ASE_OK ) {
4701 drivers.removeCurrentDriver();
4702 sprintf(message_, "RtApiAsio: driver (%s) error setting sample rate (%d).",
4703 devices_[device].name.c_str(), sampleRate);
4704 error(RtError::DEBUG_WARNING);
4708 // Determine the driver data type.
4709 ASIOChannelInfo channelInfo;
4710 channelInfo.channel = 0;
4711 if ( mode == OUTPUT ) channelInfo.isInput = false;
4712 else channelInfo.isInput = true;
4713 result = ASIOGetChannelInfo( &channelInfo );
4714 if ( result != ASE_OK ) {
4715 drivers.removeCurrentDriver();
4716 sprintf(message_, "RtApiAsio: driver (%s) error getting data format.",
4717 devices_[device].name.c_str());
4718 error(RtError::DEBUG_WARNING);
4722 // Assuming WINDOWS host is always little-endian.
4723 stream_.doByteSwap[mode] = false;
4724 stream_.userFormat = format;
4725 stream_.deviceFormat[mode] = 0;
4726 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
4727 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4728 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
4730 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
4731 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
4732 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
4734 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
4735 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
4736 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
4738 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
4739 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
4740 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
4743 if ( stream_.deviceFormat[mode] == 0 ) {
4744 drivers.removeCurrentDriver();
4745 sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
4746 devices_[device].name.c_str());
4747 error(RtError::DEBUG_WARNING);
4751 // Set the buffer size. For a duplex stream, this will end up
4752 // setting the buffer size based on the input constraints, which
4754 long minSize, maxSize, preferSize, granularity;
4755 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
4756 if ( result != ASE_OK ) {
4757 drivers.removeCurrentDriver();
4758 sprintf(message_, "RtApiAsio: driver (%s) error getting buffer size.",
4759 devices_[device].name.c_str());
4760 error(RtError::DEBUG_WARNING);
4764 if ( *bufferSize < minSize ) *bufferSize = minSize;
4765 else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
4766 else if ( granularity == -1 ) {
4767 // Make sure bufferSize is a power of two.
4768 double power = log10( (double) *bufferSize ) / log10( 2.0 );
4769 *bufferSize = (int) pow( 2.0, floor(power+0.5) );
4770 if ( *bufferSize < minSize ) *bufferSize = minSize;
4771 else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
4772 else *bufferSize = preferSize;
4775 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize )
4776 std::cerr << "Possible input/output buffersize discrepancy!" << std::endl;
4778 stream_.bufferSize = *bufferSize;
4779 stream_.nBuffers = 2;
4781 // ASIO always uses deinterleaved channels.
4782 stream_.deInterleave[mode] = true;
4784 // Allocate, if necessary, our AsioHandle structure for the stream.
4785 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
4786 if ( handle == 0 ) {
4787 handle = (AsioHandle *) calloc(1, sizeof(AsioHandle));
4788 if ( handle == NULL ) {
4789 drivers.removeCurrentDriver();
4790 sprintf(message_, "RtApiAsio: error allocating AsioHandle memory (%s).",
4791 devices_[device].name.c_str());
4792 error(RtError::DEBUG_WARNING);
4795 handle->bufferInfos = 0;
4796 // Create a manual-reset event.
4797 handle->condition = CreateEvent(NULL, // no security
4798 TRUE, // manual-reset
4799 FALSE, // non-signaled initially
4801 stream_.apiHandle = (void *) handle;
4804 // Create the ASIO internal buffers. Since RtAudio sets up input
4805 // and output separately, we'll have to dispose of previously
4806 // created output buffers for a duplex stream.
4807 if ( mode == INPUT && stream_.mode == OUTPUT ) {
4808 ASIODisposeBuffers();
4809 if ( handle->bufferInfos ) free( handle->bufferInfos );
4812 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
4813 int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
4814 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
4815 if (handle->bufferInfos == NULL) {
4816 sprintf(message_, "RtApiAsio: error allocating bufferInfo memory (%s).",
4817 devices_[device].name.c_str());
4820 ASIOBufferInfo *infos;
4821 infos = handle->bufferInfos;
4822 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
4823 infos->isInput = ASIOFalse;
4824 infos->channelNum = i;
4825 infos->buffers[0] = infos->buffers[1] = 0;
4827 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
4828 infos->isInput = ASIOTrue;
4829 infos->channelNum = i;
4830 infos->buffers[0] = infos->buffers[1] = 0;
4833 // Set up the ASIO callback structure and create the ASIO data buffers.
4834 asioCallbacks.bufferSwitch = &bufferSwitch;
4835 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
4836 asioCallbacks.asioMessage = &asioMessages;
4837 asioCallbacks.bufferSwitchTimeInfo = NULL;
4838 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks);
4839 if ( result != ASE_OK ) {
4840 sprintf(message_, "RtApiAsio: driver (%s) error creating buffers.",
4841 devices_[device].name.c_str());
4845 // Set flags for buffer conversion.
4846 stream_.doConvertBuffer[mode] = false;
4847 if (stream_.userFormat != stream_.deviceFormat[mode])
4848 stream_.doConvertBuffer[mode] = true;
4849 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
4850 stream_.doConvertBuffer[mode] = true;
4851 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
4852 stream_.doConvertBuffer[mode] = true;
4854 // Allocate necessary internal buffers
4855 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
4858 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
4859 buffer_bytes = stream_.nUserChannels[0];
4861 buffer_bytes = stream_.nUserChannels[1];
4863 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
4864 if (stream_.userBuffer) free(stream_.userBuffer);
4865 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
4866 if (stream_.userBuffer == NULL) {
4867 sprintf(message_, "RtApiAsio: error allocating user buffer memory (%s).",
4868 devices_[device].name.c_str());
4873 if ( stream_.doConvertBuffer[mode] ) {
4876 bool makeBuffer = true;
4877 if ( mode == OUTPUT )
4878 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
4879 else { // mode == INPUT
4880 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
4881 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4882 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
4883 if ( buffer_bytes < bytes_out ) makeBuffer = false;
4888 buffer_bytes *= *bufferSize;
4889 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
4890 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
4891 if (stream_.deviceBuffer == NULL) {
4892 sprintf(message_, "RtApiAsio: error allocating device buffer memory (%s).",
4893 devices_[device].name.c_str());
4899 stream_.device[mode] = device;
4900 stream_.state = STREAM_STOPPED;
4901 if ( stream_.mode == OUTPUT && mode == INPUT )
4902 // We had already set up an output stream.
4903 stream_.mode = DUPLEX;
4905 stream_.mode = mode;
4906 stream_.sampleRate = sampleRate;
4907 asioCallbackInfo = &stream_.callbackInfo;
4908 stream_.callbackInfo.object = (void *) this;
4913 ASIODisposeBuffers();
4914 drivers.removeCurrentDriver();
4917 CloseHandle( handle->condition );
4918 if ( handle->bufferInfos )
4919 free( handle->bufferInfos );
4921 stream_.apiHandle = 0;
4924 if (stream_.userBuffer) {
4925 free(stream_.userBuffer);
4926 stream_.userBuffer = 0;
4929 error(RtError::WARNING);
4933 void RtApiAsio :: closeStream()
4935 // We don't want an exception to be thrown here because this
4936 // function is called by our class destructor. So, do our own
4938 if ( stream_.mode == UNINITIALIZED ) {
4939 sprintf(message_, "RtApiAsio::closeStream(): no open stream to close!");
4940 error(RtError::WARNING);
4944 if (stream_.state == STREAM_RUNNING)
4947 ASIODisposeBuffers();
4948 drivers.removeCurrentDriver();
4950 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
4952 CloseHandle( handle->condition );
4953 if ( handle->bufferInfos )
4954 free( handle->bufferInfos );
4956 stream_.apiHandle = 0;
4959 if (stream_.userBuffer) {
4960 free(stream_.userBuffer);
4961 stream_.userBuffer = 0;
4964 if (stream_.deviceBuffer) {
4965 free(stream_.deviceBuffer);
4966 stream_.deviceBuffer = 0;
4969 stream_.mode = UNINITIALIZED;
4972 void RtApiAsio :: setStreamCallback(RtAudioCallback callback, void *userData)
4976 if ( stream_.callbackInfo.usingCallback ) {
4977 sprintf(message_, "RtApiAsio: A callback is already set for this stream!");
4978 error(RtError::WARNING);
4982 stream_.callbackInfo.callback = (void *) callback;
4983 stream_.callbackInfo.userData = userData;
4984 stream_.callbackInfo.usingCallback = true;
4987 void RtApiAsio :: cancelStreamCallback()
4991 if (stream_.callbackInfo.usingCallback) {
4993 if (stream_.state == STREAM_RUNNING)
4996 MUTEX_LOCK(&stream_.mutex);
4998 stream_.callbackInfo.usingCallback = false;
4999 stream_.callbackInfo.userData = NULL;
5000 stream_.state = STREAM_STOPPED;
5001 stream_.callbackInfo.callback = NULL;
5003 MUTEX_UNLOCK(&stream_.mutex);
5007 void RtApiAsio :: startStream()
5010 if (stream_.state == STREAM_RUNNING) return;
5012 MUTEX_LOCK(&stream_.mutex);
5014 ASIOError result = ASIOStart();
5015 if ( result != ASE_OK ) {
5016 sprintf(message_, "RtApiAsio: error starting device (%s).",
5017 devices_[stream_.device[0]].name.c_str());
5018 MUTEX_UNLOCK(&stream_.mutex);
5019 error(RtError::DRIVER_ERROR);
5021 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5022 handle->stopStream = false;
5023 stream_.state = STREAM_RUNNING;
5025 MUTEX_UNLOCK(&stream_.mutex);
5028 void RtApiAsio :: stopStream()
5031 if (stream_.state == STREAM_STOPPED) return;
5033 // Change the state before the lock to improve shutdown response
5034 // when using a callback.
5035 stream_.state = STREAM_STOPPED;
5036 MUTEX_LOCK(&stream_.mutex);
5038 ASIOError result = ASIOStop();
5039 if ( result != ASE_OK ) {
5040 sprintf(message_, "RtApiAsio: error stopping device (%s).",
5041 devices_[stream_.device[0]].name.c_str());
5042 MUTEX_UNLOCK(&stream_.mutex);
5043 error(RtError::DRIVER_ERROR);
5046 MUTEX_UNLOCK(&stream_.mutex);
5049 void RtApiAsio :: abortStream()
5054 void RtApiAsio :: tickStream()
5058 if (stream_.state == STREAM_STOPPED)
5061 if (stream_.callbackInfo.usingCallback) {
5062 sprintf(message_, "RtApiAsio: tickStream() should not be used when a callback function is set!");
5063 error(RtError::WARNING);
5067 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5069 MUTEX_LOCK(&stream_.mutex);
5071 // Release the stream_mutex here and wait for the event
5072 // to become signaled by the callback process.
5073 MUTEX_UNLOCK(&stream_.mutex);
5074 WaitForMultipleObjects(1, &handle->condition, FALSE, INFINITE);
5075 ResetEvent( handle->condition );
5078 void RtApiAsio :: callbackEvent(long bufferIndex)
5082 if (stream_.state == STREAM_STOPPED) return;
5084 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
5085 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5086 if ( info->usingCallback && handle->stopStream ) {
5087 // Check if the stream should be stopped (via the previous user
5088 // callback return value). We stop the stream here, rather than
5089 // after the function call, so that output data can first be
5095 MUTEX_LOCK(&stream_.mutex);
5097 // Invoke user callback first, to get fresh output data.
5098 if ( info->usingCallback ) {
5099 RtAudioCallback callback = (RtAudioCallback) info->callback;
5100 if ( callback(stream_.userBuffer, stream_.bufferSize, info->userData) )
5101 handle->stopStream = true;
5105 int nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
5106 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5108 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[0]);
5109 if (stream_.doConvertBuffer[0]) {
5111 convertStreamBuffer(OUTPUT);
5112 if ( stream_.doByteSwap[0] )
5113 byteSwapBuffer(stream_.deviceBuffer,
5114 stream_.bufferSize * stream_.nDeviceChannels[0],
5115 stream_.deviceFormat[0]);
5117 // Always de-interleave ASIO output data.
5119 for ( int i=0; i<nChannels; i++ ) {
5120 if ( handle->bufferInfos[i].isInput != ASIOTrue )
5121 memcpy(handle->bufferInfos[i].buffers[bufferIndex],
5122 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
5125 else { // single channel only
5127 if (stream_.doByteSwap[0])
5128 byteSwapBuffer(stream_.userBuffer,
5129 stream_.bufferSize * stream_.nUserChannels[0],
5130 stream_.userFormat);
5132 for ( int i=0; i<nChannels; i++ ) {
5133 if ( handle->bufferInfos[i].isInput != ASIOTrue ) {
5134 memcpy(handle->bufferInfos[i].buffers[bufferIndex], stream_.userBuffer, bufferBytes );
5141 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
5143 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
5144 if (stream_.doConvertBuffer[1]) {
5146 // Always interleave ASIO input data.
5148 for ( int i=0; i<nChannels; i++ ) {
5149 if ( handle->bufferInfos[i].isInput == ASIOTrue )
5150 memcpy(&stream_.deviceBuffer[j++*bufferBytes],
5151 handle->bufferInfos[i].buffers[bufferIndex],
5155 if ( stream_.doByteSwap[1] )
5156 byteSwapBuffer(stream_.deviceBuffer,
5157 stream_.bufferSize * stream_.nDeviceChannels[1],
5158 stream_.deviceFormat[1]);
5159 convertStreamBuffer(INPUT);
5162 else { // single channel only
5163 for ( int i=0; i<nChannels; i++ ) {
5164 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
5165 memcpy(stream_.userBuffer,
5166 handle->bufferInfos[i].buffers[bufferIndex],
5172 if (stream_.doByteSwap[1])
5173 byteSwapBuffer(stream_.userBuffer,
5174 stream_.bufferSize * stream_.nUserChannels[1],
5175 stream_.userFormat);
5179 if ( !info->usingCallback )
5180 SetEvent( handle->condition );
5182 MUTEX_UNLOCK(&stream_.mutex);
5185 //******************** End of __WINDOWS_ASIO__ *********************//
5188 #if defined(__WINDOWS_DS__) // Windows DirectSound API
5192 // A structure to hold various information related to the DirectSound
5193 // API implementation.
5200 // Declarations for utility functions, callbacks, and structures
5201 // specific to the DirectSound implementation.
5202 static bool CALLBACK deviceCountCallback(LPGUID lpguid,
5203 LPCSTR lpcstrDescription,
5204 LPCSTR lpcstrModule,
5207 static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
5208 LPCSTR lpcstrDescription,
5209 LPCSTR lpcstrModule,
5212 static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
5213 LPCSTR lpcstrDescription,
5214 LPCSTR lpcstrModule,
5217 static bool CALLBACK deviceIdCallback(LPGUID lpguid,
5218 LPCSTR lpcstrDescription,
5219 LPCSTR lpcstrModule,
5222 static char* getErrorString(int code);
5224 extern "C" unsigned __stdcall callbackHandler(void *ptr);
5233 RtApiDs :: RtApiDs()
5237 if (nDevices_ <= 0) {
5238 sprintf(message_, "RtApiDs: no Windows DirectSound audio devices found!");
5239 error(RtError::NO_DEVICES_FOUND);
5243 RtApiDs :: ~RtApiDs()
5245 if ( stream_.mode != UNINITIALIZED ) closeStream();
5248 int RtApiDs :: getDefaultInputDevice(void)
5251 info.name[0] = '\0';
5253 // Enumerate through devices to find the default output.
5254 HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
5255 if ( FAILED(result) ) {
5256 sprintf(message_, "RtApiDs: Error performing default input device enumeration: %s.",
5257 getErrorString(result));
5258 error(RtError::WARNING);
5262 for ( int i=0; i<nDevices_; i++ ) {
5263 if ( strncmp( info.name, devices_[i].name.c_str(), 64 ) == 0 ) return i;
5270 int RtApiDs :: getDefaultOutputDevice(void)
5273 info.name[0] = '\0';
5275 // Enumerate through devices to find the default output.
5276 HRESULT result = DirectSoundEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
5277 if ( FAILED(result) ) {
5278 sprintf(message_, "RtApiDs: Error performing default output device enumeration: %s.",
5279 getErrorString(result));
5280 error(RtError::WARNING);
5284 for ( int i=0; i<nDevices_; i++ )
5285 if ( strncmp( info.name, devices_[i].name.c_str(), 64 ) == 0 ) return i;
5290 void RtApiDs :: initialize(void)
5292 int i, ins = 0, outs = 0, count = 0;
5296 // Count DirectSound devices.
5297 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &outs);
5298 if ( FAILED(result) ) {
5299 sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5300 getErrorString(result));
5301 error(RtError::DRIVER_ERROR);
5304 // Count DirectSoundCapture devices.
5305 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &ins);
5306 if ( FAILED(result) ) {
5307 sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5308 getErrorString(result));
5309 error(RtError::DRIVER_ERROR);
5313 if (count == 0) return;
5315 std::vector<enum_info> info(count);
5316 for (i=0; i<count; i++) {
5317 info[i].name[0] = '\0';
5318 if (i < outs) info[i].isInput = false;
5319 else info[i].isInput = true;
5322 // Get playback device info and check capabilities.
5323 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
5324 if ( FAILED(result) ) {
5325 sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5326 getErrorString(result));
5327 error(RtError::DRIVER_ERROR);
5330 // Get capture device info and check capabilities.
5331 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
5332 if ( FAILED(result) ) {
5333 sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5334 getErrorString(result));
5335 error(RtError::DRIVER_ERROR);
5338 // Create device structures for valid devices and write device names
5339 // to each. Devices are considered invalid if they cannot be
5340 // opened, they report < 1 supported channels, or they report no
5341 // supported data (capture only).
5344 for (i=0; i<count; i++) {
5345 if ( info[i].isValid ) {
5346 device.name.erase();
5347 device.name.append( (const char *)info[i].name, strlen(info[i].name)+1);
5348 devices_.push_back(device);
5352 nDevices_ = devices_.size();
5356 void RtApiDs :: probeDeviceInfo(RtApiDevice *info)
5359 strncpy( dsinfo.name, info->name.c_str(), 64 );
5360 dsinfo.isValid = false;
5362 // Enumerate through input devices to find the id (if it exists).
5363 HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5364 if ( FAILED(result) ) {
5365 sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
5366 getErrorString(result));
5367 error(RtError::WARNING);
5371 // Do capture probe first.
5372 if ( dsinfo.isValid == false )
5373 goto playback_probe;
5375 LPDIRECTSOUNDCAPTURE input;
5376 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
5377 if ( FAILED(result) ) {
5378 sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
5379 info->name.c_str(), getErrorString(result));
5380 error(RtError::WARNING);
5381 goto playback_probe;
5385 in_caps.dwSize = sizeof(in_caps);
5386 result = input->GetCaps( &in_caps );
5387 if ( FAILED(result) ) {
5389 sprintf(message_, "RtApiDs: Could not get capture capabilities (%s): %s.",
5390 info->name.c_str(), getErrorString(result));
5391 error(RtError::WARNING);
5392 goto playback_probe;
5395 // Get input channel information.
5396 info->minInputChannels = 1;
5397 info->maxInputChannels = in_caps.dwChannels;
5399 // Get sample rate and format information.
5400 info->sampleRates.clear();
5401 if( in_caps.dwChannels == 2 ) {
5402 if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5403 if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5404 if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5405 if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5406 if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5407 if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5409 if ( info->nativeFormats & RTAUDIO_SINT16 ) {
5410 if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->sampleRates.push_back( 11025 );
5411 if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->sampleRates.push_back( 22050 );
5412 if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->sampleRates.push_back( 44100 );
5414 else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
5415 if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->sampleRates.push_back( 11025 );
5416 if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->sampleRates.push_back( 22050 );
5417 if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->sampleRates.push_back( 44100 );
5420 else if ( in_caps.dwChannels == 1 ) {
5421 if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5422 if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5423 if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5424 if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5425 if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5426 if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5428 if ( info->nativeFormats & RTAUDIO_SINT16 ) {
5429 if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->sampleRates.push_back( 11025 );
5430 if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->sampleRates.push_back( 22050 );
5431 if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->sampleRates.push_back( 44100 );
5433 else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
5434 if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->sampleRates.push_back( 11025 );
5435 if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->sampleRates.push_back( 22050 );
5436 if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->sampleRates.push_back( 44100 );
5439 else info->minInputChannels = 0; // technically, this would be an error
5445 dsinfo.isValid = false;
5447 // Enumerate through output devices to find the id (if it exists).
5448 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5449 if ( FAILED(result) ) {
5450 sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
5451 getErrorString(result));
5452 error(RtError::WARNING);
5456 // Now do playback probe.
5457 if ( dsinfo.isValid == false )
5458 goto check_parameters;
5460 LPDIRECTSOUND output;
5462 result = DirectSoundCreate( dsinfo.id, &output, NULL );
5463 if ( FAILED(result) ) {
5464 sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
5465 info->name.c_str(), getErrorString(result));
5466 error(RtError::WARNING);
5467 goto check_parameters;
5470 out_caps.dwSize = sizeof(out_caps);
5471 result = output->GetCaps( &out_caps );
5472 if ( FAILED(result) ) {
5474 sprintf(message_, "RtApiDs: Could not get playback capabilities (%s): %s.",
5475 info->name.c_str(), getErrorString(result));
5476 error(RtError::WARNING);
5477 goto check_parameters;
5480 // Get output channel information.
5481 info->minOutputChannels = 1;
5482 info->maxOutputChannels = ( out_caps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
5484 // Get sample rate information. Use capture device rate information
5486 if ( info->sampleRates.size() == 0 ) {
5487 info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate );
5488 info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate );
5491 // Check input rates against output rate range.
5492 for ( unsigned int i=info->sampleRates.size()-1; i>=0; i-- ) {
5493 if ( (unsigned int) info->sampleRates[i] > out_caps.dwMaxSecondarySampleRate )
5494 info->sampleRates.erase( info->sampleRates.begin() + i );
5496 while ( info->sampleRates.size() > 0 &&
5497 ((unsigned int) info->sampleRates[0] < out_caps.dwMinSecondarySampleRate) ) {
5498 info->sampleRates.erase( info->sampleRates.begin() );
5502 // Get format information.
5503 if ( out_caps.dwFlags & DSCAPS_PRIMARY16BIT ) info->nativeFormats |= RTAUDIO_SINT16;
5504 if ( out_caps.dwFlags & DSCAPS_PRIMARY8BIT ) info->nativeFormats |= RTAUDIO_SINT8;
5509 if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 ) {
5510 sprintf(message_, "RtApiDs: no reported input or output channels for device (%s).",
5511 info->name.c_str());
5512 error(RtError::DEBUG_WARNING);
5515 if ( info->sampleRates.size() == 0 || info->nativeFormats == 0 ) {
5516 sprintf(message_, "RtApiDs: no reported sample rates or data formats for device (%s).",
5517 info->name.c_str());
5518 error(RtError::DEBUG_WARNING);
5522 // Determine duplex status.
5523 if (info->maxInputChannels < info->maxOutputChannels)
5524 info->maxDuplexChannels = info->maxInputChannels;
5526 info->maxDuplexChannels = info->maxOutputChannels;
5527 if (info->minInputChannels < info->minOutputChannels)
5528 info->minDuplexChannels = info->minInputChannels;
5530 info->minDuplexChannels = info->minOutputChannels;
5532 if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
5533 else info->hasDuplexSupport = false;
5535 info->probed = true;
5540 bool RtApiDs :: probeDeviceOpen( int device, StreamMode mode, int channels,
5541 int sampleRate, RtAudioFormat format,
5542 int *bufferSize, int numberOfBuffers)
5545 HWND hWnd = GetForegroundWindow();
5547 // According to a note in PortAudio, using GetDesktopWindow()
5548 // instead of GetForegroundWindow() is supposed to avoid problems
5549 // that occur when the application's window is not the foreground
5550 // window. Also, if the application window closes before the
5551 // DirectSound buffer, DirectSound can crash. However, for console
5552 // applications, no sound was produced when using GetDesktopWindow().
5558 // Check the numberOfBuffers parameter and limit the lowest value to
5559 // two. This is a judgement call and a value of two is probably too
5560 // low for capture, but it should work for playback.
5561 if (numberOfBuffers < 2)
5564 nBuffers = numberOfBuffers;
5566 // Define the wave format structure (16-bit PCM, srate, channels)
5567 WAVEFORMATEX waveFormat;
5568 ZeroMemory(&waveFormat, sizeof(WAVEFORMATEX));
5569 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
5570 waveFormat.nChannels = channels;
5571 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
5573 // Determine the data format.
5574 if ( devices_[device].nativeFormats ) { // 8-bit and/or 16-bit support
5575 if ( format == RTAUDIO_SINT8 ) {
5576 if ( devices_[device].nativeFormats & RTAUDIO_SINT8 )
5577 waveFormat.wBitsPerSample = 8;
5579 waveFormat.wBitsPerSample = 16;
5582 if ( devices_[device].nativeFormats & RTAUDIO_SINT16 )
5583 waveFormat.wBitsPerSample = 16;
5585 waveFormat.wBitsPerSample = 8;
5589 sprintf(message_, "RtApiDs: no reported data formats for device (%s).",
5590 devices_[device].name.c_str());
5591 error(RtError::DEBUG_WARNING);
5595 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
5596 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
5599 void *ohandle = 0, *bhandle = 0;
5600 strncpy( dsinfo.name, devices_[device].name.c_str(), 64 );
5601 dsinfo.isValid = false;
5602 if ( mode == OUTPUT ) {
5604 if ( devices_[device].maxOutputChannels < channels ) {
5605 sprintf(message_, "RtApiDs: requested channels (%d) > than supported (%d) by device (%s).",
5606 channels, devices_[device].maxOutputChannels, devices_[device].name.c_str());
5607 error(RtError::DEBUG_WARNING);
5611 // Enumerate through output devices to find the id (if it exists).
5612 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5613 if ( FAILED(result) ) {
5614 sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
5615 getErrorString(result));
5616 error(RtError::DEBUG_WARNING);
5620 if ( dsinfo.isValid == false ) {
5621 sprintf(message_, "RtApiDs: output device (%s) id not found!", devices_[device].name.c_str());
5622 error(RtError::DEBUG_WARNING);
5626 LPGUID id = dsinfo.id;
5627 LPDIRECTSOUND object;
5628 LPDIRECTSOUNDBUFFER buffer;
5629 DSBUFFERDESC bufferDescription;
5631 result = DirectSoundCreate( id, &object, NULL );
5632 if ( FAILED(result) ) {
5633 sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
5634 devices_[device].name.c_str(), getErrorString(result));
5635 error(RtError::DEBUG_WARNING);
5639 // Set cooperative level to DSSCL_EXCLUSIVE
5640 result = object->SetCooperativeLevel(hWnd, DSSCL_EXCLUSIVE);
5641 if ( FAILED(result) ) {
5643 sprintf(message_, "RtApiDs: Unable to set cooperative level (%s): %s.",
5644 devices_[device].name.c_str(), getErrorString(result));
5645 error(RtError::WARNING);
5649 // Even though we will write to the secondary buffer, we need to
5650 // access the primary buffer to set the correct output format
5651 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
5652 // buffer description.
5653 ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
5654 bufferDescription.dwSize = sizeof(DSBUFFERDESC);
5655 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
5656 // Obtain the primary buffer
5657 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
5658 if ( FAILED(result) ) {
5660 sprintf(message_, "RtApiDs: Unable to access primary buffer (%s): %s.",
5661 devices_[device].name.c_str(), getErrorString(result));
5662 error(RtError::WARNING);
5666 // Set the primary DS buffer sound format.
5667 result = buffer->SetFormat(&waveFormat);
5668 if ( FAILED(result) ) {
5670 sprintf(message_, "RtApiDs: Unable to set primary buffer format (%s): %s.",
5671 devices_[device].name.c_str(), getErrorString(result));
5672 error(RtError::WARNING);
5676 // Setup the secondary DS buffer description.
5677 buffer_size = channels * *bufferSize * nBuffers * waveFormat.wBitsPerSample / 8;
5678 ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
5679 bufferDescription.dwSize = sizeof(DSBUFFERDESC);
5680 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
5681 DSBCAPS_GETCURRENTPOSITION2 |
5682 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
5683 bufferDescription.dwBufferBytes = buffer_size;
5684 bufferDescription.lpwfxFormat = &waveFormat;
5686 // Try to create the secondary DS buffer. If that doesn't work,
5687 // try to use software mixing. Otherwise, there's a problem.
5688 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
5689 if ( FAILED(result) ) {
5690 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
5691 DSBCAPS_GETCURRENTPOSITION2 |
5692 DSBCAPS_LOCSOFTWARE ); // Force software mixing
5693 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
5694 if ( FAILED(result) ) {
5696 sprintf(message_, "RtApiDs: Unable to create secondary DS buffer (%s): %s.",
5697 devices_[device].name.c_str(), getErrorString(result));
5698 error(RtError::WARNING);
5703 // Get the buffer size ... might be different from what we specified.
5705 dsbcaps.dwSize = sizeof(DSBCAPS);
5706 buffer->GetCaps(&dsbcaps);
5707 buffer_size = dsbcaps.dwBufferBytes;
5709 // Lock the DS buffer
5710 result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
5711 if ( FAILED(result) ) {
5714 sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
5715 devices_[device].name.c_str(), getErrorString(result));
5716 error(RtError::WARNING);
5720 // Zero the DS buffer
5721 ZeroMemory(audioPtr, dataLen);
5723 // Unlock the DS buffer
5724 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
5725 if ( FAILED(result) ) {
5728 sprintf(message_, "RtApiDs: Unable to unlock buffer(%s): %s.",
5729 devices_[device].name.c_str(), getErrorString(result));
5730 error(RtError::WARNING);
5734 ohandle = (void *) object;
5735 bhandle = (void *) buffer;
5736 stream_.nDeviceChannels[0] = channels;
5739 if ( mode == INPUT ) {
5741 if ( devices_[device].maxInputChannels < channels )
5744 // Enumerate through input devices to find the id (if it exists).
5745 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5746 if ( FAILED(result) ) {
5747 sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
5748 getErrorString(result));
5749 error(RtError::DEBUG_WARNING);
5753 if ( dsinfo.isValid == false ) {
5754 sprintf(message_, "RtAudioDS: input device (%s) id not found!", devices_[device].name.c_str());
5755 error(RtError::DEBUG_WARNING);
5759 LPGUID id = dsinfo.id;
5760 LPDIRECTSOUNDCAPTURE object;
5761 LPDIRECTSOUNDCAPTUREBUFFER buffer;
5762 DSCBUFFERDESC bufferDescription;
5764 result = DirectSoundCaptureCreate( id, &object, NULL );
5765 if ( FAILED(result) ) {
5766 sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
5767 devices_[device].name.c_str(), getErrorString(result));
5768 error(RtError::WARNING);
5772 // Setup the secondary DS buffer description.
5773 buffer_size = channels * *bufferSize * nBuffers * waveFormat.wBitsPerSample / 8;
5774 ZeroMemory(&bufferDescription, sizeof(DSCBUFFERDESC));
5775 bufferDescription.dwSize = sizeof(DSCBUFFERDESC);
5776 bufferDescription.dwFlags = 0;
5777 bufferDescription.dwReserved = 0;
5778 bufferDescription.dwBufferBytes = buffer_size;
5779 bufferDescription.lpwfxFormat = &waveFormat;
5781 // Create the capture buffer.
5782 result = object->CreateCaptureBuffer(&bufferDescription, &buffer, NULL);
5783 if ( FAILED(result) ) {
5785 sprintf(message_, "RtApiDs: Unable to create capture buffer (%s): %s.",
5786 devices_[device].name.c_str(), getErrorString(result));
5787 error(RtError::WARNING);
5791 // Lock the capture buffer
5792 result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
5793 if ( FAILED(result) ) {
5796 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
5797 devices_[device].name.c_str(), getErrorString(result));
5798 error(RtError::WARNING);
5803 ZeroMemory(audioPtr, dataLen);
5805 // Unlock the buffer
5806 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
5807 if ( FAILED(result) ) {
5810 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
5811 devices_[device].name.c_str(), getErrorString(result));
5812 error(RtError::WARNING);
5816 ohandle = (void *) object;
5817 bhandle = (void *) buffer;
5818 stream_.nDeviceChannels[1] = channels;
5821 stream_.userFormat = format;
5822 if ( waveFormat.wBitsPerSample == 8 )
5823 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5825 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5826 stream_.nUserChannels[mode] = channels;
5827 *bufferSize = buffer_size / (channels * nBuffers * waveFormat.wBitsPerSample / 8);
5828 stream_.bufferSize = *bufferSize;
5830 // Set flags for buffer conversion
5831 stream_.doConvertBuffer[mode] = false;
5832 if (stream_.userFormat != stream_.deviceFormat[mode])
5833 stream_.doConvertBuffer[mode] = true;
5834 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
5835 stream_.doConvertBuffer[mode] = true;
5837 // Allocate necessary internal buffers
5838 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
5841 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
5842 buffer_bytes = stream_.nUserChannels[0];
5844 buffer_bytes = stream_.nUserChannels[1];
5846 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
5847 if (stream_.userBuffer) free(stream_.userBuffer);
5848 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
5849 if (stream_.userBuffer == NULL) {
5850 sprintf(message_, "RtApiDs: error allocating user buffer memory (%s).",
5851 devices_[device].name.c_str());
5856 if ( stream_.doConvertBuffer[mode] ) {
5859 bool makeBuffer = true;
5860 if ( mode == OUTPUT )
5861 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
5862 else { // mode == INPUT
5863 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
5864 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5865 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
5866 if ( buffer_bytes < bytes_out ) makeBuffer = false;
5871 buffer_bytes *= *bufferSize;
5872 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
5873 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
5874 if (stream_.deviceBuffer == NULL) {
5875 sprintf(message_, "RtApiDs: error allocating device buffer memory (%s).",
5876 devices_[device].name.c_str());
5882 // Allocate our DsHandle structures for the stream.
5884 if ( stream_.apiHandle == 0 ) {
5885 handles = (DsHandle *) calloc(2, sizeof(DsHandle));
5886 if ( handles == NULL ) {
5887 sprintf(message_, "RtApiDs: Error allocating DsHandle memory (%s).",
5888 devices_[device].name.c_str());
5891 handles[0].object = 0;
5892 handles[1].object = 0;
5893 stream_.apiHandle = (void *) handles;
5896 handles = (DsHandle *) stream_.apiHandle;
5897 handles[mode].object = ohandle;
5898 handles[mode].buffer = bhandle;
5900 stream_.device[mode] = device;
5901 stream_.state = STREAM_STOPPED;
5902 if ( stream_.mode == OUTPUT && mode == INPUT )
5903 // We had already set up an output stream.
5904 stream_.mode = DUPLEX;
5906 stream_.mode = mode;
5907 stream_.nBuffers = nBuffers;
5908 stream_.sampleRate = sampleRate;
5914 if (handles[0].object) {
5915 LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
5916 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
5917 if (buffer) buffer->Release();
5920 if (handles[1].object) {
5921 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
5922 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
5923 if (buffer) buffer->Release();
5927 stream_.apiHandle = 0;
5930 if (stream_.userBuffer) {
5931 free(stream_.userBuffer);
5932 stream_.userBuffer = 0;
5935 error(RtError::WARNING);
5939 void RtApiDs :: setStreamCallback(RtAudioCallback callback, void *userData)
5943 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
5944 if ( info->usingCallback ) {
5945 sprintf(message_, "RtApiDs: A callback is already set for this stream!");
5946 error(RtError::WARNING);
5950 info->callback = (void *) callback;
5951 info->userData = userData;
5952 info->usingCallback = true;
5953 info->object = (void *) this;
5956 info->thread = _beginthreadex(NULL, 0, &callbackHandler,
5957 &stream_.callbackInfo, 0, &thread_id);
5958 if (info->thread == 0) {
5959 info->usingCallback = false;
5960 sprintf(message_, "RtApiDs: error starting callback thread!");
5961 error(RtError::THREAD_ERROR);
5964 // When spawning multiple threads in quick succession, it appears to be
5965 // necessary to wait a bit for each to initialize ... another windoism!
5969 void RtApiDs :: cancelStreamCallback()
5973 if (stream_.callbackInfo.usingCallback) {
5975 if (stream_.state == STREAM_RUNNING)
5978 MUTEX_LOCK(&stream_.mutex);
5980 stream_.callbackInfo.usingCallback = false;
5981 WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
5982 CloseHandle( (HANDLE)stream_.callbackInfo.thread );
5983 stream_.callbackInfo.thread = 0;
5984 stream_.callbackInfo.callback = NULL;
5985 stream_.callbackInfo.userData = NULL;
5987 MUTEX_UNLOCK(&stream_.mutex);
5991 void RtApiDs :: closeStream()
5993 // We don't want an exception to be thrown here because this
5994 // function is called by our class destructor. So, do our own
5996 if ( stream_.mode == UNINITIALIZED ) {
5997 sprintf(message_, "RtApiDs::closeStream(): no open stream to close!");
5998 error(RtError::WARNING);
6002 if (stream_.callbackInfo.usingCallback) {
6003 stream_.callbackInfo.usingCallback = false;
6004 WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
6005 CloseHandle( (HANDLE)stream_.callbackInfo.thread );
6008 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6010 if (handles[0].object) {
6011 LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
6012 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6020 if (handles[1].object) {
6021 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
6022 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6030 stream_.apiHandle = 0;
6033 if (stream_.userBuffer) {
6034 free(stream_.userBuffer);
6035 stream_.userBuffer = 0;
6038 if (stream_.deviceBuffer) {
6039 free(stream_.deviceBuffer);
6040 stream_.deviceBuffer = 0;
6043 stream_.mode = UNINITIALIZED;
6046 void RtApiDs :: startStream()
6049 if (stream_.state == STREAM_RUNNING) return;
6051 MUTEX_LOCK(&stream_.mutex);
6054 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6055 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6056 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6057 result = buffer->Play(0, 0, DSBPLAY_LOOPING );
6058 if ( FAILED(result) ) {
6059 sprintf(message_, "RtApiDs: Unable to start buffer (%s): %s.",
6060 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6061 error(RtError::DRIVER_ERROR);
6065 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6066 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6067 result = buffer->Start(DSCBSTART_LOOPING );
6068 if ( FAILED(result) ) {
6069 sprintf(message_, "RtApiDs: Unable to start capture buffer (%s): %s.",
6070 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6071 error(RtError::DRIVER_ERROR);
6074 stream_.state = STREAM_RUNNING;
6076 MUTEX_UNLOCK(&stream_.mutex);
6079 void RtApiDs :: stopStream()
6082 if (stream_.state == STREAM_STOPPED) return;
6084 // Change the state before the lock to improve shutdown response
6085 // when using a callback.
6086 stream_.state = STREAM_STOPPED;
6087 MUTEX_LOCK(&stream_.mutex);
6089 // There is no specific DirectSound API call to "drain" a buffer
6090 // before stopping. We can hack this for playback by writing zeroes
6091 // for another bufferSize * nBuffers frames. For capture, the
6092 // concept is less clear so we'll repeat what we do in the
6093 // abortStream() case.
6096 LPVOID buffer1 = NULL;
6097 LPVOID buffer2 = NULL;
6098 DWORD bufferSize1 = 0;
6099 DWORD bufferSize2 = 0;
6100 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6101 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6103 DWORD currentPos, safePos;
6104 long buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0];
6105 buffer_bytes *= formatBytes(stream_.deviceFormat[0]);
6107 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6108 UINT nextWritePos = handles[0].bufferPointer;
6109 dsBufferSize = buffer_bytes * stream_.nBuffers;
6111 // Write zeroes for nBuffer counts.
6112 for (int i=0; i<stream_.nBuffers; i++) {
6114 // Find out where the read and "safe write" pointers are.
6115 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6116 if ( FAILED(result) ) {
6117 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6118 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6119 error(RtError::DRIVER_ERROR);
6122 if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6123 DWORD endWrite = nextWritePos + buffer_bytes;
6125 // Check whether the entire write region is behind the play pointer.
6126 while ( currentPos < endWrite ) {
6127 double millis = (endWrite - currentPos) * 900.0;
6128 millis /= ( formatBytes(stream_.deviceFormat[0]) * stream_.sampleRate);
6129 if ( millis < 1.0 ) millis = 1.0;
6130 Sleep( (DWORD) millis );
6132 // Wake up, find out where we are now
6133 result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos );
6134 if ( FAILED(result) ) {
6135 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6136 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6137 error(RtError::DRIVER_ERROR);
6139 if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6142 // Lock free space in the buffer
6143 result = dsBuffer->Lock (nextWritePos, buffer_bytes, &buffer1,
6144 &bufferSize1, &buffer2, &bufferSize2, 0);
6145 if ( FAILED(result) ) {
6146 sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
6147 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6148 error(RtError::DRIVER_ERROR);
6151 // Zero the free space
6152 ZeroMemory(buffer1, bufferSize1);
6153 if (buffer2 != NULL) ZeroMemory(buffer2, bufferSize2);
6155 // Update our buffer offset and unlock sound buffer
6156 dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
6157 if ( FAILED(result) ) {
6158 sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
6159 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6160 error(RtError::DRIVER_ERROR);
6162 nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
6163 handles[0].bufferPointer = nextWritePos;
6166 // If we play again, start at the beginning of the buffer.
6167 handles[0].bufferPointer = 0;
6170 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6171 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6175 result = buffer->Stop();
6176 if ( FAILED(result) ) {
6177 sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
6178 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6179 error(RtError::DRIVER_ERROR);
6182 dsBufferSize = stream_.bufferSize * stream_.nDeviceChannels[1];
6183 dsBufferSize *= formatBytes(stream_.deviceFormat[1]) * stream_.nBuffers;
6185 // Lock the buffer and clear it so that if we start to play again,
6186 // we won't have old data playing.
6187 result = buffer->Lock(0, dsBufferSize, &buffer1, &bufferSize1, NULL, NULL, 0);
6188 if ( FAILED(result) ) {
6189 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6190 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6191 error(RtError::DRIVER_ERROR);
6194 // Zero the DS buffer
6195 ZeroMemory(buffer1, bufferSize1);
6197 // Unlock the DS buffer
6198 result = buffer->Unlock(buffer1, bufferSize1, NULL, 0);
6199 if ( FAILED(result) ) {
6200 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6201 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6202 error(RtError::DRIVER_ERROR);
6205 // If we start recording again, we must begin at beginning of buffer.
6206 handles[1].bufferPointer = 0;
6209 MUTEX_UNLOCK(&stream_.mutex);
6212 void RtApiDs :: abortStream()
6215 if (stream_.state == STREAM_STOPPED) return;
6217 // Change the state before the lock to improve shutdown response
6218 // when using a callback.
6219 stream_.state = STREAM_STOPPED;
6220 MUTEX_LOCK(&stream_.mutex);
6226 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6227 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6228 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6229 result = buffer->Stop();
6230 if ( FAILED(result) ) {
6231 sprintf(message_, "RtApiDs: Unable to stop buffer (%s): %s",
6232 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6233 error(RtError::DRIVER_ERROR);
6236 dsBufferSize = stream_.bufferSize * stream_.nDeviceChannels[0];
6237 dsBufferSize *= formatBytes(stream_.deviceFormat[0]) * stream_.nBuffers;
6239 // Lock the buffer and clear it so that if we start to play again,
6240 // we won't have old data playing.
6241 result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
6242 if ( FAILED(result) ) {
6243 sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
6244 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6245 error(RtError::DRIVER_ERROR);
6248 // Zero the DS buffer
6249 ZeroMemory(audioPtr, dataLen);
6251 // Unlock the DS buffer
6252 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6253 if ( FAILED(result) ) {
6254 sprintf(message_, "RtApiDs: Unable to unlock buffer (%s): %s.",
6255 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6256 error(RtError::DRIVER_ERROR);
6259 // If we start playing again, we must begin at beginning of buffer.
6260 handles[0].bufferPointer = 0;
6263 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6264 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6268 result = buffer->Stop();
6269 if ( FAILED(result) ) {
6270 sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
6271 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6272 error(RtError::DRIVER_ERROR);
6275 dsBufferSize = stream_.bufferSize * stream_.nDeviceChannels[1];
6276 dsBufferSize *= formatBytes(stream_.deviceFormat[1]) * stream_.nBuffers;
6278 // Lock the buffer and clear it so that if we start to play again,
6279 // we won't have old data playing.
6280 result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
6281 if ( FAILED(result) ) {
6282 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6283 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6284 error(RtError::DRIVER_ERROR);
6287 // Zero the DS buffer
6288 ZeroMemory(audioPtr, dataLen);
6290 // Unlock the DS buffer
6291 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6292 if ( FAILED(result) ) {
6293 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6294 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6295 error(RtError::DRIVER_ERROR);
6298 // If we start recording again, we must begin at beginning of buffer.
6299 handles[1].bufferPointer = 0;
6302 MUTEX_UNLOCK(&stream_.mutex);
6305 int RtApiDs :: streamWillBlock()
6308 if (stream_.state == STREAM_STOPPED) return 0;
6310 MUTEX_LOCK(&stream_.mutex);
6315 DWORD currentPos, safePos;
6317 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6318 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6320 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6321 UINT nextWritePos = handles[0].bufferPointer;
6322 channels = stream_.nDeviceChannels[0];
6323 DWORD dsBufferSize = stream_.bufferSize * channels;
6324 dsBufferSize *= formatBytes(stream_.deviceFormat[0]) * stream_.nBuffers;
6326 // Find out where the read and "safe write" pointers are.
6327 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6328 if ( FAILED(result) ) {
6329 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6330 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6331 error(RtError::DRIVER_ERROR);
6334 if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6335 frames = currentPos - nextWritePos;
6336 frames /= channels * formatBytes(stream_.deviceFormat[0]);
6339 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6341 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6342 UINT nextReadPos = handles[1].bufferPointer;
6343 channels = stream_.nDeviceChannels[1];
6344 DWORD dsBufferSize = stream_.bufferSize * channels;
6345 dsBufferSize *= formatBytes(stream_.deviceFormat[1]) * stream_.nBuffers;
6347 // Find out where the write and "safe read" pointers are.
6348 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6349 if ( FAILED(result) ) {
6350 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
6351 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6352 error(RtError::DRIVER_ERROR);
6355 if ( safePos < nextReadPos ) safePos += dsBufferSize; // unwrap offset
6357 if (stream_.mode == DUPLEX ) {
6358 // Take largest value of the two.
6359 int temp = safePos - nextReadPos;
6360 temp /= channels * formatBytes(stream_.deviceFormat[1]);
6361 frames = ( temp > frames ) ? temp : frames;
6364 frames = safePos - nextReadPos;
6365 frames /= channels * formatBytes(stream_.deviceFormat[1]);
6369 frames = stream_.bufferSize - frames;
6370 if (frames < 0) frames = 0;
6372 MUTEX_UNLOCK(&stream_.mutex);
6376 void RtApiDs :: tickStream()
6381 if (stream_.state == STREAM_STOPPED) {
6382 if (stream_.callbackInfo.usingCallback) Sleep(50); // sleep 50 milliseconds
6385 else if (stream_.callbackInfo.usingCallback) {
6386 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6387 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
6390 MUTEX_LOCK(&stream_.mutex);
6392 // The state might change while waiting on a mutex.
6393 if (stream_.state == STREAM_STOPPED) {
6394 MUTEX_UNLOCK(&stream_.mutex);
6399 DWORD currentPos, safePos;
6400 LPVOID buffer1 = NULL;
6401 LPVOID buffer2 = NULL;
6402 DWORD bufferSize1 = 0;
6403 DWORD bufferSize2 = 0;
6406 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6407 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6409 // Setup parameters and do buffer conversion if necessary.
6410 if (stream_.doConvertBuffer[0]) {
6411 convertStreamBuffer(OUTPUT);
6412 buffer = stream_.deviceBuffer;
6413 buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0];
6414 buffer_bytes *= formatBytes(stream_.deviceFormat[0]);
6417 buffer = stream_.userBuffer;
6418 buffer_bytes = stream_.bufferSize * stream_.nUserChannels[0];
6419 buffer_bytes *= formatBytes(stream_.userFormat);
6422 // No byte swapping necessary in DirectSound implementation.
6424 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6425 UINT nextWritePos = handles[0].bufferPointer;
6426 DWORD dsBufferSize = buffer_bytes * stream_.nBuffers;
6428 // Find out where the read and "safe write" pointers are.
6429 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6430 if ( FAILED(result) ) {
6431 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6432 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6433 error(RtError::DRIVER_ERROR);
6436 if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6437 DWORD endWrite = nextWritePos + buffer_bytes;
6439 // Check whether the entire write region is behind the play pointer.
6440 while ( currentPos < endWrite ) {
6441 // If we are here, then we must wait until the play pointer gets
6442 // beyond the write region. The approach here is to use the
6443 // Sleep() function to suspend operation until safePos catches
6444 // up. Calculate number of milliseconds to wait as:
6445 // time = distance * (milliseconds/second) * fudgefactor /
6446 // ((bytes/sample) * (samples/second))
6447 // A "fudgefactor" less than 1 is used because it was found
6448 // that sleeping too long was MUCH worse than sleeping for
6449 // several shorter periods.
6450 double millis = (endWrite - currentPos) * 900.0;
6451 millis /= ( formatBytes(stream_.deviceFormat[0]) * stream_.sampleRate);
6452 if ( millis < 1.0 ) millis = 1.0;
6453 Sleep( (DWORD) millis );
6455 // Wake up, find out where we are now
6456 result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos );
6457 if ( FAILED(result) ) {
6458 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6459 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6460 error(RtError::DRIVER_ERROR);
6462 if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6465 // Lock free space in the buffer
6466 result = dsBuffer->Lock (nextWritePos, buffer_bytes, &buffer1,
6467 &bufferSize1, &buffer2, &bufferSize2, 0);
6468 if ( FAILED(result) ) {
6469 sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
6470 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6471 error(RtError::DRIVER_ERROR);
6474 // Copy our buffer into the DS buffer
6475 CopyMemory(buffer1, buffer, bufferSize1);
6476 if (buffer2 != NULL) CopyMemory(buffer2, buffer+bufferSize1, bufferSize2);
6478 // Update our buffer offset and unlock sound buffer
6479 dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
6480 if ( FAILED(result) ) {
6481 sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
6482 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6483 error(RtError::DRIVER_ERROR);
6485 nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
6486 handles[0].bufferPointer = nextWritePos;
6489 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6491 // Setup parameters.
6492 if (stream_.doConvertBuffer[1]) {
6493 buffer = stream_.deviceBuffer;
6494 buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[1];
6495 buffer_bytes *= formatBytes(stream_.deviceFormat[1]);
6498 buffer = stream_.userBuffer;
6499 buffer_bytes = stream_.bufferSize * stream_.nUserChannels[1];
6500 buffer_bytes *= formatBytes(stream_.userFormat);
6503 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6504 UINT nextReadPos = handles[1].bufferPointer;
6505 DWORD dsBufferSize = buffer_bytes * stream_.nBuffers;
6507 // Find out where the write and "safe read" pointers are.
6508 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6509 if ( FAILED(result) ) {
6510 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
6511 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6512 error(RtError::DRIVER_ERROR);
6515 if ( safePos < nextReadPos ) safePos += dsBufferSize; // unwrap offset
6516 DWORD endRead = nextReadPos + buffer_bytes;
6518 // Check whether the entire write region is behind the play pointer.
6519 while ( safePos < endRead ) {
6520 // See comments for playback.
6521 double millis = (endRead - safePos) * 900.0;
6522 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.sampleRate);
6523 if ( millis < 1.0 ) millis = 1.0;
6524 Sleep( (DWORD) millis );
6526 // Wake up, find out where we are now
6527 result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos );
6528 if ( FAILED(result) ) {
6529 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
6530 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6531 error(RtError::DRIVER_ERROR);
6534 if ( safePos < nextReadPos ) safePos += dsBufferSize; // unwrap offset
6537 // Lock free space in the buffer
6538 result = dsBuffer->Lock (nextReadPos, buffer_bytes, &buffer1,
6539 &bufferSize1, &buffer2, &bufferSize2, 0);
6540 if ( FAILED(result) ) {
6541 sprintf(message_, "RtApiDs: Unable to lock buffer during capture (%s): %s.",
6542 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6543 error(RtError::DRIVER_ERROR);
6546 // Copy our buffer into the DS buffer
6547 CopyMemory(buffer, buffer1, bufferSize1);
6548 if (buffer2 != NULL) CopyMemory(buffer+bufferSize1, buffer2, bufferSize2);
6550 // Update our buffer offset and unlock sound buffer
6551 nextReadPos = (nextReadPos + bufferSize1 + bufferSize2) % dsBufferSize;
6552 dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
6553 if ( FAILED(result) ) {
6554 sprintf(message_, "RtApiDs: Unable to unlock buffer during capture (%s): %s.",
6555 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6556 error(RtError::DRIVER_ERROR);
6558 handles[1].bufferPointer = nextReadPos;
6560 // No byte swapping necessary in DirectSound implementation.
6562 // Do buffer conversion if necessary.
6563 if (stream_.doConvertBuffer[1])
6564 convertStreamBuffer(INPUT);
6567 MUTEX_UNLOCK(&stream_.mutex);
6569 if (stream_.callbackInfo.usingCallback && stopStream)
6573 // Definitions for utility functions and callbacks
6574 // specific to the DirectSound implementation.
6576 extern "C" unsigned __stdcall callbackHandler(void *ptr)
6578 CallbackInfo *info = (CallbackInfo *) ptr;
6579 RtApiDs *object = (RtApiDs *) info->object;
6580 bool *usingCallback = &info->usingCallback;
6582 while ( *usingCallback ) {
6584 object->tickStream();
6586 catch (RtError &exception) {
6587 fprintf(stderr, "\nRtApiDs: callback thread error (%s) ... closing thread.\n\n",
6588 exception.getMessageString());
6597 static bool CALLBACK deviceCountCallback(LPGUID lpguid,
6598 LPCSTR lpcstrDescription,
6599 LPCSTR lpcstrModule,
6602 int *pointer = ((int *) lpContext);
6608 static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
6609 LPCSTR lpcstrDescription,
6610 LPCSTR lpcstrModule,
6613 enum_info *info = ((enum_info *) lpContext);
6614 while (strlen(info->name) > 0) info++;
6616 strncpy(info->name, lpcstrDescription, 64);
6620 info->isValid = false;
6621 if (info->isInput == true) {
6623 LPDIRECTSOUNDCAPTURE object;
6625 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
6626 if( hr != DS_OK ) return true;
6628 caps.dwSize = sizeof(caps);
6629 hr = object->GetCaps( &caps );
6631 if (caps.dwChannels > 0 && caps.dwFormats > 0)
6632 info->isValid = true;
6638 LPDIRECTSOUND object;
6639 hr = DirectSoundCreate( lpguid, &object, NULL );
6640 if( hr != DS_OK ) return true;
6642 caps.dwSize = sizeof(caps);
6643 hr = object->GetCaps( &caps );
6645 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
6646 info->isValid = true;
6654 static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
6655 LPCSTR lpcstrDescription,
6656 LPCSTR lpcstrModule,
6659 enum_info *info = ((enum_info *) lpContext);
6661 if ( lpguid == NULL ) {
6662 strncpy(info->name, lpcstrDescription, 64);
6669 static bool CALLBACK deviceIdCallback(LPGUID lpguid,
6670 LPCSTR lpcstrDescription,
6671 LPCSTR lpcstrModule,
6674 enum_info *info = ((enum_info *) lpContext);
6676 if ( strncmp( info->name, lpcstrDescription, 64 ) == 0 ) {
6678 info->isValid = true;
6685 static char* getErrorString(int code)
6689 case DSERR_ALLOCATED:
6690 return "Direct Sound already allocated";
6692 case DSERR_CONTROLUNAVAIL:
6693 return "Direct Sound control unavailable";
6695 case DSERR_INVALIDPARAM:
6696 return "Direct Sound invalid parameter";
6698 case DSERR_INVALIDCALL:
6699 return "Direct Sound invalid call";
6702 return "Direct Sound generic error";
6704 case DSERR_PRIOLEVELNEEDED:
6705 return "Direct Sound Priority level needed";
6707 case DSERR_OUTOFMEMORY:
6708 return "Direct Sound out of memory";
6710 case DSERR_BADFORMAT:
6711 return "Direct Sound bad format";
6713 case DSERR_UNSUPPORTED:
6714 return "Direct Sound unsupported error";
6716 case DSERR_NODRIVER:
6717 return "Direct Sound no driver error";
6719 case DSERR_ALREADYINITIALIZED:
6720 return "Direct Sound already initialized";
6722 case DSERR_NOAGGREGATION:
6723 return "Direct Sound no aggregation";
6725 case DSERR_BUFFERLOST:
6726 return "Direct Sound buffer lost";
6728 case DSERR_OTHERAPPHASPRIO:
6729 return "Direct Sound other app has priority";
6731 case DSERR_UNINITIALIZED:
6732 return "Direct Sound uninitialized";
6735 return "Direct Sound unknown error";
6739 //******************** End of __WINDOWS_DS__ *********************//
6742 #if defined(__IRIX_AL__) // SGI's AL API for IRIX
6744 #include <dmedia/audio.h>
6748 extern "C" void *callbackHandler(void * ptr);
6750 RtApiAl :: RtApiAl()
6754 if (nDevices_ <= 0) {
6755 sprintf(message_, "RtApiAl: no Irix AL audio devices found!");
6756 error(RtError::NO_DEVICES_FOUND);
6760 RtApiAl :: ~RtApiAl()
6762 // The subclass destructor gets called before the base class
6763 // destructor, so close any existing streams before deallocating
6764 // apiDeviceId memory.
6765 if ( stream_.mode != UNINITIALIZED ) closeStream();
6767 // Free our allocated apiDeviceId memory.
6769 for ( unsigned int i=0; i<devices_.size(); i++ ) {
6770 id = (long *) devices_[i].apiDeviceId;
6775 void RtApiAl :: initialize(void)
6777 // Count cards and devices
6780 // Determine the total number of input and output devices.
6781 nDevices_ = alQueryValues(AL_SYSTEM, AL_DEVICES, 0, 0, 0, 0);
6782 if (nDevices_ < 0) {
6783 sprintf(message_, "RtApiAl: error counting devices: %s.",
6784 alGetErrorString(oserror()));
6785 error(RtError::DRIVER_ERROR);
6788 if (nDevices_ <= 0) return;
6790 ALvalue *vls = (ALvalue *) new ALvalue[nDevices_];
6792 // Create our list of devices and write their ascii identifiers and resource ids.
6796 pvs[0].param = AL_NAME;
6797 pvs[0].value.ptr = name;
6802 outs = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, vls, nDevices_, 0, 0);
6805 sprintf(message_, "RtApiAl: error getting output devices: %s.",
6806 alGetErrorString(oserror()));
6807 error(RtError::DRIVER_ERROR);
6810 for (i=0; i<outs; i++) {
6811 if (alGetParams(vls[i].i, pvs, 1) < 0) {
6813 sprintf(message_, "RtApiAl: error querying output devices: %s.",
6814 alGetErrorString(oserror()));
6815 error(RtError::DRIVER_ERROR);
6817 device.name.erase();
6818 device.name.append( (const char *)name, strlen(name)+1);
6819 devices_.push_back(device);
6820 id = (long *) calloc(2, sizeof(long));
6822 devices_[i].apiDeviceId = (void *) id;
6825 ins = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &vls[outs], nDevices_-outs, 0, 0);
6828 sprintf(message_, "RtApiAl: error getting input devices: %s.",
6829 alGetErrorString(oserror()));
6830 error(RtError::DRIVER_ERROR);
6833 for (i=outs; i<ins+outs; i++) {
6834 if (alGetParams(vls[i].i, pvs, 1) < 0) {
6836 sprintf(message_, "RtApiAl: error querying input devices: %s.",
6837 alGetErrorString(oserror()));
6838 error(RtError::DRIVER_ERROR);
6840 device.name.erase();
6841 device.name.append( (const char *)name, strlen(name)+1);
6842 devices_.push_back(device);
6843 id = (long *) calloc(2, sizeof(long));
6845 devices_[i].apiDeviceId = (void *) id;
6851 int RtApiAl :: getDefaultInputDevice(void)
6855 int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &value, 1, 0, 0);
6857 sprintf(message_, "RtApiAl: error getting default input device id: %s.",
6858 alGetErrorString(oserror()));
6859 error(RtError::WARNING);
6862 for ( unsigned int i=0; i<devices_.size(); i++ ) {
6863 id = (long *) devices_[i].apiDeviceId;
6864 if ( id[1] == value.i ) return i;
6871 int RtApiAl :: getDefaultOutputDevice(void)
6875 int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, &value, 1, 0, 0);
6877 sprintf(message_, "RtApiAl: error getting default output device id: %s.",
6878 alGetErrorString(oserror()));
6879 error(RtError::WARNING);
6882 for ( unsigned int i=0; i<devices_.size(); i++ ) {
6883 id = (long *) devices_[i].apiDeviceId;
6884 if ( id[0] == value.i ) return i;
6891 void RtApiAl :: probeDeviceInfo(RtApiDevice *info)
6898 // Get output resource ID if it exists.
6899 long *id = (long *) info->apiDeviceId;
6903 // Probe output device parameters.
6904 result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
6906 sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
6907 info->name.c_str(), alGetErrorString(oserror()));
6908 error(RtError::WARNING);
6911 info->maxOutputChannels = value.i;
6912 info->minOutputChannels = 1;
6915 result = alGetParamInfo(resource, AL_RATE, &pinfo);
6917 sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
6918 info->name.c_str(), alGetErrorString(oserror()));
6919 error(RtError::WARNING);
6922 info->sampleRates.clear();
6923 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
6924 if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
6925 info->sampleRates.push_back( SAMPLE_RATES[k] );
6929 // The AL library supports all our formats, except 24-bit and 32-bit ints.
6930 info->nativeFormats = (RtAudioFormat) 51;
6933 // Now get input resource ID if it exists.
6937 // Probe input device parameters.
6938 result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
6940 sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
6941 info->name.c_str(), alGetErrorString(oserror()));
6942 error(RtError::WARNING);
6945 info->maxInputChannels = value.i;
6946 info->minInputChannels = 1;
6949 result = alGetParamInfo(resource, AL_RATE, &pinfo);
6951 sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
6952 info->name.c_str(), alGetErrorString(oserror()));
6953 error(RtError::WARNING);
6956 // In the case of the default device, these values will
6957 // overwrite the rates determined for the output device. Since
6958 // the input device is most likely to be more limited than the
6959 // output device, this is ok.
6960 info->sampleRates.clear();
6961 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
6962 if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
6963 info->sampleRates.push_back( SAMPLE_RATES[k] );
6967 // The AL library supports all our formats, except 24-bit and 32-bit ints.
6968 info->nativeFormats = (RtAudioFormat) 51;
6971 if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 )
6973 if ( info->sampleRates.size() == 0 )
6976 // Determine duplex status.
6977 if (info->maxInputChannels < info->maxOutputChannels)
6978 info->maxDuplexChannels = info->maxInputChannels;
6980 info->maxDuplexChannels = info->maxOutputChannels;
6981 if (info->minInputChannels < info->minOutputChannels)
6982 info->minDuplexChannels = info->minInputChannels;
6984 info->minDuplexChannels = info->minOutputChannels;
6986 if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
6987 else info->hasDuplexSupport = false;
6989 info->probed = true;
6994 bool RtApiAl :: probeDeviceOpen(int device, StreamMode mode, int channels,
6995 int sampleRate, RtAudioFormat format,
6996 int *bufferSize, int numberOfBuffers)
6998 int result, nBuffers;
7003 long *id = (long *) devices_[device].apiDeviceId;
7005 // Get a new ALconfig structure.
7006 al_config = alNewConfig();
7008 sprintf(message_,"RtApiAl: can't get AL config: %s.",
7009 alGetErrorString(oserror()));
7010 error(RtError::WARNING);
7014 // Set the channels.
7015 result = alSetChannels(al_config, channels);
7017 alFreeConfig(al_config);
7018 sprintf(message_,"RtApiAl: can't set %d channels in AL config: %s.",
7019 channels, alGetErrorString(oserror()));
7020 error(RtError::WARNING);
7024 // Attempt to set the queue size. The al API doesn't provide a
7025 // means for querying the minimum/maximum buffer size of a device,
7026 // so if the specified size doesn't work, take whatever the
7027 // al_config structure returns.
7028 if ( numberOfBuffers < 1 )
7031 nBuffers = numberOfBuffers;
7032 long buffer_size = *bufferSize * nBuffers;
7033 result = alSetQueueSize(al_config, buffer_size); // in sample frames
7035 // Get the buffer size specified by the al_config and try that.
7036 buffer_size = alGetQueueSize(al_config);
7037 result = alSetQueueSize(al_config, buffer_size);
7039 alFreeConfig(al_config);
7040 sprintf(message_,"RtApiAl: can't set buffer size (%ld) in AL config: %s.",
7041 buffer_size, alGetErrorString(oserror()));
7042 error(RtError::WARNING);
7045 *bufferSize = buffer_size / nBuffers;
7048 // Set the data format.
7049 stream_.userFormat = format;
7050 stream_.deviceFormat[mode] = format;
7051 if (format == RTAUDIO_SINT8) {
7052 result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
7053 result = alSetWidth(al_config, AL_SAMPLE_8);
7055 else if (format == RTAUDIO_SINT16) {
7056 result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
7057 result = alSetWidth(al_config, AL_SAMPLE_16);
7059 else if (format == RTAUDIO_SINT24) {
7060 // Our 24-bit format assumes the upper 3 bytes of a 4 byte word.
7061 // The AL library uses the lower 3 bytes, so we'll need to do our
7063 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7064 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
7066 else if (format == RTAUDIO_SINT32) {
7067 // The AL library doesn't seem to support the 32-bit integer
7068 // format, so we'll need to do our own conversion.
7069 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7070 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
7072 else if (format == RTAUDIO_FLOAT32)
7073 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7074 else if (format == RTAUDIO_FLOAT64)
7075 result = alSetSampFmt(al_config, AL_SAMPFMT_DOUBLE);
7077 if ( result == -1 ) {
7078 alFreeConfig(al_config);
7079 sprintf(message_,"RtApiAl: error setting sample format in AL config: %s.",
7080 alGetErrorString(oserror()));
7081 error(RtError::WARNING);
7085 if (mode == OUTPUT) {
7089 resource = AL_DEFAULT_OUTPUT;
7092 result = alSetDevice(al_config, resource);
7093 if ( result == -1 ) {
7094 alFreeConfig(al_config);
7095 sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
7096 devices_[device].name.c_str(), alGetErrorString(oserror()));
7097 error(RtError::WARNING);
7102 port = alOpenPort("RtApiAl Output Port", "w", al_config);
7104 alFreeConfig(al_config);
7105 sprintf(message_,"RtApiAl: error opening output port: %s.",
7106 alGetErrorString(oserror()));
7107 error(RtError::WARNING);
7111 // Set the sample rate
7112 pvs[0].param = AL_MASTER_CLOCK;
7113 pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
7114 pvs[1].param = AL_RATE;
7115 pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
7116 result = alSetParams(resource, pvs, 2);
7119 alFreeConfig(al_config);
7120 sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
7121 sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
7122 error(RtError::WARNING);
7126 else { // mode == INPUT
7130 resource = AL_DEFAULT_INPUT;
7133 result = alSetDevice(al_config, resource);
7134 if ( result == -1 ) {
7135 alFreeConfig(al_config);
7136 sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
7137 devices_[device].name.c_str(), alGetErrorString(oserror()));
7138 error(RtError::WARNING);
7143 port = alOpenPort("RtApiAl Input Port", "r", al_config);
7145 alFreeConfig(al_config);
7146 sprintf(message_,"RtApiAl: error opening input port: %s.",
7147 alGetErrorString(oserror()));
7148 error(RtError::WARNING);
7152 // Set the sample rate
7153 pvs[0].param = AL_MASTER_CLOCK;
7154 pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
7155 pvs[1].param = AL_RATE;
7156 pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
7157 result = alSetParams(resource, pvs, 2);
7160 alFreeConfig(al_config);
7161 sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
7162 sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
7163 error(RtError::WARNING);
7168 alFreeConfig(al_config);
7170 stream_.nUserChannels[mode] = channels;
7171 stream_.nDeviceChannels[mode] = channels;
7173 // Save stream handle.
7174 ALport *handle = (ALport *) stream_.apiHandle;
7175 if ( handle == 0 ) {
7176 handle = (ALport *) calloc(2, sizeof(ALport));
7177 if ( handle == NULL ) {
7178 sprintf(message_, "RtApiAl: Irix Al error allocating handle memory (%s).",
7179 devices_[device].name.c_str());
7182 stream_.apiHandle = (void *) handle;
7186 handle[mode] = port;
7188 // Set flags for buffer conversion
7189 stream_.doConvertBuffer[mode] = false;
7190 if (stream_.userFormat != stream_.deviceFormat[mode])
7191 stream_.doConvertBuffer[mode] = true;
7193 // Allocate necessary internal buffers
7194 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
7197 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
7198 buffer_bytes = stream_.nUserChannels[0];
7200 buffer_bytes = stream_.nUserChannels[1];
7202 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
7203 if (stream_.userBuffer) free(stream_.userBuffer);
7204 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
7205 if (stream_.userBuffer == NULL) {
7206 sprintf(message_, "RtApiAl: error allocating user buffer memory (%s).",
7207 devices_[device].name.c_str());
7212 if ( stream_.doConvertBuffer[mode] ) {
7215 bool makeBuffer = true;
7216 if ( mode == OUTPUT )
7217 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
7218 else { // mode == INPUT
7219 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
7220 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
7221 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
7222 if ( buffer_bytes < bytes_out ) makeBuffer = false;
7227 buffer_bytes *= *bufferSize;
7228 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
7229 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
7230 if (stream_.deviceBuffer == NULL) {
7231 sprintf(message_, "RtApiAl: error allocating device buffer memory (%s).",
7232 devices_[device].name.c_str());
7238 stream_.device[mode] = device;
7239 stream_.state = STREAM_STOPPED;
7240 if ( stream_.mode == OUTPUT && mode == INPUT )
7241 // We had already set up an output stream.
7242 stream_.mode = DUPLEX;
7244 stream_.mode = mode;
7245 stream_.nBuffers = nBuffers;
7246 stream_.bufferSize = *bufferSize;
7247 stream_.sampleRate = sampleRate;
7254 alClosePort(handle[0]);
7256 alClosePort(handle[1]);
7258 stream_.apiHandle = 0;
7261 if (stream_.userBuffer) {
7262 free(stream_.userBuffer);
7263 stream_.userBuffer = 0;
7266 error(RtError::WARNING);
7270 void RtApiAl :: closeStream()
7272 // We don't want an exception to be thrown here because this
7273 // function is called by our class destructor. So, do our own
7275 if ( stream_.mode == UNINITIALIZED ) {
7276 sprintf(message_, "RtApiAl::closeStream(): no open stream to close!");
7277 error(RtError::WARNING);
7281 ALport *handle = (ALport *) stream_.apiHandle;
7282 if (stream_.state == STREAM_RUNNING) {
7283 int buffer_size = stream_.bufferSize * stream_.nBuffers;
7284 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
7285 alDiscardFrames(handle[0], buffer_size);
7286 if (stream_.mode == INPUT || stream_.mode == DUPLEX)
7287 alDiscardFrames(handle[1], buffer_size);
7288 stream_.state = STREAM_STOPPED;
7291 if (stream_.callbackInfo.usingCallback) {
7292 stream_.callbackInfo.usingCallback = false;
7293 pthread_join(stream_.callbackInfo.thread, NULL);
7297 if (handle[0]) alClosePort(handle[0]);
7298 if (handle[1]) alClosePort(handle[1]);
7300 stream_.apiHandle = 0;
7303 if (stream_.userBuffer) {
7304 free(stream_.userBuffer);
7305 stream_.userBuffer = 0;
7308 if (stream_.deviceBuffer) {
7309 free(stream_.deviceBuffer);
7310 stream_.deviceBuffer = 0;
7313 stream_.mode = UNINITIALIZED;
7316 void RtApiAl :: startStream()
7319 if (stream_.state == STREAM_RUNNING) return;
7321 MUTEX_LOCK(&stream_.mutex);
7323 // The AL port is ready as soon as it is opened.
7324 stream_.state = STREAM_RUNNING;
7326 MUTEX_UNLOCK(&stream_.mutex);
7329 void RtApiAl :: stopStream()
7332 if (stream_.state == STREAM_STOPPED) return;
7334 // Change the state before the lock to improve shutdown response
7335 // when using a callback.
7336 stream_.state = STREAM_STOPPED;
7337 MUTEX_LOCK(&stream_.mutex);
7339 int result, buffer_size = stream_.bufferSize * stream_.nBuffers;
7340 ALport *handle = (ALport *) stream_.apiHandle;
7342 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
7343 alZeroFrames(handle[0], buffer_size);
7345 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
7346 result = alDiscardFrames(handle[1], buffer_size);
7348 sprintf(message_, "RtApiAl: error draining stream device (%s): %s.",
7349 devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
7350 error(RtError::DRIVER_ERROR);
7354 MUTEX_UNLOCK(&stream_.mutex);
7357 void RtApiAl :: abortStream()
7360 if (stream_.state == STREAM_STOPPED) return;
7362 // Change the state before the lock to improve shutdown response
7363 // when using a callback.
7364 stream_.state = STREAM_STOPPED;
7365 MUTEX_LOCK(&stream_.mutex);
7367 ALport *handle = (ALport *) stream_.apiHandle;
7368 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
7370 int buffer_size = stream_.bufferSize * stream_.nBuffers;
7371 int result = alDiscardFrames(handle[0], buffer_size);
7373 sprintf(message_, "RtApiAl: error aborting stream device (%s): %s.",
7374 devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
7375 error(RtError::DRIVER_ERROR);
7379 // There is no clear action to take on the input stream, since the
7380 // port will continue to run in any event.
7382 MUTEX_UNLOCK(&stream_.mutex);
7385 int RtApiAl :: streamWillBlock()
7389 if (stream_.state == STREAM_STOPPED) return 0;
7391 MUTEX_LOCK(&stream_.mutex);
7395 ALport *handle = (ALport *) stream_.apiHandle;
7396 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
7397 err = alGetFillable(handle[0]);
7399 sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
7400 devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
7401 error(RtError::DRIVER_ERROR);
7407 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
7408 err = alGetFilled(handle[1]);
7410 sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
7411 devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
7412 error(RtError::DRIVER_ERROR);
7414 if (frames > err) frames = err;
7417 frames = stream_.bufferSize - frames;
7418 if (frames < 0) frames = 0;
7420 MUTEX_UNLOCK(&stream_.mutex);
7424 void RtApiAl :: tickStream()
7429 if (stream_.state == STREAM_STOPPED) {
7430 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
7433 else if (stream_.callbackInfo.usingCallback) {
7434 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
7435 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
7438 MUTEX_LOCK(&stream_.mutex);
7440 // The state might change while waiting on a mutex.
7441 if (stream_.state == STREAM_STOPPED)
7446 RtAudioFormat format;
7447 ALport *handle = (ALport *) stream_.apiHandle;
7448 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
7450 // Setup parameters and do buffer conversion if necessary.
7451 if (stream_.doConvertBuffer[0]) {
7452 convertStreamBuffer(OUTPUT);
7453 buffer = stream_.deviceBuffer;
7454 channels = stream_.nDeviceChannels[0];
7455 format = stream_.deviceFormat[0];
7458 buffer = stream_.userBuffer;
7459 channels = stream_.nUserChannels[0];
7460 format = stream_.userFormat;
7463 // Do byte swapping if necessary.
7464 if (stream_.doByteSwap[0])
7465 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
7467 // Write interleaved samples to device.
7468 alWriteFrames(handle[0], buffer, stream_.bufferSize);
7471 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
7473 // Setup parameters.
7474 if (stream_.doConvertBuffer[1]) {
7475 buffer = stream_.deviceBuffer;
7476 channels = stream_.nDeviceChannels[1];
7477 format = stream_.deviceFormat[1];
7480 buffer = stream_.userBuffer;
7481 channels = stream_.nUserChannels[1];
7482 format = stream_.userFormat;
7485 // Read interleaved samples from device.
7486 alReadFrames(handle[1], buffer, stream_.bufferSize);
7488 // Do byte swapping if necessary.
7489 if (stream_.doByteSwap[1])
7490 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
7492 // Do buffer conversion if necessary.
7493 if (stream_.doConvertBuffer[1])
7494 convertStreamBuffer(INPUT);
7498 MUTEX_UNLOCK(&stream_.mutex);
7500 if (stream_.callbackInfo.usingCallback && stopStream)
7504 void RtApiAl :: setStreamCallback(RtAudioCallback callback, void *userData)
7508 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
7509 if ( info->usingCallback ) {
7510 sprintf(message_, "RtApiAl: A callback is already set for this stream!");
7511 error(RtError::WARNING);
7515 info->callback = (void *) callback;
7516 info->userData = userData;
7517 info->usingCallback = true;
7518 info->object = (void *) this;
7520 // Set the thread attributes for joinable and realtime scheduling
7521 // priority. The higher priority will only take affect if the
7522 // program is run as root or suid.
7523 pthread_attr_t attr;
7524 pthread_attr_init(&attr);
7525 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
7526 pthread_attr_setschedpolicy(&attr, SCHED_RR);
7528 int err = pthread_create(&info->thread, &attr, callbackHandler, &stream_.callbackInfo);
7529 pthread_attr_destroy(&attr);
7531 info->usingCallback = false;
7532 sprintf(message_, "RtApiAl: error starting callback thread!");
7533 error(RtError::THREAD_ERROR);
7537 void RtApiAl :: cancelStreamCallback()
7541 if (stream_.callbackInfo.usingCallback) {
7543 if (stream_.state == STREAM_RUNNING)
7546 MUTEX_LOCK(&stream_.mutex);
7548 stream_.callbackInfo.usingCallback = false;
7549 pthread_join(stream_.callbackInfo.thread, NULL);
7550 stream_.callbackInfo.thread = 0;
7551 stream_.callbackInfo.callback = NULL;
7552 stream_.callbackInfo.userData = NULL;
7554 MUTEX_UNLOCK(&stream_.mutex);
7558 extern "C" void *callbackHandler(void *ptr)
7560 CallbackInfo *info = (CallbackInfo *) ptr;
7561 RtApiAl *object = (RtApiAl *) info->object;
7562 bool *usingCallback = &info->usingCallback;
7564 while ( *usingCallback ) {
7566 object->tickStream();
7568 catch (RtError &exception) {
7569 fprintf(stderr, "\nRtApiAl: callback thread error (%s) ... closing thread.\n\n",
7570 exception.getMessageString());
7578 //******************** End of __IRIX_AL__ *********************//
7582 // *************************************************** //
7584 // Protected common (OS-independent) RtAudio methods.
7586 // *************************************************** //
7588 // This method can be modified to control the behavior of error
7589 // message reporting and throwing.
7590 void RtApi :: error(RtError::Type type)
7592 if (type == RtError::WARNING) {
7593 fprintf(stderr, "\n%s\n\n", message_);
7595 else if (type == RtError::DEBUG_WARNING) {
7596 #if defined(__RTAUDIO_DEBUG__)
7597 fprintf(stderr, "\n%s\n\n", message_);
7601 #if defined(__RTAUDIO_DEBUG__)
7602 fprintf(stderr, "\n%s\n\n", message_);
7604 throw RtError(std::string(message_), type);
7608 void RtApi :: verifyStream()
7610 if ( stream_.mode == UNINITIALIZED ) {
7611 sprintf(message_, "RtAudio: a stream was not previously opened!");
7612 error(RtError::INVALID_STREAM);
7616 void RtApi :: clearDeviceInfo(RtApiDevice *info)
7618 // Don't clear the name or DEVICE_ID fields here ... they are
7619 // typically set prior to a call of this function.
7620 info->probed = false;
7621 info->maxOutputChannels = 0;
7622 info->maxInputChannels = 0;
7623 info->maxDuplexChannels = 0;
7624 info->minOutputChannels = 0;
7625 info->minInputChannels = 0;
7626 info->minDuplexChannels = 0;
7627 info->hasDuplexSupport = false;
7628 info->sampleRates.clear();
7629 info->nativeFormats = 0;
7632 void RtApi :: clearStreamInfo()
7634 stream_.mode = UNINITIALIZED;
7635 stream_.state = STREAM_STOPPED;
7636 stream_.sampleRate = 0;
7637 stream_.bufferSize = 0;
7638 stream_.nBuffers = 0;
7639 stream_.userFormat = 0;
7640 for ( int i=0; i<2; i++ ) {
7641 stream_.device[i] = 0;
7642 stream_.doConvertBuffer[i] = false;
7643 stream_.deInterleave[i] = false;
7644 stream_.doByteSwap[i] = false;
7645 stream_.nUserChannels[i] = 0;
7646 stream_.nDeviceChannels[i] = 0;
7647 stream_.deviceFormat[i] = 0;
7651 int RtApi :: formatBytes(RtAudioFormat format)
7653 if (format == RTAUDIO_SINT16)
7655 else if (format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
7656 format == RTAUDIO_FLOAT32)
7658 else if (format == RTAUDIO_FLOAT64)
7660 else if (format == RTAUDIO_SINT8)
7663 sprintf(message_,"RtApi: undefined format in formatBytes().");
7664 error(RtError::WARNING);
7669 void RtApi :: convertStreamBuffer( StreamMode mode )
7671 // This method does format conversion, input/output channel compensation, and
7672 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7673 // the upper three bytes of a 32-bit integer.
7675 int j, jump_in, jump_out, channels;
7676 RtAudioFormat format_in, format_out;
7677 char *input, *output;
7679 if (mode == INPUT) { // convert device to user buffer
7680 input = stream_.deviceBuffer;
7681 output = stream_.userBuffer;
7682 jump_in = stream_.nDeviceChannels[1];
7683 jump_out = stream_.nUserChannels[1];
7684 format_in = stream_.deviceFormat[1];
7685 format_out = stream_.userFormat;
7687 else { // convert user to device buffer
7688 input = stream_.userBuffer;
7689 output = stream_.deviceBuffer;
7690 jump_in = stream_.nUserChannels[0];
7691 jump_out = stream_.nDeviceChannels[0];
7692 format_in = stream_.userFormat;
7693 format_out = stream_.deviceFormat[0];
7695 // clear our device buffer when in/out duplex device channels are different
7696 if ( stream_.mode == DUPLEX &&
7697 stream_.nDeviceChannels[0] != stream_.nDeviceChannels[1] )
7698 memset(output, 0, stream_.bufferSize * jump_out * formatBytes(format_out));
7701 channels = (jump_in < jump_out) ? jump_in : jump_out;
7703 // Set up the interleave/deinterleave offsets
7704 std::vector<int> offset_in(channels);
7705 std::vector<int> offset_out(channels);
7706 if (mode == INPUT && stream_.deInterleave[1]) {
7707 for (int k=0; k<channels; k++) {
7708 offset_in[k] = k * stream_.bufferSize;
7713 else if (mode == OUTPUT && stream_.deInterleave[0]) {
7714 for (int k=0; k<channels; k++) {
7716 offset_out[k] = k * stream_.bufferSize;
7721 for (int k=0; k<channels; k++) {
7727 if (format_out == RTAUDIO_FLOAT64) {
7729 Float64 *out = (Float64 *)output;
7731 if (format_in == RTAUDIO_SINT8) {
7732 signed char *in = (signed char *)input;
7733 scale = 1.0 / 128.0;
7734 for (int i=0; i<stream_.bufferSize; i++) {
7735 for (j=0; j<channels; j++) {
7736 out[offset_out[j]] = (Float64) in[offset_in[j]];
7737 out[offset_out[j]] *= scale;
7743 else if (format_in == RTAUDIO_SINT16) {
7744 Int16 *in = (Int16 *)input;
7745 scale = 1.0 / 32768.0;
7746 for (int i=0; i<stream_.bufferSize; i++) {
7747 for (j=0; j<channels; j++) {
7748 out[offset_out[j]] = (Float64) in[offset_in[j]];
7749 out[offset_out[j]] *= scale;
7755 else if (format_in == RTAUDIO_SINT24) {
7756 Int32 *in = (Int32 *)input;
7757 scale = 1.0 / 2147483648.0;
7758 for (int i=0; i<stream_.bufferSize; i++) {
7759 for (j=0; j<channels; j++) {
7760 out[offset_out[j]] = (Float64) (in[offset_in[j]] & 0xffffff00);
7761 out[offset_out[j]] *= scale;
7767 else if (format_in == RTAUDIO_SINT32) {
7768 Int32 *in = (Int32 *)input;
7769 scale = 1.0 / 2147483648.0;
7770 for (int i=0; i<stream_.bufferSize; i++) {
7771 for (j=0; j<channels; j++) {
7772 out[offset_out[j]] = (Float64) in[offset_in[j]];
7773 out[offset_out[j]] *= scale;
7779 else if (format_in == RTAUDIO_FLOAT32) {
7780 Float32 *in = (Float32 *)input;
7781 for (int i=0; i<stream_.bufferSize; i++) {
7782 for (j=0; j<channels; j++) {
7783 out[offset_out[j]] = (Float64) in[offset_in[j]];
7789 else if (format_in == RTAUDIO_FLOAT64) {
7790 // Channel compensation and/or (de)interleaving only.
7791 Float64 *in = (Float64 *)input;
7792 for (int i=0; i<stream_.bufferSize; i++) {
7793 for (j=0; j<channels; j++) {
7794 out[offset_out[j]] = in[offset_in[j]];
7801 else if (format_out == RTAUDIO_FLOAT32) {
7803 Float32 *out = (Float32 *)output;
7805 if (format_in == RTAUDIO_SINT8) {
7806 signed char *in = (signed char *)input;
7807 scale = 1.0 / 128.0;
7808 for (int i=0; i<stream_.bufferSize; i++) {
7809 for (j=0; j<channels; j++) {
7810 out[offset_out[j]] = (Float32) in[offset_in[j]];
7811 out[offset_out[j]] *= scale;
7817 else if (format_in == RTAUDIO_SINT16) {
7818 Int16 *in = (Int16 *)input;
7819 scale = 1.0 / 32768.0;
7820 for (int i=0; i<stream_.bufferSize; i++) {
7821 for (j=0; j<channels; j++) {
7822 out[offset_out[j]] = (Float32) in[offset_in[j]];
7823 out[offset_out[j]] *= scale;
7829 else if (format_in == RTAUDIO_SINT24) {
7830 Int32 *in = (Int32 *)input;
7831 scale = 1.0 / 2147483648.0;
7832 for (int i=0; i<stream_.bufferSize; i++) {
7833 for (j=0; j<channels; j++) {
7834 out[offset_out[j]] = (Float32) (in[offset_in[j]] & 0xffffff00);
7835 out[offset_out[j]] *= scale;
7841 else if (format_in == RTAUDIO_SINT32) {
7842 Int32 *in = (Int32 *)input;
7843 scale = 1.0 / 2147483648.0;
7844 for (int i=0; i<stream_.bufferSize; i++) {
7845 for (j=0; j<channels; j++) {
7846 out[offset_out[j]] = (Float32) in[offset_in[j]];
7847 out[offset_out[j]] *= scale;
7853 else if (format_in == RTAUDIO_FLOAT32) {
7854 // Channel compensation and/or (de)interleaving only.
7855 Float32 *in = (Float32 *)input;
7856 for (int i=0; i<stream_.bufferSize; i++) {
7857 for (j=0; j<channels; j++) {
7858 out[offset_out[j]] = in[offset_in[j]];
7864 else if (format_in == RTAUDIO_FLOAT64) {
7865 Float64 *in = (Float64 *)input;
7866 for (int i=0; i<stream_.bufferSize; i++) {
7867 for (j=0; j<channels; j++) {
7868 out[offset_out[j]] = (Float32) in[offset_in[j]];
7875 else if (format_out == RTAUDIO_SINT32) {
7876 Int32 *out = (Int32 *)output;
7877 if (format_in == RTAUDIO_SINT8) {
7878 signed char *in = (signed char *)input;
7879 for (int i=0; i<stream_.bufferSize; i++) {
7880 for (j=0; j<channels; j++) {
7881 out[offset_out[j]] = (Int32) in[offset_in[j]];
7882 out[offset_out[j]] <<= 24;
7888 else if (format_in == RTAUDIO_SINT16) {
7889 Int16 *in = (Int16 *)input;
7890 for (int i=0; i<stream_.bufferSize; i++) {
7891 for (j=0; j<channels; j++) {
7892 out[offset_out[j]] = (Int32) in[offset_in[j]];
7893 out[offset_out[j]] <<= 16;
7899 else if (format_in == RTAUDIO_SINT24) {
7900 Int32 *in = (Int32 *)input;
7901 for (int i=0; i<stream_.bufferSize; i++) {
7902 for (j=0; j<channels; j++) {
7903 out[offset_out[j]] = (Int32) in[offset_in[j]];
7909 else if (format_in == RTAUDIO_SINT32) {
7910 // Channel compensation and/or (de)interleaving only.
7911 Int32 *in = (Int32 *)input;
7912 for (int i=0; i<stream_.bufferSize; i++) {
7913 for (j=0; j<channels; j++) {
7914 out[offset_out[j]] = in[offset_in[j]];
7920 else if (format_in == RTAUDIO_FLOAT32) {
7921 Float32 *in = (Float32 *)input;
7922 for (int i=0; i<stream_.bufferSize; i++) {
7923 for (j=0; j<channels; j++) {
7924 out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
7930 else if (format_in == RTAUDIO_FLOAT64) {
7931 Float64 *in = (Float64 *)input;
7932 for (int i=0; i<stream_.bufferSize; i++) {
7933 for (j=0; j<channels; j++) {
7934 out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
7941 else if (format_out == RTAUDIO_SINT24) {
7942 Int32 *out = (Int32 *)output;
7943 if (format_in == RTAUDIO_SINT8) {
7944 signed char *in = (signed char *)input;
7945 for (int i=0; i<stream_.bufferSize; i++) {
7946 for (j=0; j<channels; j++) {
7947 out[offset_out[j]] = (Int32) in[offset_in[j]];
7948 out[offset_out[j]] <<= 24;
7954 else if (format_in == RTAUDIO_SINT16) {
7955 Int16 *in = (Int16 *)input;
7956 for (int i=0; i<stream_.bufferSize; i++) {
7957 for (j=0; j<channels; j++) {
7958 out[offset_out[j]] = (Int32) in[offset_in[j]];
7959 out[offset_out[j]] <<= 16;
7965 else if (format_in == RTAUDIO_SINT24) {
7966 // Channel compensation and/or (de)interleaving only.
7967 Int32 *in = (Int32 *)input;
7968 for (int i=0; i<stream_.bufferSize; i++) {
7969 for (j=0; j<channels; j++) {
7970 out[offset_out[j]] = in[offset_in[j]];
7976 else if (format_in == RTAUDIO_SINT32) {
7977 Int32 *in = (Int32 *)input;
7978 for (int i=0; i<stream_.bufferSize; i++) {
7979 for (j=0; j<channels; j++) {
7980 out[offset_out[j]] = (Int32) (in[offset_in[j]] & 0xffffff00);
7986 else if (format_in == RTAUDIO_FLOAT32) {
7987 Float32 *in = (Float32 *)input;
7988 for (int i=0; i<stream_.bufferSize; i++) {
7989 for (j=0; j<channels; j++) {
7990 out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
7996 else if (format_in == RTAUDIO_FLOAT64) {
7997 Float64 *in = (Float64 *)input;
7998 for (int i=0; i<stream_.bufferSize; i++) {
7999 for (j=0; j<channels; j++) {
8000 out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
8007 else if (format_out == RTAUDIO_SINT16) {
8008 Int16 *out = (Int16 *)output;
8009 if (format_in == RTAUDIO_SINT8) {
8010 signed char *in = (signed char *)input;
8011 for (int i=0; i<stream_.bufferSize; i++) {
8012 for (j=0; j<channels; j++) {
8013 out[offset_out[j]] = (Int16) in[offset_in[j]];
8014 out[offset_out[j]] <<= 8;
8020 else if (format_in == RTAUDIO_SINT16) {
8021 // Channel compensation and/or (de)interleaving only.
8022 Int16 *in = (Int16 *)input;
8023 for (int i=0; i<stream_.bufferSize; i++) {
8024 for (j=0; j<channels; j++) {
8025 out[offset_out[j]] = in[offset_in[j]];
8031 else if (format_in == RTAUDIO_SINT24) {
8032 Int32 *in = (Int32 *)input;
8033 for (int i=0; i<stream_.bufferSize; i++) {
8034 for (j=0; j<channels; j++) {
8035 out[offset_out[j]] = (Int16) ((in[offset_in[j]] >> 16) & 0x0000ffff);
8041 else if (format_in == RTAUDIO_SINT32) {
8042 Int32 *in = (Int32 *)input;
8043 for (int i=0; i<stream_.bufferSize; i++) {
8044 for (j=0; j<channels; j++) {
8045 out[offset_out[j]] = (Int16) ((in[offset_in[j]] >> 16) & 0x0000ffff);
8051 else if (format_in == RTAUDIO_FLOAT32) {
8052 Float32 *in = (Float32 *)input;
8053 for (int i=0; i<stream_.bufferSize; i++) {
8054 for (j=0; j<channels; j++) {
8055 out[offset_out[j]] = (Int16) (in[offset_in[j]] * 32767.0);
8061 else if (format_in == RTAUDIO_FLOAT64) {
8062 Float64 *in = (Float64 *)input;
8063 for (int i=0; i<stream_.bufferSize; i++) {
8064 for (j=0; j<channels; j++) {
8065 out[offset_out[j]] = (Int16) (in[offset_in[j]] * 32767.0);
8072 else if (format_out == RTAUDIO_SINT8) {
8073 signed char *out = (signed char *)output;
8074 if (format_in == RTAUDIO_SINT8) {
8075 // Channel compensation and/or (de)interleaving only.
8076 signed char *in = (signed char *)input;
8077 for (int i=0; i<stream_.bufferSize; i++) {
8078 for (j=0; j<channels; j++) {
8079 out[offset_out[j]] = in[offset_in[j]];
8085 if (format_in == RTAUDIO_SINT16) {
8086 Int16 *in = (Int16 *)input;
8087 for (int i=0; i<stream_.bufferSize; i++) {
8088 for (j=0; j<channels; j++) {
8089 out[offset_out[j]] = (signed char) ((in[offset_in[j]] >> 8) & 0x00ff);
8095 else if (format_in == RTAUDIO_SINT24) {
8096 Int32 *in = (Int32 *)input;
8097 for (int i=0; i<stream_.bufferSize; i++) {
8098 for (j=0; j<channels; j++) {
8099 out[offset_out[j]] = (signed char) ((in[offset_in[j]] >> 24) & 0x000000ff);
8105 else if (format_in == RTAUDIO_SINT32) {
8106 Int32 *in = (Int32 *)input;
8107 for (int i=0; i<stream_.bufferSize; i++) {
8108 for (j=0; j<channels; j++) {
8109 out[offset_out[j]] = (signed char) ((in[offset_in[j]] >> 24) & 0x000000ff);
8115 else if (format_in == RTAUDIO_FLOAT32) {
8116 Float32 *in = (Float32 *)input;
8117 for (int i=0; i<stream_.bufferSize; i++) {
8118 for (j=0; j<channels; j++) {
8119 out[offset_out[j]] = (signed char) (in[offset_in[j]] * 127.0);
8125 else if (format_in == RTAUDIO_FLOAT64) {
8126 Float64 *in = (Float64 *)input;
8127 for (int i=0; i<stream_.bufferSize; i++) {
8128 for (j=0; j<channels; j++) {
8129 out[offset_out[j]] = (signed char) (in[offset_in[j]] * 127.0);
8138 void RtApi :: byteSwapBuffer( char *buffer, int samples, RtAudioFormat format )
8144 if (format == RTAUDIO_SINT16) {
8145 for (int i=0; i<samples; i++) {
8146 // Swap 1st and 2nd bytes.
8151 // Increment 2 bytes.
8155 else if (format == RTAUDIO_SINT24 ||
8156 format == RTAUDIO_SINT32 ||
8157 format == RTAUDIO_FLOAT32) {
8158 for (int i=0; i<samples; i++) {
8159 // Swap 1st and 4th bytes.
8164 // Swap 2nd and 3rd bytes.
8170 // Increment 4 bytes.
8174 else if (format == RTAUDIO_FLOAT64) {
8175 for (int i=0; i<samples; i++) {
8176 // Swap 1st and 8th bytes
8181 // Swap 2nd and 7th bytes
8187 // Swap 3rd and 6th bytes
8193 // Swap 4th and 5th bytes
8199 // Increment 8 bytes.