1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2005 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 requested to send the modifications to the original developer so that
28 they can be incorporated into the canonical version.
30 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
31 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
32 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
33 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
34 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
35 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
36 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 /************************************************************************/
40 // RtAudio: Version 3.0.3 (18 November 2005)
46 // Static variable definitions.
47 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
48 const unsigned int RtApi::SAMPLE_RATES[] = {
49 4000, 5512, 8000, 9600, 11025, 16000, 22050,
50 32000, 44100, 48000, 88200, 96000, 176400, 192000
53 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
54 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
55 #define MUTEX_DESTROY(A) DeleteCriticalSection(A);
56 #define MUTEX_LOCK(A) EnterCriticalSection(A)
57 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
59 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
60 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A);
61 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
62 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
65 // *************************************************** //
67 // Public common (OS-independent) methods.
69 // *************************************************** //
71 RtAudio :: RtAudio( RtAudioApi api )
76 RtAudio :: RtAudio( int outputDevice, int outputChannels,
77 int inputDevice, int inputChannels,
78 RtAudioFormat format, int sampleRate,
79 int *bufferSize, int numberOfBuffers, RtAudioApi api )
84 rtapi_->openStream( outputDevice, outputChannels,
85 inputDevice, inputChannels,
87 bufferSize, numberOfBuffers );
89 catch (RtError &exception) {
90 // Deallocate the RtApi instance.
96 RtAudio :: RtAudio( int outputDevice, int outputChannels,
97 int inputDevice, int inputChannels,
98 RtAudioFormat format, int sampleRate,
99 int *bufferSize, int *numberOfBuffers, RtAudioApi api )
104 rtapi_->openStream( outputDevice, outputChannels,
105 inputDevice, inputChannels,
107 bufferSize, numberOfBuffers );
109 catch (RtError &exception) {
110 // Deallocate the RtApi instance.
116 RtAudio :: ~RtAudio()
121 void RtAudio :: openStream( int outputDevice, int outputChannels,
122 int inputDevice, int inputChannels,
123 RtAudioFormat format, int sampleRate,
124 int *bufferSize, int numberOfBuffers )
126 rtapi_->openStream( outputDevice, outputChannels, inputDevice,
127 inputChannels, format, sampleRate,
128 bufferSize, numberOfBuffers );
131 void RtAudio :: openStream( int outputDevice, int outputChannels,
132 int inputDevice, int inputChannels,
133 RtAudioFormat format, int sampleRate,
134 int *bufferSize, int *numberOfBuffers )
136 rtapi_->openStream( outputDevice, outputChannels, inputDevice,
137 inputChannels, format, sampleRate,
138 bufferSize, *numberOfBuffers );
141 void RtAudio::initialize( RtAudioApi api )
145 // First look for a compiled match to a specified API value. If one
146 // of these constructors throws an error, it will be passed up the
147 // inheritance chain.
148 #if defined(__LINUX_JACK__)
149 if ( api == LINUX_JACK )
150 rtapi_ = new RtApiJack();
152 #if defined(__LINUX_ALSA__)
153 if ( api == LINUX_ALSA )
154 rtapi_ = new RtApiAlsa();
156 #if defined(__LINUX_OSS__)
157 if ( api == LINUX_OSS )
158 rtapi_ = new RtApiOss();
160 #if defined(__WINDOWS_ASIO__)
161 if ( api == WINDOWS_ASIO )
162 rtapi_ = new RtApiAsio();
164 #if defined(__WINDOWS_DS__)
165 if ( api == WINDOWS_DS )
166 rtapi_ = new RtApiDs();
168 #if defined(__IRIX_AL__)
169 if ( api == IRIX_AL )
170 rtapi_ = new RtApiAl();
172 #if defined(__MACOSX_CORE__)
173 if ( api == MACOSX_CORE )
174 rtapi_ = new RtApiCore();
177 if ( rtapi_ ) return;
179 // No compiled support for specified API value.
180 throw RtError( "RtAudio: no compiled support for specified API argument!", RtError::INVALID_PARAMETER );
183 // No specified API ... search for "best" option.
185 #if defined(__LINUX_JACK__)
186 rtapi_ = new RtApiJack();
187 #elif defined(__WINDOWS_ASIO__)
188 rtapi_ = new RtApiAsio();
189 #elif defined(__IRIX_AL__)
190 rtapi_ = new RtApiAl();
191 #elif defined(__MACOSX_CORE__)
192 rtapi_ = new RtApiCore();
198 #if defined(__RTAUDIO_DEBUG__)
199 fprintf(stderr, "\nRtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio).\n\n");
204 if ( rtapi_ ) return;
206 // Try second API support
209 #if defined(__LINUX_ALSA__)
210 rtapi_ = new RtApiAlsa();
211 #elif defined(__WINDOWS_DS__)
212 rtapi_ = new RtApiDs();
218 #if defined(__RTAUDIO_DEBUG__)
219 fprintf(stderr, "\nRtAudio: no devices found for second api option (Alsa or DirectSound).\n\n");
225 if ( rtapi_ ) return;
227 // Try third API support
229 #if defined(__LINUX_OSS__)
231 rtapi_ = new RtApiOss();
233 catch (RtError &error) {
243 throw RtError( "RtAudio: no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND );
249 stream_.mode = UNINITIALIZED;
250 stream_.state = STREAM_STOPPED;
251 stream_.apiHandle = 0;
252 MUTEX_INITIALIZE(&stream_.mutex);
257 MUTEX_DESTROY(&stream_.mutex);
260 void RtApi :: openStream( int outputDevice, int outputChannels,
261 int inputDevice, int inputChannels,
262 RtAudioFormat format, int sampleRate,
263 int *bufferSize, int *numberOfBuffers )
265 this->openStream( outputDevice, outputChannels, inputDevice,
266 inputChannels, format, sampleRate,
267 bufferSize, *numberOfBuffers );
268 *numberOfBuffers = stream_.nBuffers;
271 void RtApi :: openStream( int outputDevice, int outputChannels,
272 int inputDevice, int inputChannels,
273 RtAudioFormat format, int sampleRate,
274 int *bufferSize, int numberOfBuffers )
276 if ( stream_.mode != UNINITIALIZED ) {
277 sprintf(message_, "RtApi: only one open stream allowed per class instance.");
278 error(RtError::INVALID_STREAM);
281 if (outputChannels < 1 && inputChannels < 1) {
282 sprintf(message_,"RtApi: one or both 'channel' parameters must be greater than zero.");
283 error(RtError::INVALID_PARAMETER);
286 if ( formatBytes(format) == 0 ) {
287 sprintf(message_,"RtApi: 'format' parameter value is undefined.");
288 error(RtError::INVALID_PARAMETER);
291 if ( outputChannels > 0 ) {
292 if (outputDevice > nDevices_ || outputDevice < 0) {
293 sprintf(message_,"RtApi: 'outputDevice' parameter value (%d) is invalid.", outputDevice);
294 error(RtError::INVALID_PARAMETER);
298 if ( inputChannels > 0 ) {
299 if (inputDevice > nDevices_ || inputDevice < 0) {
300 sprintf(message_,"RtApi: 'inputDevice' parameter value (%d) is invalid.", inputDevice);
301 error(RtError::INVALID_PARAMETER);
305 std::string errorMessages;
307 bool result = FAILURE;
308 int device, defaultDevice = 0;
311 if ( outputChannels > 0 ) {
314 channels = outputChannels;
316 if ( outputDevice == 0 ) { // Try default device first.
317 defaultDevice = getDefaultOutputDevice();
318 device = defaultDevice;
321 device = outputDevice - 1;
323 for ( int i=-1; i<nDevices_; i++ ) {
325 if ( i == defaultDevice ) continue;
328 if ( devices_[device].probed == false ) {
329 // If the device wasn't successfully probed before, try it
331 clearDeviceInfo(&devices_[device]);
332 probeDeviceInfo(&devices_[device]);
334 if ( devices_[device].probed )
335 result = probeDeviceOpen(device, mode, channels, sampleRate,
336 format, bufferSize, numberOfBuffers);
337 if ( result == SUCCESS ) break;
338 errorMessages.append( " " );
339 errorMessages.append( message_ );
340 errorMessages.append( "\n" );
341 if ( outputDevice > 0 ) break;
346 if ( inputChannels > 0 && ( result == SUCCESS || outputChannels <= 0 ) ) {
349 channels = inputChannels;
351 if ( inputDevice == 0 ) { // Try default device first.
352 defaultDevice = getDefaultInputDevice();
353 device = defaultDevice;
356 device = inputDevice - 1;
358 for ( int i=-1; i<nDevices_; i++ ) {
360 if ( i == defaultDevice ) continue;
363 if ( devices_[device].probed == false ) {
364 // If the device wasn't successfully probed before, try it
366 clearDeviceInfo(&devices_[device]);
367 probeDeviceInfo(&devices_[device]);
369 if ( devices_[device].probed )
370 result = probeDeviceOpen( device, mode, channels, sampleRate,
371 format, bufferSize, numberOfBuffers );
372 if ( result == SUCCESS ) break;
373 errorMessages.append( " " );
374 errorMessages.append( message_ );
375 errorMessages.append( "\n" );
376 if ( inputDevice > 0 ) break;
380 if ( result == SUCCESS )
383 // If we get here, all attempted probes failed. Close any opened
384 // devices and clear the stream structure.
385 if ( stream_.mode != UNINITIALIZED ) closeStream();
387 if ( ( outputDevice == 0 && outputChannels > 0 )
388 || ( inputDevice == 0 && inputChannels > 0 ) )
389 sprintf(message_,"RtApi: no devices found for given stream parameters: \n%s",
390 errorMessages.c_str());
392 sprintf(message_,"RtApi: unable to open specified device(s) with given stream parameters: \n%s",
393 errorMessages.c_str());
394 error(RtError::INVALID_PARAMETER);
399 int RtApi :: getDeviceCount(void)
401 return devices_.size();
404 RtApi::StreamState RtApi :: getStreamState( void ) const
406 return stream_.state;
409 RtAudioDeviceInfo RtApi :: getDeviceInfo( int device )
411 if (device > (int) devices_.size() || device < 1) {
412 sprintf(message_, "RtApi: invalid device specifier (%d)!", device);
413 error(RtError::INVALID_DEVICE);
416 RtAudioDeviceInfo info;
417 int deviceIndex = device - 1;
419 // If the device wasn't successfully probed before, try it now (or again).
420 if (devices_[deviceIndex].probed == false) {
421 clearDeviceInfo(&devices_[deviceIndex]);
422 probeDeviceInfo(&devices_[deviceIndex]);
425 info.name.append( devices_[deviceIndex].name );
426 info.probed = devices_[deviceIndex].probed;
427 if ( info.probed == true ) {
428 info.outputChannels = devices_[deviceIndex].maxOutputChannels;
429 info.inputChannels = devices_[deviceIndex].maxInputChannels;
430 info.duplexChannels = devices_[deviceIndex].maxDuplexChannels;
431 for (unsigned int i=0; i<devices_[deviceIndex].sampleRates.size(); i++)
432 info.sampleRates.push_back( devices_[deviceIndex].sampleRates[i] );
433 info.nativeFormats = devices_[deviceIndex].nativeFormats;
434 if ( (deviceIndex == getDefaultOutputDevice()) ||
435 (deviceIndex == getDefaultInputDevice()) )
436 info.isDefault = true;
442 char * const RtApi :: getStreamBuffer(void)
445 return stream_.userBuffer;
448 int RtApi :: getDefaultInputDevice(void)
450 // Should be implemented in subclasses if appropriate.
454 int RtApi :: getDefaultOutputDevice(void)
456 // Should be implemented in subclasses if appropriate.
460 void RtApi :: closeStream(void)
462 // MUST be implemented in subclasses!
465 void RtApi :: probeDeviceInfo( RtApiDevice *info )
467 // MUST be implemented in subclasses!
470 bool RtApi :: probeDeviceOpen( int device, StreamMode mode, int channels,
471 int sampleRate, RtAudioFormat format,
472 int *bufferSize, int numberOfBuffers )
474 // MUST be implemented in subclasses!
479 // *************************************************** //
481 // OS/API-specific methods.
483 // *************************************************** //
485 #if defined(__LINUX_OSS__)
488 #include <sys/stat.h>
489 #include <sys/types.h>
490 #include <sys/ioctl.h>
493 #include <sys/soundcard.h>
497 #define DAC_NAME "/dev/dsp"
498 #define MAX_DEVICES 16
499 #define MAX_CHANNELS 16
501 extern "C" void *ossCallbackHandler(void * ptr);
503 RtApiOss :: RtApiOss()
507 if (nDevices_ <= 0) {
508 sprintf(message_, "RtApiOss: no Linux OSS audio devices found!");
509 error(RtError::NO_DEVICES_FOUND);
513 RtApiOss :: ~RtApiOss()
515 if ( stream_.mode != UNINITIALIZED )
519 void RtApiOss :: initialize(void)
521 // Count cards and devices
524 // We check /dev/dsp before probing devices. /dev/dsp is supposed to
525 // be a link to the "default" audio device, of the form /dev/dsp0,
526 // /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a
527 // real device, so we need to check for that. Also, sometimes the
528 // link is to /dev/dspx and other times just dspx. I'm not sure how
529 // the latter works, but it does.
530 char device_name[16];
534 if (lstat(DAC_NAME, &dspstat) == 0) {
535 if (S_ISLNK(dspstat.st_mode)) {
536 i = readlink(DAC_NAME, device_name, sizeof(device_name));
538 device_name[i] = '\0';
539 if (i > 8) { // check for "/dev/dspx"
540 if (!strncmp(DAC_NAME, device_name, 8))
541 dsplink = atoi(&device_name[8]);
543 else if (i > 3) { // check for "dspx"
544 if (!strncmp("dsp", device_name, 3))
545 dsplink = atoi(&device_name[3]);
549 sprintf(message_, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME);
550 error(RtError::SYSTEM_ERROR);
555 sprintf(message_, "RtApiOss: cannot stat %s.", DAC_NAME);
556 error(RtError::SYSTEM_ERROR);
559 // The OSS API doesn't provide a routine for determining the number
560 // of devices. Thus, we'll just pursue a brute force method. The
561 // idea is to start with /dev/dsp(0) and continue with higher device
562 // numbers until we reach MAX_DSP_DEVICES. This should tell us how
563 // many devices we have ... it is not a fullproof scheme, but hopefully
564 // it will work most of the time.
567 for (i=-1; i<MAX_DEVICES; i++) {
569 // Probe /dev/dsp first, since it is supposed to be the default device.
571 sprintf(device_name, "%s", DAC_NAME);
572 else if (i == dsplink)
573 continue; // We've aready probed this device via /dev/dsp link ... try next device.
575 sprintf(device_name, "%s%d", DAC_NAME, i);
577 // First try to open the device for playback, then record mode.
578 fd = open(device_name, O_WRONLY | O_NONBLOCK);
580 // Open device for playback failed ... either busy or doesn't exist.
581 if (errno != EBUSY && errno != EAGAIN) {
582 // Try to open for capture
583 fd = open(device_name, O_RDONLY | O_NONBLOCK);
585 // Open device for record failed.
586 if (errno != EBUSY && errno != EAGAIN)
589 sprintf(message_, "RtApiOss: OSS record device (%s) is busy.", device_name);
590 error(RtError::WARNING);
591 // still count it for now
596 sprintf(message_, "RtApiOss: OSS playback device (%s) is busy.", device_name);
597 error(RtError::WARNING);
598 // still count it for now
602 if (fd >= 0) close(fd);
604 device.name.append( (const char *)device_name, strlen(device_name)+1);
605 devices_.push_back(device);
610 void RtApiOss :: probeDeviceInfo(RtApiDevice *info)
612 int i, fd, channels, mask;
614 // The OSS API doesn't provide a means for probing the capabilities
615 // of devices. Thus, we'll just pursue a brute force method.
617 // First try for playback
618 fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
620 // Open device failed ... either busy or doesn't exist
621 if (errno == EBUSY || errno == EAGAIN)
622 sprintf(message_, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.",
625 sprintf(message_, "RtApiOss: OSS playback device (%s) open error.", info->name.c_str());
626 error(RtError::DEBUG_WARNING);
630 // We have an open device ... see how many channels it can handle
631 for (i=MAX_CHANNELS; i>0; i--) {
633 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1) {
634 // This would normally indicate some sort of hardware error, but under ALSA's
635 // OSS emulation, it sometimes indicates an invalid channel value. Further,
636 // the returned channel value is not changed. So, we'll ignore the possible
638 continue; // try next channel number
640 // Check to see whether the device supports the requested number of channels
641 if (channels != i ) continue; // try next channel number
642 // If here, we found the largest working channel value
645 info->maxOutputChannels = i;
647 // Now find the minimum number of channels it can handle
648 for (i=1; i<=info->maxOutputChannels; i++) {
650 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
651 continue; // try next channel number
652 // If here, we found the smallest working channel value
655 info->minOutputChannels = i;
659 // Now try for capture
660 fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
662 // Open device for capture failed ... either busy or doesn't exist
663 if (errno == EBUSY || errno == EAGAIN)
664 sprintf(message_, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.",
667 sprintf(message_, "RtApiOss: OSS capture device (%s) open error.", info->name.c_str());
668 error(RtError::DEBUG_WARNING);
669 if (info->maxOutputChannels == 0)
670 // didn't open for playback either ... device invalid
672 goto probe_parameters;
675 // We have the device open for capture ... see how many channels it can handle
676 for (i=MAX_CHANNELS; i>0; i--) {
678 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
679 continue; // as above
681 // If here, we found a working channel value
684 info->maxInputChannels = i;
686 // Now find the minimum number of channels it can handle
687 for (i=1; i<=info->maxInputChannels; i++) {
689 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
690 continue; // try next channel number
691 // If here, we found the smallest working channel value
694 info->minInputChannels = i;
697 if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
698 sprintf(message_, "RtApiOss: device (%s) reports zero channels for input and output.",
700 error(RtError::DEBUG_WARNING);
704 // If device opens for both playback and capture, we determine the channels.
705 if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
706 goto probe_parameters;
708 fd = open(info->name.c_str(), O_RDWR | O_NONBLOCK);
710 goto probe_parameters;
712 ioctl(fd, SNDCTL_DSP_SETDUPLEX, 0);
713 ioctl(fd, SNDCTL_DSP_GETCAPS, &mask);
714 if (mask & DSP_CAP_DUPLEX) {
715 info->hasDuplexSupport = true;
716 // We have the device open for duplex ... see how many channels it can handle
717 for (i=MAX_CHANNELS; i>0; i--) {
719 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
720 continue; // as above
721 // If here, we found a working channel value
724 info->maxDuplexChannels = i;
726 // Now find the minimum number of channels it can handle
727 for (i=1; i<=info->maxDuplexChannels; i++) {
729 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
730 continue; // try next channel number
731 // If here, we found the smallest working channel value
734 info->minDuplexChannels = i;
739 // At this point, we need to figure out the supported data formats
740 // and sample rates. We'll proceed by openning the device in the
741 // direction with the maximum number of channels, or playback if
742 // they are equal. This might limit our sample rate options, but so
745 if (info->maxOutputChannels >= info->maxInputChannels) {
746 fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
747 channels = info->maxOutputChannels;
750 fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
751 channels = info->maxInputChannels;
755 // We've got some sort of conflict ... abort
756 sprintf(message_, "RtApiOss: device (%s) won't reopen during probe.",
758 error(RtError::DEBUG_WARNING);
762 // We have an open device ... set to maximum channels.
764 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
765 // We've got some sort of conflict ... abort
767 sprintf(message_, "RtApiOss: device (%s) won't revert to previous channel setting.",
769 error(RtError::DEBUG_WARNING);
773 if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
775 sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
777 error(RtError::DEBUG_WARNING);
781 // Probe the supported data formats ... we don't care about endian-ness just yet.
783 info->nativeFormats = 0;
784 #if defined (AFMT_S32_BE)
785 // This format does not seem to be in the 2.4 kernel version of OSS soundcard.h
786 if (mask & AFMT_S32_BE) {
787 format = AFMT_S32_BE;
788 info->nativeFormats |= RTAUDIO_SINT32;
791 #if defined (AFMT_S32_LE)
792 /* This format is not in the 2.4.4 kernel version of OSS soundcard.h */
793 if (mask & AFMT_S32_LE) {
794 format = AFMT_S32_LE;
795 info->nativeFormats |= RTAUDIO_SINT32;
798 if (mask & AFMT_S8) {
800 info->nativeFormats |= RTAUDIO_SINT8;
802 if (mask & AFMT_S16_BE) {
803 format = AFMT_S16_BE;
804 info->nativeFormats |= RTAUDIO_SINT16;
806 if (mask & AFMT_S16_LE) {
807 format = AFMT_S16_LE;
808 info->nativeFormats |= RTAUDIO_SINT16;
811 // Check that we have at least one supported format
812 if (info->nativeFormats == 0) {
814 sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
816 error(RtError::DEBUG_WARNING);
822 if (ioctl(fd, SNDCTL_DSP_SETFMT, &format) == -1 || format != i) {
824 sprintf(message_, "RtApiOss: device (%s) error setting data format.",
826 error(RtError::DEBUG_WARNING);
830 // Probe the supported sample rates.
831 info->sampleRates.clear();
832 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
833 int speed = SAMPLE_RATES[k];
834 if (ioctl(fd, SNDCTL_DSP_SPEED, &speed) != -1 && speed == (int)SAMPLE_RATES[k])
835 info->sampleRates.push_back(speed);
838 if (info->sampleRates.size() == 0) {
840 sprintf(message_, "RtApiOss: no supported sample rates found for device (%s).",
842 error(RtError::DEBUG_WARNING);
846 // That's all ... close the device and return
852 bool RtApiOss :: probeDeviceOpen(int device, StreamMode mode, int channels,
853 int sampleRate, RtAudioFormat format,
854 int *bufferSize, int numberOfBuffers)
856 int buffers, buffer_bytes, device_channels, device_format;
858 int *handle = (int *) stream_.apiHandle;
860 const char *name = devices_[device].name.c_str();
863 fd = open(name, O_WRONLY | O_NONBLOCK);
864 else { // mode == INPUT
865 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
866 // We just set the same device for playback ... close and reopen for duplex (OSS only).
869 // First check that the number previously set channels is the same.
870 if (stream_.nUserChannels[0] != channels) {
871 sprintf(message_, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name);
874 fd = open(name, O_RDWR | O_NONBLOCK);
877 fd = open(name, O_RDONLY | O_NONBLOCK);
881 if (errno == EBUSY || errno == EAGAIN)
882 sprintf(message_, "RtApiOss: device (%s) is busy and cannot be opened.",
885 sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
889 // Now reopen in blocking mode.
892 fd = open(name, O_WRONLY | O_SYNC);
893 else { // mode == INPUT
894 if (stream_.mode == OUTPUT && stream_.device[0] == device)
895 fd = open(name, O_RDWR | O_SYNC);
897 fd = open(name, O_RDONLY | O_SYNC);
901 sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
905 // Get the sample format mask
907 if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
909 sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
914 // Determine how to set the device format.
915 stream_.userFormat = format;
917 stream_.doByteSwap[mode] = false;
918 if (format == RTAUDIO_SINT8) {
919 if (mask & AFMT_S8) {
920 device_format = AFMT_S8;
921 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
924 else if (format == RTAUDIO_SINT16) {
925 if (mask & AFMT_S16_NE) {
926 device_format = AFMT_S16_NE;
927 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
929 #if BYTE_ORDER == LITTLE_ENDIAN
930 else if (mask & AFMT_S16_BE) {
931 device_format = AFMT_S16_BE;
932 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
933 stream_.doByteSwap[mode] = true;
936 else if (mask & AFMT_S16_LE) {
937 device_format = AFMT_S16_LE;
938 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
939 stream_.doByteSwap[mode] = true;
943 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
944 else if (format == RTAUDIO_SINT32) {
945 if (mask & AFMT_S32_NE) {
946 device_format = AFMT_S32_NE;
947 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
949 #if BYTE_ORDER == LITTLE_ENDIAN
950 else if (mask & AFMT_S32_BE) {
951 device_format = AFMT_S32_BE;
952 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
953 stream_.doByteSwap[mode] = true;
956 else if (mask & AFMT_S32_LE) {
957 device_format = AFMT_S32_LE;
958 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
959 stream_.doByteSwap[mode] = true;
965 if (device_format == -1) {
966 // The user requested format is not natively supported by the device.
967 if (mask & AFMT_S16_NE) {
968 device_format = AFMT_S16_NE;
969 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
971 #if BYTE_ORDER == LITTLE_ENDIAN
972 else if (mask & AFMT_S16_BE) {
973 device_format = AFMT_S16_BE;
974 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
975 stream_.doByteSwap[mode] = true;
978 else if (mask & AFMT_S16_LE) {
979 device_format = AFMT_S16_LE;
980 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
981 stream_.doByteSwap[mode] = true;
984 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
985 else if (mask & AFMT_S32_NE) {
986 device_format = AFMT_S32_NE;
987 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
989 #if BYTE_ORDER == LITTLE_ENDIAN
990 else if (mask & AFMT_S32_BE) {
991 device_format = AFMT_S32_BE;
992 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
993 stream_.doByteSwap[mode] = true;
996 else if (mask & AFMT_S32_LE) {
997 device_format = AFMT_S32_LE;
998 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
999 stream_.doByteSwap[mode] = true;
1003 else if (mask & AFMT_S8) {
1004 device_format = AFMT_S8;
1005 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
1009 if (stream_.deviceFormat[mode] == 0) {
1010 // This really shouldn't happen ...
1012 sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
1017 // Determine the number of channels for this device. Note that the
1018 // channel value requested by the user might be < min_X_Channels.
1019 stream_.nUserChannels[mode] = channels;
1020 device_channels = channels;
1021 if (mode == OUTPUT) {
1022 if (channels < devices_[device].minOutputChannels)
1023 device_channels = devices_[device].minOutputChannels;
1025 else { // mode == INPUT
1026 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
1027 // We're doing duplex setup here.
1028 if (channels < devices_[device].minDuplexChannels)
1029 device_channels = devices_[device].minDuplexChannels;
1032 if (channels < devices_[device].minInputChannels)
1033 device_channels = devices_[device].minInputChannels;
1036 stream_.nDeviceChannels[mode] = device_channels;
1038 // Attempt to set the buffer size. According to OSS, the minimum
1039 // number of buffers is two. The supposed minimum buffer size is 16
1040 // bytes, so that will be our lower bound. The argument to this
1041 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
1042 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
1043 // We'll check the actual value used near the end of the setup
1045 buffer_bytes = *bufferSize * formatBytes(stream_.deviceFormat[mode]) * device_channels;
1046 if (buffer_bytes < 16) buffer_bytes = 16;
1047 buffers = numberOfBuffers;
1048 if (buffers < 2) buffers = 2;
1049 temp = ((int) buffers << 16) + (int)(log10((double)buffer_bytes)/log10(2.0));
1050 if (ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp)) {
1052 sprintf(message_, "RtApiOss: error setting fragment size for device (%s).",
1056 stream_.nBuffers = buffers;
1058 // Set the data format.
1059 temp = device_format;
1060 if (ioctl(fd, SNDCTL_DSP_SETFMT, &device_format) == -1 || device_format != temp) {
1062 sprintf(message_, "RtApiOss: error setting data format for device (%s).",
1067 // Set the number of channels.
1068 temp = device_channels;
1069 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &device_channels) == -1 || device_channels != temp) {
1071 sprintf(message_, "RtApiOss: error setting %d channels on device (%s).",
1076 // Set the sample rate.
1079 if (ioctl(fd, SNDCTL_DSP_SPEED, &srate) == -1) {
1081 sprintf(message_, "RtApiOss: error setting sample rate = %d on device (%s).",
1086 // Verify the sample rate setup worked.
1087 if (abs(srate - temp) > 100) {
1089 sprintf(message_, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",
1093 stream_.sampleRate = sampleRate;
1095 if (ioctl(fd, SNDCTL_DSP_GETBLKSIZE, &buffer_bytes) == -1) {
1097 sprintf(message_, "RtApiOss: error getting buffer size for device (%s).",
1102 // Save buffer size (in sample frames).
1103 *bufferSize = buffer_bytes / (formatBytes(stream_.deviceFormat[mode]) * device_channels);
1104 stream_.bufferSize = *bufferSize;
1106 if (mode == INPUT && stream_.mode == OUTPUT &&
1107 stream_.device[0] == device) {
1108 // We're doing duplex setup here.
1109 stream_.deviceFormat[0] = stream_.deviceFormat[1];
1110 stream_.nDeviceChannels[0] = device_channels;
1113 // Allocate the stream handles if necessary and then save.
1114 if ( stream_.apiHandle == 0 ) {
1115 handle = (int *) calloc(2, sizeof(int));
1116 stream_.apiHandle = (void *) handle;
1121 handle = (int *) stream_.apiHandle;
1125 // Set flags for buffer conversion
1126 stream_.doConvertBuffer[mode] = false;
1127 if (stream_.userFormat != stream_.deviceFormat[mode])
1128 stream_.doConvertBuffer[mode] = true;
1129 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
1130 stream_.doConvertBuffer[mode] = true;
1132 // Allocate necessary internal buffers
1133 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
1136 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
1137 buffer_bytes = stream_.nUserChannels[0];
1139 buffer_bytes = stream_.nUserChannels[1];
1141 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
1142 if (stream_.userBuffer) free(stream_.userBuffer);
1143 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
1144 if (stream_.userBuffer == NULL) {
1146 sprintf(message_, "RtApiOss: error allocating user buffer memory (%s).",
1152 if ( stream_.doConvertBuffer[mode] ) {
1155 bool makeBuffer = true;
1156 if ( mode == OUTPUT )
1157 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1158 else { // mode == INPUT
1159 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
1160 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1161 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1162 if ( buffer_bytes < bytes_out ) makeBuffer = false;
1167 buffer_bytes *= *bufferSize;
1168 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
1169 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
1170 if (stream_.deviceBuffer == NULL) {
1172 sprintf(message_, "RtApiOss: error allocating device buffer memory (%s).",
1179 stream_.device[mode] = device;
1180 stream_.state = STREAM_STOPPED;
1182 if ( stream_.mode == OUTPUT && mode == INPUT ) {
1183 stream_.mode = DUPLEX;
1184 if (stream_.device[0] == device)
1188 stream_.mode = mode;
1190 // Setup the buffer conversion information structure.
1191 if ( stream_.doConvertBuffer[mode] ) {
1192 if (mode == INPUT) { // convert device to user buffer
1193 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
1194 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
1195 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
1196 stream_.convertInfo[mode].outFormat = stream_.userFormat;
1198 else { // convert user to device buffer
1199 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
1200 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
1201 stream_.convertInfo[mode].inFormat = stream_.userFormat;
1202 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
1205 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
1206 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
1208 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
1210 // Set up the interleave/deinterleave offsets.
1211 if ( mode == INPUT && stream_.deInterleave[1] ) {
1212 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
1213 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
1214 stream_.convertInfo[mode].outOffset.push_back( k );
1215 stream_.convertInfo[mode].inJump = 1;
1218 else if (mode == OUTPUT && stream_.deInterleave[0]) {
1219 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
1220 stream_.convertInfo[mode].inOffset.push_back( k );
1221 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
1222 stream_.convertInfo[mode].outJump = 1;
1226 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
1227 stream_.convertInfo[mode].inOffset.push_back( k );
1228 stream_.convertInfo[mode].outOffset.push_back( k );
1240 stream_.apiHandle = 0;
1243 if (stream_.userBuffer) {
1244 free(stream_.userBuffer);
1245 stream_.userBuffer = 0;
1248 error(RtError::DEBUG_WARNING);
1252 void RtApiOss :: closeStream()
1254 // We don't want an exception to be thrown here because this
1255 // function is called by our class destructor. So, do our own
1257 if ( stream_.mode == UNINITIALIZED ) {
1258 sprintf(message_, "RtApiOss::closeStream(): no open stream to close!");
1259 error(RtError::WARNING);
1263 int *handle = (int *) stream_.apiHandle;
1264 if (stream_.state == STREAM_RUNNING) {
1265 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
1266 ioctl(handle[0], SNDCTL_DSP_RESET, 0);
1268 ioctl(handle[1], SNDCTL_DSP_RESET, 0);
1269 stream_.state = STREAM_STOPPED;
1272 if (stream_.callbackInfo.usingCallback) {
1273 stream_.callbackInfo.usingCallback = false;
1274 pthread_join(stream_.callbackInfo.thread, NULL);
1278 if (handle[0]) close(handle[0]);
1279 if (handle[1]) close(handle[1]);
1281 stream_.apiHandle = 0;
1284 if (stream_.userBuffer) {
1285 free(stream_.userBuffer);
1286 stream_.userBuffer = 0;
1289 if (stream_.deviceBuffer) {
1290 free(stream_.deviceBuffer);
1291 stream_.deviceBuffer = 0;
1294 stream_.mode = UNINITIALIZED;
1297 void RtApiOss :: startStream()
1300 if (stream_.state == STREAM_RUNNING) return;
1302 MUTEX_LOCK(&stream_.mutex);
1304 stream_.state = STREAM_RUNNING;
1306 // No need to do anything else here ... OSS automatically starts
1307 // when fed samples.
1309 MUTEX_UNLOCK(&stream_.mutex);
1312 void RtApiOss :: stopStream()
1315 if (stream_.state == STREAM_STOPPED) return;
1317 // Change the state before the lock to improve shutdown response
1318 // when using a callback.
1319 stream_.state = STREAM_STOPPED;
1320 MUTEX_LOCK(&stream_.mutex);
1323 int *handle = (int *) stream_.apiHandle;
1324 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1325 err = ioctl(handle[0], SNDCTL_DSP_POST, 0);
1326 //err = ioctl(handle[0], SNDCTL_DSP_SYNC, 0);
1328 sprintf(message_, "RtApiOss: error stopping device (%s).",
1329 devices_[stream_.device[0]].name.c_str());
1330 error(RtError::DRIVER_ERROR);
1334 err = ioctl(handle[1], SNDCTL_DSP_POST, 0);
1335 //err = ioctl(handle[1], SNDCTL_DSP_SYNC, 0);
1337 sprintf(message_, "RtApiOss: error stopping device (%s).",
1338 devices_[stream_.device[1]].name.c_str());
1339 error(RtError::DRIVER_ERROR);
1343 MUTEX_UNLOCK(&stream_.mutex);
1346 void RtApiOss :: abortStream()
1351 int RtApiOss :: streamWillBlock()
1354 if (stream_.state == STREAM_STOPPED) return 0;
1356 MUTEX_LOCK(&stream_.mutex);
1358 int bytes = 0, channels = 0, frames = 0;
1359 audio_buf_info info;
1360 int *handle = (int *) stream_.apiHandle;
1361 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1362 ioctl(handle[0], SNDCTL_DSP_GETOSPACE, &info);
1364 channels = stream_.nDeviceChannels[0];
1367 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
1368 ioctl(handle[1], SNDCTL_DSP_GETISPACE, &info);
1369 if (stream_.mode == DUPLEX ) {
1370 bytes = (bytes < info.bytes) ? bytes : info.bytes;
1371 channels = stream_.nDeviceChannels[0];
1375 channels = stream_.nDeviceChannels[1];
1379 frames = (int) (bytes / (channels * formatBytes(stream_.deviceFormat[0])));
1380 frames -= stream_.bufferSize;
1381 if (frames < 0) frames = 0;
1383 MUTEX_UNLOCK(&stream_.mutex);
1387 void RtApiOss :: tickStream()
1392 if (stream_.state == STREAM_STOPPED) {
1393 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
1396 else if (stream_.callbackInfo.usingCallback) {
1397 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
1398 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
1401 MUTEX_LOCK(&stream_.mutex);
1403 // The state might change while waiting on a mutex.
1404 if (stream_.state == STREAM_STOPPED)
1407 int result, *handle;
1410 RtAudioFormat format;
1411 handle = (int *) stream_.apiHandle;
1412 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1414 // Setup parameters and do buffer conversion if necessary.
1415 if (stream_.doConvertBuffer[0]) {
1416 buffer = stream_.deviceBuffer;
1417 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
1418 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
1419 format = stream_.deviceFormat[0];
1422 buffer = stream_.userBuffer;
1423 samples = stream_.bufferSize * stream_.nUserChannels[0];
1424 format = stream_.userFormat;
1427 // Do byte swapping if necessary.
1428 if (stream_.doByteSwap[0])
1429 byteSwapBuffer(buffer, samples, format);
1431 // Write samples to device.
1432 result = write(handle[0], buffer, samples * formatBytes(format));
1435 // This could be an underrun, but the basic OSS API doesn't provide a means for determining that.
1436 sprintf(message_, "RtApiOss: audio write error for device (%s).",
1437 devices_[stream_.device[0]].name.c_str());
1438 error(RtError::DRIVER_ERROR);
1442 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
1444 // Setup parameters.
1445 if (stream_.doConvertBuffer[1]) {
1446 buffer = stream_.deviceBuffer;
1447 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
1448 format = stream_.deviceFormat[1];
1451 buffer = stream_.userBuffer;
1452 samples = stream_.bufferSize * stream_.nUserChannels[1];
1453 format = stream_.userFormat;
1456 // Read samples from device.
1457 result = read(handle[1], buffer, samples * formatBytes(format));
1460 // This could be an overrun, but the basic OSS API doesn't provide a means for determining that.
1461 sprintf(message_, "RtApiOss: audio read error for device (%s).",
1462 devices_[stream_.device[1]].name.c_str());
1463 error(RtError::DRIVER_ERROR);
1466 // Do byte swapping if necessary.
1467 if (stream_.doByteSwap[1])
1468 byteSwapBuffer(buffer, samples, format);
1470 // Do buffer conversion if necessary.
1471 if (stream_.doConvertBuffer[1])
1472 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
1476 MUTEX_UNLOCK(&stream_.mutex);
1478 if (stream_.callbackInfo.usingCallback && stopStream)
1482 void RtApiOss :: setStreamCallback(RtAudioCallback callback, void *userData)
1486 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1487 if ( info->usingCallback ) {
1488 sprintf(message_, "RtApiOss: A callback is already set for this stream!");
1489 error(RtError::WARNING);
1493 info->callback = (void *) callback;
1494 info->userData = userData;
1495 info->usingCallback = true;
1496 info->object = (void *) this;
1498 // Set the thread attributes for joinable and realtime scheduling
1499 // priority. The higher priority will only take affect if the
1500 // program is run as root or suid.
1501 pthread_attr_t attr;
1502 pthread_attr_init(&attr);
1503 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
1504 pthread_attr_setschedpolicy(&attr, SCHED_RR);
1506 int err = pthread_create(&(info->thread), &attr, ossCallbackHandler, &stream_.callbackInfo);
1507 pthread_attr_destroy(&attr);
1509 info->usingCallback = false;
1510 sprintf(message_, "RtApiOss: error starting callback thread!");
1511 error(RtError::THREAD_ERROR);
1515 void RtApiOss :: cancelStreamCallback()
1519 if (stream_.callbackInfo.usingCallback) {
1521 if (stream_.state == STREAM_RUNNING)
1524 MUTEX_LOCK(&stream_.mutex);
1526 stream_.callbackInfo.usingCallback = false;
1527 pthread_join(stream_.callbackInfo.thread, NULL);
1528 stream_.callbackInfo.thread = 0;
1529 stream_.callbackInfo.callback = NULL;
1530 stream_.callbackInfo.userData = NULL;
1532 MUTEX_UNLOCK(&stream_.mutex);
1536 extern "C" void *ossCallbackHandler(void *ptr)
1538 CallbackInfo *info = (CallbackInfo *) ptr;
1539 RtApiOss *object = (RtApiOss *) info->object;
1540 bool *usingCallback = &info->usingCallback;
1542 while ( *usingCallback ) {
1543 pthread_testcancel();
1545 object->tickStream();
1547 catch (RtError &exception) {
1548 fprintf(stderr, "\nRtApiOss: callback thread error (%s) ... closing thread.\n\n",
1549 exception.getMessageString());
1557 //******************** End of __LINUX_OSS__ *********************//
1560 #if defined(__MACOSX_CORE__)
1563 // The OS X CoreAudio API is designed to use a separate callback
1564 // procedure for each of its audio devices. A single RtAudio duplex
1565 // stream using two different devices is supported here, though it
1566 // cannot be guaranteed to always behave correctly because we cannot
1567 // synchronize these two callbacks. This same functionality can be
1568 // achieved with better synchrony by opening two separate streams for
1569 // the devices and using RtAudio blocking calls (i.e. tickStream()).
1571 // A property listener is installed for over/underrun information.
1572 // However, no functionality is currently provided to allow property
1573 // listeners to trigger user handlers because it is unclear what could
1574 // be done if a critical stream parameter (buffer size, sample rate,
1575 // device disconnect) notification arrived. The listeners entail
1576 // quite a bit of extra code and most likely, a user program wouldn't
1577 // be prepared for the result anyway.
1579 // A structure to hold various information related to the CoreAudio API
1586 pthread_cond_t condition;
1589 :stopStream(false), xrun(false), deviceBuffer(0) {}
1592 RtApiCore :: RtApiCore()
1596 if (nDevices_ <= 0) {
1597 sprintf(message_, "RtApiCore: no Macintosh OS-X Core Audio devices found!");
1598 error(RtError::NO_DEVICES_FOUND);
1602 RtApiCore :: ~RtApiCore()
1604 // The subclass destructor gets called before the base class
1605 // destructor, so close an existing stream before deallocating
1606 // apiDeviceId memory.
1607 if ( stream_.mode != UNINITIALIZED ) closeStream();
1609 // Free our allocated apiDeviceId memory.
1611 for ( unsigned int i=0; i<devices_.size(); i++ ) {
1612 id = (AudioDeviceID *) devices_[i].apiDeviceId;
1617 void RtApiCore :: initialize(void)
1619 OSStatus err = noErr;
1621 AudioDeviceID *deviceList = NULL;
1624 // Find out how many audio devices there are, if any.
1625 err = AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices, &dataSize, NULL);
1627 sprintf(message_, "RtApiCore: OS-X error getting device info!");
1628 error(RtError::SYSTEM_ERROR);
1631 nDevices_ = dataSize / sizeof(AudioDeviceID);
1632 if (nDevices_ == 0) return;
1634 // Make space for the devices we are about to get.
1635 deviceList = (AudioDeviceID *) malloc( dataSize );
1636 if (deviceList == NULL) {
1637 sprintf(message_, "RtApiCore: memory allocation error during initialization!");
1638 error(RtError::MEMORY_ERROR);
1641 // Get the array of AudioDeviceIDs.
1642 err = AudioHardwareGetProperty(kAudioHardwarePropertyDevices, &dataSize, (void *) deviceList);
1645 sprintf(message_, "RtApiCore: OS-X error getting device properties!");
1646 error(RtError::SYSTEM_ERROR);
1649 // Create list of device structures and write device identifiers.
1652 for (int i=0; i<nDevices_; i++) {
1653 devices_.push_back(device);
1654 id = (AudioDeviceID *) malloc( sizeof(AudioDeviceID) );
1655 *id = deviceList[i];
1656 devices_[i].apiDeviceId = (void *) id;
1662 int RtApiCore :: getDefaultInputDevice(void)
1664 AudioDeviceID id, *deviceId;
1665 UInt32 dataSize = sizeof( AudioDeviceID );
1667 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
1670 if (result != noErr) {
1671 sprintf( message_, "RtApiCore: OS-X error getting default input device." );
1672 error(RtError::WARNING);
1676 for ( int i=0; i<nDevices_; i++ ) {
1677 deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
1678 if ( id == *deviceId ) return i;
1684 int RtApiCore :: getDefaultOutputDevice(void)
1686 AudioDeviceID id, *deviceId;
1687 UInt32 dataSize = sizeof( AudioDeviceID );
1689 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
1692 if (result != noErr) {
1693 sprintf( message_, "RtApiCore: OS-X error getting default output device." );
1694 error(RtError::WARNING);
1698 for ( int i=0; i<nDevices_; i++ ) {
1699 deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
1700 if ( id == *deviceId ) return i;
1706 static bool deviceSupportsFormat( AudioDeviceID id, bool isInput,
1707 AudioStreamBasicDescription *desc, bool isDuplex )
1709 OSStatus result = noErr;
1710 UInt32 dataSize = sizeof( AudioStreamBasicDescription );
1712 result = AudioDeviceGetProperty( id, 0, isInput,
1713 kAudioDevicePropertyStreamFormatSupported,
1716 if (result == kAudioHardwareNoError) {
1718 result = AudioDeviceGetProperty( id, 0, true,
1719 kAudioDevicePropertyStreamFormatSupported,
1723 if (result != kAudioHardwareNoError)
1732 void RtApiCore :: probeDeviceInfo( RtApiDevice *info )
1734 OSStatus err = noErr;
1736 // Get the device manufacturer and name.
1739 UInt32 dataSize = 256;
1740 AudioDeviceID *id = (AudioDeviceID *) info->apiDeviceId;
1741 err = AudioDeviceGetProperty( *id, 0, false,
1742 kAudioDevicePropertyDeviceManufacturer,
1745 sprintf( message_, "RtApiCore: OS-X error getting device manufacturer." );
1746 error(RtError::DEBUG_WARNING);
1749 strncpy(fullname, name, 256);
1750 strcat(fullname, ": " );
1753 err = AudioDeviceGetProperty( *id, 0, false,
1754 kAudioDevicePropertyDeviceName,
1757 sprintf( message_, "RtApiCore: OS-X error getting device name." );
1758 error(RtError::DEBUG_WARNING);
1761 strncat(fullname, name, 254);
1763 info->name.append( (const char *)fullname, strlen(fullname)+1);
1765 // Get output channel information.
1766 unsigned int i, minChannels = 0, maxChannels = 0, nStreams = 0;
1767 AudioBufferList *bufferList = nil;
1768 err = AudioDeviceGetPropertyInfo( *id, 0, false,
1769 kAudioDevicePropertyStreamConfiguration,
1771 if (err == noErr && dataSize > 0) {
1772 bufferList = (AudioBufferList *) malloc( dataSize );
1773 if (bufferList == NULL) {
1774 sprintf(message_, "RtApiCore: memory allocation error!");
1775 error(RtError::DEBUG_WARNING);
1779 err = AudioDeviceGetProperty( *id, 0, false,
1780 kAudioDevicePropertyStreamConfiguration,
1781 &dataSize, bufferList );
1785 nStreams = bufferList->mNumberBuffers;
1786 for ( i=0; i<nStreams; i++ ) {
1787 maxChannels += bufferList->mBuffers[i].mNumberChannels;
1788 if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
1789 minChannels = bufferList->mBuffers[i].mNumberChannels;
1795 if (err != noErr || dataSize <= 0) {
1796 sprintf( message_, "RtApiCore: OS-X error getting output channels for device (%s).",
1797 info->name.c_str() );
1798 error(RtError::DEBUG_WARNING);
1803 if ( maxChannels > 0 )
1804 info->maxOutputChannels = maxChannels;
1805 if ( minChannels > 0 )
1806 info->minOutputChannels = minChannels;
1809 // Get input channel information.
1811 err = AudioDeviceGetPropertyInfo( *id, 0, true,
1812 kAudioDevicePropertyStreamConfiguration,
1814 if (err == noErr && dataSize > 0) {
1815 bufferList = (AudioBufferList *) malloc( dataSize );
1816 if (bufferList == NULL) {
1817 sprintf(message_, "RtApiCore: memory allocation error!");
1818 error(RtError::DEBUG_WARNING);
1821 err = AudioDeviceGetProperty( *id, 0, true,
1822 kAudioDevicePropertyStreamConfiguration,
1823 &dataSize, bufferList );
1827 nStreams = bufferList->mNumberBuffers;
1828 for ( i=0; i<nStreams; i++ ) {
1829 if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
1830 minChannels = bufferList->mBuffers[i].mNumberChannels;
1831 maxChannels += bufferList->mBuffers[i].mNumberChannels;
1837 if (err != noErr || dataSize <= 0) {
1838 sprintf( message_, "RtApiCore: OS-X error getting input channels for device (%s).",
1839 info->name.c_str() );
1840 error(RtError::DEBUG_WARNING);
1845 if ( maxChannels > 0 )
1846 info->maxInputChannels = maxChannels;
1847 if ( minChannels > 0 )
1848 info->minInputChannels = minChannels;
1851 // If device opens for both playback and capture, we determine the channels.
1852 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
1853 info->hasDuplexSupport = true;
1854 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
1855 info->maxInputChannels : info->maxOutputChannels;
1856 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
1857 info->minInputChannels : info->minOutputChannels;
1860 // Probe the device sample rate and data format parameters. The
1861 // core audio query mechanism is performed on a "stream"
1862 // description, which can have a variable number of channels and
1863 // apply to input or output only.
1865 // Create a stream description structure.
1866 AudioStreamBasicDescription description;
1867 dataSize = sizeof( AudioStreamBasicDescription );
1868 memset(&description, 0, sizeof(AudioStreamBasicDescription));
1869 bool isInput = false;
1870 if ( info->maxOutputChannels == 0 ) isInput = true;
1871 bool isDuplex = false;
1872 if ( info->maxDuplexChannels > 0 ) isDuplex = true;
1874 // Determine the supported sample rates.
1875 info->sampleRates.clear();
1876 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
1877 description.mSampleRate = (double) SAMPLE_RATES[k];
1878 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1879 info->sampleRates.push_back( SAMPLE_RATES[k] );
1882 if (info->sampleRates.size() == 0) {
1883 sprintf( message_, "RtApiCore: No supported sample rates found for OS-X device (%s).",
1884 info->name.c_str() );
1885 error(RtError::DEBUG_WARNING);
1889 // Determine the supported data formats.
1890 info->nativeFormats = 0;
1891 description.mFormatID = kAudioFormatLinearPCM;
1892 description.mBitsPerChannel = 8;
1893 description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
1894 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1895 info->nativeFormats |= RTAUDIO_SINT8;
1897 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1898 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1899 info->nativeFormats |= RTAUDIO_SINT8;
1902 description.mBitsPerChannel = 16;
1903 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1904 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1905 info->nativeFormats |= RTAUDIO_SINT16;
1907 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1908 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1909 info->nativeFormats |= RTAUDIO_SINT16;
1912 description.mBitsPerChannel = 32;
1913 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1914 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1915 info->nativeFormats |= RTAUDIO_SINT32;
1917 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1918 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1919 info->nativeFormats |= RTAUDIO_SINT32;
1922 description.mBitsPerChannel = 24;
1923 description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsAlignedHigh | kLinearPCMFormatFlagIsBigEndian;
1924 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1925 info->nativeFormats |= RTAUDIO_SINT24;
1927 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1928 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1929 info->nativeFormats |= RTAUDIO_SINT24;
1932 description.mBitsPerChannel = 32;
1933 description.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
1934 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1935 info->nativeFormats |= RTAUDIO_FLOAT32;
1937 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1938 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1939 info->nativeFormats |= RTAUDIO_FLOAT32;
1942 description.mBitsPerChannel = 64;
1943 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1944 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1945 info->nativeFormats |= RTAUDIO_FLOAT64;
1947 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1948 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1949 info->nativeFormats |= RTAUDIO_FLOAT64;
1952 // Check that we have at least one supported format.
1953 if (info->nativeFormats == 0) {
1954 sprintf(message_, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.",
1955 info->name.c_str());
1956 error(RtError::DEBUG_WARNING);
1960 info->probed = true;
1963 OSStatus callbackHandler( AudioDeviceID inDevice,
1964 const AudioTimeStamp* inNow,
1965 const AudioBufferList* inInputData,
1966 const AudioTimeStamp* inInputTime,
1967 AudioBufferList* outOutputData,
1968 const AudioTimeStamp* inOutputTime,
1971 CallbackInfo *info = (CallbackInfo *) infoPointer;
1973 RtApiCore *object = (RtApiCore *) info->object;
1975 object->callbackEvent( inDevice, (void *)inInputData, (void *)outOutputData );
1977 catch (RtError &exception) {
1978 fprintf(stderr, "\nRtApiCore: callback handler error (%s)!\n\n", exception.getMessageString());
1979 return kAudioHardwareUnspecifiedError;
1982 return kAudioHardwareNoError;
1985 OSStatus deviceListener( AudioDeviceID inDevice,
1988 AudioDevicePropertyID propertyID,
1989 void* handlePointer )
1991 CoreHandle *handle = (CoreHandle *) handlePointer;
1992 if ( propertyID == kAudioDeviceProcessorOverload ) {
1994 fprintf(stderr, "\nRtApiCore: OS-X audio input overrun detected!\n");
1996 fprintf(stderr, "\nRtApiCore: OS-X audio output underrun detected!\n");
1997 handle->xrun = true;
2000 return kAudioHardwareNoError;
2003 bool RtApiCore :: probeDeviceOpen( int device, StreamMode mode, int channels,
2004 int sampleRate, RtAudioFormat format,
2005 int *bufferSize, int numberOfBuffers )
2007 // Setup for stream mode.
2008 bool isInput = false;
2009 AudioDeviceID id = *((AudioDeviceID *) devices_[device].apiDeviceId);
2010 if ( mode == INPUT ) isInput = true;
2012 // Search for a stream which contains the desired number of channels.
2013 OSStatus err = noErr;
2015 unsigned int deviceChannels, nStreams = 0;
2016 UInt32 iChannel = 0, iStream = 0;
2017 AudioBufferList *bufferList = nil;
2018 err = AudioDeviceGetPropertyInfo( id, 0, isInput,
2019 kAudioDevicePropertyStreamConfiguration,
2022 if (err == noErr && dataSize > 0) {
2023 bufferList = (AudioBufferList *) malloc( dataSize );
2024 if (bufferList == NULL) {
2025 sprintf(message_, "RtApiCore: memory allocation error in probeDeviceOpen()!");
2026 error(RtError::DEBUG_WARNING);
2029 err = AudioDeviceGetProperty( id, 0, isInput,
2030 kAudioDevicePropertyStreamConfiguration,
2031 &dataSize, bufferList );
2034 stream_.deInterleave[mode] = false;
2035 nStreams = bufferList->mNumberBuffers;
2036 for ( iStream=0; iStream<nStreams; iStream++ ) {
2037 if ( bufferList->mBuffers[iStream].mNumberChannels >= (unsigned int) channels ) break;
2038 iChannel += bufferList->mBuffers[iStream].mNumberChannels;
2040 // If we didn't find a single stream above, see if we can meet
2041 // the channel specification in mono mode (i.e. using separate
2042 // non-interleaved buffers). This can only work if there are N
2043 // consecutive one-channel streams, where N is the number of
2044 // desired channels.
2046 if ( iStream >= nStreams && nStreams >= (unsigned int) channels ) {
2048 for ( iStream=0; iStream<nStreams; iStream++ ) {
2049 if ( bufferList->mBuffers[iStream].mNumberChannels == 1 )
2053 if ( counter == channels ) {
2054 iStream -= channels - 1;
2055 iChannel -= channels - 1;
2056 stream_.deInterleave[mode] = true;
2059 iChannel += bufferList->mBuffers[iStream].mNumberChannels;
2064 if (err != noErr || dataSize <= 0) {
2065 if ( bufferList ) free( bufferList );
2066 sprintf( message_, "RtApiCore: OS-X error getting channels for device (%s).",
2067 devices_[device].name.c_str() );
2068 error(RtError::DEBUG_WARNING);
2072 if (iStream >= nStreams) {
2074 sprintf( message_, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).",
2075 devices_[device].name.c_str(), channels );
2076 error(RtError::DEBUG_WARNING);
2080 // This is ok even for mono mode ... it gets updated later.
2081 deviceChannels = bufferList->mBuffers[iStream].mNumberChannels;
2084 // Determine the buffer size.
2085 AudioValueRange bufferRange;
2086 dataSize = sizeof(AudioValueRange);
2087 err = AudioDeviceGetProperty( id, 0, isInput,
2088 kAudioDevicePropertyBufferSizeRange,
2089 &dataSize, &bufferRange);
2091 sprintf( message_, "RtApiCore: OS-X error getting buffer size range for device (%s).",
2092 devices_[device].name.c_str() );
2093 error(RtError::DEBUG_WARNING);
2097 long bufferBytes = *bufferSize * deviceChannels * formatBytes(RTAUDIO_FLOAT32);
2098 if (bufferRange.mMinimum > bufferBytes) bufferBytes = (int) bufferRange.mMinimum;
2099 else if (bufferRange.mMaximum < bufferBytes) bufferBytes = (int) bufferRange.mMaximum;
2101 // Set the buffer size. For mono mode, I'm assuming we only need to
2102 // make this setting for the first channel.
2103 UInt32 theSize = (UInt32) bufferBytes;
2104 dataSize = sizeof( UInt32);
2105 err = AudioDeviceSetProperty(id, NULL, 0, isInput,
2106 kAudioDevicePropertyBufferSize,
2107 dataSize, &theSize);
2109 sprintf( message_, "RtApiCore: OS-X error setting the buffer size for device (%s).",
2110 devices_[device].name.c_str() );
2111 error(RtError::DEBUG_WARNING);
2115 // If attempting to setup a duplex stream, the bufferSize parameter
2116 // MUST be the same in both directions!
2117 *bufferSize = bufferBytes / ( deviceChannels * formatBytes(RTAUDIO_FLOAT32) );
2118 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
2119 sprintf( message_, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).",
2120 devices_[device].name.c_str() );
2121 error(RtError::DEBUG_WARNING);
2125 stream_.bufferSize = *bufferSize;
2126 stream_.nBuffers = 1;
2128 // Set the stream format description. Do for each channel in mono mode.
2129 AudioStreamBasicDescription description;
2130 dataSize = sizeof( AudioStreamBasicDescription );
2131 if ( stream_.deInterleave[mode] ) nStreams = channels;
2133 for ( unsigned int i=0; i<nStreams; i++, iChannel++ ) {
2135 err = AudioDeviceGetProperty( id, iChannel, isInput,
2136 kAudioDevicePropertyStreamFormat,
2137 &dataSize, &description );
2139 sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).",
2140 devices_[device].name.c_str() );
2141 error(RtError::DEBUG_WARNING);
2145 // Set the sample rate and data format id.
2146 description.mSampleRate = (double) sampleRate;
2147 description.mFormatID = kAudioFormatLinearPCM;
2148 err = AudioDeviceSetProperty( id, NULL, iChannel, isInput,
2149 kAudioDevicePropertyStreamFormat,
2150 dataSize, &description );
2152 sprintf( message_, "RtApiCore: OS-X error setting sample rate or data format for device (%s).",
2153 devices_[device].name.c_str() );
2154 error(RtError::DEBUG_WARNING);
2159 // Check whether we need byte-swapping (assuming OS-X host is big-endian).
2160 iChannel -= nStreams;
2161 err = AudioDeviceGetProperty( id, iChannel, isInput,
2162 kAudioDevicePropertyStreamFormat,
2163 &dataSize, &description );
2165 sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).", devices_[device].name.c_str() );
2166 error(RtError::DEBUG_WARNING);
2170 stream_.doByteSwap[mode] = false;
2171 if ( !description.mFormatFlags & kLinearPCMFormatFlagIsBigEndian )
2172 stream_.doByteSwap[mode] = true;
2174 // From the CoreAudio documentation, PCM data must be supplied as
2176 stream_.userFormat = format;
2177 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2179 if ( stream_.deInterleave[mode] ) // mono mode
2180 stream_.nDeviceChannels[mode] = channels;
2182 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
2183 stream_.nUserChannels[mode] = channels;
2185 // Set flags for buffer conversion.
2186 stream_.doConvertBuffer[mode] = false;
2187 if (stream_.userFormat != stream_.deviceFormat[mode])
2188 stream_.doConvertBuffer[mode] = true;
2189 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
2190 stream_.doConvertBuffer[mode] = true;
2191 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
2192 stream_.doConvertBuffer[mode] = true;
2194 // Allocate our CoreHandle structure for the stream.
2196 if ( stream_.apiHandle == 0 ) {
2197 handle = (CoreHandle *) calloc(1, sizeof(CoreHandle));
2198 if ( handle == NULL ) {
2199 sprintf(message_, "RtApiCore: OS-X error allocating coreHandle memory (%s).",
2200 devices_[device].name.c_str());
2203 handle->index[0] = 0;
2204 handle->index[1] = 0;
2205 if ( pthread_cond_init(&handle->condition, NULL) ) {
2206 sprintf(message_, "RtApiCore: error initializing pthread condition variable (%s).",
2207 devices_[device].name.c_str());
2210 stream_.apiHandle = (void *) handle;
2213 handle = (CoreHandle *) stream_.apiHandle;
2214 handle->index[mode] = iStream;
2216 // Allocate necessary internal buffers.
2217 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
2220 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
2221 buffer_bytes = stream_.nUserChannels[0];
2223 buffer_bytes = stream_.nUserChannels[1];
2225 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
2226 if (stream_.userBuffer) free(stream_.userBuffer);
2227 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
2228 if (stream_.userBuffer == NULL) {
2229 sprintf(message_, "RtApiCore: OS-X error allocating user buffer memory (%s).",
2230 devices_[device].name.c_str());
2235 if ( stream_.deInterleave[mode] ) {
2238 bool makeBuffer = true;
2239 if ( mode == OUTPUT )
2240 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2241 else { // mode == INPUT
2242 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
2243 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2244 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2245 if ( buffer_bytes < bytes_out ) makeBuffer = false;
2250 buffer_bytes *= *bufferSize;
2251 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
2252 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
2253 if (stream_.deviceBuffer == NULL) {
2254 sprintf(message_, "RtApiCore: error allocating device buffer memory (%s).",
2255 devices_[device].name.c_str());
2259 // If not de-interleaving, we point stream_.deviceBuffer to the
2260 // OS X supplied device buffer before doing any necessary data
2261 // conversions. This presents a problem if we have a duplex
2262 // stream using one device which needs de-interleaving and
2263 // another device which doesn't. So, save a pointer to our own
2264 // device buffer in the CallbackInfo structure.
2265 handle->deviceBuffer = stream_.deviceBuffer;
2269 stream_.sampleRate = sampleRate;
2270 stream_.device[mode] = device;
2271 stream_.state = STREAM_STOPPED;
2272 stream_.callbackInfo.object = (void *) this;
2274 // Setup the buffer conversion information structure.
2275 if ( stream_.doConvertBuffer[mode] ) {
2276 if (mode == INPUT) { // convert device to user buffer
2277 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
2278 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
2279 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
2280 stream_.convertInfo[mode].outFormat = stream_.userFormat;
2282 else { // convert user to device buffer
2283 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
2284 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
2285 stream_.convertInfo[mode].inFormat = stream_.userFormat;
2286 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
2289 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
2290 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
2292 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
2294 // Set up the interleave/deinterleave offsets.
2295 if ( mode == INPUT && stream_.deInterleave[1] ) {
2296 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
2297 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
2298 stream_.convertInfo[mode].outOffset.push_back( k );
2299 stream_.convertInfo[mode].inJump = 1;
2302 else if (mode == OUTPUT && stream_.deInterleave[0]) {
2303 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
2304 stream_.convertInfo[mode].inOffset.push_back( k );
2305 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
2306 stream_.convertInfo[mode].outJump = 1;
2310 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
2311 stream_.convertInfo[mode].inOffset.push_back( k );
2312 stream_.convertInfo[mode].outOffset.push_back( k );
2317 if ( stream_.mode == OUTPUT && mode == INPUT && stream_.device[0] == device )
2318 // Only one callback procedure per device.
2319 stream_.mode = DUPLEX;
2321 err = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
2323 sprintf( message_, "RtApiCore: OS-X error setting callback for device (%s).", devices_[device].name.c_str() );
2324 error(RtError::DEBUG_WARNING);
2327 if ( stream_.mode == OUTPUT && mode == INPUT )
2328 stream_.mode = DUPLEX;
2330 stream_.mode = mode;
2333 // Setup the device property listener for over/underload.
2334 err = AudioDeviceAddPropertyListener( id, iChannel, isInput,
2335 kAudioDeviceProcessorOverload,
2336 deviceListener, (void *) handle );
2342 pthread_cond_destroy(&handle->condition);
2344 stream_.apiHandle = 0;
2347 if (stream_.userBuffer) {
2348 free(stream_.userBuffer);
2349 stream_.userBuffer = 0;
2352 error(RtError::DEBUG_WARNING);
2356 void RtApiCore :: closeStream()
2358 // We don't want an exception to be thrown here because this
2359 // function is called by our class destructor. So, do our own
2361 if ( stream_.mode == UNINITIALIZED ) {
2362 sprintf(message_, "RtApiCore::closeStream(): no open stream to close!");
2363 error(RtError::WARNING);
2367 AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2368 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2369 if (stream_.state == STREAM_RUNNING)
2370 AudioDeviceStop( id, callbackHandler );
2371 AudioDeviceRemoveIOProc( id, callbackHandler );
2374 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2375 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2376 if (stream_.state == STREAM_RUNNING)
2377 AudioDeviceStop( id, callbackHandler );
2378 AudioDeviceRemoveIOProc( id, callbackHandler );
2381 if (stream_.userBuffer) {
2382 free(stream_.userBuffer);
2383 stream_.userBuffer = 0;
2386 if ( stream_.deInterleave[0] || stream_.deInterleave[1] ) {
2387 free(stream_.deviceBuffer);
2388 stream_.deviceBuffer = 0;
2391 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2393 // Destroy pthread condition variable and free the CoreHandle structure.
2395 pthread_cond_destroy(&handle->condition);
2397 stream_.apiHandle = 0;
2400 stream_.mode = UNINITIALIZED;
2403 void RtApiCore :: startStream()
2406 if (stream_.state == STREAM_RUNNING) return;
2408 MUTEX_LOCK(&stream_.mutex);
2412 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2414 id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2415 err = AudioDeviceStart(id, callbackHandler);
2417 sprintf(message_, "RtApiCore: OS-X error starting callback procedure on device (%s).",
2418 devices_[stream_.device[0]].name.c_str());
2419 MUTEX_UNLOCK(&stream_.mutex);
2420 error(RtError::DRIVER_ERROR);
2424 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2426 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2427 err = AudioDeviceStart(id, callbackHandler);
2429 sprintf(message_, "RtApiCore: OS-X error starting input callback procedure on device (%s).",
2430 devices_[stream_.device[0]].name.c_str());
2431 MUTEX_UNLOCK(&stream_.mutex);
2432 error(RtError::DRIVER_ERROR);
2436 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2437 handle->stopStream = false;
2438 stream_.state = STREAM_RUNNING;
2440 MUTEX_UNLOCK(&stream_.mutex);
2443 void RtApiCore :: stopStream()
2446 if (stream_.state == STREAM_STOPPED) return;
2448 // Change the state before the lock to improve shutdown response
2449 // when using a callback.
2450 stream_.state = STREAM_STOPPED;
2451 MUTEX_LOCK(&stream_.mutex);
2455 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2457 id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2458 err = AudioDeviceStop(id, callbackHandler);
2460 sprintf(message_, "RtApiCore: OS-X error stopping callback procedure on device (%s).",
2461 devices_[stream_.device[0]].name.c_str());
2462 MUTEX_UNLOCK(&stream_.mutex);
2463 error(RtError::DRIVER_ERROR);
2467 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2469 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2470 err = AudioDeviceStop(id, callbackHandler);
2472 sprintf(message_, "RtApiCore: OS-X error stopping input callback procedure on device (%s).",
2473 devices_[stream_.device[0]].name.c_str());
2474 MUTEX_UNLOCK(&stream_.mutex);
2475 error(RtError::DRIVER_ERROR);
2479 MUTEX_UNLOCK(&stream_.mutex);
2482 void RtApiCore :: abortStream()
2487 void RtApiCore :: tickStream()
2491 if (stream_.state == STREAM_STOPPED) return;
2493 if (stream_.callbackInfo.usingCallback) {
2494 sprintf(message_, "RtApiCore: tickStream() should not be used when a callback function is set!");
2495 error(RtError::WARNING);
2499 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2501 MUTEX_LOCK(&stream_.mutex);
2503 pthread_cond_wait(&handle->condition, &stream_.mutex);
2505 MUTEX_UNLOCK(&stream_.mutex);
2508 void RtApiCore :: callbackEvent( AudioDeviceID deviceId, void *inData, void *outData )
2512 if (stream_.state == STREAM_STOPPED) return;
2514 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2515 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2516 AudioBufferList *inBufferList = (AudioBufferList *) inData;
2517 AudioBufferList *outBufferList = (AudioBufferList *) outData;
2519 if ( info->usingCallback && handle->stopStream ) {
2520 // Check if the stream should be stopped (via the previous user
2521 // callback return value). We stop the stream here, rather than
2522 // after the function call, so that output data can first be
2528 MUTEX_LOCK(&stream_.mutex);
2530 // Invoke user callback first, to get fresh output data. Don't
2531 // invoke the user callback if duplex mode AND the input/output devices
2532 // are different AND this function is called for the input device.
2533 AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2534 if ( info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) ) {
2535 RtAudioCallback callback = (RtAudioCallback) info->callback;
2536 handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
2537 if ( handle->xrun == true ) {
2538 handle->xrun = false;
2539 MUTEX_UNLOCK(&stream_.mutex);
2544 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
2546 if (stream_.doConvertBuffer[0]) {
2548 if ( !stream_.deInterleave[0] )
2549 stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->index[0]].mData;
2551 stream_.deviceBuffer = handle->deviceBuffer;
2553 convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] );
2554 if ( stream_.doByteSwap[0] )
2555 byteSwapBuffer(stream_.deviceBuffer,
2556 stream_.bufferSize * stream_.nDeviceChannels[0],
2557 stream_.deviceFormat[0]);
2559 if ( stream_.deInterleave[0] ) {
2560 int bufferBytes = outBufferList->mBuffers[handle->index[0]].mDataByteSize;
2561 for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2562 memcpy(outBufferList->mBuffers[handle->index[0]+i].mData,
2563 &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2569 if (stream_.doByteSwap[0])
2570 byteSwapBuffer(stream_.userBuffer,
2571 stream_.bufferSize * stream_.nUserChannels[0],
2572 stream_.userFormat);
2574 memcpy(outBufferList->mBuffers[handle->index[0]].mData,
2576 outBufferList->mBuffers[handle->index[0]].mDataByteSize );
2580 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2581 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
2583 if (stream_.doConvertBuffer[1]) {
2585 if ( stream_.deInterleave[1] ) {
2586 stream_.deviceBuffer = (char *) handle->deviceBuffer;
2587 int bufferBytes = inBufferList->mBuffers[handle->index[1]].mDataByteSize;
2588 for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2589 memcpy(&stream_.deviceBuffer[i*bufferBytes],
2590 inBufferList->mBuffers[handle->index[1]+i].mData, bufferBytes );
2594 stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->index[1]].mData;
2596 if ( stream_.doByteSwap[1] )
2597 byteSwapBuffer(stream_.deviceBuffer,
2598 stream_.bufferSize * stream_.nDeviceChannels[1],
2599 stream_.deviceFormat[1]);
2600 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
2604 memcpy(stream_.userBuffer,
2605 inBufferList->mBuffers[handle->index[1]].mData,
2606 inBufferList->mBuffers[handle->index[1]].mDataByteSize );
2608 if (stream_.doByteSwap[1])
2609 byteSwapBuffer(stream_.userBuffer,
2610 stream_.bufferSize * stream_.nUserChannels[1],
2611 stream_.userFormat);
2615 if ( !info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) )
2616 pthread_cond_signal(&handle->condition);
2618 MUTEX_UNLOCK(&stream_.mutex);
2621 void RtApiCore :: setStreamCallback(RtAudioCallback callback, void *userData)
2625 if ( stream_.callbackInfo.usingCallback ) {
2626 sprintf(message_, "RtApiCore: A callback is already set for this stream!");
2627 error(RtError::WARNING);
2631 stream_.callbackInfo.callback = (void *) callback;
2632 stream_.callbackInfo.userData = userData;
2633 stream_.callbackInfo.usingCallback = true;
2636 void RtApiCore :: cancelStreamCallback()
2640 if (stream_.callbackInfo.usingCallback) {
2642 if (stream_.state == STREAM_RUNNING)
2645 MUTEX_LOCK(&stream_.mutex);
2647 stream_.callbackInfo.usingCallback = false;
2648 stream_.callbackInfo.userData = NULL;
2649 stream_.state = STREAM_STOPPED;
2650 stream_.callbackInfo.callback = NULL;
2652 MUTEX_UNLOCK(&stream_.mutex);
2657 //******************** End of __MACOSX_CORE__ *********************//
2660 #if defined(__LINUX_JACK__)
2662 // JACK is a low-latency audio server, written primarily for the
2663 // GNU/Linux operating system. It can connect a number of different
2664 // applications to an audio device, as well as allowing them to share
2665 // audio between themselves.
2667 // The JACK server must be running before RtApiJack can be instantiated.
2668 // RtAudio will report just a single "device", which is the JACK audio
2669 // server. The JACK server is typically started in a terminal as follows:
2671 // .jackd -d alsa -d hw:0
2673 // or through an interface program such as qjackctl. Many of the
2674 // parameters normally set for a stream are fixed by the JACK server
2675 // and can be specified when the JACK server is started. In
2678 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
2680 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
2681 // frames, and number of buffers = 4. Once the server is running, it
2682 // is not possible to override these values. If the values are not
2683 // specified in the command-line, the JACK server uses default values.
2685 #include <jack/jack.h>
2688 // A structure to hold various information related to the Jack API
2691 jack_client_t *client;
2692 jack_port_t **ports[2];
2695 pthread_cond_t condition;
2698 :client(0), clientOpen(false), stopStream(false) {}
2701 std::string jackmsg;
2703 static void jackerror (const char *desc)
2706 jackmsg.append( desc, strlen(desc)+1 );
2709 RtApiJack :: RtApiJack()
2713 if (nDevices_ <= 0) {
2714 sprintf(message_, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!",
2716 error(RtError::NO_DEVICES_FOUND);
2720 RtApiJack :: ~RtApiJack()
2722 if ( stream_.mode != UNINITIALIZED ) closeStream();
2725 void RtApiJack :: initialize(void)
2729 // Tell the jack server to call jackerror() when it experiences an
2730 // error. This function saves the error message for subsequent
2731 // reporting via the normal RtAudio error function.
2732 jack_set_error_function( jackerror );
2734 // Look for jack server and try to become a client.
2735 jack_client_t *client;
2736 if ( (client = jack_client_new( "RtApiJack" )) == 0)
2741 // Determine the name of the device.
2742 device.name = "Jack Server";
2743 devices_.push_back(device);
2747 std::string port, prevPort;
2748 unsigned int nChannels = 0;
2749 ports = jack_get_ports( client, NULL, NULL, 0 );
2751 port = (char *) ports[ nChannels ];
2752 unsigned int colonPos = 0;
2754 port = (char *) ports[ nChannels ];
2755 if ( (colonPos = port.find(":")) != std::string::npos ) {
2756 port = port.substr( 0, colonPos+1 );
2757 if ( port != prevPort ) {
2760 devices_.push_back( device );
2765 } while ( ports[++nChannels] );
2769 jack_client_close(client);
2772 void RtApiJack :: probeDeviceInfo(RtApiDevice *info)
2774 // Look for jack server and try to become a client.
2775 jack_client_t *client;
2776 if ( (client = jack_client_new( "RtApiJack_Probe" )) == 0) {
2777 sprintf(message_, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!",
2779 error(RtError::WARNING);
2783 // Get the current jack server sample rate.
2784 info->sampleRates.clear();
2785 info->sampleRates.push_back( jack_get_sample_rate(client) );
2787 // Count the available ports as device channels. Jack "input ports"
2788 // equal RtAudio output channels.
2791 unsigned int nChannels = 0;
2792 ports = jack_get_ports( client, info->name.c_str(), NULL, JackPortIsInput );
2794 port = (char *) ports[nChannels];
2796 port = (char *) ports[++nChannels];
2798 info->maxOutputChannels = nChannels;
2799 info->minOutputChannels = 1;
2802 // Jack "output ports" equal RtAudio input channels.
2804 ports = jack_get_ports( client, info->name.c_str(), NULL, JackPortIsOutput );
2806 port = (char *) ports[nChannels];
2808 port = (char *) ports[++nChannels];
2810 info->maxInputChannels = nChannels;
2811 info->minInputChannels = 1;
2814 if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
2815 jack_client_close(client);
2816 sprintf(message_, "RtApiJack: error determining jack input/output channels!");
2817 error(RtError::DEBUG_WARNING);
2821 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
2822 info->hasDuplexSupport = true;
2823 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
2824 info->maxInputChannels : info->maxOutputChannels;
2825 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
2826 info->minInputChannels : info->minOutputChannels;
2829 // Get the jack data format type. There isn't much documentation
2830 // regarding supported data formats in jack. I'm assuming here that
2831 // the default type will always be a floating-point type, of length
2832 // equal to either 4 or 8 bytes.
2833 int sample_size = sizeof( jack_default_audio_sample_t );
2834 if ( sample_size == 4 )
2835 info->nativeFormats = RTAUDIO_FLOAT32;
2836 else if ( sample_size == 8 )
2837 info->nativeFormats = RTAUDIO_FLOAT64;
2839 // Check that we have a supported format
2840 if (info->nativeFormats == 0) {
2841 jack_client_close(client);
2842 sprintf(message_, "RtApiJack: error determining jack server data format!");
2843 error(RtError::DEBUG_WARNING);
2847 jack_client_close(client);
2848 info->probed = true;
2851 int jackCallbackHandler(jack_nframes_t nframes, void *infoPointer)
2853 CallbackInfo *info = (CallbackInfo *) infoPointer;
2854 RtApiJack *object = (RtApiJack *) info->object;
2856 object->callbackEvent( (unsigned long) nframes );
2858 catch (RtError &exception) {
2859 fprintf(stderr, "\nRtApiJack: callback handler error (%s)!\n\n", exception.getMessageString());
2866 void jackShutdown(void *infoPointer)
2868 CallbackInfo *info = (CallbackInfo *) infoPointer;
2869 JackHandle *handle = (JackHandle *) info->apiInfo;
2870 handle->clientOpen = false;
2871 RtApiJack *object = (RtApiJack *) info->object;
2873 // Check current stream state. If stopped, then we'll assume this
2874 // was called as a result of a call to RtApiJack::stopStream (the
2875 // deactivation of a client handle causes this function to be called).
2876 // If not, we'll assume the Jack server is shutting down or some
2877 // other problem occurred and we should close the stream.
2878 if ( object->getStreamState() == RtApi::STREAM_STOPPED ) return;
2881 object->closeStream();
2883 catch (RtError &exception) {
2884 fprintf(stderr, "\nRtApiJack: jackShutdown error (%s)!\n\n", exception.getMessageString());
2888 fprintf(stderr, "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!!\n\n");
2891 int jackXrun( void * )
2893 fprintf(stderr, "\nRtApiJack: audio overrun/underrun reported!\n");
2897 bool RtApiJack :: probeDeviceOpen(int device, StreamMode mode, int channels,
2898 int sampleRate, RtAudioFormat format,
2899 int *bufferSize, int numberOfBuffers)
2901 // Compare the jack server channels to the requested number of channels.
2902 if ( (mode == OUTPUT && devices_[device].maxOutputChannels < channels ) ||
2903 (mode == INPUT && devices_[device].maxInputChannels < channels ) ) {
2904 sprintf(message_, "RtApiJack: the Jack server does not support requested channels!");
2905 error(RtError::DEBUG_WARNING);
2909 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2911 // Look for jack server and try to become a client (only do once per stream).
2913 jack_client_t *client = 0;
2914 if ( mode == OUTPUT || (mode == INPUT && stream_.mode != OUTPUT) ) {
2915 snprintf(label, 32, "RtApiJack");
2916 if ( (client = jack_client_new( (const char *) label )) == 0) {
2917 sprintf(message_, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!",
2919 error(RtError::DEBUG_WARNING);
2924 // The handle must have been created on an earlier pass.
2925 client = handle->client;
2928 // First, check the jack server sample rate.
2930 jack_rate = (int) jack_get_sample_rate(client);
2931 if ( sampleRate != jack_rate ) {
2932 jack_client_close(client);
2933 sprintf( message_, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).",
2934 sampleRate, jack_rate );
2935 error(RtError::DEBUG_WARNING);
2938 stream_.sampleRate = jack_rate;
2940 // The jack server seems to support just a single floating-point
2941 // data type. Since we already checked it before, just use what we
2943 stream_.deviceFormat[mode] = devices_[device].nativeFormats;
2944 stream_.userFormat = format;
2946 // Jack always uses non-interleaved buffers. We'll need to
2947 // de-interleave if we have more than one channel.
2948 stream_.deInterleave[mode] = false;
2950 stream_.deInterleave[mode] = true;
2952 // Jack always provides host byte-ordered data.
2953 stream_.doByteSwap[mode] = false;
2955 // Get the buffer size. The buffer size and number of buffers
2956 // (periods) is set when the jack server is started.
2957 stream_.bufferSize = (int) jack_get_buffer_size(client);
2958 *bufferSize = stream_.bufferSize;
2960 stream_.nDeviceChannels[mode] = channels;
2961 stream_.nUserChannels[mode] = channels;
2963 stream_.doConvertBuffer[mode] = false;
2964 if (stream_.userFormat != stream_.deviceFormat[mode])
2965 stream_.doConvertBuffer[mode] = true;
2966 if (stream_.deInterleave[mode])
2967 stream_.doConvertBuffer[mode] = true;
2969 // Allocate our JackHandle structure for the stream.
2970 if ( handle == 0 ) {
2971 handle = (JackHandle *) calloc(1, sizeof(JackHandle));
2972 if ( handle == NULL ) {
2973 sprintf(message_, "RtApiJack: error allocating JackHandle memory (%s).",
2974 devices_[device].name.c_str());
2977 handle->ports[0] = 0;
2978 handle->ports[1] = 0;
2979 if ( pthread_cond_init(&handle->condition, NULL) ) {
2980 sprintf(message_, "RtApiJack: error initializing pthread condition variable!");
2983 stream_.apiHandle = (void *) handle;
2984 handle->client = client;
2985 handle->clientOpen = true;
2988 // Allocate necessary internal buffers.
2989 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
2992 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
2993 buffer_bytes = stream_.nUserChannels[0];
2995 buffer_bytes = stream_.nUserChannels[1];
2997 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
2998 if (stream_.userBuffer) free(stream_.userBuffer);
2999 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
3000 if (stream_.userBuffer == NULL) {
3001 sprintf(message_, "RtApiJack: error allocating user buffer memory (%s).",
3002 devices_[device].name.c_str());
3007 if ( stream_.doConvertBuffer[mode] ) {
3010 bool makeBuffer = true;
3011 if ( mode == OUTPUT )
3012 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
3013 else { // mode == INPUT
3014 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
3015 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
3016 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
3017 if ( buffer_bytes < bytes_out ) makeBuffer = false;
3022 buffer_bytes *= *bufferSize;
3023 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
3024 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
3025 if (stream_.deviceBuffer == NULL) {
3026 sprintf(message_, "RtApiJack: error allocating device buffer memory (%s).",
3027 devices_[device].name.c_str());
3033 // Allocate memory for the Jack ports (channels) identifiers.
3034 handle->ports[mode] = (jack_port_t **) malloc (sizeof (jack_port_t *) * channels);
3035 if ( handle->ports[mode] == NULL ) {
3036 sprintf(message_, "RtApiJack: error allocating port handle memory (%s).",
3037 devices_[device].name.c_str());
3041 stream_.device[mode] = device;
3042 stream_.state = STREAM_STOPPED;
3043 stream_.callbackInfo.usingCallback = false;
3044 stream_.callbackInfo.object = (void *) this;
3045 stream_.callbackInfo.apiInfo = (void *) handle;
3047 if ( stream_.mode == OUTPUT && mode == INPUT )
3048 // We had already set up the stream for output.
3049 stream_.mode = DUPLEX;
3051 stream_.mode = mode;
3052 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
3053 jack_set_xrun_callback( handle->client, jackXrun, NULL );
3054 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
3057 // Setup the buffer conversion information structure.
3058 if ( stream_.doConvertBuffer[mode] ) {
3059 if (mode == INPUT) { // convert device to user buffer
3060 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
3061 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
3062 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
3063 stream_.convertInfo[mode].outFormat = stream_.userFormat;
3065 else { // convert user to device buffer
3066 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
3067 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
3068 stream_.convertInfo[mode].inFormat = stream_.userFormat;
3069 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
3072 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
3073 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
3075 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
3077 // Set up the interleave/deinterleave offsets.
3078 if ( mode == INPUT && stream_.deInterleave[1] ) {
3079 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
3080 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
3081 stream_.convertInfo[mode].outOffset.push_back( k );
3082 stream_.convertInfo[mode].inJump = 1;
3085 else if (mode == OUTPUT && stream_.deInterleave[0]) {
3086 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
3087 stream_.convertInfo[mode].inOffset.push_back( k );
3088 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
3089 stream_.convertInfo[mode].outJump = 1;
3093 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
3094 stream_.convertInfo[mode].inOffset.push_back( k );
3095 stream_.convertInfo[mode].outOffset.push_back( k );
3104 pthread_cond_destroy(&handle->condition);
3105 if ( handle->clientOpen == true )
3106 jack_client_close(handle->client);
3108 if ( handle->ports[0] ) free(handle->ports[0]);
3109 if ( handle->ports[1] ) free(handle->ports[1]);
3112 stream_.apiHandle = 0;
3115 if (stream_.userBuffer) {
3116 free(stream_.userBuffer);
3117 stream_.userBuffer = 0;
3120 error(RtError::DEBUG_WARNING);
3124 void RtApiJack :: closeStream()
3126 // We don't want an exception to be thrown here because this
3127 // function is called by our class destructor. So, do our own
3129 if ( stream_.mode == UNINITIALIZED ) {
3130 sprintf(message_, "RtApiJack::closeStream(): no open stream to close!");
3131 error(RtError::WARNING);
3135 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3136 if ( handle && handle->clientOpen == true ) {
3137 if (stream_.state == STREAM_RUNNING)
3138 jack_deactivate(handle->client);
3140 jack_client_close(handle->client);
3144 if ( handle->ports[0] ) free(handle->ports[0]);
3145 if ( handle->ports[1] ) free(handle->ports[1]);
3146 pthread_cond_destroy(&handle->condition);
3148 stream_.apiHandle = 0;
3151 if (stream_.userBuffer) {
3152 free(stream_.userBuffer);
3153 stream_.userBuffer = 0;
3156 if (stream_.deviceBuffer) {
3157 free(stream_.deviceBuffer);
3158 stream_.deviceBuffer = 0;
3161 stream_.mode = UNINITIALIZED;
3165 void RtApiJack :: startStream()
3168 if (stream_.state == STREAM_RUNNING) return;
3170 MUTEX_LOCK(&stream_.mutex);
3173 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3174 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3175 for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
3176 snprintf(label, 64, "outport %d", i);
3177 handle->ports[0][i] = jack_port_register(handle->client, (const char *)label,
3178 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0);
3182 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3183 for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
3184 snprintf(label, 64, "inport %d", i);
3185 handle->ports[1][i] = jack_port_register(handle->client, (const char *)label,
3186 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0);
3190 if (jack_activate(handle->client)) {
3191 sprintf(message_, "RtApiJack: unable to activate JACK client!");
3192 error(RtError::SYSTEM_ERROR);
3197 // Get the list of available ports.
3198 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3199 ports = jack_get_ports(handle->client, devices_[stream_.device[0]].name.c_str(), NULL, JackPortIsInput);
3200 if ( ports == NULL) {
3201 sprintf(message_, "RtApiJack: error determining available jack input ports!");
3202 error(RtError::SYSTEM_ERROR);
3205 // Now make the port connections. Since RtAudio wasn't designed to
3206 // allow the user to select particular channels of a device, we'll
3207 // just open the first "nChannels" ports.
3208 for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
3211 result = jack_connect( handle->client, jack_port_name(handle->ports[0][i]), ports[i] );
3214 sprintf(message_, "RtApiJack: error connecting output ports!");
3215 error(RtError::SYSTEM_ERROR);
3221 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3222 ports = jack_get_ports( handle->client, devices_[stream_.device[1]].name.c_str(), NULL, JackPortIsOutput );
3223 if ( ports == NULL) {
3224 sprintf(message_, "RtApiJack: error determining available jack output ports!");
3225 error(RtError::SYSTEM_ERROR);
3228 // Now make the port connections. See note above.
3229 for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
3232 result = jack_connect( handle->client, ports[i], jack_port_name(handle->ports[1][i]) );
3235 sprintf(message_, "RtApiJack: error connecting input ports!");
3236 error(RtError::SYSTEM_ERROR);
3242 handle->stopStream = false;
3243 stream_.state = STREAM_RUNNING;
3245 MUTEX_UNLOCK(&stream_.mutex);
3248 void RtApiJack :: stopStream()
3251 if (stream_.state == STREAM_STOPPED) return;
3253 // Change the state before the lock to improve shutdown response
3254 // when using a callback.
3255 stream_.state = STREAM_STOPPED;
3256 MUTEX_LOCK(&stream_.mutex);
3258 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3259 jack_deactivate(handle->client);
3261 MUTEX_UNLOCK(&stream_.mutex);
3264 void RtApiJack :: abortStream()
3269 void RtApiJack :: tickStream()
3273 if (stream_.state == STREAM_STOPPED) return;
3275 if (stream_.callbackInfo.usingCallback) {
3276 sprintf(message_, "RtApiJack: tickStream() should not be used when a callback function is set!");
3277 error(RtError::WARNING);
3281 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3283 MUTEX_LOCK(&stream_.mutex);
3285 pthread_cond_wait(&handle->condition, &stream_.mutex);
3287 MUTEX_UNLOCK(&stream_.mutex);
3290 void RtApiJack :: callbackEvent( unsigned long nframes )
3294 if (stream_.state == STREAM_STOPPED) return;
3296 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3297 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3298 if ( info->usingCallback && handle->stopStream ) {
3299 // Check if the stream should be stopped (via the previous user
3300 // callback return value). We stop the stream here, rather than
3301 // after the function call, so that output data can first be
3307 MUTEX_LOCK(&stream_.mutex);
3309 // Invoke user callback first, to get fresh output data.
3310 if ( info->usingCallback ) {
3311 RtAudioCallback callback = (RtAudioCallback) info->callback;
3312 handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
3315 jack_default_audio_sample_t *jackbuffer;
3316 long bufferBytes = nframes * sizeof(jack_default_audio_sample_t);
3317 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3319 if (stream_.doConvertBuffer[0]) {
3320 convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] );
3322 for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
3323 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i],
3324 (jack_nframes_t) nframes);
3325 memcpy(jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
3328 else { // single channel only
3329 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][0],
3330 (jack_nframes_t) nframes);
3331 memcpy(jackbuffer, stream_.userBuffer, bufferBytes );
3335 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3337 if (stream_.doConvertBuffer[1]) {
3338 for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
3339 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][i],
3340 (jack_nframes_t) nframes);
3341 memcpy(&stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
3343 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
3345 else { // single channel only
3346 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][0],
3347 (jack_nframes_t) nframes);
3348 memcpy(stream_.userBuffer, jackbuffer, bufferBytes );
3352 if ( !info->usingCallback )
3353 pthread_cond_signal(&handle->condition);
3355 MUTEX_UNLOCK(&stream_.mutex);
3358 void RtApiJack :: setStreamCallback(RtAudioCallback callback, void *userData)
3362 if ( stream_.callbackInfo.usingCallback ) {
3363 sprintf(message_, "RtApiJack: A callback is already set for this stream!");
3364 error(RtError::WARNING);
3368 stream_.callbackInfo.callback = (void *) callback;
3369 stream_.callbackInfo.userData = userData;
3370 stream_.callbackInfo.usingCallback = true;
3373 void RtApiJack :: cancelStreamCallback()
3377 if (stream_.callbackInfo.usingCallback) {
3379 if (stream_.state == STREAM_RUNNING)
3382 MUTEX_LOCK(&stream_.mutex);
3384 stream_.callbackInfo.usingCallback = false;
3385 stream_.callbackInfo.userData = NULL;
3386 stream_.state = STREAM_STOPPED;
3387 stream_.callbackInfo.callback = NULL;
3389 MUTEX_UNLOCK(&stream_.mutex);
3395 #if defined(__LINUX_ALSA__)
3397 #include <alsa/asoundlib.h>
3401 // A structure to hold various information related to the ALSA API
3404 snd_pcm_t *handles[2];
3409 :synchronized(false), tempBuffer(0) {}
3412 extern "C" void *alsaCallbackHandler(void * ptr);
3414 RtApiAlsa :: RtApiAlsa()
3418 if (nDevices_ <= 0) {
3419 sprintf(message_, "RtApiAlsa: no Linux ALSA audio devices found!");
3420 error(RtError::NO_DEVICES_FOUND);
3424 RtApiAlsa :: ~RtApiAlsa()
3426 if ( stream_.mode != UNINITIALIZED )
3430 void RtApiAlsa :: initialize(void)
3432 int card, subdevice, result;
3436 snd_ctl_card_info_t *info;
3437 snd_ctl_card_info_alloca(&info);
3440 // Count cards and devices
3443 snd_card_next(&card);
3444 while ( card >= 0 ) {
3445 sprintf(name, "hw:%d", card);
3446 result = snd_ctl_open(&handle, name, 0);
3448 sprintf(message_, "RtApiAlsa: control open (%i): %s.", card, snd_strerror(result));
3449 error(RtError::DEBUG_WARNING);
3452 result = snd_ctl_card_info(handle, info);
3454 sprintf(message_, "RtApiAlsa: control hardware info (%i): %s.", card, snd_strerror(result));
3455 error(RtError::DEBUG_WARNING);
3458 cardId = snd_ctl_card_info_get_id(info);
3461 result = snd_ctl_pcm_next_device(handle, &subdevice);
3463 sprintf(message_, "RtApiAlsa: control next device (%i): %s.", card, snd_strerror(result));
3464 error(RtError::DEBUG_WARNING);
3469 sprintf( name, "hw:%d,%d", card, subdevice );
3470 // If a cardId exists and it contains at least one non-numeric
3471 // character, use it to identify the device. This avoids a bug
3472 // in ALSA such that a numeric string is interpreted as a device
3474 for ( unsigned int i=0; i<strlen(cardId); i++ ) {
3475 if ( !isdigit( cardId[i] ) ) {
3476 sprintf( name, "hw:%s,%d", cardId, subdevice );
3480 device.name.erase();
3481 device.name.append( (const char *)name, strlen(name)+1 );
3482 devices_.push_back(device);
3486 snd_ctl_close(handle);
3487 snd_card_next(&card);
3491 void RtApiAlsa :: probeDeviceInfo(RtApiDevice *info)
3494 int open_mode = SND_PCM_ASYNC;
3497 snd_pcm_stream_t stream;
3498 snd_pcm_info_t *pcminfo;
3499 snd_pcm_info_alloca(&pcminfo);
3500 snd_pcm_hw_params_t *params;
3501 snd_pcm_hw_params_alloca(¶ms);
3505 // Open the control interface for this card.
3506 strncpy( name, info->name.c_str(), 64 );
3507 card = strtok(name, ",");
3508 err = snd_ctl_open(&chandle, card, SND_CTL_NONBLOCK);
3510 sprintf(message_, "RtApiAlsa: control open (%s): %s.", card, snd_strerror(err));
3511 error(RtError::DEBUG_WARNING);
3514 unsigned int dev = (unsigned int) atoi( strtok(NULL, ",") );
3516 // First try for playback
3517 stream = SND_PCM_STREAM_PLAYBACK;
3518 snd_pcm_info_set_device(pcminfo, dev);
3519 snd_pcm_info_set_subdevice(pcminfo, 0);
3520 snd_pcm_info_set_stream(pcminfo, stream);
3522 if ((err = snd_ctl_pcm_info(chandle, pcminfo)) < 0) {
3523 if (err == -ENOENT) {
3524 sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle output!", info->name.c_str());
3525 error(RtError::DEBUG_WARNING);
3528 sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s",
3529 info->name.c_str(), snd_strerror(err));
3530 error(RtError::DEBUG_WARNING);
3535 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK );
3538 sprintf(message_, "RtApiAlsa: pcm playback device (%s) is busy: %s.",
3539 info->name.c_str(), snd_strerror(err));
3541 sprintf(message_, "RtApiAlsa: pcm playback open (%s) error: %s.",
3542 info->name.c_str(), snd_strerror(err));
3543 error(RtError::DEBUG_WARNING);
3547 // We have an open device ... allocate the parameter structure.
3548 err = snd_pcm_hw_params_any(handle, params);
3550 snd_pcm_close(handle);
3551 sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
3552 info->name.c_str(), snd_strerror(err));
3553 error(RtError::DEBUG_WARNING);
3557 // Get output channel information.
3559 err = snd_pcm_hw_params_get_channels_min(params, &value);
3561 snd_pcm_close(handle);
3562 sprintf(message_, "RtApiAlsa: hardware minimum channel probe error (%s): %s.",
3563 info->name.c_str(), snd_strerror(err));
3564 error(RtError::DEBUG_WARNING);
3567 info->minOutputChannels = value;
3569 err = snd_pcm_hw_params_get_channels_max(params, &value);
3571 snd_pcm_close(handle);
3572 sprintf(message_, "RtApiAlsa: hardware maximum channel probe error (%s): %s.",
3573 info->name.c_str(), snd_strerror(err));
3574 error(RtError::DEBUG_WARNING);
3577 info->maxOutputChannels = value;
3579 snd_pcm_close(handle);
3582 // Now try for capture
3583 stream = SND_PCM_STREAM_CAPTURE;
3584 snd_pcm_info_set_stream(pcminfo, stream);
3586 err = snd_ctl_pcm_info(chandle, pcminfo);
3587 snd_ctl_close(chandle);
3589 if (err == -ENOENT) {
3590 sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle input!", info->name.c_str());
3591 error(RtError::DEBUG_WARNING);
3594 sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s",
3595 info->name.c_str(), snd_strerror(err));
3596 error(RtError::DEBUG_WARNING);
3598 if (info->maxOutputChannels == 0)
3599 // didn't open for playback either ... device invalid
3601 goto probe_parameters;
3604 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK);
3607 sprintf(message_, "RtApiAlsa: pcm capture device (%s) is busy: %s.",
3608 info->name.c_str(), snd_strerror(err));
3610 sprintf(message_, "RtApiAlsa: pcm capture open (%s) error: %s.",
3611 info->name.c_str(), snd_strerror(err));
3612 error(RtError::DEBUG_WARNING);
3613 if (info->maxOutputChannels == 0)
3614 // didn't open for playback either ... device invalid
3616 goto probe_parameters;
3619 // We have an open capture device ... allocate the parameter structure.
3620 err = snd_pcm_hw_params_any(handle, params);
3622 snd_pcm_close(handle);
3623 sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
3624 info->name.c_str(), snd_strerror(err));
3625 error(RtError::DEBUG_WARNING);
3626 if (info->maxOutputChannels > 0)
3627 goto probe_parameters;
3632 // Get input channel information.
3633 err = snd_pcm_hw_params_get_channels_min(params, &value);
3635 snd_pcm_close(handle);
3636 sprintf(message_, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.",
3637 info->name.c_str(), snd_strerror(err));
3638 error(RtError::DEBUG_WARNING);
3639 if (info->maxOutputChannels > 0)
3640 goto probe_parameters;
3644 info->minInputChannels = value;
3646 err = snd_pcm_hw_params_get_channels_max(params, &value);
3648 snd_pcm_close(handle);
3649 sprintf(message_, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.",
3650 info->name.c_str(), snd_strerror(err));
3651 error(RtError::DEBUG_WARNING);
3652 if (info->maxOutputChannels > 0)
3653 goto probe_parameters;
3657 info->maxInputChannels = value;
3659 snd_pcm_close(handle);
3661 // If device opens for both playback and capture, we determine the channels.
3662 if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
3663 goto probe_parameters;
3665 info->hasDuplexSupport = true;
3666 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
3667 info->maxInputChannels : info->maxOutputChannels;
3668 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
3669 info->minInputChannels : info->minOutputChannels;
3672 // At this point, we just need to figure out the supported data
3673 // formats and sample rates. We'll proceed by opening the device in
3674 // the direction with the maximum number of channels, or playback if
3675 // they are equal. This might limit our sample rate options, but so
3678 if (info->maxOutputChannels >= info->maxInputChannels)
3679 stream = SND_PCM_STREAM_PLAYBACK;
3681 stream = SND_PCM_STREAM_CAPTURE;
3683 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode);
3685 sprintf(message_, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.",
3686 info->name.c_str(), snd_strerror(err));
3687 error(RtError::DEBUG_WARNING);
3691 // We have an open device ... allocate the parameter structure.
3692 err = snd_pcm_hw_params_any(handle, params);
3694 snd_pcm_close(handle);
3695 sprintf(message_, "RtApiAlsa: hardware reopen probe error (%s): %s.",
3696 info->name.c_str(), snd_strerror(err));
3697 error(RtError::DEBUG_WARNING);
3701 // Test our discrete set of sample rate values.
3703 info->sampleRates.clear();
3704 for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
3705 if (snd_pcm_hw_params_test_rate(handle, params, SAMPLE_RATES[i], dir) == 0)
3706 info->sampleRates.push_back(SAMPLE_RATES[i]);
3708 if (info->sampleRates.size() == 0) {
3709 snd_pcm_close(handle);
3710 sprintf(message_, "RtApiAlsa: no supported sample rates found for device (%s).",
3711 info->name.c_str());
3712 error(RtError::DEBUG_WARNING);
3716 // Probe the supported data formats ... we don't care about endian-ness just yet
3717 snd_pcm_format_t format;
3718 info->nativeFormats = 0;
3719 format = SND_PCM_FORMAT_S8;
3720 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3721 info->nativeFormats |= RTAUDIO_SINT8;
3722 format = SND_PCM_FORMAT_S16;
3723 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3724 info->nativeFormats |= RTAUDIO_SINT16;
3725 format = SND_PCM_FORMAT_S24;
3726 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3727 info->nativeFormats |= RTAUDIO_SINT24;
3728 format = SND_PCM_FORMAT_S32;
3729 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3730 info->nativeFormats |= RTAUDIO_SINT32;
3731 format = SND_PCM_FORMAT_FLOAT;
3732 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3733 info->nativeFormats |= RTAUDIO_FLOAT32;
3734 format = SND_PCM_FORMAT_FLOAT64;
3735 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3736 info->nativeFormats |= RTAUDIO_FLOAT64;
3738 // Check that we have at least one supported format
3739 if (info->nativeFormats == 0) {
3740 snd_pcm_close(handle);
3741 sprintf(message_, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.",
3742 info->name.c_str());
3743 error(RtError::DEBUG_WARNING);
3747 // That's all ... close the device and return
3748 snd_pcm_close(handle);
3749 info->probed = true;
3753 bool RtApiAlsa :: probeDeviceOpen( int device, StreamMode mode, int channels,
3754 int sampleRate, RtAudioFormat format,
3755 int *bufferSize, int numberOfBuffers )
3757 #if defined(__RTAUDIO_DEBUG__)
3759 snd_output_stdio_attach(&out, stderr, 0);
3762 // I'm not using the "plug" interface ... too much inconsistent behavior.
3763 const char *name = devices_[device].name.c_str();
3765 snd_pcm_stream_t alsa_stream;
3767 alsa_stream = SND_PCM_STREAM_PLAYBACK;
3769 alsa_stream = SND_PCM_STREAM_CAPTURE;
3773 int alsa_open_mode = SND_PCM_ASYNC;
3774 err = snd_pcm_open(&handle, name, alsa_stream, alsa_open_mode);
3776 sprintf(message_,"RtApiAlsa: pcm device (%s) won't open: %s.",
3777 name, snd_strerror(err));
3778 error(RtError::DEBUG_WARNING);
3782 // Fill the parameter structure.
3783 snd_pcm_hw_params_t *hw_params;
3784 snd_pcm_hw_params_alloca(&hw_params);
3785 err = snd_pcm_hw_params_any(handle, hw_params);
3787 snd_pcm_close(handle);
3788 sprintf(message_, "RtApiAlsa: error getting parameter handle (%s): %s.",
3789 name, snd_strerror(err));
3790 error(RtError::DEBUG_WARNING);
3794 #if defined(__RTAUDIO_DEBUG__)
3795 fprintf(stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n");
3796 snd_pcm_hw_params_dump(hw_params, out);
3799 // Set access ... try interleaved access first, then non-interleaved
3800 if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED) ) {
3801 err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED);
3803 else if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED) ) {
3804 err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED);
3805 stream_.deInterleave[mode] = true;
3808 snd_pcm_close(handle);
3809 sprintf(message_, "RtApiAlsa: device (%s) access not supported by RtAudio.", name);
3810 error(RtError::DEBUG_WARNING);
3815 snd_pcm_close(handle);
3816 sprintf(message_, "RtApiAlsa: error setting access ( (%s): %s.", name, snd_strerror(err));
3817 error(RtError::DEBUG_WARNING);
3821 // Determine how to set the device format.
3822 stream_.userFormat = format;
3823 snd_pcm_format_t device_format = SND_PCM_FORMAT_UNKNOWN;
3825 if (format == RTAUDIO_SINT8)
3826 device_format = SND_PCM_FORMAT_S8;
3827 else if (format == RTAUDIO_SINT16)
3828 device_format = SND_PCM_FORMAT_S16;
3829 else if (format == RTAUDIO_SINT24)
3830 device_format = SND_PCM_FORMAT_S24;
3831 else if (format == RTAUDIO_SINT32)
3832 device_format = SND_PCM_FORMAT_S32;
3833 else if (format == RTAUDIO_FLOAT32)
3834 device_format = SND_PCM_FORMAT_FLOAT;
3835 else if (format == RTAUDIO_FLOAT64)
3836 device_format = SND_PCM_FORMAT_FLOAT64;
3838 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3839 stream_.deviceFormat[mode] = format;
3843 // The user requested format is not natively supported by the device.
3844 device_format = SND_PCM_FORMAT_FLOAT64;
3845 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3846 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
3850 device_format = SND_PCM_FORMAT_FLOAT;
3851 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3852 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
3856 device_format = SND_PCM_FORMAT_S32;
3857 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3858 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
3862 device_format = SND_PCM_FORMAT_S24;
3863 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3864 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
3868 device_format = SND_PCM_FORMAT_S16;
3869 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3870 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3874 device_format = SND_PCM_FORMAT_S8;
3875 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3876 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3880 // If we get here, no supported format was found.
3881 sprintf(message_,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name);
3882 snd_pcm_close(handle);
3883 error(RtError::DEBUG_WARNING);
3887 err = snd_pcm_hw_params_set_format(handle, hw_params, device_format);
3889 snd_pcm_close(handle);
3890 sprintf(message_, "RtApiAlsa: error setting format (%s): %s.",
3891 name, snd_strerror(err));
3892 error(RtError::DEBUG_WARNING);
3896 // Determine whether byte-swaping is necessary.
3897 stream_.doByteSwap[mode] = false;
3898 if (device_format != SND_PCM_FORMAT_S8) {
3899 err = snd_pcm_format_cpu_endian(device_format);
3901 stream_.doByteSwap[mode] = true;
3903 snd_pcm_close(handle);
3904 sprintf(message_, "RtApiAlsa: error getting format endian-ness (%s): %s.",
3905 name, snd_strerror(err));
3906 error(RtError::DEBUG_WARNING);
3911 // Set the sample rate.
3912 err = snd_pcm_hw_params_set_rate(handle, hw_params, (unsigned int)sampleRate, 0);
3914 snd_pcm_close(handle);
3915 sprintf(message_, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.",
3916 sampleRate, name, snd_strerror(err));
3917 error(RtError::DEBUG_WARNING);
3921 // Determine the number of channels for this device. We support a possible
3922 // minimum device channel number > than the value requested by the user.
3923 stream_.nUserChannels[mode] = channels;
3925 err = snd_pcm_hw_params_get_channels_max(hw_params, &value);
3926 int device_channels = value;
3927 if (err < 0 || device_channels < channels) {
3928 snd_pcm_close(handle);
3929 sprintf(message_, "RtApiAlsa: channels (%d) not supported by device (%s).",
3931 error(RtError::DEBUG_WARNING);
3935 err = snd_pcm_hw_params_get_channels_min(hw_params, &value);
3937 snd_pcm_close(handle);
3938 sprintf(message_, "RtApiAlsa: error getting min channels count on device (%s).", name);
3939 error(RtError::DEBUG_WARNING);
3942 device_channels = value;
3943 if (device_channels < channels) device_channels = channels;
3944 stream_.nDeviceChannels[mode] = device_channels;
3946 // Set the device channels.
3947 err = snd_pcm_hw_params_set_channels(handle, hw_params, device_channels);
3949 snd_pcm_close(handle);
3950 sprintf(message_, "RtApiAlsa: error setting channels (%d) on device (%s): %s.",
3951 device_channels, name, snd_strerror(err));
3952 error(RtError::DEBUG_WARNING);
3956 // Set the buffer number, which in ALSA is referred to as the "period".
3958 unsigned int periods = numberOfBuffers;
3959 // Even though the hardware might allow 1 buffer, it won't work reliably.
3960 if (periods < 2) periods = 2;
3961 err = snd_pcm_hw_params_set_periods_near(handle, hw_params, &periods, &dir);
3963 snd_pcm_close(handle);
3964 sprintf(message_, "RtApiAlsa: error setting periods (%s): %s.",
3965 name, snd_strerror(err));
3966 error(RtError::DEBUG_WARNING);
3970 // Set the buffer (or period) size.
3971 snd_pcm_uframes_t period_size = *bufferSize;
3972 err = snd_pcm_hw_params_set_period_size_near(handle, hw_params, &period_size, &dir);
3974 snd_pcm_close(handle);
3975 sprintf(message_, "RtApiAlsa: error setting period size (%s): %s.",
3976 name, snd_strerror(err));
3977 error(RtError::DEBUG_WARNING);
3980 *bufferSize = period_size;
3982 // If attempting to setup a duplex stream, the bufferSize parameter
3983 // MUST be the same in both directions!
3984 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
3985 sprintf( message_, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).",
3987 error(RtError::DEBUG_WARNING);
3991 stream_.bufferSize = *bufferSize;
3993 // Install the hardware configuration
3994 err = snd_pcm_hw_params(handle, hw_params);
3996 snd_pcm_close(handle);
3997 sprintf(message_, "RtApiAlsa: error installing hardware configuration (%s): %s.",
3998 name, snd_strerror(err));
3999 error(RtError::DEBUG_WARNING);
4003 #if defined(__RTAUDIO_DEBUG__)
4004 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
4005 snd_pcm_hw_params_dump(hw_params, out);
4008 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
4009 snd_pcm_sw_params_t *sw_params = NULL;
4010 snd_pcm_sw_params_alloca( &sw_params );
4011 snd_pcm_sw_params_current( handle, sw_params );
4012 snd_pcm_sw_params_set_start_threshold( handle, sw_params, *bufferSize );
4013 snd_pcm_sw_params_set_stop_threshold( handle, sw_params, 0x7fffffff );
4014 snd_pcm_sw_params_set_silence_threshold( handle, sw_params, 0 );
4015 snd_pcm_sw_params_set_silence_size( handle, sw_params, INT_MAX );
4016 err = snd_pcm_sw_params( handle, sw_params );
4018 snd_pcm_close(handle);
4019 sprintf(message_, "RtAudio: ALSA error installing software configuration (%s): %s.",
4020 name, snd_strerror(err));
4021 error(RtError::DEBUG_WARNING);
4025 #if defined(__RTAUDIO_DEBUG__)
4026 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
4027 snd_pcm_sw_params_dump(sw_params, out);
4030 // Allocate the ApiHandle if necessary and then save.
4031 AlsaHandle *apiInfo = 0;
4032 if ( stream_.apiHandle == 0 ) {
4033 apiInfo = (AlsaHandle *) new AlsaHandle;
4034 stream_.apiHandle = (void *) apiInfo;
4035 apiInfo->handles[0] = 0;
4036 apiInfo->handles[1] = 0;
4039 apiInfo = (AlsaHandle *) stream_.apiHandle;
4041 apiInfo->handles[mode] = handle;
4043 // Set flags for buffer conversion
4044 stream_.doConvertBuffer[mode] = false;
4045 if (stream_.userFormat != stream_.deviceFormat[mode])
4046 stream_.doConvertBuffer[mode] = true;
4047 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
4048 stream_.doConvertBuffer[mode] = true;
4049 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
4050 stream_.doConvertBuffer[mode] = true;
4052 // Allocate necessary internal buffers
4053 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
4056 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
4057 buffer_bytes = stream_.nUserChannels[0];
4059 buffer_bytes = stream_.nUserChannels[1];
4061 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
4062 if (stream_.userBuffer) free(stream_.userBuffer);
4063 if (apiInfo->tempBuffer) free(apiInfo->tempBuffer);
4064 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
4065 apiInfo->tempBuffer = (char *) calloc(buffer_bytes, 1);
4066 if ( stream_.userBuffer == NULL || apiInfo->tempBuffer == NULL ) {
4067 sprintf(message_, "RtApiAlsa: error allocating user buffer memory (%s).",
4068 devices_[device].name.c_str());
4073 if ( stream_.doConvertBuffer[mode] ) {
4076 bool makeBuffer = true;
4077 if ( mode == OUTPUT )
4078 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
4079 else { // mode == INPUT
4080 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
4081 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4082 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
4083 if ( buffer_bytes < bytes_out ) makeBuffer = false;
4088 buffer_bytes *= *bufferSize;
4089 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
4090 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
4091 if (stream_.deviceBuffer == NULL) {
4092 sprintf(message_, "RtApiAlsa: error allocating device buffer memory (%s).",
4093 devices_[device].name.c_str());
4099 stream_.device[mode] = device;
4100 stream_.state = STREAM_STOPPED;
4101 if ( stream_.mode == OUTPUT && mode == INPUT ) {
4102 // We had already set up an output stream.
4103 stream_.mode = DUPLEX;
4104 // Link the streams if possible.
4105 apiInfo->synchronized = false;
4106 if (snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0)
4107 apiInfo->synchronized = true;
4109 sprintf(message_, "RtApiAlsa: unable to synchronize input and output streams (%s).",
4110 devices_[device].name.c_str());
4111 error(RtError::DEBUG_WARNING);
4115 stream_.mode = mode;
4116 stream_.nBuffers = periods;
4117 stream_.sampleRate = sampleRate;
4119 // Setup the buffer conversion information structure.
4120 if ( stream_.doConvertBuffer[mode] ) {
4121 if (mode == INPUT) { // convert device to user buffer
4122 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
4123 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
4124 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
4125 stream_.convertInfo[mode].outFormat = stream_.userFormat;
4127 else { // convert user to device buffer
4128 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
4129 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
4130 stream_.convertInfo[mode].inFormat = stream_.userFormat;
4131 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
4134 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
4135 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
4137 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
4139 // Set up the interleave/deinterleave offsets.
4140 if ( mode == INPUT && stream_.deInterleave[1] ) {
4141 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
4142 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
4143 stream_.convertInfo[mode].outOffset.push_back( k );
4144 stream_.convertInfo[mode].inJump = 1;
4147 else if (mode == OUTPUT && stream_.deInterleave[0]) {
4148 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
4149 stream_.convertInfo[mode].inOffset.push_back( k );
4150 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
4151 stream_.convertInfo[mode].outJump = 1;
4155 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
4156 stream_.convertInfo[mode].inOffset.push_back( k );
4157 stream_.convertInfo[mode].outOffset.push_back( k );
4166 if (apiInfo->handles[0])
4167 snd_pcm_close(apiInfo->handles[0]);
4168 if (apiInfo->handles[1])
4169 snd_pcm_close(apiInfo->handles[1]);
4170 if ( apiInfo->tempBuffer ) free(apiInfo->tempBuffer);
4172 stream_.apiHandle = 0;
4175 if (stream_.userBuffer) {
4176 free(stream_.userBuffer);
4177 stream_.userBuffer = 0;
4180 error(RtError::DEBUG_WARNING);
4184 void RtApiAlsa :: closeStream()
4186 // We don't want an exception to be thrown here because this
4187 // function is called by our class destructor. So, do our own
4189 if ( stream_.mode == UNINITIALIZED ) {
4190 sprintf(message_, "RtApiAlsa::closeStream(): no open stream to close!");
4191 error(RtError::WARNING);
4195 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4196 if (stream_.state == STREAM_RUNNING) {
4197 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
4198 snd_pcm_drop(apiInfo->handles[0]);
4199 if (stream_.mode == INPUT || stream_.mode == DUPLEX)
4200 snd_pcm_drop(apiInfo->handles[1]);
4201 stream_.state = STREAM_STOPPED;
4204 if (stream_.callbackInfo.usingCallback) {
4205 stream_.callbackInfo.usingCallback = false;
4206 pthread_join(stream_.callbackInfo.thread, NULL);
4210 if (apiInfo->handles[0]) snd_pcm_close(apiInfo->handles[0]);
4211 if (apiInfo->handles[1]) snd_pcm_close(apiInfo->handles[1]);
4212 free(apiInfo->tempBuffer);
4214 stream_.apiHandle = 0;
4217 if (stream_.userBuffer) {
4218 free(stream_.userBuffer);
4219 stream_.userBuffer = 0;
4222 if (stream_.deviceBuffer) {
4223 free(stream_.deviceBuffer);
4224 stream_.deviceBuffer = 0;
4227 stream_.mode = UNINITIALIZED;
4230 // Pump a bunch of zeros into the output buffer. This is needed only when we
4231 // are doing duplex operations.
4232 bool RtApiAlsa :: primeOutputBuffer()
4238 RtAudioFormat format;
4239 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4240 handle = (snd_pcm_t **) apiInfo->handles;
4242 if (stream_.mode == DUPLEX) {
4244 // Setup parameters and do buffer conversion if necessary.
4245 if ( stream_.doConvertBuffer[0] ) {
4246 convertBuffer( stream_.deviceBuffer, apiInfo->tempBuffer, stream_.convertInfo[0] );
4247 channels = stream_.nDeviceChannels[0];
4248 format = stream_.deviceFormat[0];
4251 channels = stream_.nUserChannels[0];
4252 format = stream_.userFormat;
4255 buffer = new char[stream_.bufferSize * formatBytes(format) * channels];
4256 bzero(buffer, stream_.bufferSize * formatBytes(format) * channels);
4258 for (int i=0; i<stream_.nBuffers; i++) {
4259 // Write samples to device in interleaved/non-interleaved format.
4260 if (stream_.deInterleave[0]) {
4261 void *bufs[channels];
4262 size_t offset = stream_.bufferSize * formatBytes(format);
4263 for (int i=0; i<channels; i++)
4264 bufs[i] = (void *) (buffer + (i * offset));
4265 err = snd_pcm_writen(handle[0], bufs, stream_.bufferSize);
4268 err = snd_pcm_writei(handle[0], buffer, stream_.bufferSize);
4270 if (err < stream_.bufferSize) {
4271 // Either an error or underrun occured.
4272 if (err == -EPIPE) {
4273 snd_pcm_state_t state = snd_pcm_state(handle[0]);
4274 if (state == SND_PCM_STATE_XRUN) {
4275 sprintf(message_, "RtApiAlsa: underrun detected while priming output buffer.");
4279 sprintf(message_, "RtApiAlsa: primeOutputBuffer() error, current state is %s.",
4280 snd_pcm_state_name(state));
4285 sprintf(message_, "RtApiAlsa: audio write error for device (%s): %s.",
4286 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4296 void RtApiAlsa :: startStream()
4298 // This method calls snd_pcm_prepare if the device isn't already in that state.
4301 if (stream_.state == STREAM_RUNNING) return;
4303 MUTEX_LOCK(&stream_.mutex);
4306 snd_pcm_state_t state;
4307 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4308 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4309 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4310 state = snd_pcm_state(handle[0]);
4311 if (state != SND_PCM_STATE_PREPARED) {
4312 err = snd_pcm_prepare(handle[0]);
4314 sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
4315 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4316 MUTEX_UNLOCK(&stream_.mutex);
4317 error(RtError::DRIVER_ERROR);
4319 // Reprime output buffer if needed
4320 if ( (stream_.mode == DUPLEX) && ( !primeOutputBuffer() ) ) {
4321 MUTEX_UNLOCK(&stream_.mutex);
4322 error(RtError::DRIVER_ERROR);
4327 if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) {
4328 state = snd_pcm_state(handle[1]);
4329 if (state != SND_PCM_STATE_PREPARED) {
4330 err = snd_pcm_prepare(handle[1]);
4332 sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
4333 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4334 MUTEX_UNLOCK(&stream_.mutex);
4335 error(RtError::DRIVER_ERROR);
4340 if ( (stream_.mode == DUPLEX) && ( !primeOutputBuffer() ) ) {
4341 MUTEX_UNLOCK(&stream_.mutex);
4342 error(RtError::DRIVER_ERROR);
4344 stream_.state = STREAM_RUNNING;
4346 MUTEX_UNLOCK(&stream_.mutex);
4349 void RtApiAlsa :: stopStream()
4352 if (stream_.state == STREAM_STOPPED) return;
4354 // Change the state before the lock to improve shutdown response
4355 // when using a callback.
4356 stream_.state = STREAM_STOPPED;
4357 MUTEX_LOCK(&stream_.mutex);
4360 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4361 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4362 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4363 err = snd_pcm_drain(handle[0]);
4365 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4366 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4367 MUTEX_UNLOCK(&stream_.mutex);
4368 error(RtError::DRIVER_ERROR);
4372 if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) {
4373 err = snd_pcm_drain(handle[1]);
4375 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4376 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4377 MUTEX_UNLOCK(&stream_.mutex);
4378 error(RtError::DRIVER_ERROR);
4382 MUTEX_UNLOCK(&stream_.mutex);
4385 void RtApiAlsa :: abortStream()
4388 if (stream_.state == STREAM_STOPPED) return;
4390 // Change the state before the lock to improve shutdown response
4391 // when using a callback.
4392 stream_.state = STREAM_STOPPED;
4393 MUTEX_LOCK(&stream_.mutex);
4396 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4397 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4398 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4399 err = snd_pcm_drop(handle[0]);
4401 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4402 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4403 MUTEX_UNLOCK(&stream_.mutex);
4404 error(RtError::DRIVER_ERROR);
4408 if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) {
4409 err = snd_pcm_drop(handle[1]);
4411 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4412 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4413 MUTEX_UNLOCK(&stream_.mutex);
4414 error(RtError::DRIVER_ERROR);
4418 MUTEX_UNLOCK(&stream_.mutex);
4421 int RtApiAlsa :: streamWillBlock()
4424 if (stream_.state == STREAM_STOPPED) return 0;
4426 MUTEX_LOCK(&stream_.mutex);
4428 int err = 0, frames = 0;
4429 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4430 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4431 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4432 err = snd_pcm_avail_update(handle[0]);
4434 sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
4435 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4436 MUTEX_UNLOCK(&stream_.mutex);
4437 error(RtError::DRIVER_ERROR);
4443 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4444 err = snd_pcm_avail_update(handle[1]);
4446 sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
4447 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4448 MUTEX_UNLOCK(&stream_.mutex);
4449 error(RtError::DRIVER_ERROR);
4451 if (frames > err) frames = err;
4454 frames = stream_.bufferSize - frames;
4455 if (frames < 0) frames = 0;
4457 MUTEX_UNLOCK(&stream_.mutex);
4461 void RtApiAlsa :: tickStream()
4466 if (stream_.state == STREAM_STOPPED) {
4467 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
4470 else if (stream_.callbackInfo.usingCallback) {
4471 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
4472 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
4475 MUTEX_LOCK(&stream_.mutex);
4477 // The state might change while waiting on a mutex.
4478 if (stream_.state == STREAM_STOPPED)
4484 AlsaHandle *apiInfo;
4486 RtAudioFormat format;
4487 apiInfo = (AlsaHandle *) stream_.apiHandle;
4488 handle = (snd_pcm_t **) apiInfo->handles;
4490 if ( stream_.mode == DUPLEX ) {
4491 // In duplex mode, we need to make the snd_pcm_read call before
4492 // the snd_pcm_write call in order to avoid under/over runs. So,
4493 // copy the userData to our temporary buffer.
4495 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0] * formatBytes(stream_.userFormat);
4496 memcpy( apiInfo->tempBuffer, stream_.userBuffer, bufferBytes );
4499 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4501 // Setup parameters.
4502 if (stream_.doConvertBuffer[1]) {
4503 buffer = stream_.deviceBuffer;
4504 channels = stream_.nDeviceChannels[1];
4505 format = stream_.deviceFormat[1];
4508 buffer = stream_.userBuffer;
4509 channels = stream_.nUserChannels[1];
4510 format = stream_.userFormat;
4513 // Read samples from device in interleaved/non-interleaved format.
4514 if (stream_.deInterleave[1]) {
4515 void *bufs[channels];
4516 size_t offset = stream_.bufferSize * formatBytes(format);
4517 for (int i=0; i<channels; i++)
4518 bufs[i] = (void *) (buffer + (i * offset));
4519 err = snd_pcm_readn(handle[1], bufs, stream_.bufferSize);
4522 err = snd_pcm_readi(handle[1], buffer, stream_.bufferSize);
4524 if (err < stream_.bufferSize) {
4525 // Either an error or underrun occured.
4526 if (err == -EPIPE) {
4527 snd_pcm_state_t state = snd_pcm_state(handle[1]);
4528 if (state == SND_PCM_STATE_XRUN) {
4529 sprintf(message_, "RtApiAlsa: overrun detected.");
4530 error(RtError::WARNING);
4531 err = snd_pcm_prepare(handle[1]);
4533 sprintf(message_, "RtApiAlsa: error preparing handle after overrun: %s.",
4535 MUTEX_UNLOCK(&stream_.mutex);
4536 error(RtError::DRIVER_ERROR);
4538 // Reprime output buffer if needed.
4539 if ( (stream_.mode == DUPLEX) && ( !primeOutputBuffer() ) ) {
4540 MUTEX_UNLOCK(&stream_.mutex);
4541 error(RtError::DRIVER_ERROR);
4545 sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
4546 snd_pcm_state_name(state));
4547 MUTEX_UNLOCK(&stream_.mutex);
4548 error(RtError::DRIVER_ERROR);
4553 sprintf(message_, "RtApiAlsa: audio read error for device (%s): %s.",
4554 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4555 MUTEX_UNLOCK(&stream_.mutex);
4556 error(RtError::DRIVER_ERROR);
4560 // Do byte swapping if necessary.
4561 if (stream_.doByteSwap[1])
4562 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
4564 // Do buffer conversion if necessary.
4565 if (stream_.doConvertBuffer[1])
4566 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
4569 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4571 // Setup parameters and do buffer conversion if necessary.
4572 if (stream_.doConvertBuffer[0]) {
4573 buffer = stream_.deviceBuffer;
4574 if ( stream_.mode == DUPLEX )
4575 convertBuffer( buffer, apiInfo->tempBuffer, stream_.convertInfo[0] );
4577 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
4578 channels = stream_.nDeviceChannels[0];
4579 format = stream_.deviceFormat[0];
4582 if ( stream_.mode == DUPLEX )
4583 buffer = apiInfo->tempBuffer;
4585 buffer = stream_.userBuffer;
4586 channels = stream_.nUserChannels[0];
4587 format = stream_.userFormat;
4590 // Do byte swapping if necessary.
4591 if (stream_.doByteSwap[0])
4592 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
4594 // Write samples to device in interleaved/non-interleaved format.
4595 if (stream_.deInterleave[0]) {
4596 void *bufs[channels];
4597 size_t offset = stream_.bufferSize * formatBytes(format);
4598 for (int i=0; i<channels; i++)
4599 bufs[i] = (void *) (buffer + (i * offset));
4600 err = snd_pcm_writen(handle[0], bufs, stream_.bufferSize);
4603 err = snd_pcm_writei(handle[0], buffer, stream_.bufferSize);
4605 if (err < stream_.bufferSize) {
4606 // Either an error or underrun occured.
4607 if (err == -EPIPE) {
4608 snd_pcm_state_t state = snd_pcm_state(handle[0]);
4609 if (state == SND_PCM_STATE_XRUN) {
4610 sprintf(message_, "RtApiAlsa: underrun detected.");
4611 error(RtError::WARNING);
4612 err = snd_pcm_prepare(handle[0]);
4614 sprintf(message_, "RtApiAlsa: error preparing handle after underrun: %s.",
4616 MUTEX_UNLOCK(&stream_.mutex);
4617 error(RtError::DRIVER_ERROR);
4621 sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
4622 snd_pcm_state_name(state));
4623 MUTEX_UNLOCK(&stream_.mutex);
4624 error(RtError::DRIVER_ERROR);
4629 sprintf(message_, "RtApiAlsa: audio write error for device (%s): %s.",
4630 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4631 MUTEX_UNLOCK(&stream_.mutex);
4632 error(RtError::DRIVER_ERROR);
4638 MUTEX_UNLOCK(&stream_.mutex);
4640 if (stream_.callbackInfo.usingCallback && stopStream)
4644 void RtApiAlsa :: setStreamCallback(RtAudioCallback callback, void *userData)
4648 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4649 if ( info->usingCallback ) {
4650 sprintf(message_, "RtApiAlsa: A callback is already set for this stream!");
4651 error(RtError::WARNING);
4655 info->callback = (void *) callback;
4656 info->userData = userData;
4657 info->usingCallback = true;
4658 info->object = (void *) this;
4660 // Set the thread attributes for joinable and realtime scheduling
4661 // priority. The higher priority will only take affect if the
4662 // program is run as root or suid.
4663 pthread_attr_t attr;
4664 pthread_attr_init(&attr);
4665 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
4666 pthread_attr_setschedpolicy(&attr, SCHED_RR);
4668 int err = pthread_create(&info->thread, &attr, alsaCallbackHandler, &stream_.callbackInfo);
4669 pthread_attr_destroy(&attr);
4671 info->usingCallback = false;
4672 sprintf(message_, "RtApiAlsa: error starting callback thread!");
4673 error(RtError::THREAD_ERROR);
4677 void RtApiAlsa :: cancelStreamCallback()
4681 if (stream_.callbackInfo.usingCallback) {
4683 if (stream_.state == STREAM_RUNNING)
4686 MUTEX_LOCK(&stream_.mutex);
4688 stream_.callbackInfo.usingCallback = false;
4689 pthread_join(stream_.callbackInfo.thread, NULL);
4690 stream_.callbackInfo.thread = 0;
4691 stream_.callbackInfo.callback = NULL;
4692 stream_.callbackInfo.userData = NULL;
4694 MUTEX_UNLOCK(&stream_.mutex);
4698 extern "C" void *alsaCallbackHandler(void *ptr)
4700 CallbackInfo *info = (CallbackInfo *) ptr;
4701 RtApiAlsa *object = (RtApiAlsa *) info->object;
4702 bool *usingCallback = &info->usingCallback;
4704 while ( *usingCallback ) {
4706 object->tickStream();
4708 catch (RtError &exception) {
4709 fprintf(stderr, "\nRtApiAlsa: callback thread error (%s) ... closing thread.\n\n",
4710 exception.getMessageString());
4718 //******************** End of __LINUX_ALSA__ *********************//
4721 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
4723 // The ASIO API is designed around a callback scheme, so this
4724 // implementation is similar to that used for OS-X CoreAudio and Linux
4725 // Jack. The primary constraint with ASIO is that it only allows
4726 // access to a single driver at a time. Thus, it is not possible to
4727 // have more than one simultaneous RtAudio stream.
4729 // This implementation also requires a number of external ASIO files
4730 // and a few global variables. The ASIO callback scheme does not
4731 // allow for the passing of user data, so we must create a global
4732 // pointer to our callbackInfo structure.
4734 // On unix systems, we make use of a pthread condition variable.
4735 // Since there is no equivalent in Windows, I hacked something based
4736 // on information found in
4737 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
4739 #include "asio/asiosys.h"
4740 #include "asio/asio.h"
4741 #include "asio/iasiothiscallresolver.h"
4742 #include "asio/asiodrivers.h"
4745 AsioDrivers drivers;
4746 ASIOCallbacks asioCallbacks;
4747 ASIODriverInfo driverInfo;
4748 CallbackInfo *asioCallbackInfo;
4752 ASIOBufferInfo *bufferInfos;
4756 :stopStream(false), bufferInfos(0) {}
4759 static const char* GetAsioErrorString( ASIOError result )
4766 static Messages m[] =
4768 { ASE_NotPresent, "Hardware input or output is not present or available." },
4769 { ASE_HWMalfunction, "Hardware is malfunctioning." },
4770 { ASE_InvalidParameter, "Invalid input parameter." },
4771 { ASE_InvalidMode, "Invalid mode." },
4772 { ASE_SPNotAdvancing, "Sample position not advancing." },
4773 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
4774 { ASE_NoMemory, "Not enough memory to complete the request." }
4777 for (unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i)
4778 if (m[i].value == result) return m[i].message;
4780 return "Unknown error.";
4783 RtApiAsio :: RtApiAsio()
4785 this->coInitialized = false;
4788 if (nDevices_ <= 0) {
4789 sprintf(message_, "RtApiAsio: no Windows ASIO audio drivers found!");
4790 error(RtError::NO_DEVICES_FOUND);
4794 RtApiAsio :: ~RtApiAsio()
4796 if ( stream_.mode != UNINITIALIZED ) closeStream();
4798 if ( coInitialized )
4802 void RtApiAsio :: initialize(void)
4805 // ASIO cannot run on a multi-threaded appartment. You can call CoInitialize beforehand, but it must be
4806 // for appartment threading (in which case, CoInitilialize will return S_FALSE here).
4807 coInitialized = false;
4808 HRESULT hr = CoInitialize(NULL);
4810 sprintf(message_,"RtApiAsio: ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)");
4812 coInitialized = true;
4814 nDevices_ = drivers.asioGetNumDev();
4815 if (nDevices_ <= 0) return;
4817 // Create device structures and write device driver names to each.
4820 for (int i=0; i<nDevices_; i++) {
4821 if ( drivers.asioGetDriverName( i, name, 128 ) == 0 ) {
4822 device.name.erase();
4823 device.name.append( (const char *)name, strlen(name)+1);
4824 devices_.push_back(device);
4827 sprintf(message_, "RtApiAsio: error getting driver name for device index %d!", i);
4828 error(RtError::WARNING);
4832 nDevices_ = (int) devices_.size();
4834 drivers.removeCurrentDriver();
4835 driverInfo.asioVersion = 2;
4836 // See note in DirectSound implementation about GetDesktopWindow().
4837 driverInfo.sysRef = GetForegroundWindow();
4840 void RtApiAsio :: probeDeviceInfo(RtApiDevice *info)
4842 // Don't probe if a stream is already open.
4843 if ( stream_.mode != UNINITIALIZED ) {
4844 sprintf(message_, "RtApiAsio: unable to probe driver while a stream is open.");
4845 error(RtError::DEBUG_WARNING);
4849 if ( !drivers.loadDriver( (char *)info->name.c_str() ) ) {
4850 sprintf(message_, "RtApiAsio: error loading driver (%s).", info->name.c_str());
4851 error(RtError::DEBUG_WARNING);
4855 ASIOError result = ASIOInit( &driverInfo );
4856 if ( result != ASE_OK ) {
4857 sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).",
4858 GetAsioErrorString(result), info->name.c_str());
4859 error(RtError::DEBUG_WARNING);
4863 // Determine the device channel information.
4864 long inputChannels, outputChannels;
4865 result = ASIOGetChannels( &inputChannels, &outputChannels );
4866 if ( result != ASE_OK ) {
4867 drivers.removeCurrentDriver();
4868 sprintf(message_, "RtApiAsio: error (%s) getting input/output channel count (%s).",
4869 GetAsioErrorString(result),
4870 info->name.c_str());
4871 error(RtError::DEBUG_WARNING);
4875 info->maxOutputChannels = outputChannels;
4876 if ( outputChannels > 0 ) info->minOutputChannels = 1;
4878 info->maxInputChannels = inputChannels;
4879 if ( inputChannels > 0 ) info->minInputChannels = 1;
4881 // If device opens for both playback and capture, we determine the channels.
4882 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
4883 info->hasDuplexSupport = true;
4884 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
4885 info->maxInputChannels : info->maxOutputChannels;
4886 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
4887 info->minInputChannels : info->minOutputChannels;
4890 // Determine the supported sample rates.
4891 info->sampleRates.clear();
4892 for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
4893 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
4894 if ( result == ASE_OK )
4895 info->sampleRates.push_back( SAMPLE_RATES[i] );
4898 if (info->sampleRates.size() == 0) {
4899 drivers.removeCurrentDriver();
4900 sprintf( message_, "RtApiAsio: No supported sample rates found for driver (%s).", info->name.c_str() );
4901 error(RtError::DEBUG_WARNING);
4905 // Determine supported data types ... just check first channel and assume rest are the same.
4906 ASIOChannelInfo channelInfo;
4907 channelInfo.channel = 0;
4908 channelInfo.isInput = true;
4909 if ( info->maxInputChannels <= 0 ) channelInfo.isInput = false;
4910 result = ASIOGetChannelInfo( &channelInfo );
4911 if ( result != ASE_OK ) {
4912 drivers.removeCurrentDriver();
4913 sprintf(message_, "RtApiAsio: error (%s) getting driver (%s) channel information.",
4914 GetAsioErrorString(result),
4915 info->name.c_str());
4916 error(RtError::DEBUG_WARNING);
4920 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
4921 info->nativeFormats |= RTAUDIO_SINT16;
4922 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
4923 info->nativeFormats |= RTAUDIO_SINT32;
4924 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
4925 info->nativeFormats |= RTAUDIO_FLOAT32;
4926 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
4927 info->nativeFormats |= RTAUDIO_FLOAT64;
4929 // Check that we have at least one supported format.
4930 if (info->nativeFormats == 0) {
4931 drivers.removeCurrentDriver();
4932 sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
4933 info->name.c_str());
4934 error(RtError::DEBUG_WARNING);
4938 info->probed = true;
4939 drivers.removeCurrentDriver();
4942 void bufferSwitch(long index, ASIOBool processNow)
4944 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
4946 object->callbackEvent( index );
4948 catch (RtError &exception) {
4949 fprintf(stderr, "\nRtApiAsio: callback handler error (%s)!\n\n", exception.getMessageString());
4956 void sampleRateChanged(ASIOSampleRate sRate)
4958 // The ASIO documentation says that this usually only happens during
4959 // external sync. Audio processing is not stopped by the driver,
4960 // actual sample rate might not have even changed, maybe only the
4961 // sample rate status of an AES/EBU or S/PDIF digital input at the
4964 RtAudio *object = (RtAudio *) asioCallbackInfo->object;
4966 object->stopStream();
4968 catch (RtError &exception) {
4969 fprintf(stderr, "\nRtApiAsio: sampleRateChanged() error (%s)!\n\n", exception.getMessageString());
4973 fprintf(stderr, "\nRtApiAsio: driver reports sample rate changed to %d ... stream stopped!!!", (int) sRate);
4976 long asioMessages(long selector, long value, void* message, double* opt)
4980 case kAsioSelectorSupported:
4981 if(value == kAsioResetRequest
4982 || value == kAsioEngineVersion
4983 || value == kAsioResyncRequest
4984 || value == kAsioLatenciesChanged
4985 // The following three were added for ASIO 2.0, you don't
4986 // necessarily have to support them.
4987 || value == kAsioSupportsTimeInfo
4988 || value == kAsioSupportsTimeCode
4989 || value == kAsioSupportsInputMonitor)
4992 case kAsioResetRequest:
4993 // Defer the task and perform the reset of the driver during the
4994 // next "safe" situation. You cannot reset the driver right now,
4995 // as this code is called from the driver. Reset the driver is
4996 // done by completely destruct is. I.e. ASIOStop(),
4997 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
4999 fprintf(stderr, "\nRtApiAsio: driver reset requested!!!");
5002 case kAsioResyncRequest:
5003 // This informs the application that the driver encountered some
5004 // non-fatal data loss. It is used for synchronization purposes
5005 // of different media. Added mainly to work around the Win16Mutex
5006 // problems in Windows 95/98 with the Windows Multimedia system,
5007 // which could lose data because the Mutex was held too long by
5008 // another thread. However a driver can issue it in other
5010 fprintf(stderr, "\nRtApiAsio: driver resync requested!!!");
5013 case kAsioLatenciesChanged:
5014 // This will inform the host application that the drivers were
5015 // latencies changed. Beware, it this does not mean that the
5016 // buffer sizes have changed! You might need to update internal
5018 fprintf(stderr, "\nRtApiAsio: driver latency may have changed!!!");
5021 case kAsioEngineVersion:
5022 // Return the supported ASIO version of the host application. If
5023 // a host application does not implement this selector, ASIO 1.0
5024 // is assumed by the driver.
5027 case kAsioSupportsTimeInfo:
5028 // Informs the driver whether the
5029 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
5030 // For compatibility with ASIO 1.0 drivers the host application
5031 // should always support the "old" bufferSwitch method, too.
5034 case kAsioSupportsTimeCode:
5035 // Informs the driver wether application is interested in time
5036 // code info. If an application does not need to know about time
5037 // code, the driver has less work to do.
5044 bool RtApiAsio :: probeDeviceOpen(int device, StreamMode mode, int channels,
5045 int sampleRate, RtAudioFormat format,
5046 int *bufferSize, int numberOfBuffers)
5048 // For ASIO, a duplex stream MUST use the same driver.
5049 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
5050 sprintf(message_, "RtApiAsio: duplex stream must use the same device for input and output.");
5051 error(RtError::WARNING);
5055 // Only load the driver once for duplex stream.
5057 if ( mode != INPUT || stream_.mode != OUTPUT ) {
5058 if ( !drivers.loadDriver( (char *)devices_[device].name.c_str() ) ) {
5059 sprintf(message_, "RtApiAsio: error loading driver (%s).",
5060 devices_[device].name.c_str());
5061 error(RtError::DEBUG_WARNING);
5065 result = ASIOInit( &driverInfo );
5066 if ( result != ASE_OK ) {
5067 sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).",
5068 GetAsioErrorString(result), devices_[device].name.c_str());
5069 error(RtError::DEBUG_WARNING);
5074 // Check the device channel count.
5075 long inputChannels, outputChannels;
5076 result = ASIOGetChannels( &inputChannels, &outputChannels );
5077 if ( result != ASE_OK ) {
5078 drivers.removeCurrentDriver();
5079 sprintf(message_, "RtApiAsio: error (%s) getting input/output channel count (%s).",
5080 GetAsioErrorString(result),
5081 devices_[device].name.c_str());
5082 error(RtError::DEBUG_WARNING);
5086 if ( ( mode == OUTPUT && channels > outputChannels) ||
5087 ( mode == INPUT && channels > inputChannels) ) {
5088 drivers.removeCurrentDriver();
5089 sprintf(message_, "RtApiAsio: driver (%s) does not support requested channel count (%d).",
5090 devices_[device].name.c_str(), channels);
5091 error(RtError::DEBUG_WARNING);
5094 stream_.nDeviceChannels[mode] = channels;
5095 stream_.nUserChannels[mode] = channels;
5097 // Verify the sample rate is supported.
5098 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
5099 if ( result != ASE_OK ) {
5100 drivers.removeCurrentDriver();
5101 sprintf(message_, "RtApiAsio: driver (%s) does not support requested sample rate (%d).",
5102 devices_[device].name.c_str(), sampleRate);
5103 error(RtError::DEBUG_WARNING);
5107 // Set the sample rate.
5108 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
5109 if ( result != ASE_OK ) {
5110 drivers.removeCurrentDriver();
5111 sprintf(message_, "RtApiAsio: driver (%s) error setting sample rate (%d).",
5112 devices_[device].name.c_str(), sampleRate);
5113 error(RtError::DEBUG_WARNING);
5117 // Determine the driver data type.
5118 ASIOChannelInfo channelInfo;
5119 channelInfo.channel = 0;
5120 if ( mode == OUTPUT ) channelInfo.isInput = false;
5121 else channelInfo.isInput = true;
5122 result = ASIOGetChannelInfo( &channelInfo );
5123 if ( result != ASE_OK ) {
5124 drivers.removeCurrentDriver();
5125 sprintf(message_, "RtApiAsio: driver (%s) error getting data format.",
5126 devices_[device].name.c_str());
5127 error(RtError::DEBUG_WARNING);
5131 // Assuming WINDOWS host is always little-endian.
5132 stream_.doByteSwap[mode] = false;
5133 stream_.userFormat = format;
5134 stream_.deviceFormat[mode] = 0;
5135 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
5136 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5137 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
5139 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
5140 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5141 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
5143 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
5144 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5145 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
5147 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
5148 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5149 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
5152 if ( stream_.deviceFormat[mode] == 0 ) {
5153 drivers.removeCurrentDriver();
5154 sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
5155 devices_[device].name.c_str());
5156 error(RtError::DEBUG_WARNING);
5160 // Set the buffer size. For a duplex stream, this will end up
5161 // setting the buffer size based on the input constraints, which
5163 long minSize, maxSize, preferSize, granularity;
5164 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
5165 if ( result != ASE_OK ) {
5166 drivers.removeCurrentDriver();
5167 sprintf(message_, "RtApiAsio: error (%s) on driver (%s) error getting buffer size.",
5168 GetAsioErrorString(result),
5169 devices_[device].name.c_str());
5170 error(RtError::DEBUG_WARNING);
5174 if ( *bufferSize < minSize ) *bufferSize = minSize;
5175 else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
5176 else if ( granularity == -1 ) {
5177 // Make sure bufferSize is a power of two.
5178 double power = log10( (double) *bufferSize ) / log10( 2.0 );
5179 *bufferSize = (int) pow( 2.0, floor(power+0.5) );
5180 if ( *bufferSize < minSize ) *bufferSize = minSize;
5181 else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
5182 else *bufferSize = preferSize;
5183 } else if (granularity != 0)
5185 // to an even multiple of granularity, rounding up.
5186 *bufferSize = (*bufferSize + granularity-1)/granularity*granularity;
5191 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize )
5192 std::cerr << "Possible input/output buffersize discrepancy!" << std::endl;
5194 stream_.bufferSize = *bufferSize;
5195 stream_.nBuffers = 2;
5197 // ASIO always uses deinterleaved channels.
5198 stream_.deInterleave[mode] = true;
5200 // Allocate, if necessary, our AsioHandle structure for the stream.
5201 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5202 if ( handle == 0 ) {
5203 handle = (AsioHandle *) calloc(1, sizeof(AsioHandle));
5204 if ( handle == NULL ) {
5205 drivers.removeCurrentDriver();
5206 sprintf(message_, "RtApiAsio: error allocating AsioHandle memory (%s).",
5207 devices_[device].name.c_str());
5208 error(RtError::DEBUG_WARNING);
5211 handle->bufferInfos = 0;
5212 // Create a manual-reset event.
5213 handle->condition = CreateEvent( NULL, // no security
5214 TRUE, // manual-reset
5215 FALSE, // non-signaled initially
5217 stream_.apiHandle = (void *) handle;
5220 // Create the ASIO internal buffers. Since RtAudio sets up input
5221 // and output separately, we'll have to dispose of previously
5222 // created output buffers for a duplex stream.
5223 if ( mode == INPUT && stream_.mode == OUTPUT ) {
5224 ASIODisposeBuffers();
5225 if ( handle->bufferInfos ) free( handle->bufferInfos );
5228 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
5229 int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
5230 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
5231 if (handle->bufferInfos == NULL) {
5232 sprintf(message_, "RtApiAsio: error allocating bufferInfo memory (%s).",
5233 devices_[device].name.c_str());
5236 ASIOBufferInfo *infos;
5237 infos = handle->bufferInfos;
5238 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
5239 infos->isInput = ASIOFalse;
5240 infos->channelNum = i;
5241 infos->buffers[0] = infos->buffers[1] = 0;
5243 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
5244 infos->isInput = ASIOTrue;
5245 infos->channelNum = i;
5246 infos->buffers[0] = infos->buffers[1] = 0;
5249 // Set up the ASIO callback structure and create the ASIO data buffers.
5250 asioCallbacks.bufferSwitch = &bufferSwitch;
5251 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
5252 asioCallbacks.asioMessage = &asioMessages;
5253 asioCallbacks.bufferSwitchTimeInfo = NULL;
5254 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks);
5255 if ( result != ASE_OK ) {
5256 sprintf(message_, "RtApiAsio: eror (%s) on driver (%s) error creating buffers.",
5257 GetAsioErrorString(result),
5258 devices_[device].name.c_str());
5262 // Set flags for buffer conversion.
5263 stream_.doConvertBuffer[mode] = false;
5264 if (stream_.userFormat != stream_.deviceFormat[mode])
5265 stream_.doConvertBuffer[mode] = true;
5266 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
5267 stream_.doConvertBuffer[mode] = true;
5268 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
5269 stream_.doConvertBuffer[mode] = true;
5271 // Allocate necessary internal buffers
5272 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
5275 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
5276 buffer_bytes = stream_.nUserChannels[0];
5278 buffer_bytes = stream_.nUserChannels[1];
5280 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
5281 if (stream_.userBuffer) free(stream_.userBuffer);
5282 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
5283 if (stream_.userBuffer == NULL) {
5284 sprintf(message_, "RtApiAsio: error (%s) allocating user buffer memory (%s).",
5285 GetAsioErrorString(result),
5286 devices_[device].name.c_str());
5291 if ( stream_.doConvertBuffer[mode] ) {
5294 bool makeBuffer = true;
5295 if ( mode == OUTPUT )
5296 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
5297 else { // mode == INPUT
5298 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
5299 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5300 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
5301 if ( buffer_bytes < bytes_out ) makeBuffer = false;
5306 buffer_bytes *= *bufferSize;
5307 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
5308 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
5309 if (stream_.deviceBuffer == NULL) {
5310 sprintf(message_, "RtApiAsio: error (%s) allocating device buffer memory (%s).",
5311 GetAsioErrorString(result),
5312 devices_[device].name.c_str());
5318 stream_.device[mode] = device;
5319 stream_.state = STREAM_STOPPED;
5320 if ( stream_.mode == OUTPUT && mode == INPUT )
5321 // We had already set up an output stream.
5322 stream_.mode = DUPLEX;
5324 stream_.mode = mode;
5325 stream_.sampleRate = sampleRate;
5326 asioCallbackInfo = &stream_.callbackInfo;
5327 stream_.callbackInfo.object = (void *) this;
5329 // Setup the buffer conversion information structure.
5330 if ( stream_.doConvertBuffer[mode] ) {
5331 if (mode == INPUT) { // convert device to user buffer
5332 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
5333 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
5334 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
5335 stream_.convertInfo[mode].outFormat = stream_.userFormat;
5337 else { // convert user to device buffer
5338 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
5339 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
5340 stream_.convertInfo[mode].inFormat = stream_.userFormat;
5341 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
5344 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
5345 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
5347 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
5349 // Set up the interleave/deinterleave offsets.
5350 if ( mode == INPUT && stream_.deInterleave[1] ) {
5351 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
5352 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
5353 stream_.convertInfo[mode].outOffset.push_back( k );
5354 stream_.convertInfo[mode].inJump = 1;
5357 else if (mode == OUTPUT && stream_.deInterleave[0]) {
5358 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
5359 stream_.convertInfo[mode].inOffset.push_back( k );
5360 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
5361 stream_.convertInfo[mode].outJump = 1;
5365 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
5366 stream_.convertInfo[mode].inOffset.push_back( k );
5367 stream_.convertInfo[mode].outOffset.push_back( k );
5375 ASIODisposeBuffers();
5376 drivers.removeCurrentDriver();
5379 CloseHandle( handle->condition );
5380 if ( handle->bufferInfos )
5381 free( handle->bufferInfos );
5383 stream_.apiHandle = 0;
5386 if (stream_.userBuffer) {
5387 free(stream_.userBuffer);
5388 stream_.userBuffer = 0;
5391 error(RtError::DEBUG_WARNING);
5395 void RtApiAsio :: closeStream()
5397 // We don't want an exception to be thrown here because this
5398 // function is called by our class destructor. So, do our own
5400 if ( stream_.mode == UNINITIALIZED ) {
5401 sprintf(message_, "RtApiAsio::closeStream(): no open stream to close!");
5402 error(RtError::WARNING);
5406 if (stream_.state == STREAM_RUNNING)
5409 ASIODisposeBuffers();
5410 drivers.removeCurrentDriver();
5412 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5414 CloseHandle( handle->condition );
5415 if ( handle->bufferInfos )
5416 free( handle->bufferInfos );
5418 stream_.apiHandle = 0;
5421 if (stream_.userBuffer) {
5422 free(stream_.userBuffer);
5423 stream_.userBuffer = 0;
5426 if (stream_.deviceBuffer) {
5427 free(stream_.deviceBuffer);
5428 stream_.deviceBuffer = 0;
5431 stream_.mode = UNINITIALIZED;
5434 void RtApiAsio :: setStreamCallback(RtAudioCallback callback, void *userData)
5438 if ( stream_.callbackInfo.usingCallback ) {
5439 sprintf(message_, "RtApiAsio: A callback is already set for this stream!");
5440 error(RtError::WARNING);
5444 stream_.callbackInfo.callback = (void *) callback;
5445 stream_.callbackInfo.userData = userData;
5446 stream_.callbackInfo.usingCallback = true;
5449 void RtApiAsio :: cancelStreamCallback()
5453 if (stream_.callbackInfo.usingCallback) {
5455 if (stream_.state == STREAM_RUNNING)
5458 MUTEX_LOCK(&stream_.mutex);
5460 stream_.callbackInfo.usingCallback = false;
5461 stream_.callbackInfo.userData = NULL;
5462 stream_.state = STREAM_STOPPED;
5463 stream_.callbackInfo.callback = NULL;
5465 MUTEX_UNLOCK(&stream_.mutex);
5469 void RtApiAsio :: startStream()
5472 if (stream_.state == STREAM_RUNNING) return;
5474 MUTEX_LOCK(&stream_.mutex);
5476 ASIOError result = ASIOStart();
5477 if ( result != ASE_OK ) {
5478 sprintf(message_, "RtApiAsio: error starting device (%s).",
5479 devices_[stream_.device[0]].name.c_str());
5480 MUTEX_UNLOCK(&stream_.mutex);
5481 error(RtError::DRIVER_ERROR);
5483 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5484 handle->stopStream = false;
5485 stream_.state = STREAM_RUNNING;
5487 MUTEX_UNLOCK(&stream_.mutex);
5490 void RtApiAsio :: stopStream()
5493 if (stream_.state == STREAM_STOPPED) return;
5495 // Change the state before the lock to improve shutdown response
5496 // when using a callback.
5497 stream_.state = STREAM_STOPPED;
5498 MUTEX_LOCK(&stream_.mutex);
5500 ASIOError result = ASIOStop();
5501 if ( result != ASE_OK ) {
5502 sprintf(message_, "RtApiAsio: error stopping device (%s).",
5503 devices_[stream_.device[0]].name.c_str());
5504 MUTEX_UNLOCK(&stream_.mutex);
5505 error(RtError::DRIVER_ERROR);
5508 MUTEX_UNLOCK(&stream_.mutex);
5511 void RtApiAsio :: abortStream()
5516 void RtApiAsio :: tickStream()
5520 if (stream_.state == STREAM_STOPPED)
5523 if (stream_.callbackInfo.usingCallback) {
5524 sprintf(message_, "RtApiAsio: tickStream() should not be used when a callback function is set!");
5525 error(RtError::WARNING);
5529 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5531 MUTEX_LOCK(&stream_.mutex);
5533 // Release the stream_mutex here and wait for the event
5534 // to become signaled by the callback process.
5535 MUTEX_UNLOCK(&stream_.mutex);
5536 WaitForMultipleObjects(1, &handle->condition, FALSE, INFINITE);
5537 ResetEvent( handle->condition );
5540 void RtApiAsio :: callbackEvent(long bufferIndex)
5544 if (stream_.state == STREAM_STOPPED) return;
5546 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
5547 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5548 if ( info->usingCallback && handle->stopStream ) {
5549 // Check if the stream should be stopped (via the previous user
5550 // callback return value). We stop the stream here, rather than
5551 // after the function call, so that output data can first be
5557 MUTEX_LOCK(&stream_.mutex);
5559 // Invoke user callback first, to get fresh output data.
5560 if ( info->usingCallback ) {
5561 RtAudioCallback callback = (RtAudioCallback) info->callback;
5562 if ( callback(stream_.userBuffer, stream_.bufferSize, info->userData) )
5563 handle->stopStream = true;
5567 int nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
5568 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5570 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[0]);
5571 if (stream_.doConvertBuffer[0]) {
5573 convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] );
5574 if ( stream_.doByteSwap[0] )
5575 byteSwapBuffer(stream_.deviceBuffer,
5576 stream_.bufferSize * stream_.nDeviceChannels[0],
5577 stream_.deviceFormat[0]);
5579 // Always de-interleave ASIO output data.
5581 for ( int i=0; i<nChannels; i++ ) {
5582 if ( handle->bufferInfos[i].isInput != ASIOTrue )
5583 memcpy(handle->bufferInfos[i].buffers[bufferIndex],
5584 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
5587 else { // single channel only
5589 if (stream_.doByteSwap[0])
5590 byteSwapBuffer(stream_.userBuffer,
5591 stream_.bufferSize * stream_.nUserChannels[0],
5592 stream_.userFormat);
5594 for ( int i=0; i<nChannels; i++ ) {
5595 if ( handle->bufferInfos[i].isInput != ASIOTrue ) {
5596 memcpy(handle->bufferInfos[i].buffers[bufferIndex], stream_.userBuffer, bufferBytes );
5603 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
5605 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
5606 if (stream_.doConvertBuffer[1]) {
5608 // Always interleave ASIO input data.
5610 for ( int i=0; i<nChannels; i++ ) {
5611 if ( handle->bufferInfos[i].isInput == ASIOTrue )
5612 memcpy(&stream_.deviceBuffer[j++*bufferBytes],
5613 handle->bufferInfos[i].buffers[bufferIndex],
5617 if ( stream_.doByteSwap[1] )
5618 byteSwapBuffer(stream_.deviceBuffer,
5619 stream_.bufferSize * stream_.nDeviceChannels[1],
5620 stream_.deviceFormat[1]);
5621 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
5624 else { // single channel only
5625 for ( int i=0; i<nChannels; i++ ) {
5626 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
5627 memcpy(stream_.userBuffer,
5628 handle->bufferInfos[i].buffers[bufferIndex],
5634 if (stream_.doByteSwap[1])
5635 byteSwapBuffer(stream_.userBuffer,
5636 stream_.bufferSize * stream_.nUserChannels[1],
5637 stream_.userFormat);
5641 if ( !info->usingCallback )
5642 SetEvent( handle->condition );
5644 // The following call was suggested by Malte Clasen. While the API
5645 // documentation indicates it should not be required, some device
5646 // drivers apparently do not function correctly without it.
5649 MUTEX_UNLOCK(&stream_.mutex);
5652 //******************** End of __WINDOWS_ASIO__ *********************//
5655 #if defined(__WINDOWS_DS__) // Windows DirectSound API
5661 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
5664 #ifdef _MSC_VER // if Microsoft Visual C++
5665 #pragma comment(lib,"winmm.lib") // then, auto-link winmm.lib. Otherwise, it has to be added manually.
5669 static inline DWORD dsPointerDifference(DWORD laterPointer,DWORD earlierPointer,DWORD bufferSize)
5671 if (laterPointer > earlierPointer)
5672 return laterPointer-earlierPointer;
5674 return laterPointer-earlierPointer+bufferSize;
5677 static inline DWORD dsPointerBetween(DWORD pointer, DWORD laterPointer,DWORD earlierPointer, DWORD bufferSize)
5679 if (pointer > bufferSize) pointer -= bufferSize;
5681 if (laterPointer < earlierPointer)
5682 laterPointer += bufferSize;
5684 if (pointer < earlierPointer)
5685 pointer += bufferSize;
5687 return pointer >= earlierPointer && pointer < laterPointer;
5690 #undef GENERATE_DEBUG_LOG // Define this to generate a debug timing log file in c:/rtaudiolog.txt"
5691 #ifdef GENERATE_DEBUG_LOG
5693 #include "mmsystem.h"
5698 DWORD currentReadPointer, safeReadPointer;
5699 DWORD currentWritePointer, safeWritePointer;
5700 DWORD readTime, writeTime;
5701 DWORD nextWritePointer, nextReadPointer;
5704 int currentDebugLogEntry = 0;
5705 std::vector<TTickRecord> debugLog(2000);
5710 // A structure to hold various information related to the DirectSound
5711 // API implementation.
5717 DWORD dsPointerLeadTime; // the number of bytes ahead of the safe pointer to lead by.
5721 RtApiDs::RtDsStatistics RtApiDs::statistics;
5723 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
5724 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
5726 RtDsStatistics s = statistics;
5727 // update the calculated fields.
5730 if (s.inputFrameSize != 0)
5731 s.latency += s.readDeviceSafeLeadBytes*1.0/s.inputFrameSize / s.sampleRate;
5733 if (s.outputFrameSize != 0)
5734 s.latency += (s.writeDeviceSafeLeadBytes+ s.writeDeviceBufferLeadBytes)*1.0/s.outputFrameSize / s.sampleRate;
5739 // Declarations for utility functions, callbacks, and structures
5740 // specific to the DirectSound implementation.
5741 static bool CALLBACK deviceCountCallback(LPGUID lpguid,
5742 LPCTSTR description,
5746 static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
5747 LPCTSTR description,
5751 static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
5752 LPCTSTR description,
5756 static bool CALLBACK deviceIdCallback(LPGUID lpguid,
5757 LPCTSTR description,
5761 static char* getErrorString(int code);
5763 extern "C" unsigned __stdcall callbackHandler(void *ptr);
5772 RtApiDs :: RtApiDs()
5774 // Dsound will run both-threaded. If CoInitialize fails, then just
5775 // accept whatever the mainline chose for a threading model.
5776 coInitialized = false;
5777 HRESULT hr = CoInitialize(NULL);
5779 coInitialized = true;
5783 if (nDevices_ <= 0) {
5784 sprintf(message_, "RtApiDs: no Windows DirectSound audio devices found!");
5785 error(RtError::NO_DEVICES_FOUND);
5789 RtApiDs :: ~RtApiDs()
5792 CoUninitialize(); // balanced call.
5794 if ( stream_.mode != UNINITIALIZED ) closeStream();
5797 int RtApiDs :: getDefaultInputDevice(void)
5801 // Enumerate through devices to find the default output.
5802 HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
5803 if ( FAILED(result) ) {
5804 sprintf(message_, "RtApiDs: Error performing default input device enumeration: %s.",
5805 getErrorString(result));
5806 error(RtError::WARNING);
5810 for ( int i=0; i<nDevices_; i++ ) {
5811 if ( info.name == devices_[i].name ) return i;
5817 int RtApiDs :: getDefaultOutputDevice(void)
5820 info.name[0] = '\0';
5822 // Enumerate through devices to find the default output.
5823 HRESULT result = DirectSoundEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
5824 if ( FAILED(result) ) {
5825 sprintf(message_, "RtApiDs: Error performing default output device enumeration: %s.",
5826 getErrorString(result));
5827 error(RtError::WARNING);
5831 for ( int i=0; i<nDevices_; i++ )
5832 if ( info.name == devices_[i].name ) return i;
5837 void RtApiDs :: initialize(void)
5839 int i, ins = 0, outs = 0, count = 0;
5843 // Count DirectSound devices.
5844 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &outs);
5845 if ( FAILED(result) ) {
5846 sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5847 getErrorString(result));
5848 error(RtError::DRIVER_ERROR);
5851 // Count DirectSoundCapture devices.
5852 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &ins);
5853 if ( FAILED(result) ) {
5854 sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5855 getErrorString(result));
5856 error(RtError::DRIVER_ERROR);
5860 if (count == 0) return;
5862 std::vector<enum_info> info(count);
5863 for (i=0; i<count; i++) {
5864 if (i < outs) info[i].isInput = false;
5865 else info[i].isInput = true;
5868 // Get playback device info and check capabilities.
5869 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
5870 if ( FAILED(result) ) {
5871 sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5872 getErrorString(result));
5873 error(RtError::DRIVER_ERROR);
5876 // Get capture device info and check capabilities.
5877 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
5878 if ( FAILED(result) ) {
5879 sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5880 getErrorString(result));
5881 error(RtError::DRIVER_ERROR);
5884 // Create device structures for valid devices and write device names
5885 // to each. Devices are considered invalid if they cannot be
5886 // opened, they report < 1 supported channels, or they report no
5887 // supported data (capture only).
5889 for (i=0; i<count; i++) {
5890 if ( info[i].isValid ) {
5891 device.name.erase();
5892 device.name = info[i].name;
5893 devices_.push_back(device);
5897 nDevices_ = devices_.size();
5901 void RtApiDs :: probeDeviceInfo(RtApiDevice *info)
5904 dsinfo.name = info->name;
5905 dsinfo.isValid = false;
5907 // Enumerate through input devices to find the id (if it exists).
5908 HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5909 if ( FAILED(result) ) {
5910 sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
5911 getErrorString(result));
5912 error(RtError::DEBUG_WARNING);
5916 // Do capture probe first.
5917 if ( dsinfo.isValid == false )
5918 goto playback_probe;
5920 LPDIRECTSOUNDCAPTURE input;
5921 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
5922 if ( FAILED(result) ) {
5923 sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
5924 info->name.c_str(), getErrorString(result));
5925 error(RtError::DEBUG_WARNING);
5926 goto playback_probe;
5930 in_caps.dwSize = sizeof(in_caps);
5931 result = input->GetCaps( &in_caps );
5932 if ( FAILED(result) ) {
5934 sprintf(message_, "RtApiDs: Could not get capture capabilities (%s): %s.",
5935 info->name.c_str(), getErrorString(result));
5936 error(RtError::DEBUG_WARNING);
5937 goto playback_probe;
5940 // Get input channel information.
5941 info->minInputChannels = 1;
5942 info->maxInputChannels = in_caps.dwChannels;
5944 // Get sample rate and format information.
5945 info->sampleRates.clear();
5946 if( in_caps.dwChannels == 2 ) {
5947 if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5948 if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5949 if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5950 if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5951 if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5952 if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5954 if ( info->nativeFormats & RTAUDIO_SINT16 ) {
5955 if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->sampleRates.push_back( 11025 );
5956 if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->sampleRates.push_back( 22050 );
5957 if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->sampleRates.push_back( 44100 );
5959 else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
5960 if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->sampleRates.push_back( 11025 );
5961 if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->sampleRates.push_back( 22050 );
5962 if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->sampleRates.push_back( 44100 );
5965 else if ( in_caps.dwChannels == 1 ) {
5966 if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5967 if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5968 if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5969 if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5970 if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5971 if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5973 if ( info->nativeFormats & RTAUDIO_SINT16 ) {
5974 if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->sampleRates.push_back( 11025 );
5975 if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->sampleRates.push_back( 22050 );
5976 if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->sampleRates.push_back( 44100 );
5978 else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
5979 if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->sampleRates.push_back( 11025 );
5980 if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->sampleRates.push_back( 22050 );
5981 if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->sampleRates.push_back( 44100 );
5984 else info->minInputChannels = 0; // technically, this would be an error
5990 dsinfo.isValid = false;
5992 // Enumerate through output devices to find the id (if it exists).
5993 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5994 if ( FAILED(result) ) {
5995 sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
5996 getErrorString(result));
5997 error(RtError::DEBUG_WARNING);
6001 // Now do playback probe.
6002 if ( dsinfo.isValid == false )
6003 goto check_parameters;
6005 LPDIRECTSOUND output;
6007 result = DirectSoundCreate( dsinfo.id, &output, NULL );
6008 if ( FAILED(result) ) {
6009 sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
6010 info->name.c_str(), getErrorString(result));
6011 error(RtError::DEBUG_WARNING);
6012 goto check_parameters;
6015 out_caps.dwSize = sizeof(out_caps);
6016 result = output->GetCaps( &out_caps );
6017 if ( FAILED(result) ) {
6019 sprintf(message_, "RtApiDs: Could not get playback capabilities (%s): %s.",
6020 info->name.c_str(), getErrorString(result));
6021 error(RtError::DEBUG_WARNING);
6022 goto check_parameters;
6025 // Get output channel information.
6026 info->minOutputChannels = 1;
6027 info->maxOutputChannels = ( out_caps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
6029 // Get sample rate information. Use capture device rate information
6031 if ( info->sampleRates.size() == 0 ) {
6032 info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate );
6033 if ( out_caps.dwMaxSecondarySampleRate > out_caps.dwMinSecondarySampleRate )
6034 info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate );
6037 // Check input rates against output rate range. If there's an
6038 // inconsistency (such as a duplex-capable device which reports a
6039 // single output rate of 48000 Hz), we'll go with the output
6040 // rate(s) since the DirectSoundCapture API is stupid and broken.
6041 // Note that the probed sample rate values are NOT used when
6042 // opening the device. Thanks to Tue Andersen for reporting this.
6043 if ( info->sampleRates.back() < (int) out_caps.dwMinSecondarySampleRate ) {
6044 info->sampleRates.clear();
6045 info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate );
6046 if ( out_caps.dwMaxSecondarySampleRate > out_caps.dwMinSecondarySampleRate )
6047 info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate );
6050 for ( int i=info->sampleRates.size()-1; i>=0; i-- ) {
6051 if ( (unsigned int) info->sampleRates[i] > out_caps.dwMaxSecondarySampleRate )
6052 info->sampleRates.erase( info->sampleRates.begin() + i );
6054 while ( info->sampleRates.size() > 0 &&
6055 ((unsigned int) info->sampleRates[0] < out_caps.dwMinSecondarySampleRate) ) {
6056 info->sampleRates.erase( info->sampleRates.begin() );
6061 // Get format information.
6062 if ( out_caps.dwFlags & DSCAPS_PRIMARY16BIT ) info->nativeFormats |= RTAUDIO_SINT16;
6063 if ( out_caps.dwFlags & DSCAPS_PRIMARY8BIT ) info->nativeFormats |= RTAUDIO_SINT8;
6068 if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 ) {
6069 sprintf(message_, "RtApiDs: no reported input or output channels for device (%s).",
6070 info->name.c_str());
6071 error(RtError::DEBUG_WARNING);
6074 if ( info->sampleRates.size() == 0 || info->nativeFormats == 0 ) {
6075 sprintf(message_, "RtApiDs: no reported sample rates or data formats for device (%s).",
6076 info->name.c_str());
6077 error(RtError::DEBUG_WARNING);
6081 // Determine duplex status.
6082 if (info->maxInputChannels < info->maxOutputChannels)
6083 info->maxDuplexChannels = info->maxInputChannels;
6085 info->maxDuplexChannels = info->maxOutputChannels;
6086 if (info->minInputChannels < info->minOutputChannels)
6087 info->minDuplexChannels = info->minInputChannels;
6089 info->minDuplexChannels = info->minOutputChannels;
6091 if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
6092 else info->hasDuplexSupport = false;
6094 info->probed = true;
6099 bool RtApiDs :: probeDeviceOpen( int device, StreamMode mode, int channels,
6100 int sampleRate, RtAudioFormat format,
6101 int *bufferSize, int numberOfBuffers)
6104 HWND hWnd = GetForegroundWindow();
6106 // According to a note in PortAudio, using GetDesktopWindow()
6107 // instead of GetForegroundWindow() is supposed to avoid problems
6108 // that occur when the application's window is not the foreground
6109 // window. Also, if the application window closes before the
6110 // DirectSound buffer, DirectSound can crash. However, for console
6111 // applications, no sound was produced when using GetDesktopWindow().
6117 // Check the numberOfBuffers parameter and limit the lowest value to
6118 // two. This is a judgement call and a value of two is probably too
6119 // low for capture, but it should work for playback.
6120 if (numberOfBuffers < 2)
6123 nBuffers = numberOfBuffers;
6125 // Define the wave format structure (16-bit PCM, srate, channels)
6126 WAVEFORMATEX waveFormat;
6127 ZeroMemory(&waveFormat, sizeof(WAVEFORMATEX));
6128 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
6129 waveFormat.nChannels = channels;
6130 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
6132 // Determine the data format.
6133 if ( devices_[device].nativeFormats ) { // 8-bit and/or 16-bit support
6134 if ( format == RTAUDIO_SINT8 ) {
6135 if ( devices_[device].nativeFormats & RTAUDIO_SINT8 )
6136 waveFormat.wBitsPerSample = 8;
6138 waveFormat.wBitsPerSample = 16;
6141 if ( devices_[device].nativeFormats & RTAUDIO_SINT16 )
6142 waveFormat.wBitsPerSample = 16;
6144 waveFormat.wBitsPerSample = 8;
6148 sprintf(message_, "RtApiDs: no reported data formats for device (%s).",
6149 devices_[device].name.c_str());
6150 error(RtError::DEBUG_WARNING);
6154 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
6155 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
6157 // Determine the device buffer size. By default, 32k, but we will
6158 // grow it to make allowances for very large software buffer sizes.
6159 DWORD dsBufferSize = 0;
6160 DWORD dsPointerLeadTime = 0;
6162 buffer_size = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
6165 void *ohandle = 0, *bhandle = 0;
6166 // strncpy( dsinfo.name, devices_[device].name.c_str(), 64 );
6167 dsinfo.name = devices_[device].name;
6168 dsinfo.isValid = false;
6169 if ( mode == OUTPUT ) {
6171 dsPointerLeadTime = numberOfBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
6173 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
6174 while ( dsPointerLeadTime * 2U > (DWORD)buffer_size )
6177 if ( devices_[device].maxOutputChannels < channels ) {
6178 sprintf(message_, "RtApiDs: requested channels (%d) > than supported (%d) by device (%s).",
6179 channels, devices_[device].maxOutputChannels, devices_[device].name.c_str());
6180 error(RtError::DEBUG_WARNING);
6184 // Enumerate through output devices to find the id (if it exists).
6185 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
6186 if ( FAILED(result) ) {
6187 sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
6188 getErrorString(result));
6189 error(RtError::DEBUG_WARNING);
6193 if ( dsinfo.isValid == false ) {
6194 sprintf(message_, "RtApiDs: output device (%s) id not found!", devices_[device].name.c_str());
6195 error(RtError::DEBUG_WARNING);
6199 LPGUID id = dsinfo.id;
6200 LPDIRECTSOUND object;
6201 LPDIRECTSOUNDBUFFER buffer;
6202 DSBUFFERDESC bufferDescription;
6204 result = DirectSoundCreate( id, &object, NULL );
6205 if ( FAILED(result) ) {
6206 sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
6207 devices_[device].name.c_str(), getErrorString(result));
6208 error(RtError::DEBUG_WARNING);
6212 // Set cooperative level to DSSCL_EXCLUSIVE
6213 result = object->SetCooperativeLevel(hWnd, DSSCL_EXCLUSIVE);
6214 if ( FAILED(result) ) {
6216 sprintf(message_, "RtApiDs: Unable to set cooperative level (%s): %s.",
6217 devices_[device].name.c_str(), getErrorString(result));
6218 error(RtError::DEBUG_WARNING);
6222 // Even though we will write to the secondary buffer, we need to
6223 // access the primary buffer to set the correct output format
6224 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
6225 // buffer description.
6226 ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
6227 bufferDescription.dwSize = sizeof(DSBUFFERDESC);
6228 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
6229 // Obtain the primary buffer
6230 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
6231 if ( FAILED(result) ) {
6233 sprintf(message_, "RtApiDs: Unable to access primary buffer (%s): %s.",
6234 devices_[device].name.c_str(), getErrorString(result));
6235 error(RtError::DEBUG_WARNING);
6239 // Set the primary DS buffer sound format.
6240 result = buffer->SetFormat(&waveFormat);
6241 if ( FAILED(result) ) {
6243 sprintf(message_, "RtApiDs: Unable to set primary buffer format (%s): %s.",
6244 devices_[device].name.c_str(), getErrorString(result));
6245 error(RtError::DEBUG_WARNING);
6249 // Setup the secondary DS buffer description.
6250 dsBufferSize = (DWORD)buffer_size;
6251 ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
6252 bufferDescription.dwSize = sizeof(DSBUFFERDESC);
6253 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
6254 DSBCAPS_GETCURRENTPOSITION2 |
6255 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
6256 bufferDescription.dwBufferBytes = buffer_size;
6257 bufferDescription.lpwfxFormat = &waveFormat;
6259 // Try to create the secondary DS buffer. If that doesn't work,
6260 // try to use software mixing. Otherwise, there's a problem.
6261 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
6262 if ( FAILED(result) ) {
6263 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
6264 DSBCAPS_GETCURRENTPOSITION2 |
6265 DSBCAPS_LOCSOFTWARE ); // Force software mixing
6266 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
6267 if ( FAILED(result) ) {
6269 sprintf(message_, "RtApiDs: Unable to create secondary DS buffer (%s): %s.",
6270 devices_[device].name.c_str(), getErrorString(result));
6271 error(RtError::DEBUG_WARNING);
6276 // Get the buffer size ... might be different from what we specified.
6278 dsbcaps.dwSize = sizeof(DSBCAPS);
6279 buffer->GetCaps(&dsbcaps);
6280 buffer_size = dsbcaps.dwBufferBytes;
6282 // Lock the DS buffer
6283 result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
6284 if ( FAILED(result) ) {
6287 sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
6288 devices_[device].name.c_str(), getErrorString(result));
6289 error(RtError::DEBUG_WARNING);
6293 // Zero the DS buffer
6294 ZeroMemory(audioPtr, dataLen);
6296 // Unlock the DS buffer
6297 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6298 if ( FAILED(result) ) {
6301 sprintf(message_, "RtApiDs: Unable to unlock buffer(%s): %s.",
6302 devices_[device].name.c_str(), getErrorString(result));
6303 error(RtError::DEBUG_WARNING);
6307 ohandle = (void *) object;
6308 bhandle = (void *) buffer;
6309 stream_.nDeviceChannels[0] = channels;
6312 if ( mode == INPUT ) {
6314 if ( devices_[device].maxInputChannels < channels ) {
6315 sprintf(message_, "RtAudioDS: device (%s) does not support %d channels.", devices_[device].name.c_str(), channels);
6316 error(RtError::DEBUG_WARNING);
6320 // Enumerate through input devices to find the id (if it exists).
6321 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
6322 if ( FAILED(result) ) {
6323 sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
6324 getErrorString(result));
6325 error(RtError::DEBUG_WARNING);
6329 if ( dsinfo.isValid == false ) {
6330 sprintf(message_, "RtAudioDS: input device (%s) id not found!", devices_[device].name.c_str());
6331 error(RtError::DEBUG_WARNING);
6335 LPGUID id = dsinfo.id;
6336 LPDIRECTSOUNDCAPTURE object;
6337 LPDIRECTSOUNDCAPTUREBUFFER buffer;
6338 DSCBUFFERDESC bufferDescription;
6340 result = DirectSoundCaptureCreate( id, &object, NULL );
6341 if ( FAILED(result) ) {
6342 sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
6343 devices_[device].name.c_str(), getErrorString(result));
6344 error(RtError::DEBUG_WARNING);
6348 // Setup the secondary DS buffer description.
6349 dsBufferSize = buffer_size;
6350 ZeroMemory(&bufferDescription, sizeof(DSCBUFFERDESC));
6351 bufferDescription.dwSize = sizeof(DSCBUFFERDESC);
6352 bufferDescription.dwFlags = 0;
6353 bufferDescription.dwReserved = 0;
6354 bufferDescription.dwBufferBytes = buffer_size;
6355 bufferDescription.lpwfxFormat = &waveFormat;
6357 // Create the capture buffer.
6358 result = object->CreateCaptureBuffer(&bufferDescription, &buffer, NULL);
6359 if ( FAILED(result) ) {
6361 sprintf(message_, "RtApiDs: Unable to create capture buffer (%s): %s.",
6362 devices_[device].name.c_str(), getErrorString(result));
6363 error(RtError::DEBUG_WARNING);
6367 // Lock the capture buffer
6368 result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
6369 if ( FAILED(result) ) {
6372 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6373 devices_[device].name.c_str(), getErrorString(result));
6374 error(RtError::DEBUG_WARNING);
6379 ZeroMemory(audioPtr, dataLen);
6381 // Unlock the buffer
6382 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6383 if ( FAILED(result) ) {
6386 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6387 devices_[device].name.c_str(), getErrorString(result));
6388 error(RtError::DEBUG_WARNING);
6392 ohandle = (void *) object;
6393 bhandle = (void *) buffer;
6394 stream_.nDeviceChannels[1] = channels;
6397 stream_.userFormat = format;
6398 if ( waveFormat.wBitsPerSample == 8 )
6399 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6401 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6402 stream_.nUserChannels[mode] = channels;
6404 stream_.bufferSize = *bufferSize;
6406 // Set flags for buffer conversion
6407 stream_.doConvertBuffer[mode] = false;
6408 if (stream_.userFormat != stream_.deviceFormat[mode])
6409 stream_.doConvertBuffer[mode] = true;
6410 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
6411 stream_.doConvertBuffer[mode] = true;
6413 // Allocate necessary internal buffers
6414 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
6417 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
6418 buffer_bytes = stream_.nUserChannels[0];
6420 buffer_bytes = stream_.nUserChannels[1];
6422 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
6423 if (stream_.userBuffer) free(stream_.userBuffer);
6424 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
6425 if (stream_.userBuffer == NULL) {
6426 sprintf(message_, "RtApiDs: error allocating user buffer memory (%s).",
6427 devices_[device].name.c_str());
6432 if ( stream_.doConvertBuffer[mode] ) {
6435 bool makeBuffer = true;
6436 if ( mode == OUTPUT )
6437 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
6438 else { // mode == INPUT
6439 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
6440 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6441 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
6442 if ( buffer_bytes < bytes_out ) makeBuffer = false;
6447 buffer_bytes *= *bufferSize;
6448 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
6449 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
6450 if (stream_.deviceBuffer == NULL) {
6451 sprintf(message_, "RtApiDs: error allocating device buffer memory (%s).",
6452 devices_[device].name.c_str());
6458 // Allocate our DsHandle structures for the stream.
6460 if ( stream_.apiHandle == 0 ) {
6461 handles = (DsHandle *) calloc(2, sizeof(DsHandle));
6462 if ( handles == NULL ) {
6463 sprintf(message_, "RtApiDs: Error allocating DsHandle memory (%s).",
6464 devices_[device].name.c_str());
6467 handles[0].object = 0;
6468 handles[1].object = 0;
6469 stream_.apiHandle = (void *) handles;
6472 handles = (DsHandle *) stream_.apiHandle;
6473 handles[mode].object = ohandle;
6474 handles[mode].buffer = bhandle;
6475 handles[mode].dsBufferSize = dsBufferSize;
6476 handles[mode].dsPointerLeadTime = dsPointerLeadTime;
6478 stream_.device[mode] = device;
6479 stream_.state = STREAM_STOPPED;
6480 if ( stream_.mode == OUTPUT && mode == INPUT )
6481 // We had already set up an output stream.
6482 stream_.mode = DUPLEX;
6484 stream_.mode = mode;
6485 stream_.nBuffers = nBuffers;
6486 stream_.sampleRate = sampleRate;
6488 // Setup the buffer conversion information structure.
6489 if ( stream_.doConvertBuffer[mode] ) {
6490 if (mode == INPUT) { // convert device to user buffer
6491 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
6492 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
6493 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
6494 stream_.convertInfo[mode].outFormat = stream_.userFormat;
6496 else { // convert user to device buffer
6497 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
6498 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
6499 stream_.convertInfo[mode].inFormat = stream_.userFormat;
6500 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
6503 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
6504 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
6506 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
6508 // Set up the interleave/deinterleave offsets.
6509 if ( mode == INPUT && stream_.deInterleave[1] ) {
6510 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
6511 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
6512 stream_.convertInfo[mode].outOffset.push_back( k );
6513 stream_.convertInfo[mode].inJump = 1;
6516 else if (mode == OUTPUT && stream_.deInterleave[0]) {
6517 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
6518 stream_.convertInfo[mode].inOffset.push_back( k );
6519 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
6520 stream_.convertInfo[mode].outJump = 1;
6524 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
6525 stream_.convertInfo[mode].inOffset.push_back( k );
6526 stream_.convertInfo[mode].outOffset.push_back( k );
6535 if (handles[0].object) {
6536 LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
6537 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6538 if (buffer) buffer->Release();
6541 if (handles[1].object) {
6542 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
6543 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6544 if (buffer) buffer->Release();
6548 stream_.apiHandle = 0;
6551 if (stream_.userBuffer) {
6552 free(stream_.userBuffer);
6553 stream_.userBuffer = 0;
6556 error(RtError::DEBUG_WARNING);
6560 void RtApiDs :: setStreamCallback(RtAudioCallback callback, void *userData)
6564 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
6565 if ( info->usingCallback ) {
6566 sprintf(message_, "RtApiDs: A callback is already set for this stream!");
6567 error(RtError::WARNING);
6571 info->callback = (void *) callback;
6572 info->userData = userData;
6573 info->usingCallback = true;
6574 info->object = (void *) this;
6577 info->thread = _beginthreadex(NULL, 0, &callbackHandler,
6578 &stream_.callbackInfo, 0, &thread_id);
6579 if (info->thread == 0) {
6580 info->usingCallback = false;
6581 sprintf(message_, "RtApiDs: error starting callback thread!");
6582 error(RtError::THREAD_ERROR);
6585 // When spawning multiple threads in quick succession, it appears to be
6586 // necessary to wait a bit for each to initialize ... another windoism!
6590 void RtApiDs :: cancelStreamCallback()
6594 if (stream_.callbackInfo.usingCallback) {
6596 if (stream_.state == STREAM_RUNNING)
6599 MUTEX_LOCK(&stream_.mutex);
6601 stream_.callbackInfo.usingCallback = false;
6602 WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
6603 CloseHandle( (HANDLE)stream_.callbackInfo.thread );
6604 stream_.callbackInfo.thread = 0;
6605 stream_.callbackInfo.callback = NULL;
6606 stream_.callbackInfo.userData = NULL;
6608 MUTEX_UNLOCK(&stream_.mutex);
6612 void RtApiDs :: closeStream()
6614 // We don't want an exception to be thrown here because this
6615 // function is called by our class destructor. So, do our own
6617 if ( stream_.mode == UNINITIALIZED ) {
6618 sprintf(message_, "RtApiDs::closeStream(): no open stream to close!");
6619 error(RtError::WARNING);
6623 if (stream_.callbackInfo.usingCallback) {
6624 stream_.callbackInfo.usingCallback = false;
6625 WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
6626 CloseHandle( (HANDLE)stream_.callbackInfo.thread );
6629 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6631 if (handles[0].object) {
6632 LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
6633 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6641 if (handles[1].object) {
6642 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
6643 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6651 stream_.apiHandle = 0;
6654 if (stream_.userBuffer) {
6655 free(stream_.userBuffer);
6656 stream_.userBuffer = 0;
6659 if (stream_.deviceBuffer) {
6660 free(stream_.deviceBuffer);
6661 stream_.deviceBuffer = 0;
6664 stream_.mode = UNINITIALIZED;
6667 void RtApiDs :: startStream()
6670 if (stream_.state == STREAM_RUNNING) return;
6672 // Increase scheduler frequency on lesser windows (a side-effect of
6673 // increasing timer accuracy). On greater windows (Win2K or later),
6674 // this is already in effect.
6676 MUTEX_LOCK(&stream_.mutex);
6678 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6682 memset(&statistics,0,sizeof(statistics));
6683 statistics.sampleRate = stream_.sampleRate;
6684 statistics.writeDeviceBufferLeadBytes = handles[0].dsPointerLeadTime ;
6686 buffersRolling = false;
6687 duplexPrerollBytes = 0;
6689 if (stream_.mode == DUPLEX) {
6690 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
6691 duplexPrerollBytes = (int)(0.5*stream_.sampleRate*formatBytes( stream_.deviceFormat[1])*stream_.nDeviceChannels[1]);
6694 #ifdef GENERATE_DEBUG_LOG
6695 currentDebugLogEntry = 0;
6699 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6700 statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0])
6701 *stream_.nDeviceChannels[0];
6703 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6704 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
6705 if ( FAILED(result) ) {
6706 sprintf(message_, "RtApiDs: Unable to start buffer (%s): %s.",
6707 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6708 error(RtError::DRIVER_ERROR);
6712 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6713 statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1])
6714 *stream_.nDeviceChannels[1];
6716 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6717 result = buffer->Start(DSCBSTART_LOOPING );
6718 if ( FAILED(result) ) {
6719 sprintf(message_, "RtApiDs: Unable to start capture buffer (%s): %s.",
6720 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6721 error(RtError::DRIVER_ERROR);
6724 stream_.state = STREAM_RUNNING;
6726 MUTEX_UNLOCK(&stream_.mutex);
6729 void RtApiDs :: stopStream()
6732 if (stream_.state == STREAM_STOPPED) return;
6734 // Change the state before the lock to improve shutdown response
6735 // when using a callback.
6736 stream_.state = STREAM_STOPPED;
6737 MUTEX_LOCK(&stream_.mutex);
6739 timeEndPeriod(1); // revert to normal scheduler frequency on lesser windows.
6741 #ifdef GENERATE_DEBUG_LOG
6742 // Write the timing log to a .TSV file for analysis in Excel.
6743 unlink("c:/rtaudiolog.txt");
6744 std::ofstream os("c:/rtaudiolog.txt");
6745 os << "writeTime\treadDelay\tnextWritePointer\tnextReadPointer\tcurrentWritePointer\tsafeWritePointer\tcurrentReadPointer\tsafeReadPointer" << std::endl;
6746 for (int i = 0; i < currentDebugLogEntry ; ++i) {
6747 TTickRecord &r = debugLog[i];
6748 os << r.writeTime-debugLog[0].writeTime << "\t" << (r.readTime-r.writeTime) << "\t"
6749 << r.nextWritePointer % BUFFER_SIZE << "\t" << r.nextReadPointer % BUFFER_SIZE
6750 << "\t" << r.currentWritePointer % BUFFER_SIZE << "\t" << r.safeWritePointer % BUFFER_SIZE
6751 << "\t" << r.currentReadPointer % BUFFER_SIZE << "\t" << r.safeReadPointer % BUFFER_SIZE << std::endl;
6755 // There is no specific DirectSound API call to "drain" a buffer
6756 // before stopping. We can hack this for playback by writing
6757 // buffers of zeroes over the entire buffer. For capture, the
6758 // concept is less clear so we'll repeat what we do in the
6759 // abortStream() case.
6762 LPVOID buffer1 = NULL;
6763 LPVOID buffer2 = NULL;
6764 DWORD bufferSize1 = 0;
6765 DWORD bufferSize2 = 0;
6766 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6767 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6769 DWORD currentPos, safePos;
6770 long buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
6772 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6773 DWORD nextWritePos = handles[0].bufferPointer;
6774 dsBufferSize = handles[0].dsBufferSize;
6775 DWORD dsBytesWritten = 0;
6777 // Write zeroes for at least dsBufferSize bytes.
6778 while ( dsBytesWritten < dsBufferSize ) {
6780 // Find out where the read and "safe write" pointers are.
6781 result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos );
6782 if ( FAILED(result) ) {
6783 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6784 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6785 error(RtError::DRIVER_ERROR);
6788 // Chase nextWritePosition.
6789 if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6790 DWORD endWrite = nextWritePos + buffer_bytes;
6792 // Check whether the entire write region is behind the play pointer.
6793 while ( currentPos < endWrite ) {
6794 double millis = (endWrite - currentPos) * 900.0;
6795 millis /= ( formatBytes(stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] *stream_.sampleRate);
6796 if ( millis < 1.0 ) millis = 1.0;
6797 Sleep( (DWORD) millis );
6799 // Wake up, find out where we are now
6800 result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos );
6801 if ( FAILED(result) ) {
6802 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6803 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6804 error(RtError::DRIVER_ERROR);
6807 if ( currentPos < (DWORD)nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6810 // Lock free space in the buffer
6811 result = dsBuffer->Lock( nextWritePos, buffer_bytes, &buffer1,
6812 &bufferSize1, &buffer2, &bufferSize2, 0);
6813 if ( FAILED(result) ) {
6814 sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
6815 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6816 error(RtError::DRIVER_ERROR);
6819 // Zero the free space
6820 ZeroMemory( buffer1, bufferSize1 );
6821 if (buffer2 != NULL) ZeroMemory( buffer2, bufferSize2 );
6823 // Update our buffer offset and unlock sound buffer
6824 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
6825 if ( FAILED(result) ) {
6826 sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
6827 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6828 error(RtError::DRIVER_ERROR);
6830 nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
6831 handles[0].bufferPointer = nextWritePos;
6832 dsBytesWritten += buffer_bytes;
6835 // OK, now stop the buffer.
6836 result = dsBuffer->Stop();
6837 if ( FAILED(result) ) {
6838 sprintf(message_, "RtApiDs: Unable to stop buffer (%s): %s",
6839 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6840 error(RtError::DRIVER_ERROR);
6843 // If we play again, start at the beginning of the buffer.
6844 handles[0].bufferPointer = 0;
6847 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6849 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6853 result = buffer->Stop();
6854 if ( FAILED(result) ) {
6855 sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
6856 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6857 error(RtError::DRIVER_ERROR);
6860 dsBufferSize = handles[1].dsBufferSize;
6862 // Lock the buffer and clear it so that if we start to play again,
6863 // we won't have old data playing.
6864 result = buffer->Lock(0, dsBufferSize, &buffer1, &bufferSize1, NULL, NULL, 0);
6865 if ( FAILED(result) ) {
6866 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6867 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6868 error(RtError::DRIVER_ERROR);
6871 // Zero the DS buffer
6872 ZeroMemory(buffer1, bufferSize1);
6874 // Unlock the DS buffer
6875 result = buffer->Unlock(buffer1, bufferSize1, NULL, 0);
6876 if ( FAILED(result) ) {
6877 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6878 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6879 error(RtError::DRIVER_ERROR);
6882 // If we start recording again, we must begin at beginning of buffer.
6883 handles[1].bufferPointer = 0;
6886 MUTEX_UNLOCK(&stream_.mutex);
6889 void RtApiDs :: abortStream()
6892 if (stream_.state == STREAM_STOPPED) return;
6894 // Change the state before the lock to improve shutdown response
6895 // when using a callback.
6896 stream_.state = STREAM_STOPPED;
6897 MUTEX_LOCK(&stream_.mutex);
6899 timeEndPeriod(1); // revert to normal scheduler frequency on lesser windows.
6905 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6906 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6908 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6909 result = buffer->Stop();
6910 if ( FAILED(result) ) {
6911 sprintf(message_, "RtApiDs: Unable to stop buffer (%s): %s",
6912 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6913 error(RtError::DRIVER_ERROR);
6916 dsBufferSize = handles[0].dsBufferSize;
6918 // Lock the buffer and clear it so that if we start to play again,
6919 // we won't have old data playing.
6920 result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
6921 if ( FAILED(result) ) {
6922 sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
6923 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6924 error(RtError::DRIVER_ERROR);
6927 // Zero the DS buffer
6928 ZeroMemory(audioPtr, dataLen);
6930 // Unlock the DS buffer
6931 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6932 if ( FAILED(result) ) {
6933 sprintf(message_, "RtApiDs: Unable to unlock buffer (%s): %s.",
6934 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6935 error(RtError::DRIVER_ERROR);
6938 // If we start playing again, we must begin at beginning of buffer.
6939 handles[0].bufferPointer = 0;
6942 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6943 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6947 result = buffer->Stop();
6948 if ( FAILED(result) ) {
6949 sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
6950 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6951 error(RtError::DRIVER_ERROR);
6954 dsBufferSize = handles[1].dsBufferSize;
6956 // Lock the buffer and clear it so that if we start to play again,
6957 // we won't have old data playing.
6958 result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
6959 if ( FAILED(result) ) {
6960 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6961 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6962 error(RtError::DRIVER_ERROR);
6965 // Zero the DS buffer
6966 ZeroMemory(audioPtr, dataLen);
6968 // Unlock the DS buffer
6969 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6970 if ( FAILED(result) ) {
6971 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6972 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6973 error(RtError::DRIVER_ERROR);
6976 // If we start recording again, we must begin at beginning of buffer.
6977 handles[1].bufferPointer = 0;
6980 MUTEX_UNLOCK(&stream_.mutex);
6983 int RtApiDs :: streamWillBlock()
6986 if (stream_.state == STREAM_STOPPED) return 0;
6988 MUTEX_LOCK(&stream_.mutex);
6993 DWORD currentPos, safePos;
6995 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6996 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6998 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6999 UINT nextWritePos = handles[0].bufferPointer;
7000 channels = stream_.nDeviceChannels[0];
7001 DWORD dsBufferSize = handles[0].dsBufferSize;
7003 // Find out where the read and "safe write" pointers are.
7004 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
7005 if ( FAILED(result) ) {
7006 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
7007 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7008 error(RtError::DRIVER_ERROR);
7011 DWORD leadPos = safePos + handles[0].dsPointerLeadTime;
7012 if (leadPos > dsBufferSize) {
7013 leadPos -= dsBufferSize;
7015 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
7017 frames = (leadPos - nextWritePos);
7018 frames /= channels * formatBytes(stream_.deviceFormat[0]);
7021 if (stream_.mode == INPUT ) {
7022 // note that we don't block on DUPLEX input anymore. We run lockstep with the write pointer instead.
7024 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
7025 UINT nextReadPos = handles[1].bufferPointer;
7026 channels = stream_.nDeviceChannels[1];
7027 DWORD dsBufferSize = handles[1].dsBufferSize;
7029 // Find out where the write and "safe read" pointers are.
7030 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
7031 if ( FAILED(result) ) {
7032 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7033 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7034 error(RtError::DRIVER_ERROR);
7037 if ( safePos < (DWORD)nextReadPos ) safePos += dsBufferSize; // unwrap offset
7039 frames = (int)(safePos - nextReadPos);
7040 frames /= channels * formatBytes(stream_.deviceFormat[1]);
7043 frames = stream_.bufferSize - frames;
7044 if (frames < 0) frames = 0;
7046 MUTEX_UNLOCK(&stream_.mutex);
7050 void RtApiDs :: tickStream()
7055 if (stream_.state == STREAM_STOPPED) {
7056 if (stream_.callbackInfo.usingCallback) Sleep(50); // sleep 50 milliseconds
7059 else if (stream_.callbackInfo.usingCallback) {
7060 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
7061 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
7064 MUTEX_LOCK(&stream_.mutex);
7066 // The state might change while waiting on a mutex.
7067 if (stream_.state == STREAM_STOPPED) {
7068 MUTEX_UNLOCK(&stream_.mutex);
7073 DWORD currentWritePos, safeWritePos;
7074 DWORD currentReadPos, safeReadPos;
7078 #ifdef GENERATE_DEBUG_LOG
7079 DWORD writeTime, readTime;
7082 LPVOID buffer1 = NULL;
7083 LPVOID buffer2 = NULL;
7084 DWORD bufferSize1 = 0;
7085 DWORD bufferSize2 = 0;
7089 DsHandle *handles = (DsHandle *) stream_.apiHandle;
7091 if (stream_.mode == DUPLEX && !buffersRolling) {
7092 assert(handles[0].dsBufferSize == handles[1].dsBufferSize);
7094 // It takes a while for the devices to get rolling. As a result,
7095 // there's no guarantee that the capture and write device pointers
7096 // will move in lockstep. Wait here for both devices to start
7097 // rolling, and then set our buffer pointers accordingly.
7098 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
7099 // bytes later than the write buffer.
7101 // Stub: a serious risk of having a pre-emptive scheduling round
7102 // take place between the two GetCurrentPosition calls... but I'm
7103 // really not sure how to solve the problem. Temporarily boost to
7104 // Realtime priority, maybe; but I'm not sure what priority the
7105 // directsound service threads run at. We *should* be roughly
7106 // within a ms or so of correct.
7108 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
7109 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
7111 DWORD initialWritePos, initialSafeWritePos;
7112 DWORD initialReadPos, initialSafeReadPos;;
7114 result = dsWriteBuffer->GetCurrentPosition(&initialWritePos, &initialSafeWritePos);
7115 if ( FAILED(result) ) {
7116 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
7117 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7118 error(RtError::DRIVER_ERROR);
7120 result = dsCaptureBuffer->GetCurrentPosition(&initialReadPos, &initialSafeReadPos);
7121 if ( FAILED(result) ) {
7122 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7123 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7124 error(RtError::DRIVER_ERROR);
7127 result = dsWriteBuffer->GetCurrentPosition(¤tWritePos, &safeWritePos);
7128 if ( FAILED(result) ) {
7129 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
7130 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7131 error(RtError::DRIVER_ERROR);
7133 result = dsCaptureBuffer->GetCurrentPosition(¤tReadPos, &safeReadPos);
7134 if ( FAILED(result) ) {
7135 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7136 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7137 error(RtError::DRIVER_ERROR);
7139 if (safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos) {
7145 assert( handles[0].dsBufferSize == handles[1].dsBufferSize );
7147 buffersRolling = true;
7148 handles[0].bufferPointer = (safeWritePos + handles[0].dsPointerLeadTime);
7149 handles[1].bufferPointer = safeReadPos;
7153 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
7155 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
7157 // Setup parameters and do buffer conversion if necessary.
7158 if (stream_.doConvertBuffer[0]) {
7159 buffer = stream_.deviceBuffer;
7160 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
7161 buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0];
7162 buffer_bytes *= formatBytes(stream_.deviceFormat[0]);
7165 buffer = stream_.userBuffer;
7166 buffer_bytes = stream_.bufferSize * stream_.nUserChannels[0];
7167 buffer_bytes *= formatBytes(stream_.userFormat);
7170 // No byte swapping necessary in DirectSound implementation.
7172 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
7173 // unsigned. So, we need to convert our signed 8-bit data here to
7175 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
7176 for ( int i=0; i<buffer_bytes; i++ ) buffer[i] = (unsigned char) (buffer[i] + 128);
7178 DWORD dsBufferSize = handles[0].dsBufferSize;
7179 nextWritePos = handles[0].bufferPointer;
7183 // Find out where the read and "safe write" pointers are.
7184 result = dsBuffer->GetCurrentPosition(¤tWritePos, &safeWritePos);
7185 if ( FAILED(result) ) {
7186 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
7187 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7188 error(RtError::DRIVER_ERROR);
7191 leadPos = safeWritePos + handles[0].dsPointerLeadTime;
7192 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
7193 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
7194 endWrite = nextWritePos + buffer_bytes;
7196 // Check whether the entire write region is behind the play pointer.
7197 if ( leadPos >= endWrite ) break;
7199 // If we are here, then we must wait until the play pointer gets
7200 // beyond the write region. The approach here is to use the
7201 // Sleep() function to suspend operation until safePos catches
7202 // up. Calculate number of milliseconds to wait as:
7203 // time = distance * (milliseconds/second) * fudgefactor /
7204 // ((bytes/sample) * (samples/second))
7205 // A "fudgefactor" less than 1 is used because it was found
7206 // that sleeping too long was MUCH worse than sleeping for
7207 // several shorter periods.
7208 double millis = (endWrite - leadPos) * 900.0;
7209 millis /= ( formatBytes(stream_.deviceFormat[0]) *stream_.nDeviceChannels[0]* stream_.sampleRate);
7210 if ( millis < 1.0 ) millis = 1.0;
7211 if ( millis > 50.0 ) {
7212 static int nOverruns = 0;
7215 Sleep( (DWORD) millis );
7218 #ifdef GENERATE_DEBUG_LOG
7219 writeTime = timeGetTime();
7222 if (statistics.writeDeviceSafeLeadBytes < dsPointerDifference(safeWritePos,currentWritePos,handles[0].dsBufferSize)) {
7223 statistics.writeDeviceSafeLeadBytes = dsPointerDifference(safeWritePos,currentWritePos,handles[0].dsBufferSize);
7226 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
7227 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
7228 // We've strayed into the forbidden zone ... resync the read pointer.
7229 ++statistics.numberOfWriteUnderruns;
7230 nextWritePos = safeWritePos + handles[0].dsPointerLeadTime-buffer_bytes+dsBufferSize;
7231 while (nextWritePos >= dsBufferSize) nextWritePos-= dsBufferSize;
7232 handles[0].bufferPointer = nextWritePos;
7233 endWrite = nextWritePos + buffer_bytes;
7236 // Lock free space in the buffer
7237 result = dsBuffer->Lock( nextWritePos, buffer_bytes, &buffer1,
7238 &bufferSize1, &buffer2, &bufferSize2, 0 );
7239 if ( FAILED(result) ) {
7240 sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
7241 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7242 error(RtError::DRIVER_ERROR);
7245 // Copy our buffer into the DS buffer
7246 CopyMemory(buffer1, buffer, bufferSize1);
7247 if (buffer2 != NULL) CopyMemory(buffer2, buffer+bufferSize1, bufferSize2);
7249 // Update our buffer offset and unlock sound buffer
7250 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
7251 if ( FAILED(result) ) {
7252 sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
7253 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7254 error(RtError::DRIVER_ERROR);
7256 nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
7257 handles[0].bufferPointer = nextWritePos;
7260 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
7262 // Setup parameters.
7263 if (stream_.doConvertBuffer[1]) {
7264 buffer = stream_.deviceBuffer;
7265 buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[1];
7266 buffer_bytes *= formatBytes(stream_.deviceFormat[1]);
7269 buffer = stream_.userBuffer;
7270 buffer_bytes = stream_.bufferSize * stream_.nUserChannels[1];
7271 buffer_bytes *= formatBytes(stream_.userFormat);
7273 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
7274 long nextReadPos = handles[1].bufferPointer;
7275 DWORD dsBufferSize = handles[1].dsBufferSize;
7277 // Find out where the write and "safe read" pointers are.
7278 result = dsBuffer->GetCurrentPosition(¤tReadPos, &safeReadPos);
7279 if ( FAILED(result) ) {
7280 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7281 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7282 error(RtError::DRIVER_ERROR);
7285 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
7286 DWORD endRead = nextReadPos + buffer_bytes;
7288 // Handling depends on whether we are INPUT or DUPLEX.
7289 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
7290 // then a wait here will drag the write pointers into the forbidden zone.
7292 // In DUPLEX mode, rather than wait, we will back off the read pointer until
7293 // it's in a safe position. This causes dropouts, but it seems to be the only
7294 // practical way to sync up the read and write pointers reliably, given the
7295 // the very complex relationship between phase and increment of the read and write
7298 // In order to minimize audible dropouts in DUPLEX mode, we will
7299 // provide a pre-roll period of 0.5 seconds in which we return
7300 // zeros from the read buffer while the pointers sync up.
7302 if (stream_.mode == DUPLEX)
7304 if (safeReadPos < endRead)
7306 if (duplexPrerollBytes <= 0)
7308 // pre-roll time over. Be more agressive.
7309 int adjustment = endRead-safeReadPos;
7311 ++statistics.numberOfReadOverruns;
7313 // large adjustments: we've probably run out of CPU cycles, so just resync exactly,
7314 // and perform fine adjustments later.
7315 // small adjustments: back off by twice as much.
7316 if (adjustment >= 2*buffer_bytes)
7318 nextReadPos = safeReadPos-2*buffer_bytes;
7321 nextReadPos = safeReadPos-buffer_bytes-adjustment;
7323 statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
7324 if (statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
7326 if (nextReadPos < 0) nextReadPos += dsBufferSize;
7329 // in pre=roll time. Just do it.
7330 nextReadPos = safeReadPos-buffer_bytes;
7331 while (nextReadPos < 0) nextReadPos += dsBufferSize;
7333 endRead = nextReadPos + buffer_bytes;
7336 while ( safeReadPos < endRead ) {
7337 // See comments for playback.
7338 double millis = (endRead - safeReadPos) * 900.0;
7339 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
7340 if ( millis < 1.0 ) millis = 1.0;
7341 Sleep( (DWORD) millis );
7343 // Wake up, find out where we are now
7344 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
7345 if ( FAILED(result) ) {
7346 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7347 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7348 error(RtError::DRIVER_ERROR);
7351 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
7354 #ifdef GENERATE_DEBUG_LOG
7355 readTime = timeGetTime();
7357 if (statistics.readDeviceSafeLeadBytes < dsPointerDifference(currentReadPos,nextReadPos ,dsBufferSize))
7359 statistics.readDeviceSafeLeadBytes = dsPointerDifference(currentReadPos,nextReadPos ,dsBufferSize);
7362 // Lock free space in the buffer
7363 result = dsBuffer->Lock (nextReadPos, buffer_bytes, &buffer1,
7364 &bufferSize1, &buffer2, &bufferSize2, 0);
7365 if ( FAILED(result) ) {
7366 sprintf(message_, "RtApiDs: Unable to lock buffer during capture (%s): %s.",
7367 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7368 error(RtError::DRIVER_ERROR);
7371 if (duplexPrerollBytes <= 0)
7373 // Copy our buffer into the DS buffer
7374 CopyMemory(buffer, buffer1, bufferSize1);
7375 if (buffer2 != NULL) CopyMemory(buffer+bufferSize1, buffer2, bufferSize2);
7377 memset(buffer,0,bufferSize1);
7378 if (buffer2 != NULL) memset(buffer+bufferSize1,0,bufferSize2);
7379 duplexPrerollBytes -= bufferSize1 + bufferSize2;
7382 // Update our buffer offset and unlock sound buffer
7383 nextReadPos = (nextReadPos + bufferSize1 + bufferSize2) % dsBufferSize;
7384 dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
7385 if ( FAILED(result) ) {
7386 sprintf(message_, "RtApiDs: Unable to unlock buffer during capture (%s): %s.",
7387 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7388 error(RtError::DRIVER_ERROR);
7390 handles[1].bufferPointer = nextReadPos;
7393 // No byte swapping necessary in DirectSound implementation.
7395 // If necessary, convert 8-bit data from unsigned to signed.
7396 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
7397 for ( int j=0; j<buffer_bytes; j++ ) buffer[j] = (signed char) (buffer[j] - 128);
7399 // Do buffer conversion if necessary.
7400 if (stream_.doConvertBuffer[1])
7401 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
7403 #ifdef GENERATE_DEBUG_LOG
7404 if (currentDebugLogEntry < debugLog.size())
7406 TTickRecord &r = debugLog[currentDebugLogEntry++];
7407 r.currentReadPointer = currentReadPos;
7408 r.safeReadPointer = safeReadPos;
7409 r.currentWritePointer = currentWritePos;
7410 r.safeWritePointer = safeWritePos;
7411 r.readTime = readTime;
7412 r.writeTime = writeTime;
7413 r.nextReadPointer = handles[1].bufferPointer;
7414 r.nextWritePointer = handles[0].bufferPointer;
7419 MUTEX_UNLOCK(&stream_.mutex);
7421 if (stream_.callbackInfo.usingCallback && stopStream)
7424 // Definitions for utility functions and callbacks
7425 // specific to the DirectSound implementation.
7427 extern "C" unsigned __stdcall callbackHandler(void *ptr)
7429 CallbackInfo *info = (CallbackInfo *) ptr;
7430 RtApiDs *object = (RtApiDs *) info->object;
7431 bool *usingCallback = &info->usingCallback;
7433 while ( *usingCallback ) {
7435 object->tickStream();
7437 catch (RtError &exception) {
7438 fprintf(stderr, "\nRtApiDs: callback thread error (%s) ... closing thread.\n\n",
7439 exception.getMessageString());
7448 static bool CALLBACK deviceCountCallback(LPGUID lpguid,
7449 LPCTSTR description,
7453 int *pointer = ((int *) lpContext);
7461 std::string convertTChar( LPCTSTR name )
7465 #if defined( UNICODE ) || defined( _UNICODE )
7466 // Yes, this conversion doesn't make sense for two-byte characters
7467 // but RtAudio is currently written to return an std::string of
7468 // one-byte chars for the device name.
7469 for ( unsigned int i=0; i<wcslen( name ); i++ )
7470 s.push_back( name[i] );
7472 s.append( std::string( name ) );
7478 static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
7479 LPCTSTR description,
7483 enum_info *info = ((enum_info *) lpContext);
7484 while ( !info->name.empty() ) info++;
7486 info->name = convertTChar( description );
7490 info->isValid = false;
7491 if (info->isInput == true) {
7493 LPDIRECTSOUNDCAPTURE object;
7495 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
7496 if( hr != DS_OK ) return true;
7498 caps.dwSize = sizeof(caps);
7499 hr = object->GetCaps( &caps );
7501 if (caps.dwChannels > 0 && caps.dwFormats > 0)
7502 info->isValid = true;
7508 LPDIRECTSOUND object;
7509 hr = DirectSoundCreate( lpguid, &object, NULL );
7510 if( hr != DS_OK ) return true;
7512 caps.dwSize = sizeof(caps);
7513 hr = object->GetCaps( &caps );
7515 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
7516 info->isValid = true;
7524 static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
7525 LPCTSTR description,
7529 enum_info *info = ((enum_info *) lpContext);
7531 if ( lpguid == NULL ) {
7532 info->name = convertTChar( description );
7539 static bool CALLBACK deviceIdCallback(LPGUID lpguid,
7540 LPCTSTR description,
7544 enum_info *info = ((enum_info *) lpContext);
7546 std::string s = convertTChar( description );
7547 if ( info->name == s ) {
7549 info->isValid = true;
7556 static char* getErrorString(int code)
7560 case DSERR_ALLOCATED:
7561 return "Already allocated.";
7563 case DSERR_CONTROLUNAVAIL:
7564 return "Control unavailable.";
7566 case DSERR_INVALIDPARAM:
7567 return "Invalid parameter.";
7569 case DSERR_INVALIDCALL:
7570 return "Invalid call.";
7573 return "Generic error.";
7575 case DSERR_PRIOLEVELNEEDED:
7576 return "Priority level needed";
7578 case DSERR_OUTOFMEMORY:
7579 return "Out of memory";
7581 case DSERR_BADFORMAT:
7582 return "The sample rate or the channel format is not supported.";
7584 case DSERR_UNSUPPORTED:
7585 return "Not supported.";
7587 case DSERR_NODRIVER:
7588 return "No driver.";
7590 case DSERR_ALREADYINITIALIZED:
7591 return "Already initialized.";
7593 case DSERR_NOAGGREGATION:
7594 return "No aggregation.";
7596 case DSERR_BUFFERLOST:
7597 return "Buffer lost.";
7599 case DSERR_OTHERAPPHASPRIO:
7600 return "Another application already has priority.";
7602 case DSERR_UNINITIALIZED:
7603 return "Uninitialized.";
7606 return "DirectSound unknown error";
7610 //******************** End of __WINDOWS_DS__ *********************//
7613 #if defined(__IRIX_AL__) // SGI's AL API for IRIX
7615 #include <dmedia/audio.h>
7619 extern "C" void *callbackHandler(void * ptr);
7621 RtApiAl :: RtApiAl()
7625 if (nDevices_ <= 0) {
7626 sprintf(message_, "RtApiAl: no Irix AL audio devices found!");
7627 error(RtError::NO_DEVICES_FOUND);
7631 RtApiAl :: ~RtApiAl()
7633 // The subclass destructor gets called before the base class
7634 // destructor, so close any existing streams before deallocating
7635 // apiDeviceId memory.
7636 if ( stream_.mode != UNINITIALIZED ) closeStream();
7638 // Free our allocated apiDeviceId memory.
7640 for ( unsigned int i=0; i<devices_.size(); i++ ) {
7641 id = (long *) devices_[i].apiDeviceId;
7646 void RtApiAl :: initialize(void)
7648 // Count cards and devices
7651 // Determine the total number of input and output devices.
7652 nDevices_ = alQueryValues(AL_SYSTEM, AL_DEVICES, 0, 0, 0, 0);
7653 if (nDevices_ < 0) {
7654 sprintf(message_, "RtApiAl: error counting devices: %s.",
7655 alGetErrorString(oserror()));
7656 error(RtError::DRIVER_ERROR);
7659 if (nDevices_ <= 0) return;
7661 ALvalue *vls = (ALvalue *) new ALvalue[nDevices_];
7663 // Create our list of devices and write their ascii identifiers and resource ids.
7667 pvs[0].param = AL_NAME;
7668 pvs[0].value.ptr = name;
7673 outs = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, vls, nDevices_, 0, 0);
7676 sprintf(message_, "RtApiAl: error getting output devices: %s.",
7677 alGetErrorString(oserror()));
7678 error(RtError::DRIVER_ERROR);
7681 for (i=0; i<outs; i++) {
7682 if (alGetParams(vls[i].i, pvs, 1) < 0) {
7684 sprintf(message_, "RtApiAl: error querying output devices: %s.",
7685 alGetErrorString(oserror()));
7686 error(RtError::DRIVER_ERROR);
7688 device.name.erase();
7689 device.name.append( (const char *)name, strlen(name)+1);
7690 devices_.push_back(device);
7691 id = (long *) calloc(2, sizeof(long));
7693 devices_[i].apiDeviceId = (void *) id;
7696 ins = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &vls[outs], nDevices_-outs, 0, 0);
7699 sprintf(message_, "RtApiAl: error getting input devices: %s.",
7700 alGetErrorString(oserror()));
7701 error(RtError::DRIVER_ERROR);
7704 for (i=outs; i<ins+outs; i++) {
7705 if (alGetParams(vls[i].i, pvs, 1) < 0) {
7707 sprintf(message_, "RtApiAl: error querying input devices: %s.",
7708 alGetErrorString(oserror()));
7709 error(RtError::DRIVER_ERROR);
7711 device.name.erase();
7712 device.name.append( (const char *)name, strlen(name)+1);
7713 devices_.push_back(device);
7714 id = (long *) calloc(2, sizeof(long));
7716 devices_[i].apiDeviceId = (void *) id;
7722 int RtApiAl :: getDefaultInputDevice(void)
7726 int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &value, 1, 0, 0);
7728 sprintf(message_, "RtApiAl: error getting default input device id: %s.",
7729 alGetErrorString(oserror()));
7730 error(RtError::WARNING);
7733 for ( unsigned int i=0; i<devices_.size(); i++ ) {
7734 id = (long *) devices_[i].apiDeviceId;
7735 if ( id[1] == value.i ) return i;
7742 int RtApiAl :: getDefaultOutputDevice(void)
7746 int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, &value, 1, 0, 0);
7748 sprintf(message_, "RtApiAl: error getting default output device id: %s.",
7749 alGetErrorString(oserror()));
7750 error(RtError::WARNING);
7753 for ( unsigned int i=0; i<devices_.size(); i++ ) {
7754 id = (long *) devices_[i].apiDeviceId;
7755 if ( id[0] == value.i ) return i;
7762 void RtApiAl :: probeDeviceInfo(RtApiDevice *info)
7769 // Get output resource ID if it exists.
7770 long *id = (long *) info->apiDeviceId;
7774 // Probe output device parameters.
7775 result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
7777 sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
7778 info->name.c_str(), alGetErrorString(oserror()));
7779 error(RtError::DEBUG_WARNING);
7782 info->maxOutputChannels = value.i;
7783 info->minOutputChannels = 1;
7786 result = alGetParamInfo(resource, AL_RATE, &pinfo);
7788 sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
7789 info->name.c_str(), alGetErrorString(oserror()));
7790 error(RtError::DEBUG_WARNING);
7793 info->sampleRates.clear();
7794 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
7795 if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
7796 info->sampleRates.push_back( SAMPLE_RATES[k] );
7800 // The AL library supports all our formats, except 24-bit and 32-bit ints.
7801 info->nativeFormats = (RtAudioFormat) 51;
7804 // Now get input resource ID if it exists.
7808 // Probe input device parameters.
7809 result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
7811 sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
7812 info->name.c_str(), alGetErrorString(oserror()));
7813 error(RtError::DEBUG_WARNING);
7816 info->maxInputChannels = value.i;
7817 info->minInputChannels = 1;
7820 result = alGetParamInfo(resource, AL_RATE, &pinfo);
7822 sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
7823 info->name.c_str(), alGetErrorString(oserror()));
7824 error(RtError::DEBUG_WARNING);
7827 // In the case of the default device, these values will
7828 // overwrite the rates determined for the output device. Since
7829 // the input device is most likely to be more limited than the
7830 // output device, this is ok.
7831 info->sampleRates.clear();
7832 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
7833 if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
7834 info->sampleRates.push_back( SAMPLE_RATES[k] );
7838 // The AL library supports all our formats, except 24-bit and 32-bit ints.
7839 info->nativeFormats = (RtAudioFormat) 51;
7842 if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 )
7844 if ( info->sampleRates.size() == 0 )
7847 // Determine duplex status.
7848 if (info->maxInputChannels < info->maxOutputChannels)
7849 info->maxDuplexChannels = info->maxInputChannels;
7851 info->maxDuplexChannels = info->maxOutputChannels;
7852 if (info->minInputChannels < info->minOutputChannels)
7853 info->minDuplexChannels = info->minInputChannels;
7855 info->minDuplexChannels = info->minOutputChannels;
7857 if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
7858 else info->hasDuplexSupport = false;
7860 info->probed = true;
7865 bool RtApiAl :: probeDeviceOpen(int device, StreamMode mode, int channels,
7866 int sampleRate, RtAudioFormat format,
7867 int *bufferSize, int numberOfBuffers)
7869 int result, nBuffers;
7874 long *id = (long *) devices_[device].apiDeviceId;
7876 // Get a new ALconfig structure.
7877 al_config = alNewConfig();
7879 sprintf(message_,"RtApiAl: can't get AL config: %s.",
7880 alGetErrorString(oserror()));
7881 error(RtError::DEBUG_WARNING);
7885 // Set the channels.
7886 result = alSetChannels(al_config, channels);
7888 alFreeConfig(al_config);
7889 sprintf(message_,"RtApiAl: can't set %d channels in AL config: %s.",
7890 channels, alGetErrorString(oserror()));
7891 error(RtError::DEBUG_WARNING);
7895 // Attempt to set the queue size. The al API doesn't provide a
7896 // means for querying the minimum/maximum buffer size of a device,
7897 // so if the specified size doesn't work, take whatever the
7898 // al_config structure returns.
7899 if ( numberOfBuffers < 1 )
7902 nBuffers = numberOfBuffers;
7903 long buffer_size = *bufferSize * nBuffers;
7904 result = alSetQueueSize(al_config, buffer_size); // in sample frames
7906 // Get the buffer size specified by the al_config and try that.
7907 buffer_size = alGetQueueSize(al_config);
7908 result = alSetQueueSize(al_config, buffer_size);
7910 alFreeConfig(al_config);
7911 sprintf(message_,"RtApiAl: can't set buffer size (%ld) in AL config: %s.",
7912 buffer_size, alGetErrorString(oserror()));
7913 error(RtError::DEBUG_WARNING);
7916 *bufferSize = buffer_size / nBuffers;
7919 // Set the data format.
7920 stream_.userFormat = format;
7921 stream_.deviceFormat[mode] = format;
7922 if (format == RTAUDIO_SINT8) {
7923 result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
7924 result = alSetWidth(al_config, AL_SAMPLE_8);
7926 else if (format == RTAUDIO_SINT16) {
7927 result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
7928 result = alSetWidth(al_config, AL_SAMPLE_16);
7930 else if (format == RTAUDIO_SINT24) {
7931 // Our 24-bit format assumes the upper 3 bytes of a 4 byte word.
7932 // The AL library uses the lower 3 bytes, so we'll need to do our
7934 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7935 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
7937 else if (format == RTAUDIO_SINT32) {
7938 // The AL library doesn't seem to support the 32-bit integer
7939 // format, so we'll need to do our own conversion.
7940 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7941 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
7943 else if (format == RTAUDIO_FLOAT32)
7944 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7945 else if (format == RTAUDIO_FLOAT64)
7946 result = alSetSampFmt(al_config, AL_SAMPFMT_DOUBLE);
7948 if ( result == -1 ) {
7949 alFreeConfig(al_config);
7950 sprintf(message_,"RtApiAl: error setting sample format in AL config: %s.",
7951 alGetErrorString(oserror()));
7952 error(RtError::DEBUG_WARNING);
7956 if (mode == OUTPUT) {
7960 resource = AL_DEFAULT_OUTPUT;
7963 result = alSetDevice(al_config, resource);
7964 if ( result == -1 ) {
7965 alFreeConfig(al_config);
7966 sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
7967 devices_[device].name.c_str(), alGetErrorString(oserror()));
7968 error(RtError::DEBUG_WARNING);
7973 port = alOpenPort("RtApiAl Output Port", "w", al_config);
7975 alFreeConfig(al_config);
7976 sprintf(message_,"RtApiAl: error opening output port: %s.",
7977 alGetErrorString(oserror()));
7978 error(RtError::DEBUG_WARNING);
7982 // Set the sample rate
7983 pvs[0].param = AL_MASTER_CLOCK;
7984 pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
7985 pvs[1].param = AL_RATE;
7986 pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
7987 result = alSetParams(resource, pvs, 2);
7990 alFreeConfig(al_config);
7991 sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
7992 sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
7993 error(RtError::DEBUG_WARNING);
7997 else { // mode == INPUT
8001 resource = AL_DEFAULT_INPUT;
8004 result = alSetDevice(al_config, resource);
8005 if ( result == -1 ) {
8006 alFreeConfig(al_config);
8007 sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
8008 devices_[device].name.c_str(), alGetErrorString(oserror()));
8009 error(RtError::DEBUG_WARNING);
8014 port = alOpenPort("RtApiAl Input Port", "r", al_config);
8016 alFreeConfig(al_config);
8017 sprintf(message_,"RtApiAl: error opening input port: %s.",
8018 alGetErrorString(oserror()));
8019 error(RtError::DEBUG_WARNING);
8023 // Set the sample rate
8024 pvs[0].param = AL_MASTER_CLOCK;
8025 pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
8026 pvs[1].param = AL_RATE;
8027 pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
8028 result = alSetParams(resource, pvs, 2);
8031 alFreeConfig(al_config);
8032 sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
8033 sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
8034 error(RtError::DEBUG_WARNING);
8039 alFreeConfig(al_config);
8041 stream_.nUserChannels[mode] = channels;
8042 stream_.nDeviceChannels[mode] = channels;
8044 // Save stream handle.
8045 ALport *handle = (ALport *) stream_.apiHandle;
8046 if ( handle == 0 ) {
8047 handle = (ALport *) calloc(2, sizeof(ALport));
8048 if ( handle == NULL ) {
8049 sprintf(message_, "RtApiAl: Irix Al error allocating handle memory (%s).",
8050 devices_[device].name.c_str());
8053 stream_.apiHandle = (void *) handle;
8057 handle[mode] = port;
8059 // Set flags for buffer conversion
8060 stream_.doConvertBuffer[mode] = false;
8061 if (stream_.userFormat != stream_.deviceFormat[mode])
8062 stream_.doConvertBuffer[mode] = true;
8064 // Allocate necessary internal buffers
8065 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
8068 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
8069 buffer_bytes = stream_.nUserChannels[0];
8071 buffer_bytes = stream_.nUserChannels[1];
8073 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
8074 if (stream_.userBuffer) free(stream_.userBuffer);
8075 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
8076 if (stream_.userBuffer == NULL) {
8077 sprintf(message_, "RtApiAl: error allocating user buffer memory (%s).",
8078 devices_[device].name.c_str());
8083 if ( stream_.doConvertBuffer[mode] ) {
8086 bool makeBuffer = true;
8087 if ( mode == OUTPUT )
8088 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
8089 else { // mode == INPUT
8090 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
8091 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
8092 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
8093 if ( buffer_bytes < bytes_out ) makeBuffer = false;
8098 buffer_bytes *= *bufferSize;
8099 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
8100 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
8101 if (stream_.deviceBuffer == NULL) {
8102 sprintf(message_, "RtApiAl: error allocating device buffer memory (%s).",
8103 devices_[device].name.c_str());
8109 stream_.device[mode] = device;
8110 stream_.state = STREAM_STOPPED;
8111 if ( stream_.mode == OUTPUT && mode == INPUT )
8112 // We had already set up an output stream.
8113 stream_.mode = DUPLEX;
8115 stream_.mode = mode;
8116 stream_.nBuffers = nBuffers;
8117 stream_.bufferSize = *bufferSize;
8118 stream_.sampleRate = sampleRate;
8120 // Setup the buffer conversion information structure.
8121 if ( stream_.doConvertBuffer[mode] ) {
8122 if (mode == INPUT) { // convert device to user buffer
8123 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
8124 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
8125 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
8126 stream_.convertInfo[mode].outFormat = stream_.userFormat;
8128 else { // convert user to device buffer
8129 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
8130 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
8131 stream_.convertInfo[mode].inFormat = stream_.userFormat;
8132 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
8135 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
8136 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
8138 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
8140 // Set up the interleave/deinterleave offsets.
8141 if ( mode == INPUT && stream_.deInterleave[1] ) {
8142 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
8143 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
8144 stream_.convertInfo[mode].outOffset.push_back( k );
8145 stream_.convertInfo[mode].inJump = 1;
8148 else if (mode == OUTPUT && stream_.deInterleave[0]) {
8149 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
8150 stream_.convertInfo[mode].inOffset.push_back( k );
8151 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
8152 stream_.convertInfo[mode].outJump = 1;
8156 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
8157 stream_.convertInfo[mode].inOffset.push_back( k );
8158 stream_.convertInfo[mode].outOffset.push_back( k );
8168 alClosePort(handle[0]);
8170 alClosePort(handle[1]);
8172 stream_.apiHandle = 0;
8175 if (stream_.userBuffer) {
8176 free(stream_.userBuffer);
8177 stream_.userBuffer = 0;
8180 error(RtError::DEBUG_WARNING);
8184 void RtApiAl :: closeStream()
8186 // We don't want an exception to be thrown here because this
8187 // function is called by our class destructor. So, do our own
8189 if ( stream_.mode == UNINITIALIZED ) {
8190 sprintf(message_, "RtApiAl::closeStream(): no open stream to close!");
8191 error(RtError::WARNING);
8195 ALport *handle = (ALport *) stream_.apiHandle;
8196 if (stream_.state == STREAM_RUNNING) {
8197 int buffer_size = stream_.bufferSize * stream_.nBuffers;
8198 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
8199 alDiscardFrames(handle[0], buffer_size);
8200 if (stream_.mode == INPUT || stream_.mode == DUPLEX)
8201 alDiscardFrames(handle[1], buffer_size);
8202 stream_.state = STREAM_STOPPED;
8205 if (stream_.callbackInfo.usingCallback) {
8206 stream_.callbackInfo.usingCallback = false;
8207 pthread_join(stream_.callbackInfo.thread, NULL);
8211 if (handle[0]) alClosePort(handle[0]);
8212 if (handle[1]) alClosePort(handle[1]);
8214 stream_.apiHandle = 0;
8217 if (stream_.userBuffer) {
8218 free(stream_.userBuffer);
8219 stream_.userBuffer = 0;
8222 if (stream_.deviceBuffer) {
8223 free(stream_.deviceBuffer);
8224 stream_.deviceBuffer = 0;
8227 stream_.mode = UNINITIALIZED;
8230 void RtApiAl :: startStream()
8233 if (stream_.state == STREAM_RUNNING) return;
8235 MUTEX_LOCK(&stream_.mutex);
8237 // The AL port is ready as soon as it is opened.
8238 stream_.state = STREAM_RUNNING;
8240 MUTEX_UNLOCK(&stream_.mutex);
8243 void RtApiAl :: stopStream()
8246 if (stream_.state == STREAM_STOPPED) return;
8248 // Change the state before the lock to improve shutdown response
8249 // when using a callback.
8250 stream_.state = STREAM_STOPPED;
8251 MUTEX_LOCK(&stream_.mutex);
8253 int result, buffer_size = stream_.bufferSize * stream_.nBuffers;
8254 ALport *handle = (ALport *) stream_.apiHandle;
8256 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
8257 alZeroFrames(handle[0], buffer_size);
8259 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
8260 result = alDiscardFrames(handle[1], buffer_size);
8262 sprintf(message_, "RtApiAl: error draining stream device (%s): %s.",
8263 devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
8264 error(RtError::DRIVER_ERROR);
8268 MUTEX_UNLOCK(&stream_.mutex);
8271 void RtApiAl :: abortStream()
8274 if (stream_.state == STREAM_STOPPED) return;
8276 // Change the state before the lock to improve shutdown response
8277 // when using a callback.
8278 stream_.state = STREAM_STOPPED;
8279 MUTEX_LOCK(&stream_.mutex);
8281 ALport *handle = (ALport *) stream_.apiHandle;
8282 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
8284 int buffer_size = stream_.bufferSize * stream_.nBuffers;
8285 int result = alDiscardFrames(handle[0], buffer_size);
8287 sprintf(message_, "RtApiAl: error aborting stream device (%s): %s.",
8288 devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
8289 error(RtError::DRIVER_ERROR);
8293 // There is no clear action to take on the input stream, since the
8294 // port will continue to run in any event.
8296 MUTEX_UNLOCK(&stream_.mutex);
8299 int RtApiAl :: streamWillBlock()
8303 if (stream_.state == STREAM_STOPPED) return 0;
8305 MUTEX_LOCK(&stream_.mutex);
8309 ALport *handle = (ALport *) stream_.apiHandle;
8310 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
8311 err = alGetFillable(handle[0]);
8313 sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
8314 devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
8315 error(RtError::DRIVER_ERROR);
8321 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
8322 err = alGetFilled(handle[1]);
8324 sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
8325 devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
8326 error(RtError::DRIVER_ERROR);
8328 if (frames > err) frames = err;
8331 frames = stream_.bufferSize - frames;
8332 if (frames < 0) frames = 0;
8334 MUTEX_UNLOCK(&stream_.mutex);
8338 void RtApiAl :: tickStream()
8343 if (stream_.state == STREAM_STOPPED) {
8344 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
8347 else if (stream_.callbackInfo.usingCallback) {
8348 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
8349 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
8352 MUTEX_LOCK(&stream_.mutex);
8354 // The state might change while waiting on a mutex.
8355 if (stream_.state == STREAM_STOPPED)
8360 RtAudioFormat format;
8361 ALport *handle = (ALport *) stream_.apiHandle;
8362 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
8364 // Setup parameters and do buffer conversion if necessary.
8365 if (stream_.doConvertBuffer[0]) {
8366 buffer = stream_.deviceBuffer;
8367 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
8368 channels = stream_.nDeviceChannels[0];
8369 format = stream_.deviceFormat[0];
8372 buffer = stream_.userBuffer;
8373 channels = stream_.nUserChannels[0];
8374 format = stream_.userFormat;
8377 // Do byte swapping if necessary.
8378 if (stream_.doByteSwap[0])
8379 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
8381 // Write interleaved samples to device.
8382 alWriteFrames(handle[0], buffer, stream_.bufferSize);
8385 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
8387 // Setup parameters.
8388 if (stream_.doConvertBuffer[1]) {
8389 buffer = stream_.deviceBuffer;
8390 channels = stream_.nDeviceChannels[1];
8391 format = stream_.deviceFormat[1];
8394 buffer = stream_.userBuffer;
8395 channels = stream_.nUserChannels[1];
8396 format = stream_.userFormat;
8399 // Read interleaved samples from device.
8400 alReadFrames(handle[1], buffer, stream_.bufferSize);
8402 // Do byte swapping if necessary.
8403 if (stream_.doByteSwap[1])
8404 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
8406 // Do buffer conversion if necessary.
8407 if (stream_.doConvertBuffer[1])
8408 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
8412 MUTEX_UNLOCK(&stream_.mutex);
8414 if (stream_.callbackInfo.usingCallback && stopStream)
8418 void RtApiAl :: setStreamCallback(RtAudioCallback callback, void *userData)
8422 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
8423 if ( info->usingCallback ) {
8424 sprintf(message_, "RtApiAl: A callback is already set for this stream!");
8425 error(RtError::WARNING);
8429 info->callback = (void *) callback;
8430 info->userData = userData;
8431 info->usingCallback = true;
8432 info->object = (void *) this;
8434 // Set the thread attributes for joinable and realtime scheduling
8435 // priority. The higher priority will only take affect if the
8436 // program is run as root or suid.
8437 pthread_attr_t attr;
8438 pthread_attr_init(&attr);
8439 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
8440 pthread_attr_setschedpolicy(&attr, SCHED_RR);
8442 int err = pthread_create(&info->thread, &attr, callbackHandler, &stream_.callbackInfo);
8443 pthread_attr_destroy(&attr);
8445 info->usingCallback = false;
8446 sprintf(message_, "RtApiAl: error starting callback thread!");
8447 error(RtError::THREAD_ERROR);
8451 void RtApiAl :: cancelStreamCallback()
8455 if (stream_.callbackInfo.usingCallback) {
8457 if (stream_.state == STREAM_RUNNING)
8460 MUTEX_LOCK(&stream_.mutex);
8462 stream_.callbackInfo.usingCallback = false;
8463 pthread_join(stream_.callbackInfo.thread, NULL);
8464 stream_.callbackInfo.thread = 0;
8465 stream_.callbackInfo.callback = NULL;
8466 stream_.callbackInfo.userData = NULL;
8468 MUTEX_UNLOCK(&stream_.mutex);
8472 extern "C" void *callbackHandler(void *ptr)
8474 CallbackInfo *info = (CallbackInfo *) ptr;
8475 RtApiAl *object = (RtApiAl *) info->object;
8476 bool *usingCallback = &info->usingCallback;
8478 while ( *usingCallback ) {
8480 object->tickStream();
8482 catch (RtError &exception) {
8483 fprintf(stderr, "\nRtApiAl: callback thread error (%s) ... closing thread.\n\n",
8484 exception.getMessageString());
8492 //******************** End of __IRIX_AL__ *********************//
8496 // *************************************************** //
8498 // Protected common (OS-independent) RtAudio methods.
8500 // *************************************************** //
8502 // This method can be modified to control the behavior of error
8503 // message reporting and throwing.
8504 void RtApi :: error(RtError::Type type)
8506 if (type == RtError::WARNING) {
8507 fprintf(stderr, "\n%s\n\n", message_);
8509 else if (type == RtError::DEBUG_WARNING) {
8510 #if defined(__RTAUDIO_DEBUG__)
8511 fprintf(stderr, "\n%s\n\n", message_);
8515 #if defined(__RTAUDIO_DEBUG__)
8516 fprintf(stderr, "\n%s\n\n", message_);
8518 throw RtError(std::string(message_), type);
8522 void RtApi :: verifyStream()
8524 if ( stream_.mode == UNINITIALIZED ) {
8525 sprintf(message_, "RtAudio: stream is not open!");
8526 error(RtError::INVALID_STREAM);
8530 void RtApi :: clearDeviceInfo(RtApiDevice *info)
8532 // Don't clear the name or DEVICE_ID fields here ... they are
8533 // typically set prior to a call of this function.
8534 info->probed = false;
8535 info->maxOutputChannels = 0;
8536 info->maxInputChannels = 0;
8537 info->maxDuplexChannels = 0;
8538 info->minOutputChannels = 0;
8539 info->minInputChannels = 0;
8540 info->minDuplexChannels = 0;
8541 info->hasDuplexSupport = false;
8542 info->sampleRates.clear();
8543 info->nativeFormats = 0;
8546 void RtApi :: clearStreamInfo()
8548 stream_.mode = UNINITIALIZED;
8549 stream_.state = STREAM_STOPPED;
8550 stream_.sampleRate = 0;
8551 stream_.bufferSize = 0;
8552 stream_.nBuffers = 0;
8553 stream_.userFormat = 0;
8554 for ( int i=0; i<2; i++ ) {
8555 stream_.device[i] = 0;
8556 stream_.doConvertBuffer[i] = false;
8557 stream_.deInterleave[i] = false;
8558 stream_.doByteSwap[i] = false;
8559 stream_.nUserChannels[i] = 0;
8560 stream_.nDeviceChannels[i] = 0;
8561 stream_.deviceFormat[i] = 0;
8565 int RtApi :: formatBytes(RtAudioFormat format)
8567 if (format == RTAUDIO_SINT16)
8569 else if (format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
8570 format == RTAUDIO_FLOAT32)
8572 else if (format == RTAUDIO_FLOAT64)
8574 else if (format == RTAUDIO_SINT8)
8577 sprintf(message_,"RtApi: undefined format in formatBytes().");
8578 error(RtError::WARNING);
8583 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
8585 // This function does format conversion, input/output channel compensation, and
8586 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
8587 // the upper three bytes of a 32-bit integer.
8589 // Clear our device buffer when in/out duplex device channels are different
8590 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
8591 stream_.nDeviceChannels[0] != stream_.nDeviceChannels[1] )
8592 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
8595 if (info.outFormat == RTAUDIO_FLOAT64) {
8597 Float64 *out = (Float64 *)outBuffer;
8599 if (info.inFormat == RTAUDIO_SINT8) {
8600 signed char *in = (signed char *)inBuffer;
8601 scale = 1.0 / 128.0;
8602 for (int i=0; i<stream_.bufferSize; i++) {
8603 for (j=0; j<info.channels; j++) {
8604 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8605 out[info.outOffset[j]] *= scale;
8608 out += info.outJump;
8611 else if (info.inFormat == RTAUDIO_SINT16) {
8612 Int16 *in = (Int16 *)inBuffer;
8613 scale = 1.0 / 32768.0;
8614 for (int i=0; i<stream_.bufferSize; i++) {
8615 for (j=0; j<info.channels; j++) {
8616 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8617 out[info.outOffset[j]] *= scale;
8620 out += info.outJump;
8623 else if (info.inFormat == RTAUDIO_SINT24) {
8624 Int32 *in = (Int32 *)inBuffer;
8625 scale = 1.0 / 2147483648.0;
8626 for (int i=0; i<stream_.bufferSize; i++) {
8627 for (j=0; j<info.channels; j++) {
8628 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0xffffff00);
8629 out[info.outOffset[j]] *= scale;
8632 out += info.outJump;
8635 else if (info.inFormat == RTAUDIO_SINT32) {
8636 Int32 *in = (Int32 *)inBuffer;
8637 scale = 1.0 / 2147483648.0;
8638 for (int i=0; i<stream_.bufferSize; i++) {
8639 for (j=0; j<info.channels; j++) {
8640 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8641 out[info.outOffset[j]] *= scale;
8644 out += info.outJump;
8647 else if (info.inFormat == RTAUDIO_FLOAT32) {
8648 Float32 *in = (Float32 *)inBuffer;
8649 for (int i=0; i<stream_.bufferSize; i++) {
8650 for (j=0; j<info.channels; j++) {
8651 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8654 out += info.outJump;
8657 else if (info.inFormat == RTAUDIO_FLOAT64) {
8658 // Channel compensation and/or (de)interleaving only.
8659 Float64 *in = (Float64 *)inBuffer;
8660 for (int i=0; i<stream_.bufferSize; i++) {
8661 for (j=0; j<info.channels; j++) {
8662 out[info.outOffset[j]] = in[info.inOffset[j]];
8665 out += info.outJump;
8669 else if (info.outFormat == RTAUDIO_FLOAT32) {
8671 Float32 *out = (Float32 *)outBuffer;
8673 if (info.inFormat == RTAUDIO_SINT8) {
8674 signed char *in = (signed char *)inBuffer;
8675 scale = 1.0 / 128.0;
8676 for (int i=0; i<stream_.bufferSize; i++) {
8677 for (j=0; j<info.channels; j++) {
8678 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8679 out[info.outOffset[j]] *= scale;
8682 out += info.outJump;
8685 else if (info.inFormat == RTAUDIO_SINT16) {
8686 Int16 *in = (Int16 *)inBuffer;
8687 scale = 1.0 / 32768.0;
8688 for (int i=0; i<stream_.bufferSize; i++) {
8689 for (j=0; j<info.channels; j++) {
8690 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8691 out[info.outOffset[j]] *= scale;
8694 out += info.outJump;
8697 else if (info.inFormat == RTAUDIO_SINT24) {
8698 Int32 *in = (Int32 *)inBuffer;
8699 scale = 1.0 / 2147483648.0;
8700 for (int i=0; i<stream_.bufferSize; i++) {
8701 for (j=0; j<info.channels; j++) {
8702 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0xffffff00);
8703 out[info.outOffset[j]] *= scale;
8706 out += info.outJump;
8709 else if (info.inFormat == RTAUDIO_SINT32) {
8710 Int32 *in = (Int32 *)inBuffer;
8711 scale = 1.0 / 2147483648.0;
8712 for (int i=0; i<stream_.bufferSize; i++) {
8713 for (j=0; j<info.channels; j++) {
8714 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8715 out[info.outOffset[j]] *= scale;
8718 out += info.outJump;
8721 else if (info.inFormat == RTAUDIO_FLOAT32) {
8722 // Channel compensation and/or (de)interleaving only.
8723 Float32 *in = (Float32 *)inBuffer;
8724 for (int i=0; i<stream_.bufferSize; i++) {
8725 for (j=0; j<info.channels; j++) {
8726 out[info.outOffset[j]] = in[info.inOffset[j]];
8729 out += info.outJump;
8732 else if (info.inFormat == RTAUDIO_FLOAT64) {
8733 Float64 *in = (Float64 *)inBuffer;
8734 for (int i=0; i<stream_.bufferSize; i++) {
8735 for (j=0; j<info.channels; j++) {
8736 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8739 out += info.outJump;
8743 else if (info.outFormat == RTAUDIO_SINT32) {
8744 Int32 *out = (Int32 *)outBuffer;
8745 if (info.inFormat == RTAUDIO_SINT8) {
8746 signed char *in = (signed char *)inBuffer;
8747 for (int i=0; i<stream_.bufferSize; i++) {
8748 for (j=0; j<info.channels; j++) {
8749 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8750 out[info.outOffset[j]] <<= 24;
8753 out += info.outJump;
8756 else if (info.inFormat == RTAUDIO_SINT16) {
8757 Int16 *in = (Int16 *)inBuffer;
8758 for (int i=0; i<stream_.bufferSize; i++) {
8759 for (j=0; j<info.channels; j++) {
8760 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8761 out[info.outOffset[j]] <<= 16;
8764 out += info.outJump;
8767 else if (info.inFormat == RTAUDIO_SINT24) {
8768 Int32 *in = (Int32 *)inBuffer;
8769 for (int i=0; i<stream_.bufferSize; i++) {
8770 for (j=0; j<info.channels; j++) {
8771 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8774 out += info.outJump;
8777 else if (info.inFormat == RTAUDIO_SINT32) {
8778 // Channel compensation and/or (de)interleaving only.
8779 Int32 *in = (Int32 *)inBuffer;
8780 for (int i=0; i<stream_.bufferSize; i++) {
8781 for (j=0; j<info.channels; j++) {
8782 out[info.outOffset[j]] = in[info.inOffset[j]];
8785 out += info.outJump;
8788 else if (info.inFormat == RTAUDIO_FLOAT32) {
8789 Float32 *in = (Float32 *)inBuffer;
8790 for (int i=0; i<stream_.bufferSize; i++) {
8791 for (j=0; j<info.channels; j++) {
8792 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8795 out += info.outJump;
8798 else if (info.inFormat == RTAUDIO_FLOAT64) {
8799 Float64 *in = (Float64 *)inBuffer;
8800 for (int i=0; i<stream_.bufferSize; i++) {
8801 for (j=0; j<info.channels; j++) {
8802 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8805 out += info.outJump;
8809 else if (info.outFormat == RTAUDIO_SINT24) {
8810 Int32 *out = (Int32 *)outBuffer;
8811 if (info.inFormat == RTAUDIO_SINT8) {
8812 signed char *in = (signed char *)inBuffer;
8813 for (int i=0; i<stream_.bufferSize; i++) {
8814 for (j=0; j<info.channels; j++) {
8815 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8816 out[info.outOffset[j]] <<= 24;
8819 out += info.outJump;
8822 else if (info.inFormat == RTAUDIO_SINT16) {
8823 Int16 *in = (Int16 *)inBuffer;
8824 for (int i=0; i<stream_.bufferSize; i++) {
8825 for (j=0; j<info.channels; j++) {
8826 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8827 out[info.outOffset[j]] <<= 16;
8830 out += info.outJump;
8833 else if (info.inFormat == RTAUDIO_SINT24) {
8834 // Channel compensation and/or (de)interleaving only.
8835 Int32 *in = (Int32 *)inBuffer;
8836 for (int i=0; i<stream_.bufferSize; i++) {
8837 for (j=0; j<info.channels; j++) {
8838 out[info.outOffset[j]] = in[info.inOffset[j]];
8841 out += info.outJump;
8844 else if (info.inFormat == RTAUDIO_SINT32) {
8845 Int32 *in = (Int32 *)inBuffer;
8846 for (int i=0; i<stream_.bufferSize; i++) {
8847 for (j=0; j<info.channels; j++) {
8848 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] & 0xffffff00);
8851 out += info.outJump;
8854 else if (info.inFormat == RTAUDIO_FLOAT32) {
8855 Float32 *in = (Float32 *)inBuffer;
8856 for (int i=0; i<stream_.bufferSize; i++) {
8857 for (j=0; j<info.channels; j++) {
8858 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8861 out += info.outJump;
8864 else if (info.inFormat == RTAUDIO_FLOAT64) {
8865 Float64 *in = (Float64 *)inBuffer;
8866 for (int i=0; i<stream_.bufferSize; i++) {
8867 for (j=0; j<info.channels; j++) {
8868 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8871 out += info.outJump;
8875 else if (info.outFormat == RTAUDIO_SINT16) {
8876 Int16 *out = (Int16 *)outBuffer;
8877 if (info.inFormat == RTAUDIO_SINT8) {
8878 signed char *in = (signed char *)inBuffer;
8879 for (int i=0; i<stream_.bufferSize; i++) {
8880 for (j=0; j<info.channels; j++) {
8881 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
8882 out[info.outOffset[j]] <<= 8;
8885 out += info.outJump;
8888 else if (info.inFormat == RTAUDIO_SINT16) {
8889 // Channel compensation and/or (de)interleaving only.
8890 Int16 *in = (Int16 *)inBuffer;
8891 for (int i=0; i<stream_.bufferSize; i++) {
8892 for (j=0; j<info.channels; j++) {
8893 out[info.outOffset[j]] = in[info.inOffset[j]];
8896 out += info.outJump;
8899 else if (info.inFormat == RTAUDIO_SINT24) {
8900 Int32 *in = (Int32 *)inBuffer;
8901 for (int i=0; i<stream_.bufferSize; i++) {
8902 for (j=0; j<info.channels; j++) {
8903 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
8906 out += info.outJump;
8909 else if (info.inFormat == RTAUDIO_SINT32) {
8910 Int32 *in = (Int32 *)inBuffer;
8911 for (int i=0; i<stream_.bufferSize; i++) {
8912 for (j=0; j<info.channels; j++) {
8913 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
8916 out += info.outJump;
8919 else if (info.inFormat == RTAUDIO_FLOAT32) {
8920 Float32 *in = (Float32 *)inBuffer;
8921 for (int i=0; i<stream_.bufferSize; i++) {
8922 for (j=0; j<info.channels; j++) {
8923 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
8926 out += info.outJump;
8929 else if (info.inFormat == RTAUDIO_FLOAT64) {
8930 Float64 *in = (Float64 *)inBuffer;
8931 for (int i=0; i<stream_.bufferSize; i++) {
8932 for (j=0; j<info.channels; j++) {
8933 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
8936 out += info.outJump;
8940 else if (info.outFormat == RTAUDIO_SINT8) {
8941 signed char *out = (signed char *)outBuffer;
8942 if (info.inFormat == RTAUDIO_SINT8) {
8943 // Channel compensation and/or (de)interleaving only.
8944 signed char *in = (signed char *)inBuffer;
8945 for (int i=0; i<stream_.bufferSize; i++) {
8946 for (j=0; j<info.channels; j++) {
8947 out[info.outOffset[j]] = in[info.inOffset[j]];
8950 out += info.outJump;
8953 if (info.inFormat == RTAUDIO_SINT16) {
8954 Int16 *in = (Int16 *)inBuffer;
8955 for (int i=0; i<stream_.bufferSize; i++) {
8956 for (j=0; j<info.channels; j++) {
8957 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
8960 out += info.outJump;
8963 else if (info.inFormat == RTAUDIO_SINT24) {
8964 Int32 *in = (Int32 *)inBuffer;
8965 for (int i=0; i<stream_.bufferSize; i++) {
8966 for (j=0; j<info.channels; j++) {
8967 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
8970 out += info.outJump;
8973 else if (info.inFormat == RTAUDIO_SINT32) {
8974 Int32 *in = (Int32 *)inBuffer;
8975 for (int i=0; i<stream_.bufferSize; i++) {
8976 for (j=0; j<info.channels; j++) {
8977 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
8980 out += info.outJump;
8983 else if (info.inFormat == RTAUDIO_FLOAT32) {
8984 Float32 *in = (Float32 *)inBuffer;
8985 for (int i=0; i<stream_.bufferSize; i++) {
8986 for (j=0; j<info.channels; j++) {
8987 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
8990 out += info.outJump;
8993 else if (info.inFormat == RTAUDIO_FLOAT64) {
8994 Float64 *in = (Float64 *)inBuffer;
8995 for (int i=0; i<stream_.bufferSize; i++) {
8996 for (j=0; j<info.channels; j++) {
8997 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
9000 out += info.outJump;
9006 void RtApi :: byteSwapBuffer( char *buffer, int samples, RtAudioFormat format )
9012 if (format == RTAUDIO_SINT16) {
9013 for (int i=0; i<samples; i++) {
9014 // Swap 1st and 2nd bytes.
9019 // Increment 2 bytes.
9023 else if (format == RTAUDIO_SINT24 ||
9024 format == RTAUDIO_SINT32 ||
9025 format == RTAUDIO_FLOAT32) {
9026 for (int i=0; i<samples; i++) {
9027 // Swap 1st and 4th bytes.
9032 // Swap 2nd and 3rd bytes.
9038 // Increment 4 bytes.
9042 else if (format == RTAUDIO_FLOAT64) {
9043 for (int i=0; i<samples; i++) {
9044 // Swap 1st and 8th bytes
9049 // Swap 2nd and 7th bytes
9055 // Swap 3rd and 6th bytes
9061 // Swap 4th and 5th bytes
9067 // Increment 8 bytes.