1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2009 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.5
46 // Static variable definitions.
47 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
48 const unsigned int RtApi::SAMPLE_RATES[] = {
49 4000, 5512, 8000, 9600, 11025, 16000, 22050,
50 32000, 44100, 48000, 88200, 96000, 176400, 192000
53 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
54 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
55 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
56 #define MUTEX_LOCK(A) EnterCriticalSection(A)
57 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
58 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
60 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
61 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
62 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
63 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
65 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
66 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
69 // *************************************************** //
71 // RtAudio definitions.
73 // *************************************************** //
75 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
79 // The order here will control the order of RtAudio's API search in
81 #if defined(__UNIX_JACK__)
82 apis.push_back( UNIX_JACK );
84 #if defined(__LINUX_ALSA__)
85 apis.push_back( LINUX_ALSA );
87 #if defined(__LINUX_OSS__)
88 apis.push_back( LINUX_OSS );
90 #if defined(__WINDOWS_ASIO__)
91 apis.push_back( WINDOWS_ASIO );
93 #if defined(__WINDOWS_DS__)
94 apis.push_back( WINDOWS_DS );
96 #if defined(__MACOSX_CORE__)
97 apis.push_back( MACOSX_CORE );
99 #if defined(__RTAUDIO_DUMMY__)
100 apis.push_back( RTAUDIO_DUMMY );
104 void RtAudio :: openRtApi( RtAudio::Api api )
106 #if defined(__UNIX_JACK__)
107 if ( api == UNIX_JACK )
108 rtapi_ = new RtApiJack();
110 #if defined(__LINUX_ALSA__)
111 if ( api == LINUX_ALSA )
112 rtapi_ = new RtApiAlsa();
114 #if defined(__LINUX_OSS__)
115 if ( api == LINUX_OSS )
116 rtapi_ = new RtApiOss();
118 #if defined(__WINDOWS_ASIO__)
119 if ( api == WINDOWS_ASIO )
120 rtapi_ = new RtApiAsio();
122 #if defined(__WINDOWS_DS__)
123 if ( api == WINDOWS_DS )
124 rtapi_ = new RtApiDs();
126 #if defined(__MACOSX_CORE__)
127 if ( api == MACOSX_CORE )
128 rtapi_ = new RtApiCore();
130 #if defined(__RTAUDIO_DUMMY__)
131 if ( api == RTAUDIO_DUMMY )
132 rtapi_ = new RtApiDummy();
136 RtAudio :: RtAudio( RtAudio::Api api ) throw()
140 if ( api != UNSPECIFIED ) {
141 // Attempt to open the specified API.
143 if ( rtapi_ ) return;
145 // No compiled support for specified API value. Issue a debug
146 // warning and continue as if no API was specified.
147 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
150 // Iterate through the compiled APIs and return as soon as we find
151 // one with at least one device or we reach the end of the list.
152 std::vector< RtAudio::Api > apis;
153 getCompiledApi( apis );
154 for ( unsigned int i=0; i<apis.size(); i++ ) {
155 openRtApi( apis[i] );
156 if ( rtapi_->getDeviceCount() ) break;
159 if ( rtapi_ ) return;
161 // It should not be possible to get here because the preprocessor
162 // definition __RTAUDIO_DUMMY__ is automatically defined if no
163 // API-specific definitions are passed to the compiler. But just in
164 // case something weird happens, we'll print out an error message.
165 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
168 RtAudio :: ~RtAudio() throw()
173 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
174 RtAudio::StreamParameters *inputParameters,
175 RtAudioFormat format, unsigned int sampleRate,
176 unsigned int *bufferFrames,
177 RtAudioCallback callback, void *userData,
178 RtAudio::StreamOptions *options )
180 return rtapi_->openStream( outputParameters, inputParameters, format,
181 sampleRate, bufferFrames, callback,
185 // *************************************************** //
187 // Public RtApi definitions (see end of file for
188 // private or protected utility functions).
190 // *************************************************** //
194 stream_.state = STREAM_CLOSED;
195 stream_.mode = UNINITIALIZED;
196 stream_.apiHandle = 0;
197 stream_.userBuffer[0] = 0;
198 stream_.userBuffer[1] = 0;
199 MUTEX_INITIALIZE( &stream_.mutex );
200 showWarnings_ = true;
205 MUTEX_DESTROY( &stream_.mutex );
208 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
209 RtAudio::StreamParameters *iParams,
210 RtAudioFormat format, unsigned int sampleRate,
211 unsigned int *bufferFrames,
212 RtAudioCallback callback, void *userData,
213 RtAudio::StreamOptions *options )
215 if ( stream_.state != STREAM_CLOSED ) {
216 errorText_ = "RtApi::openStream: a stream is already open!";
217 error( RtError::INVALID_USE );
220 if ( oParams && oParams->nChannels < 1 ) {
221 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
222 error( RtError::INVALID_USE );
225 if ( iParams && iParams->nChannels < 1 ) {
226 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
227 error( RtError::INVALID_USE );
230 if ( oParams == NULL && iParams == NULL ) {
231 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
232 error( RtError::INVALID_USE );
235 if ( formatBytes(format) == 0 ) {
236 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
237 error( RtError::INVALID_USE );
240 unsigned int nDevices = getDeviceCount();
241 unsigned int oChannels = 0;
243 oChannels = oParams->nChannels;
244 if ( oParams->deviceId >= nDevices ) {
245 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
246 error( RtError::INVALID_USE );
250 unsigned int iChannels = 0;
252 iChannels = iParams->nChannels;
253 if ( iParams->deviceId >= nDevices ) {
254 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
255 error( RtError::INVALID_USE );
262 if ( oChannels > 0 ) {
264 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
265 sampleRate, format, bufferFrames, options );
266 if ( result == false ) error( RtError::SYSTEM_ERROR );
269 if ( iChannels > 0 ) {
271 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
272 sampleRate, format, bufferFrames, options );
273 if ( result == false ) {
274 if ( oChannels > 0 ) closeStream();
275 error( RtError::SYSTEM_ERROR );
279 stream_.callbackInfo.callback = (void *) callback;
280 stream_.callbackInfo.userData = userData;
282 if ( options ) options->numberOfBuffers = stream_.nBuffers;
283 stream_.state = STREAM_STOPPED;
286 unsigned int RtApi :: getDefaultInputDevice( void )
288 // Should be implemented in subclasses if possible.
292 unsigned int RtApi :: getDefaultOutputDevice( void )
294 // Should be implemented in subclasses if possible.
298 void RtApi :: closeStream( void )
300 // MUST be implemented in subclasses!
304 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
305 unsigned int firstChannel, unsigned int sampleRate,
306 RtAudioFormat format, unsigned int *bufferSize,
307 RtAudio::StreamOptions *options )
309 // MUST be implemented in subclasses!
313 void RtApi :: tickStreamTime( void )
315 // Subclasses that do not provide their own implementation of
316 // getStreamTime should call this function once per buffer I/O to
317 // provide basic stream time support.
319 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
321 #if defined( HAVE_GETTIMEOFDAY )
322 gettimeofday( &stream_.lastTickTimestamp, NULL );
326 long RtApi :: getStreamLatency( void )
330 long totalLatency = 0;
331 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
332 totalLatency = stream_.latency[0];
333 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
334 totalLatency += stream_.latency[1];
339 double RtApi :: getStreamTime( void )
343 #if defined( HAVE_GETTIMEOFDAY )
344 // Return a very accurate estimate of the stream time by
345 // adding in the elapsed time since the last tick.
349 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
350 return stream_.streamTime;
352 gettimeofday( &now, NULL );
353 then = stream_.lastTickTimestamp;
354 return stream_.streamTime +
355 ((now.tv_sec + 0.000001 * now.tv_usec) -
356 (then.tv_sec + 0.000001 * then.tv_usec));
358 return stream_.streamTime;
362 unsigned int RtApi :: getStreamSampleRate( void )
366 return stream_.sampleRate;
370 // *************************************************** //
372 // OS/API-specific methods.
374 // *************************************************** //
376 #if defined(__MACOSX_CORE__)
378 // The OS X CoreAudio API is designed to use a separate callback
379 // procedure for each of its audio devices. A single RtAudio duplex
380 // stream using two different devices is supported here, though it
381 // cannot be guaranteed to always behave correctly because we cannot
382 // synchronize these two callbacks.
384 // A property listener is installed for over/underrun information.
385 // However, no functionality is currently provided to allow property
386 // listeners to trigger user handlers because it is unclear what could
387 // be done if a critical stream parameter (buffer size, sample rate,
388 // device disconnect) notification arrived. The listeners entail
389 // quite a bit of extra code and most likely, a user program wouldn't
390 // be prepared for the result anyway. However, we do provide a flag
391 // to the client callback function to inform of an over/underrun.
393 // The mechanism for querying and setting system parameters was
394 // updated (and perhaps simplified) in OS-X version 10.4. However,
395 // since 10.4 support is not necessarily available to all users, I've
396 // decided not to update the respective code at this time. Perhaps
397 // this will happen when Apple makes 10.4 free for everyone. :-)
399 // A structure to hold various information related to the CoreAudio API
402 AudioDeviceID id[2]; // device ids
403 AudioDeviceIOProcID procId[2];
404 UInt32 iStream[2]; // device stream index (or first if using multiple)
405 UInt32 nStreams[2]; // number of streams to use
408 pthread_cond_t condition;
409 int drainCounter; // Tracks callback counts when draining
410 bool internalDrain; // Indicates if stop is initiated from callback or not.
413 :deviceBuffer(0), drainCounter(0), internalDrain(false) { nStreams[0] = 1; nStreams[1] = 1; id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
416 RtApiCore :: RtApiCore()
418 // Nothing to do here.
421 RtApiCore :: ~RtApiCore()
423 // The subclass destructor gets called before the base class
424 // destructor, so close an existing stream before deallocating
425 // apiDeviceId memory.
426 if ( stream_.state != STREAM_CLOSED ) closeStream();
429 unsigned int RtApiCore :: getDeviceCount( void )
431 // Find out how many audio devices there are, if any.
433 OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
434 if ( result != noErr ) {
435 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
436 error( RtError::WARNING );
440 return dataSize / sizeof( AudioDeviceID );
443 unsigned int RtApiCore :: getDefaultInputDevice( void )
445 unsigned int nDevices = getDeviceCount();
446 if ( nDevices <= 1 ) return 0;
449 UInt32 dataSize = sizeof( AudioDeviceID );
450 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
453 if ( result != noErr ) {
454 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
455 error( RtError::WARNING );
459 dataSize *= nDevices;
460 AudioDeviceID deviceList[ nDevices ];
461 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
462 if ( result != noErr ) {
463 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
464 error( RtError::WARNING );
468 for ( unsigned int i=0; i<nDevices; i++ )
469 if ( id == deviceList[i] ) return i;
471 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
472 error( RtError::WARNING );
476 unsigned int RtApiCore :: getDefaultOutputDevice( void )
478 unsigned int nDevices = getDeviceCount();
479 if ( nDevices <= 1 ) return 0;
482 UInt32 dataSize = sizeof( AudioDeviceID );
483 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
486 if ( result != noErr ) {
487 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
488 error( RtError::WARNING );
492 dataSize *= nDevices;
493 AudioDeviceID deviceList[ nDevices ];
494 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
495 if ( result != noErr ) {
496 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
497 error( RtError::WARNING );
501 for ( unsigned int i=0; i<nDevices; i++ )
502 if ( id == deviceList[i] ) return i;
504 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
505 error( RtError::WARNING );
509 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
511 RtAudio::DeviceInfo info;
515 unsigned int nDevices = getDeviceCount();
516 if ( nDevices == 0 ) {
517 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
518 error( RtError::INVALID_USE );
521 if ( device >= nDevices ) {
522 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
523 error( RtError::INVALID_USE );
526 AudioDeviceID deviceList[ nDevices ];
527 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
528 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
529 if ( result != noErr ) {
530 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
531 error( RtError::WARNING );
535 AudioDeviceID id = deviceList[ device ];
537 // Get the device name.
541 result = AudioDeviceGetProperty( id, 0, false,
542 kAudioDevicePropertyDeviceManufacturer,
545 if ( result != noErr ) {
546 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
547 errorText_ = errorStream_.str();
548 error( RtError::WARNING );
551 info.name.append( (const char *)name, strlen(name) );
552 info.name.append( ": " );
555 result = AudioDeviceGetProperty( id, 0, false,
556 kAudioDevicePropertyDeviceName,
558 if ( result != noErr ) {
559 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
560 errorText_ = errorStream_.str();
561 error( RtError::WARNING );
564 info.name.append( (const char *)name, strlen(name) );
566 // Get the output stream "configuration".
567 AudioBufferList *bufferList = nil;
568 result = AudioDeviceGetPropertyInfo( id, 0, false,
569 kAudioDevicePropertyStreamConfiguration,
571 if (result != noErr || dataSize == 0) {
572 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
573 errorText_ = errorStream_.str();
574 error( RtError::WARNING );
578 // Allocate the AudioBufferList.
579 bufferList = (AudioBufferList *) malloc( dataSize );
580 if ( bufferList == NULL ) {
581 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
582 error( RtError::WARNING );
586 result = AudioDeviceGetProperty( id, 0, false,
587 kAudioDevicePropertyStreamConfiguration,
588 &dataSize, bufferList );
589 if ( result != noErr ) {
591 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
592 errorText_ = errorStream_.str();
593 error( RtError::WARNING );
597 // Get output channel information.
598 unsigned int i, nStreams = bufferList->mNumberBuffers;
599 for ( i=0; i<nStreams; i++ )
600 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
603 // Get the input stream "configuration".
604 result = AudioDeviceGetPropertyInfo( id, 0, true,
605 kAudioDevicePropertyStreamConfiguration,
607 if (result != noErr || dataSize == 0) {
608 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
609 errorText_ = errorStream_.str();
610 error( RtError::WARNING );
614 // Allocate the AudioBufferList.
615 bufferList = (AudioBufferList *) malloc( dataSize );
616 if ( bufferList == NULL ) {
617 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
618 error( RtError::WARNING );
622 result = AudioDeviceGetProperty( id, 0, true,
623 kAudioDevicePropertyStreamConfiguration,
624 &dataSize, bufferList );
625 if ( result != noErr ) {
627 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
628 errorText_ = errorStream_.str();
629 error( RtError::WARNING );
633 // Get input channel information.
634 nStreams = bufferList->mNumberBuffers;
635 for ( i=0; i<nStreams; i++ )
636 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
639 // If device opens for both playback and capture, we determine the channels.
640 if ( info.outputChannels > 0 && info.inputChannels > 0 )
641 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
643 // Probe the device sample rates.
644 bool isInput = false;
645 if ( info.outputChannels == 0 ) isInput = true;
647 // Determine the supported sample rates.
648 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
649 kAudioDevicePropertyAvailableNominalSampleRates,
652 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
653 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
654 errorText_ = errorStream_.str();
655 error( RtError::WARNING );
659 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
660 AudioValueRange rangeList[ nRanges ];
661 result = AudioDeviceGetProperty( id, 0, isInput,
662 kAudioDevicePropertyAvailableNominalSampleRates,
663 &dataSize, &rangeList );
665 if ( result != kAudioHardwareNoError ) {
666 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
667 errorText_ = errorStream_.str();
668 error( RtError::WARNING );
672 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
673 for ( UInt32 i=0; i<nRanges; i++ ) {
674 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
675 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
678 info.sampleRates.clear();
679 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
680 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
681 info.sampleRates.push_back( SAMPLE_RATES[k] );
684 if ( info.sampleRates.size() == 0 ) {
685 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
686 errorText_ = errorStream_.str();
687 error( RtError::WARNING );
691 // CoreAudio always uses 32-bit floating point data for PCM streams.
692 // Thus, any other "physical" formats supported by the device are of
693 // no interest to the client.
694 info.nativeFormats = RTAUDIO_FLOAT32;
696 if ( getDefaultOutputDevice() == device )
697 info.isDefaultOutput = true;
698 if ( getDefaultInputDevice() == device )
699 info.isDefaultInput = true;
705 OSStatus callbackHandler( AudioDeviceID inDevice,
706 const AudioTimeStamp* inNow,
707 const AudioBufferList* inInputData,
708 const AudioTimeStamp* inInputTime,
709 AudioBufferList* outOutputData,
710 const AudioTimeStamp* inOutputTime,
713 CallbackInfo *info = (CallbackInfo *) infoPointer;
715 RtApiCore *object = (RtApiCore *) info->object;
716 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
717 return kAudioHardwareUnspecifiedError;
719 return kAudioHardwareNoError;
722 OSStatus deviceListener( AudioDeviceID inDevice,
725 AudioDevicePropertyID propertyID,
726 void* handlePointer )
728 CoreHandle *handle = (CoreHandle *) handlePointer;
729 if ( propertyID == kAudioDeviceProcessorOverload ) {
731 handle->xrun[1] = true;
733 handle->xrun[0] = true;
736 return kAudioHardwareNoError;
739 static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
741 OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
745 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
746 unsigned int firstChannel, unsigned int sampleRate,
747 RtAudioFormat format, unsigned int *bufferSize,
748 RtAudio::StreamOptions *options )
751 unsigned int nDevices = getDeviceCount();
752 if ( nDevices == 0 ) {
753 // This should not happen because a check is made before this function is called.
754 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
758 if ( device >= nDevices ) {
759 // This should not happen because a check is made before this function is called.
760 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
764 AudioDeviceID deviceList[ nDevices ];
765 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
766 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
767 if ( result != noErr ) {
768 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
772 AudioDeviceID id = deviceList[ device ];
774 // Setup for stream mode.
775 bool isInput = false;
776 if ( mode == INPUT ) isInput = true;
778 // Set or disable "hog" mode.
779 dataSize = sizeof( UInt32 );
781 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
782 result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
783 if ( result != noErr ) {
784 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
785 errorText_ = errorStream_.str();
789 // Get the stream "configuration".
790 AudioBufferList *bufferList;
791 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
792 kAudioDevicePropertyStreamConfiguration,
794 if (result != noErr || dataSize == 0) {
795 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
796 errorText_ = errorStream_.str();
800 // Allocate the AudioBufferList.
801 bufferList = (AudioBufferList *) malloc( dataSize );
802 if ( bufferList == NULL ) {
803 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
807 result = AudioDeviceGetProperty( id, 0, isInput,
808 kAudioDevicePropertyStreamConfiguration,
809 &dataSize, bufferList );
810 if ( result != noErr ) {
812 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
813 errorText_ = errorStream_.str();
817 // Search for one or more streams that contain the desired number of
818 // channels. CoreAudio devices can have an arbitrary number of
819 // streams and each stream can have an arbitrary number of channels.
820 // For each stream, a single buffer of interleaved samples is
821 // provided. RtAudio prefers the use of one stream of interleaved
822 // data or multiple consecutive single-channel streams. However, we
823 // now support multiple consecutive multi-channel streams of
824 // interleaved data as well.
825 UInt32 iStream, offsetCounter = firstChannel;
826 UInt32 nStreams = bufferList->mNumberBuffers;
827 bool monoMode = false;
828 bool foundStream = false;
830 // First check that the device supports the requested number of
832 UInt32 deviceChannels = 0;
833 for ( iStream=0; iStream<nStreams; iStream++ )
834 deviceChannels += bufferList->mBuffers[iStream].mNumberChannels;
836 if ( deviceChannels < ( channels + firstChannel ) ) {
838 errorStream_ << "RtApiCore::probeDeviceOpen: the device (" << device << ") does not support the requested channel count.";
839 errorText_ = errorStream_.str();
843 // Look for a single stream meeting our needs.
844 UInt32 firstStream, streamCount = 1, streamChannels = 0, channelOffset = 0;
845 for ( iStream=0; iStream<nStreams; iStream++ ) {
846 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
847 if ( streamChannels >= channels + offsetCounter ) {
848 firstStream = iStream;
849 channelOffset = offsetCounter;
853 if ( streamChannels > offsetCounter ) break;
854 offsetCounter -= streamChannels;
857 // If we didn't find a single stream above, then we should be able
858 // to meet the channel specification with multiple streams.
859 if ( foundStream == false ) {
861 offsetCounter = firstChannel;
862 for ( iStream=0; iStream<nStreams; iStream++ ) {
863 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
864 if ( streamChannels > offsetCounter ) break;
865 offsetCounter -= streamChannels;
868 firstStream = iStream;
869 channelOffset = offsetCounter;
870 Int32 channelCounter = channels + offsetCounter - streamChannels;
872 if ( streamChannels > 1 ) monoMode = false;
873 while ( channelCounter > 0 ) {
874 streamChannels = bufferList->mBuffers[++iStream].mNumberChannels;
875 if ( streamChannels > 1 ) monoMode = false;
876 channelCounter -= streamChannels;
883 // Determine the buffer size.
884 AudioValueRange bufferRange;
885 dataSize = sizeof( AudioValueRange );
886 result = AudioDeviceGetProperty( id, 0, isInput,
887 kAudioDevicePropertyBufferFrameSizeRange,
888 &dataSize, &bufferRange );
889 if ( result != noErr ) {
890 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
891 errorText_ = errorStream_.str();
895 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
896 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
897 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
899 // Set the buffer size. For multiple streams, I'm assuming we only
900 // need to make this setting for the master channel.
901 UInt32 theSize = (UInt32) *bufferSize;
902 dataSize = sizeof( UInt32 );
903 result = AudioDeviceSetProperty( id, NULL, 0, isInput,
904 kAudioDevicePropertyBufferFrameSize,
905 dataSize, &theSize );
907 if ( result != noErr ) {
908 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
909 errorText_ = errorStream_.str();
913 // If attempting to setup a duplex stream, the bufferSize parameter
914 // MUST be the same in both directions!
915 *bufferSize = theSize;
916 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
917 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
918 errorText_ = errorStream_.str();
922 stream_.bufferSize = *bufferSize;
923 stream_.nBuffers = 1;
925 // Get the stream ID(s) so we can set the stream format. We'll have
926 // to do this for each stream.
927 AudioStreamID streamIDs[ nStreams ];
928 dataSize = nStreams * sizeof( AudioStreamID );
929 result = AudioDeviceGetProperty( id, 0, isInput,
930 kAudioDevicePropertyStreams,
931 &dataSize, &streamIDs );
932 if ( result != noErr ) {
933 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
934 errorText_ = errorStream_.str();
938 // Now set the stream format. Also, check the physical format of the
939 // device and change that if necessary.
940 AudioStreamBasicDescription description;
941 dataSize = sizeof( AudioStreamBasicDescription );
944 for ( UInt32 i=0; i<streamCount; i++ ) {
946 result = AudioStreamGetProperty( streamIDs[firstStream+i], 0,
947 kAudioStreamPropertyVirtualFormat,
948 &dataSize, &description );
950 if ( result != noErr ) {
951 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
952 errorText_ = errorStream_.str();
956 // Set the sample rate and data format id. However, only make the
957 // change if the sample rate is not within 1.0 of the desired
958 // rate and the format is not linear pcm.
959 updateFormat = false;
960 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
961 description.mSampleRate = (double) sampleRate;
965 if ( description.mFormatID != kAudioFormatLinearPCM ) {
966 description.mFormatID = kAudioFormatLinearPCM;
970 if ( updateFormat ) {
971 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0,
972 kAudioStreamPropertyVirtualFormat,
973 dataSize, &description );
974 if ( result != noErr ) {
975 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
976 errorText_ = errorStream_.str();
981 // Now check the physical format.
982 result = AudioStreamGetProperty( streamIDs[firstStream+i], 0,
983 kAudioStreamPropertyPhysicalFormat,
984 &dataSize, &description );
985 if ( result != noErr ) {
986 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
987 errorText_ = errorStream_.str();
991 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
992 description.mFormatID = kAudioFormatLinearPCM;
993 AudioStreamBasicDescription testDescription = description;
994 unsigned long formatFlags;
996 // We'll try higher bit rates first and then work our way down.
997 testDescription.mBitsPerChannel = 32;
998 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
999 testDescription.mFormatFlags = formatFlags;
1000 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1001 if ( result == noErr ) continue;
1003 testDescription = description;
1004 testDescription.mBitsPerChannel = 32;
1005 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
1006 testDescription.mFormatFlags = formatFlags;
1007 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1008 if ( result == noErr ) continue;
1010 testDescription = description;
1011 testDescription.mBitsPerChannel = 24;
1012 testDescription.mFormatFlags = formatFlags;
1013 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1014 if ( result == noErr ) continue;
1016 testDescription = description;
1017 testDescription.mBitsPerChannel = 16;
1018 testDescription.mFormatFlags = formatFlags;
1019 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1020 if ( result == noErr ) continue;
1022 testDescription = description;
1023 testDescription.mBitsPerChannel = 8;
1024 testDescription.mFormatFlags = formatFlags;
1025 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1026 if ( result != noErr ) {
1027 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1028 errorText_ = errorStream_.str();
1034 // Get the stream latency. There can be latency in both the device
1035 // and the stream. First, attempt to get the device latency on the
1036 // master channel or the first open channel. Errors that might
1037 // occur here are not deemed critical.
1039 // ***** CHECK THIS ***** //
1040 UInt32 latency, channel = 0;
1041 dataSize = sizeof( UInt32 );
1042 AudioDevicePropertyID property = kAudioDevicePropertyLatency;
1043 if ( hasProperty( id, channel, isInput, property ) == true ) {
1044 result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
1045 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1047 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1048 errorText_ = errorStream_.str();
1049 error( RtError::WARNING );
1053 // Now try to get the stream latency. For multiple streams, I assume the
1054 // latency is equal for each.
1055 result = AudioStreamGetProperty( streamIDs[firstStream], 0, property, &dataSize, &latency );
1056 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1058 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1059 errorText_ = errorStream_.str();
1060 error( RtError::WARNING );
1063 // Byte-swapping: According to AudioHardware.h, the stream data will
1064 // always be presented in native-endian format, so we should never
1065 // need to byte swap.
1066 stream_.doByteSwap[mode] = false;
1068 // From the CoreAudio documentation, PCM data must be supplied as
1070 stream_.userFormat = format;
1071 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1073 if ( streamCount == 1 )
1074 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1075 else // multiple streams
1076 stream_.nDeviceChannels[mode] = channels;
1077 stream_.nUserChannels[mode] = channels;
1078 stream_.channelOffset[mode] = channelOffset; // offset within a CoreAudio stream
1079 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1080 else stream_.userInterleaved = true;
1081 stream_.deviceInterleaved[mode] = true;
1082 if ( monoMode == true ) stream_.deviceInterleaved[mode] = false;
1084 // Set flags for buffer conversion.
1085 stream_.doConvertBuffer[mode] = false;
1086 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1087 stream_.doConvertBuffer[mode] = true;
1088 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1089 stream_.doConvertBuffer[mode] = true;
1090 if ( streamCount == 1 ) {
1091 if ( stream_.nUserChannels[mode] > 1 &&
1092 stream_.userInterleaved != stream_.deviceInterleaved[mode] )
1093 stream_.doConvertBuffer[mode] = true;
1095 else if ( monoMode && stream_.userInterleaved )
1096 stream_.doConvertBuffer[mode] = true;
1098 // Allocate our CoreHandle structure for the stream.
1099 CoreHandle *handle = 0;
1100 if ( stream_.apiHandle == 0 ) {
1102 handle = new CoreHandle;
1104 catch ( std::bad_alloc& ) {
1105 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1109 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1110 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1113 stream_.apiHandle = (void *) handle;
1116 handle = (CoreHandle *) stream_.apiHandle;
1117 handle->iStream[mode] = firstStream;
1118 handle->nStreams[mode] = streamCount;
1119 handle->id[mode] = id;
1121 // Allocate necessary internal buffers.
1122 unsigned long bufferBytes;
1123 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1124 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1125 if ( stream_.userBuffer[mode] == NULL ) {
1126 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1130 // If possible, we will make use of the CoreAudio stream buffers as
1131 // "device buffers". However, we can't do this if using multiple
1133 if ( stream_.doConvertBuffer[mode] && handle->nStreams[mode] > 1 ) {
1135 bool makeBuffer = true;
1136 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1137 if ( mode == INPUT ) {
1138 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1139 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1140 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1145 bufferBytes *= *bufferSize;
1146 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1147 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1148 if ( stream_.deviceBuffer == NULL ) {
1149 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1155 stream_.sampleRate = sampleRate;
1156 stream_.device[mode] = device;
1157 stream_.state = STREAM_STOPPED;
1158 stream_.callbackInfo.object = (void *) this;
1160 // Setup the buffer conversion information structure.
1161 if ( stream_.doConvertBuffer[mode] ) {
1162 if ( streamCount > 1 ) setConvertInfo( mode, 0 );
1163 else setConvertInfo( mode, channelOffset );
1166 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1167 // Only one callback procedure per device.
1168 stream_.mode = DUPLEX;
1170 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1171 result = AudioDeviceCreateIOProcID( id, callbackHandler, (void *) &stream_.callbackInfo, &handle->procId[mode] );
1173 // deprecated in favor of AudioDeviceCreateIOProcID()
1174 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1176 if ( result != noErr ) {
1177 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1178 errorText_ = errorStream_.str();
1181 if ( stream_.mode == OUTPUT && mode == INPUT )
1182 stream_.mode = DUPLEX;
1184 stream_.mode = mode;
1187 // Setup the device property listener for over/underload.
1188 result = AudioDeviceAddPropertyListener( id, 0, isInput,
1189 kAudioDeviceProcessorOverload,
1190 deviceListener, (void *) handle );
1196 pthread_cond_destroy( &handle->condition );
1198 stream_.apiHandle = 0;
1201 for ( int i=0; i<2; i++ ) {
1202 if ( stream_.userBuffer[i] ) {
1203 free( stream_.userBuffer[i] );
1204 stream_.userBuffer[i] = 0;
1208 if ( stream_.deviceBuffer ) {
1209 free( stream_.deviceBuffer );
1210 stream_.deviceBuffer = 0;
1216 void RtApiCore :: closeStream( void )
1218 if ( stream_.state == STREAM_CLOSED ) {
1219 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1220 error( RtError::WARNING );
1224 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1225 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1226 if ( stream_.state == STREAM_RUNNING )
1227 AudioDeviceStop( handle->id[0], callbackHandler );
1228 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1229 AudioDeviceDestroyIOProcID( handle->id[0], handle->procId[0] );
1231 // deprecated in favor of AudioDeviceDestroyIOProcID()
1232 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1236 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1237 if ( stream_.state == STREAM_RUNNING )
1238 AudioDeviceStop( handle->id[1], callbackHandler );
1239 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1240 AudioDeviceDestroyIOProcID( handle->id[1], handle->procId[1] );
1242 // deprecated in favor of AudioDeviceDestroyIOProcID()
1243 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1247 for ( int i=0; i<2; i++ ) {
1248 if ( stream_.userBuffer[i] ) {
1249 free( stream_.userBuffer[i] );
1250 stream_.userBuffer[i] = 0;
1254 if ( stream_.deviceBuffer ) {
1255 free( stream_.deviceBuffer );
1256 stream_.deviceBuffer = 0;
1259 // Destroy pthread condition variable.
1260 pthread_cond_destroy( &handle->condition );
1262 stream_.apiHandle = 0;
1264 stream_.mode = UNINITIALIZED;
1265 stream_.state = STREAM_CLOSED;
1268 void RtApiCore :: startStream( void )
1271 if ( stream_.state == STREAM_RUNNING ) {
1272 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1273 error( RtError::WARNING );
1277 MUTEX_LOCK( &stream_.mutex );
1279 OSStatus result = noErr;
1280 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1281 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1283 result = AudioDeviceStart( handle->id[0], callbackHandler );
1284 if ( result != noErr ) {
1285 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1286 errorText_ = errorStream_.str();
1291 if ( stream_.mode == INPUT ||
1292 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1294 result = AudioDeviceStart( handle->id[1], callbackHandler );
1295 if ( result != noErr ) {
1296 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1297 errorText_ = errorStream_.str();
1302 handle->drainCounter = 0;
1303 handle->internalDrain = false;
1304 stream_.state = STREAM_RUNNING;
1307 MUTEX_UNLOCK( &stream_.mutex );
1309 if ( result == noErr ) return;
1310 error( RtError::SYSTEM_ERROR );
1313 void RtApiCore :: stopStream( void )
1316 if ( stream_.state == STREAM_STOPPED ) {
1317 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1318 error( RtError::WARNING );
1322 MUTEX_LOCK( &stream_.mutex );
1324 if ( stream_.state == STREAM_STOPPED ) {
1325 MUTEX_UNLOCK( &stream_.mutex );
1329 OSStatus result = noErr;
1330 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1331 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1333 if ( handle->drainCounter == 0 ) {
1334 handle->drainCounter = 1;
1335 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1338 result = AudioDeviceStop( handle->id[0], callbackHandler );
1339 if ( result != noErr ) {
1340 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1341 errorText_ = errorStream_.str();
1346 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1348 result = AudioDeviceStop( handle->id[1], callbackHandler );
1349 if ( result != noErr ) {
1350 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1351 errorText_ = errorStream_.str();
1356 stream_.state = STREAM_STOPPED;
1359 MUTEX_UNLOCK( &stream_.mutex );
1361 if ( result == noErr ) return;
1362 error( RtError::SYSTEM_ERROR );
1365 void RtApiCore :: abortStream( void )
1368 if ( stream_.state == STREAM_STOPPED ) {
1369 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1370 error( RtError::WARNING );
1374 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1375 handle->drainCounter = 1;
1380 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1381 const AudioBufferList *inBufferList,
1382 const AudioBufferList *outBufferList )
1384 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1385 if ( stream_.state == STREAM_CLOSED ) {
1386 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1387 error( RtError::WARNING );
1391 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1392 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1394 // Check if we were draining the stream and signal is finished.
1395 if ( handle->drainCounter > 3 ) {
1396 if ( handle->internalDrain == false )
1397 pthread_cond_signal( &handle->condition );
1403 MUTEX_LOCK( &stream_.mutex );
1405 // The state might change while waiting on a mutex.
1406 if ( stream_.state == STREAM_STOPPED ) {
1407 MUTEX_UNLOCK( &stream_.mutex );
1411 AudioDeviceID outputDevice = handle->id[0];
1413 // Invoke user callback to get fresh output data UNLESS we are
1414 // draining stream or duplex mode AND the input/output devices are
1415 // different AND this function is called for the input device.
1416 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1417 RtAudioCallback callback = (RtAudioCallback) info->callback;
1418 double streamTime = getStreamTime();
1419 RtAudioStreamStatus status = 0;
1420 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1421 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1422 handle->xrun[0] = false;
1424 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1425 status |= RTAUDIO_INPUT_OVERFLOW;
1426 handle->xrun[1] = false;
1428 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1429 stream_.bufferSize, streamTime, status, info->userData );
1430 if ( handle->drainCounter == 2 ) {
1431 MUTEX_UNLOCK( &stream_.mutex );
1435 else if ( handle->drainCounter == 1 )
1436 handle->internalDrain = true;
1439 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1441 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1443 if ( handle->nStreams[0] == 1 ) {
1444 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1446 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1448 else { // fill multiple streams with zeros
1449 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1450 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1452 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1456 else if ( handle->nStreams[0] == 1 ) {
1457 if ( stream_.doConvertBuffer[0] ) { // convert directly to CoreAudio stream buffer
1458 convertBuffer( (char *) outBufferList->mBuffers[handle->iStream[0]].mData,
1459 stream_.userBuffer[0], stream_.convertInfo[0] );
1461 else { // copy from user buffer
1462 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1463 stream_.userBuffer[0],
1464 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1467 else { // fill multiple streams
1468 Float32 *inBuffer = (Float32 *) stream_.userBuffer[0];
1469 if ( stream_.doConvertBuffer[0] ) {
1470 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1471 inBuffer = (Float32 *) stream_.deviceBuffer;
1474 if ( stream_.deviceInterleaved[0] == false ) { // mono mode
1475 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1476 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1477 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1478 (void *)&inBuffer[i*stream_.bufferSize], bufferBytes );
1481 else { // fill multiple multi-channel streams with interleaved data
1482 UInt32 streamChannels, channelsLeft, inJump, outJump, inOffset;
1485 bool inInterleaved = ( stream_.userInterleaved ) ? true : false;
1486 UInt32 inChannels = stream_.nUserChannels[0];
1487 if ( stream_.doConvertBuffer[0] ) {
1488 inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1489 inChannels = stream_.nDeviceChannels[0];
1492 if ( inInterleaved ) inOffset = 1;
1493 else inOffset = stream_.bufferSize;
1495 channelsLeft = inChannels;
1496 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1498 out = (Float32 *) outBufferList->mBuffers[handle->iStream[0]+i].mData;
1499 streamChannels = outBufferList->mBuffers[handle->iStream[0]+i].mNumberChannels;
1502 // Account for possible channel offset in first stream
1503 if ( i == 0 && stream_.channelOffset[0] > 0 ) {
1504 streamChannels -= stream_.channelOffset[0];
1505 outJump = stream_.channelOffset[0];
1509 // Account for possible unfilled channels at end of the last stream
1510 if ( streamChannels > channelsLeft ) {
1511 outJump = streamChannels - channelsLeft;
1512 streamChannels = channelsLeft;
1515 // Determine input buffer offsets and skips
1516 if ( inInterleaved ) {
1517 inJump = inChannels;
1518 in += inChannels - channelsLeft;
1522 in += (inChannels - channelsLeft) * inOffset;
1525 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1526 for ( unsigned int j=0; j<streamChannels; j++ ) {
1527 *out++ = in[j*inOffset];
1532 channelsLeft -= streamChannels;
1537 if ( handle->drainCounter ) {
1538 handle->drainCounter++;
1543 AudioDeviceID inputDevice;
1544 inputDevice = handle->id[1];
1545 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1547 if ( handle->nStreams[1] == 1 ) {
1548 if ( stream_.doConvertBuffer[1] ) { // convert directly from CoreAudio stream buffer
1549 convertBuffer( stream_.userBuffer[1],
1550 (char *) inBufferList->mBuffers[handle->iStream[1]].mData,
1551 stream_.convertInfo[1] );
1553 else { // copy to user buffer
1554 memcpy( stream_.userBuffer[1],
1555 inBufferList->mBuffers[handle->iStream[1]].mData,
1556 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1559 else { // read from multiple streams
1560 Float32 *outBuffer = (Float32 *) stream_.userBuffer[1];
1561 if ( stream_.doConvertBuffer[1] ) outBuffer = (Float32 *) stream_.deviceBuffer;
1563 if ( stream_.deviceInterleaved[1] == false ) { // mono mode
1564 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1565 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1566 memcpy( (void *)&outBuffer[i*stream_.bufferSize],
1567 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1570 else { // read from multiple multi-channel streams
1571 UInt32 streamChannels, channelsLeft, inJump, outJump, outOffset;
1574 bool outInterleaved = ( stream_.userInterleaved ) ? true : false;
1575 UInt32 outChannels = stream_.nUserChannels[1];
1576 if ( stream_.doConvertBuffer[1] ) {
1577 outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1578 outChannels = stream_.nDeviceChannels[1];
1581 if ( outInterleaved ) outOffset = 1;
1582 else outOffset = stream_.bufferSize;
1584 channelsLeft = outChannels;
1585 for ( unsigned int i=0; i<handle->nStreams[1]; i++ ) {
1587 in = (Float32 *) inBufferList->mBuffers[handle->iStream[1]+i].mData;
1588 streamChannels = inBufferList->mBuffers[handle->iStream[1]+i].mNumberChannels;
1591 // Account for possible channel offset in first stream
1592 if ( i == 0 && stream_.channelOffset[1] > 0 ) {
1593 streamChannels -= stream_.channelOffset[1];
1594 inJump = stream_.channelOffset[1];
1598 // Account for possible unread channels at end of the last stream
1599 if ( streamChannels > channelsLeft ) {
1600 inJump = streamChannels - channelsLeft;
1601 streamChannels = channelsLeft;
1604 // Determine output buffer offsets and skips
1605 if ( outInterleaved ) {
1606 outJump = outChannels;
1607 out += outChannels - channelsLeft;
1611 out += (outChannels - channelsLeft) * outOffset;
1614 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1615 for ( unsigned int j=0; j<streamChannels; j++ ) {
1616 out[j*outOffset] = *in++;
1621 channelsLeft -= streamChannels;
1625 if ( stream_.doConvertBuffer[1] ) { // convert from our internal "device" buffer
1626 convertBuffer( stream_.userBuffer[1],
1627 stream_.deviceBuffer,
1628 stream_.convertInfo[1] );
1634 MUTEX_UNLOCK( &stream_.mutex );
1636 RtApi::tickStreamTime();
1640 const char* RtApiCore :: getErrorCode( OSStatus code )
1644 case kAudioHardwareNotRunningError:
1645 return "kAudioHardwareNotRunningError";
1647 case kAudioHardwareUnspecifiedError:
1648 return "kAudioHardwareUnspecifiedError";
1650 case kAudioHardwareUnknownPropertyError:
1651 return "kAudioHardwareUnknownPropertyError";
1653 case kAudioHardwareBadPropertySizeError:
1654 return "kAudioHardwareBadPropertySizeError";
1656 case kAudioHardwareIllegalOperationError:
1657 return "kAudioHardwareIllegalOperationError";
1659 case kAudioHardwareBadObjectError:
1660 return "kAudioHardwareBadObjectError";
1662 case kAudioHardwareBadDeviceError:
1663 return "kAudioHardwareBadDeviceError";
1665 case kAudioHardwareBadStreamError:
1666 return "kAudioHardwareBadStreamError";
1668 case kAudioHardwareUnsupportedOperationError:
1669 return "kAudioHardwareUnsupportedOperationError";
1671 case kAudioDeviceUnsupportedFormatError:
1672 return "kAudioDeviceUnsupportedFormatError";
1674 case kAudioDevicePermissionsError:
1675 return "kAudioDevicePermissionsError";
1678 return "CoreAudio unknown error";
1682 //******************** End of __MACOSX_CORE__ *********************//
1685 #if defined(__UNIX_JACK__)
1687 // JACK is a low-latency audio server, originally written for the
1688 // GNU/Linux operating system and now also ported to OS-X. It can
1689 // connect a number of different applications to an audio device, as
1690 // well as allowing them to share audio between themselves.
1692 // When using JACK with RtAudio, "devices" refer to JACK clients that
1693 // have ports connected to the server. The JACK server is typically
1694 // started in a terminal as follows:
1696 // .jackd -d alsa -d hw:0
1698 // or through an interface program such as qjackctl. Many of the
1699 // parameters normally set for a stream are fixed by the JACK server
1700 // and can be specified when the JACK server is started. In
1703 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1705 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1706 // frames, and number of buffers = 4. Once the server is running, it
1707 // is not possible to override these values. If the values are not
1708 // specified in the command-line, the JACK server uses default values.
1710 // The JACK server does not have to be running when an instance of
1711 // RtApiJack is created, though the function getDeviceCount() will
1712 // report 0 devices found until JACK has been started. When no
1713 // devices are available (i.e., the JACK server is not running), a
1714 // stream cannot be opened.
1716 #include <jack/jack.h>
1719 // A structure to hold various information related to the Jack API
1722 jack_client_t *client;
1723 jack_port_t **ports[2];
1724 std::string deviceName[2];
1726 pthread_cond_t condition;
1727 int drainCounter; // Tracks callback counts when draining
1728 bool internalDrain; // Indicates if stop is initiated from callback or not.
1731 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1734 RtApiJack :: RtApiJack()
1736 // Nothing to do here.
1739 RtApiJack :: ~RtApiJack()
1741 if ( stream_.state != STREAM_CLOSED ) closeStream();
1744 unsigned int RtApiJack :: getDeviceCount( void )
1746 // See if we can become a jack client.
1747 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1748 jack_status_t *status = NULL;
1749 jack_client_t *client = jack_client_open( "RtApiJackCount", options, status );
1750 if ( client == 0 ) return 0;
1753 std::string port, previousPort;
1754 unsigned int nChannels = 0, nDevices = 0;
1755 ports = jack_get_ports( client, NULL, NULL, 0 );
1757 // Parse the port names up to the first colon (:).
1760 port = (char *) ports[ nChannels ];
1761 iColon = port.find(":");
1762 if ( iColon != std::string::npos ) {
1763 port = port.substr( 0, iColon + 1 );
1764 if ( port != previousPort ) {
1766 previousPort = port;
1769 } while ( ports[++nChannels] );
1773 jack_client_close( client );
1777 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1779 RtAudio::DeviceInfo info;
1780 info.probed = false;
1782 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption
1783 jack_status_t *status = NULL;
1784 jack_client_t *client = jack_client_open( "RtApiJackInfo", options, status );
1785 if ( client == 0 ) {
1786 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1787 error( RtError::WARNING );
1792 std::string port, previousPort;
1793 unsigned int nPorts = 0, nDevices = 0;
1794 ports = jack_get_ports( client, NULL, NULL, 0 );
1796 // Parse the port names up to the first colon (:).
1799 port = (char *) ports[ nPorts ];
1800 iColon = port.find(":");
1801 if ( iColon != std::string::npos ) {
1802 port = port.substr( 0, iColon );
1803 if ( port != previousPort ) {
1804 if ( nDevices == device ) info.name = port;
1806 previousPort = port;
1809 } while ( ports[++nPorts] );
1813 if ( device >= nDevices ) {
1814 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1815 error( RtError::INVALID_USE );
1818 // Get the current jack server sample rate.
1819 info.sampleRates.clear();
1820 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1822 // Count the available ports containing the client name as device
1823 // channels. Jack "input ports" equal RtAudio output channels.
1824 unsigned int nChannels = 0;
1825 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1827 while ( ports[ nChannels ] ) nChannels++;
1829 info.outputChannels = nChannels;
1832 // Jack "output ports" equal RtAudio input channels.
1834 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1836 while ( ports[ nChannels ] ) nChannels++;
1838 info.inputChannels = nChannels;
1841 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1842 jack_client_close(client);
1843 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1844 error( RtError::WARNING );
1848 // If device opens for both playback and capture, we determine the channels.
1849 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1850 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1852 // Jack always uses 32-bit floats.
1853 info.nativeFormats = RTAUDIO_FLOAT32;
1855 // Jack doesn't provide default devices so we'll use the first available one.
1856 if ( device == 0 && info.outputChannels > 0 )
1857 info.isDefaultOutput = true;
1858 if ( device == 0 && info.inputChannels > 0 )
1859 info.isDefaultInput = true;
1861 jack_client_close(client);
1866 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1868 CallbackInfo *info = (CallbackInfo *) infoPointer;
1870 RtApiJack *object = (RtApiJack *) info->object;
1871 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1876 void jackShutdown( void *infoPointer )
1878 CallbackInfo *info = (CallbackInfo *) infoPointer;
1879 RtApiJack *object = (RtApiJack *) info->object;
1881 // Check current stream state. If stopped, then we'll assume this
1882 // was called as a result of a call to RtApiJack::stopStream (the
1883 // deactivation of a client handle causes this function to be called).
1884 // If not, we'll assume the Jack server is shutting down or some
1885 // other problem occurred and we should close the stream.
1886 if ( object->isStreamRunning() == false ) return;
1888 object->closeStream();
1889 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1892 int jackXrun( void *infoPointer )
1894 JackHandle *handle = (JackHandle *) infoPointer;
1896 if ( handle->ports[0] ) handle->xrun[0] = true;
1897 if ( handle->ports[1] ) handle->xrun[1] = true;
1902 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1903 unsigned int firstChannel, unsigned int sampleRate,
1904 RtAudioFormat format, unsigned int *bufferSize,
1905 RtAudio::StreamOptions *options )
1907 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1909 // Look for jack server and try to become a client (only do once per stream).
1910 jack_client_t *client = 0;
1911 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1912 jack_options_t jackoptions = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1913 jack_status_t *status = NULL;
1914 if ( options && !options->streamName.empty() )
1915 client = jack_client_open( options->streamName.c_str(), jackoptions, status );
1917 client = jack_client_open( "RtApiJack", jackoptions, status );
1918 if ( client == 0 ) {
1919 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1920 error( RtError::WARNING );
1925 // The handle must have been created on an earlier pass.
1926 client = handle->client;
1930 std::string port, previousPort, deviceName;
1931 unsigned int nPorts = 0, nDevices = 0;
1932 ports = jack_get_ports( client, NULL, NULL, 0 );
1934 // Parse the port names up to the first colon (:).
1937 port = (char *) ports[ nPorts ];
1938 iColon = port.find(":");
1939 if ( iColon != std::string::npos ) {
1940 port = port.substr( 0, iColon );
1941 if ( port != previousPort ) {
1942 if ( nDevices == device ) deviceName = port;
1944 previousPort = port;
1947 } while ( ports[++nPorts] );
1951 if ( device >= nDevices ) {
1952 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
1956 // Count the available ports containing the client name as device
1957 // channels. Jack "input ports" equal RtAudio output channels.
1958 unsigned int nChannels = 0;
1959 unsigned long flag = JackPortIsInput;
1960 if ( mode == INPUT ) flag = JackPortIsOutput;
1961 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1963 while ( ports[ nChannels ] ) nChannels++;
1967 // Compare the jack ports for specified client to the requested number of channels.
1968 if ( nChannels < (channels + firstChannel) ) {
1969 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
1970 errorText_ = errorStream_.str();
1974 // Check the jack server sample rate.
1975 unsigned int jackRate = jack_get_sample_rate( client );
1976 if ( sampleRate != jackRate ) {
1977 jack_client_close( client );
1978 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
1979 errorText_ = errorStream_.str();
1982 stream_.sampleRate = jackRate;
1984 // Get the latency of the JACK port.
1985 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1986 if ( ports[ firstChannel ] )
1987 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
1990 // The jack server always uses 32-bit floating-point data.
1991 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1992 stream_.userFormat = format;
1994 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1995 else stream_.userInterleaved = true;
1997 // Jack always uses non-interleaved buffers.
1998 stream_.deviceInterleaved[mode] = false;
2000 // Jack always provides host byte-ordered data.
2001 stream_.doByteSwap[mode] = false;
2003 // Get the buffer size. The buffer size and number of buffers
2004 // (periods) is set when the jack server is started.
2005 stream_.bufferSize = (int) jack_get_buffer_size( client );
2006 *bufferSize = stream_.bufferSize;
2008 stream_.nDeviceChannels[mode] = channels;
2009 stream_.nUserChannels[mode] = channels;
2011 // Set flags for buffer conversion.
2012 stream_.doConvertBuffer[mode] = false;
2013 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2014 stream_.doConvertBuffer[mode] = true;
2015 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2016 stream_.nUserChannels[mode] > 1 )
2017 stream_.doConvertBuffer[mode] = true;
2019 // Allocate our JackHandle structure for the stream.
2020 if ( handle == 0 ) {
2022 handle = new JackHandle;
2024 catch ( std::bad_alloc& ) {
2025 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
2029 if ( pthread_cond_init(&handle->condition, NULL) ) {
2030 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
2033 stream_.apiHandle = (void *) handle;
2034 handle->client = client;
2036 handle->deviceName[mode] = deviceName;
2038 // Allocate necessary internal buffers.
2039 unsigned long bufferBytes;
2040 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2041 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2042 if ( stream_.userBuffer[mode] == NULL ) {
2043 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
2047 if ( stream_.doConvertBuffer[mode] ) {
2049 bool makeBuffer = true;
2050 if ( mode == OUTPUT )
2051 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2052 else { // mode == INPUT
2053 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
2054 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2055 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2056 if ( bufferBytes < bytesOut ) makeBuffer = false;
2061 bufferBytes *= *bufferSize;
2062 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2063 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2064 if ( stream_.deviceBuffer == NULL ) {
2065 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
2071 // Allocate memory for the Jack ports (channels) identifiers.
2072 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
2073 if ( handle->ports[mode] == NULL ) {
2074 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
2078 stream_.device[mode] = device;
2079 stream_.channelOffset[mode] = firstChannel;
2080 stream_.state = STREAM_STOPPED;
2081 stream_.callbackInfo.object = (void *) this;
2083 if ( stream_.mode == OUTPUT && mode == INPUT )
2084 // We had already set up the stream for output.
2085 stream_.mode = DUPLEX;
2087 stream_.mode = mode;
2088 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
2089 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
2090 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
2093 // Register our ports.
2095 if ( mode == OUTPUT ) {
2096 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2097 snprintf( label, 64, "outport %d", i );
2098 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
2099 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
2103 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2104 snprintf( label, 64, "inport %d", i );
2105 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
2106 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
2110 // Setup the buffer conversion information structure. We don't use
2111 // buffers to do channel offsets, so we override that parameter
2113 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2119 pthread_cond_destroy( &handle->condition );
2120 jack_client_close( handle->client );
2122 if ( handle->ports[0] ) free( handle->ports[0] );
2123 if ( handle->ports[1] ) free( handle->ports[1] );
2126 stream_.apiHandle = 0;
2129 for ( int i=0; i<2; i++ ) {
2130 if ( stream_.userBuffer[i] ) {
2131 free( stream_.userBuffer[i] );
2132 stream_.userBuffer[i] = 0;
2136 if ( stream_.deviceBuffer ) {
2137 free( stream_.deviceBuffer );
2138 stream_.deviceBuffer = 0;
2144 void RtApiJack :: closeStream( void )
2146 if ( stream_.state == STREAM_CLOSED ) {
2147 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2148 error( RtError::WARNING );
2152 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2155 if ( stream_.state == STREAM_RUNNING )
2156 jack_deactivate( handle->client );
2158 jack_client_close( handle->client );
2162 if ( handle->ports[0] ) free( handle->ports[0] );
2163 if ( handle->ports[1] ) free( handle->ports[1] );
2164 pthread_cond_destroy( &handle->condition );
2166 stream_.apiHandle = 0;
2169 for ( int i=0; i<2; i++ ) {
2170 if ( stream_.userBuffer[i] ) {
2171 free( stream_.userBuffer[i] );
2172 stream_.userBuffer[i] = 0;
2176 if ( stream_.deviceBuffer ) {
2177 free( stream_.deviceBuffer );
2178 stream_.deviceBuffer = 0;
2181 stream_.mode = UNINITIALIZED;
2182 stream_.state = STREAM_CLOSED;
2185 void RtApiJack :: startStream( void )
2188 if ( stream_.state == STREAM_RUNNING ) {
2189 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2190 error( RtError::WARNING );
2194 MUTEX_LOCK(&stream_.mutex);
2196 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2197 int result = jack_activate( handle->client );
2199 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2205 // Get the list of available ports.
2206 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2208 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2209 if ( ports == NULL) {
2210 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2214 // Now make the port connections. Since RtAudio wasn't designed to
2215 // allow the user to select particular channels of a device, we'll
2216 // just open the first "nChannels" ports with offset.
2217 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2219 if ( ports[ stream_.channelOffset[0] + i ] )
2220 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2223 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2230 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2232 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2233 if ( ports == NULL) {
2234 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2238 // Now make the port connections. See note above.
2239 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2241 if ( ports[ stream_.channelOffset[1] + i ] )
2242 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2245 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2252 handle->drainCounter = 0;
2253 handle->internalDrain = false;
2254 stream_.state = STREAM_RUNNING;
2257 MUTEX_UNLOCK(&stream_.mutex);
2259 if ( result == 0 ) return;
2260 error( RtError::SYSTEM_ERROR );
2263 void RtApiJack :: stopStream( void )
2266 if ( stream_.state == STREAM_STOPPED ) {
2267 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2268 error( RtError::WARNING );
2272 MUTEX_LOCK( &stream_.mutex );
2274 if ( stream_.state == STREAM_STOPPED ) {
2275 MUTEX_UNLOCK( &stream_.mutex );
2279 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2280 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2282 if ( handle->drainCounter == 0 ) {
2283 handle->drainCounter = 1;
2284 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2288 jack_deactivate( handle->client );
2289 stream_.state = STREAM_STOPPED;
2291 MUTEX_UNLOCK( &stream_.mutex );
2294 void RtApiJack :: abortStream( void )
2297 if ( stream_.state == STREAM_STOPPED ) {
2298 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2299 error( RtError::WARNING );
2303 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2304 handle->drainCounter = 1;
2309 bool RtApiJack :: callbackEvent( unsigned long nframes )
2311 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2312 if ( stream_.state == STREAM_CLOSED ) {
2313 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2314 error( RtError::WARNING );
2317 if ( stream_.bufferSize != nframes ) {
2318 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2319 error( RtError::WARNING );
2323 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2324 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2326 // Check if we were draining the stream and signal is finished.
2327 if ( handle->drainCounter > 3 ) {
2328 if ( handle->internalDrain == false )
2329 pthread_cond_signal( &handle->condition );
2335 MUTEX_LOCK( &stream_.mutex );
2337 // The state might change while waiting on a mutex.
2338 if ( stream_.state == STREAM_STOPPED ) {
2339 MUTEX_UNLOCK( &stream_.mutex );
2343 // Invoke user callback first, to get fresh output data.
2344 if ( handle->drainCounter == 0 ) {
2345 RtAudioCallback callback = (RtAudioCallback) info->callback;
2346 double streamTime = getStreamTime();
2347 RtAudioStreamStatus status = 0;
2348 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2349 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2350 handle->xrun[0] = false;
2352 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2353 status |= RTAUDIO_INPUT_OVERFLOW;
2354 handle->xrun[1] = false;
2356 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2357 stream_.bufferSize, streamTime, status, info->userData );
2358 if ( handle->drainCounter == 2 ) {
2359 MUTEX_UNLOCK( &stream_.mutex );
2363 else if ( handle->drainCounter == 1 )
2364 handle->internalDrain = true;
2367 jack_default_audio_sample_t *jackbuffer;
2368 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2369 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2371 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2373 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2374 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2375 memset( jackbuffer, 0, bufferBytes );
2379 else if ( stream_.doConvertBuffer[0] ) {
2381 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2383 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2384 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2385 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2388 else { // no buffer conversion
2389 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2390 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2391 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2395 if ( handle->drainCounter ) {
2396 handle->drainCounter++;
2401 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2403 if ( stream_.doConvertBuffer[1] ) {
2404 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2405 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2406 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2408 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2410 else { // no buffer conversion
2411 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2412 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2413 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2419 MUTEX_UNLOCK(&stream_.mutex);
2421 RtApi::tickStreamTime();
2424 //******************** End of __UNIX_JACK__ *********************//
2427 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2429 // The ASIO API is designed around a callback scheme, so this
2430 // implementation is similar to that used for OS-X CoreAudio and Linux
2431 // Jack. The primary constraint with ASIO is that it only allows
2432 // access to a single driver at a time. Thus, it is not possible to
2433 // have more than one simultaneous RtAudio stream.
2435 // This implementation also requires a number of external ASIO files
2436 // and a few global variables. The ASIO callback scheme does not
2437 // allow for the passing of user data, so we must create a global
2438 // pointer to our callbackInfo structure.
2440 // On unix systems, we make use of a pthread condition variable.
2441 // Since there is no equivalent in Windows, I hacked something based
2442 // on information found in
2443 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2445 #include "asiosys.h"
2447 #include "iasiothiscallresolver.h"
2448 #include "asiodrivers.h"
2451 AsioDrivers drivers;
2452 ASIOCallbacks asioCallbacks;
2453 ASIODriverInfo driverInfo;
2454 CallbackInfo *asioCallbackInfo;
2458 int drainCounter; // Tracks callback counts when draining
2459 bool internalDrain; // Indicates if stop is initiated from callback or not.
2460 ASIOBufferInfo *bufferInfos;
2464 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2467 // Function declarations (definitions at end of section)
2468 static const char* getAsioErrorString( ASIOError result );
2469 void sampleRateChanged( ASIOSampleRate sRate );
2470 long asioMessages( long selector, long value, void* message, double* opt );
2472 RtApiAsio :: RtApiAsio()
2474 // ASIO cannot run on a multi-threaded appartment. You can call
2475 // CoInitialize beforehand, but it must be for appartment threading
2476 // (in which case, CoInitilialize will return S_FALSE here).
2477 coInitialized_ = false;
2478 HRESULT hr = CoInitialize( NULL );
2480 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2481 error( RtError::WARNING );
2483 coInitialized_ = true;
2485 drivers.removeCurrentDriver();
2486 driverInfo.asioVersion = 2;
2488 // See note in DirectSound implementation about GetDesktopWindow().
2489 driverInfo.sysRef = GetForegroundWindow();
2492 RtApiAsio :: ~RtApiAsio()
2494 if ( stream_.state != STREAM_CLOSED ) closeStream();
2495 if ( coInitialized_ ) CoUninitialize();
2498 unsigned int RtApiAsio :: getDeviceCount( void )
2500 return (unsigned int) drivers.asioGetNumDev();
2503 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2505 RtAudio::DeviceInfo info;
2506 info.probed = false;
2509 unsigned int nDevices = getDeviceCount();
2510 if ( nDevices == 0 ) {
2511 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2512 error( RtError::INVALID_USE );
2515 if ( device >= nDevices ) {
2516 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2517 error( RtError::INVALID_USE );
2520 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2521 if ( stream_.state != STREAM_CLOSED ) {
2522 if ( device >= devices_.size() ) {
2523 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2524 error( RtError::WARNING );
2527 return devices_[ device ];
2530 char driverName[32];
2531 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2532 if ( result != ASE_OK ) {
2533 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2534 errorText_ = errorStream_.str();
2535 error( RtError::WARNING );
2539 info.name = driverName;
2541 if ( !drivers.loadDriver( driverName ) ) {
2542 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2543 errorText_ = errorStream_.str();
2544 error( RtError::WARNING );
2548 result = ASIOInit( &driverInfo );
2549 if ( result != ASE_OK ) {
2550 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2551 errorText_ = errorStream_.str();
2552 error( RtError::WARNING );
2556 // Determine the device channel information.
2557 long inputChannels, outputChannels;
2558 result = ASIOGetChannels( &inputChannels, &outputChannels );
2559 if ( result != ASE_OK ) {
2560 drivers.removeCurrentDriver();
2561 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2562 errorText_ = errorStream_.str();
2563 error( RtError::WARNING );
2567 info.outputChannels = outputChannels;
2568 info.inputChannels = inputChannels;
2569 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2570 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2572 // Determine the supported sample rates.
2573 info.sampleRates.clear();
2574 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2575 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2576 if ( result == ASE_OK )
2577 info.sampleRates.push_back( SAMPLE_RATES[i] );
2580 // Determine supported data types ... just check first channel and assume rest are the same.
2581 ASIOChannelInfo channelInfo;
2582 channelInfo.channel = 0;
2583 channelInfo.isInput = true;
2584 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2585 result = ASIOGetChannelInfo( &channelInfo );
2586 if ( result != ASE_OK ) {
2587 drivers.removeCurrentDriver();
2588 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2589 errorText_ = errorStream_.str();
2590 error( RtError::WARNING );
2594 info.nativeFormats = 0;
2595 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2596 info.nativeFormats |= RTAUDIO_SINT16;
2597 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2598 info.nativeFormats |= RTAUDIO_SINT32;
2599 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2600 info.nativeFormats |= RTAUDIO_FLOAT32;
2601 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2602 info.nativeFormats |= RTAUDIO_FLOAT64;
2604 if ( getDefaultOutputDevice() == device )
2605 info.isDefaultOutput = true;
2606 if ( getDefaultInputDevice() == device )
2607 info.isDefaultInput = true;
2610 drivers.removeCurrentDriver();
2614 void bufferSwitch( long index, ASIOBool processNow )
2616 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2617 object->callbackEvent( index );
2620 void RtApiAsio :: saveDeviceInfo( void )
2624 unsigned int nDevices = getDeviceCount();
2625 devices_.resize( nDevices );
2626 for ( unsigned int i=0; i<nDevices; i++ )
2627 devices_[i] = getDeviceInfo( i );
2630 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2631 unsigned int firstChannel, unsigned int sampleRate,
2632 RtAudioFormat format, unsigned int *bufferSize,
2633 RtAudio::StreamOptions *options )
2635 // For ASIO, a duplex stream MUST use the same driver.
2636 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2637 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2641 char driverName[32];
2642 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2643 if ( result != ASE_OK ) {
2644 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2645 errorText_ = errorStream_.str();
2649 // The getDeviceInfo() function will not work when a stream is open
2650 // because ASIO does not allow multiple devices to run at the same
2651 // time. Thus, we'll probe the system before opening a stream and
2652 // save the results for use by getDeviceInfo().
2653 this->saveDeviceInfo();
2655 // Only load the driver once for duplex stream.
2656 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2657 if ( !drivers.loadDriver( driverName ) ) {
2658 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2659 errorText_ = errorStream_.str();
2663 result = ASIOInit( &driverInfo );
2664 if ( result != ASE_OK ) {
2665 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2666 errorText_ = errorStream_.str();
2671 // Check the device channel count.
2672 long inputChannels, outputChannels;
2673 result = ASIOGetChannels( &inputChannels, &outputChannels );
2674 if ( result != ASE_OK ) {
2675 drivers.removeCurrentDriver();
2676 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2677 errorText_ = errorStream_.str();
2681 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2682 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2683 drivers.removeCurrentDriver();
2684 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2685 errorText_ = errorStream_.str();
2688 stream_.nDeviceChannels[mode] = channels;
2689 stream_.nUserChannels[mode] = channels;
2690 stream_.channelOffset[mode] = firstChannel;
2692 // Verify the sample rate is supported.
2693 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2694 if ( result != ASE_OK ) {
2695 drivers.removeCurrentDriver();
2696 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2697 errorText_ = errorStream_.str();
2701 // Get the current sample rate
2702 ASIOSampleRate currentRate;
2703 result = ASIOGetSampleRate( ¤tRate );
2704 if ( result != ASE_OK ) {
2705 drivers.removeCurrentDriver();
2706 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2707 errorText_ = errorStream_.str();
2711 // Set the sample rate only if necessary
2712 if ( currentRate != sampleRate ) {
2713 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2714 if ( result != ASE_OK ) {
2715 drivers.removeCurrentDriver();
2716 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2717 errorText_ = errorStream_.str();
2722 // Determine the driver data type.
2723 ASIOChannelInfo channelInfo;
2724 channelInfo.channel = 0;
2725 if ( mode == OUTPUT ) channelInfo.isInput = false;
2726 else channelInfo.isInput = true;
2727 result = ASIOGetChannelInfo( &channelInfo );
2728 if ( result != ASE_OK ) {
2729 drivers.removeCurrentDriver();
2730 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2731 errorText_ = errorStream_.str();
2735 // Assuming WINDOWS host is always little-endian.
2736 stream_.doByteSwap[mode] = false;
2737 stream_.userFormat = format;
2738 stream_.deviceFormat[mode] = 0;
2739 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2740 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2741 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2743 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2744 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2745 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2747 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2748 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2749 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2751 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2752 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2753 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2756 if ( stream_.deviceFormat[mode] == 0 ) {
2757 drivers.removeCurrentDriver();
2758 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2759 errorText_ = errorStream_.str();
2763 // Set the buffer size. For a duplex stream, this will end up
2764 // setting the buffer size based on the input constraints, which
2766 long minSize, maxSize, preferSize, granularity;
2767 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2768 if ( result != ASE_OK ) {
2769 drivers.removeCurrentDriver();
2770 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2771 errorText_ = errorStream_.str();
2775 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2776 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2777 else if ( granularity == -1 ) {
2778 // Make sure bufferSize is a power of two.
2779 int log2_of_min_size = 0;
2780 int log2_of_max_size = 0;
2782 for ( unsigned int i = 0; i < sizeof(long) * 8; i++ ) {
2783 if ( minSize & ((long)1 << i) ) log2_of_min_size = i;
2784 if ( maxSize & ((long)1 << i) ) log2_of_max_size = i;
2787 long min_delta = std::abs( (long)*bufferSize - ((long)1 << log2_of_min_size) );
2788 int min_delta_num = log2_of_min_size;
2790 for (int i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
2791 long current_delta = std::abs( (long)*bufferSize - ((long)1 << i) );
2792 if (current_delta < min_delta) {
2793 min_delta = current_delta;
2798 *bufferSize = ( (unsigned int)1 << min_delta_num );
2799 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2800 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2802 else if ( granularity != 0 ) {
2803 // Set to an even multiple of granularity, rounding up.
2804 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2807 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2808 drivers.removeCurrentDriver();
2809 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2813 stream_.bufferSize = *bufferSize;
2814 stream_.nBuffers = 2;
2816 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2817 else stream_.userInterleaved = true;
2819 // ASIO always uses non-interleaved buffers.
2820 stream_.deviceInterleaved[mode] = false;
2822 // Allocate, if necessary, our AsioHandle structure for the stream.
2823 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2824 if ( handle == 0 ) {
2826 handle = new AsioHandle;
2828 catch ( std::bad_alloc& ) {
2829 //if ( handle == NULL ) {
2830 drivers.removeCurrentDriver();
2831 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2834 handle->bufferInfos = 0;
2836 // Create a manual-reset event.
2837 handle->condition = CreateEvent( NULL, // no security
2838 TRUE, // manual-reset
2839 FALSE, // non-signaled initially
2841 stream_.apiHandle = (void *) handle;
2844 // Create the ASIO internal buffers. Since RtAudio sets up input
2845 // and output separately, we'll have to dispose of previously
2846 // created output buffers for a duplex stream.
2847 long inputLatency, outputLatency;
2848 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2849 ASIODisposeBuffers();
2850 if ( handle->bufferInfos ) free( handle->bufferInfos );
2853 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2854 bool buffersAllocated = false;
2855 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2856 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2857 if ( handle->bufferInfos == NULL ) {
2858 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2859 errorText_ = errorStream_.str();
2863 ASIOBufferInfo *infos;
2864 infos = handle->bufferInfos;
2865 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2866 infos->isInput = ASIOFalse;
2867 infos->channelNum = i + stream_.channelOffset[0];
2868 infos->buffers[0] = infos->buffers[1] = 0;
2870 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2871 infos->isInput = ASIOTrue;
2872 infos->channelNum = i + stream_.channelOffset[1];
2873 infos->buffers[0] = infos->buffers[1] = 0;
2876 // Set up the ASIO callback structure and create the ASIO data buffers.
2877 asioCallbacks.bufferSwitch = &bufferSwitch;
2878 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2879 asioCallbacks.asioMessage = &asioMessages;
2880 asioCallbacks.bufferSwitchTimeInfo = NULL;
2881 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2882 if ( result != ASE_OK ) {
2883 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2884 errorText_ = errorStream_.str();
2887 buffersAllocated = true;
2889 // Set flags for buffer conversion.
2890 stream_.doConvertBuffer[mode] = false;
2891 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2892 stream_.doConvertBuffer[mode] = true;
2893 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2894 stream_.nUserChannels[mode] > 1 )
2895 stream_.doConvertBuffer[mode] = true;
2897 // Allocate necessary internal buffers
2898 unsigned long bufferBytes;
2899 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2900 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2901 if ( stream_.userBuffer[mode] == NULL ) {
2902 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2906 if ( stream_.doConvertBuffer[mode] ) {
2908 bool makeBuffer = true;
2909 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2910 if ( mode == INPUT ) {
2911 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2912 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2913 if ( bufferBytes <= bytesOut ) makeBuffer = false;
2918 bufferBytes *= *bufferSize;
2919 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2920 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2921 if ( stream_.deviceBuffer == NULL ) {
2922 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
2928 stream_.sampleRate = sampleRate;
2929 stream_.device[mode] = device;
2930 stream_.state = STREAM_STOPPED;
2931 asioCallbackInfo = &stream_.callbackInfo;
2932 stream_.callbackInfo.object = (void *) this;
2933 if ( stream_.mode == OUTPUT && mode == INPUT )
2934 // We had already set up an output stream.
2935 stream_.mode = DUPLEX;
2937 stream_.mode = mode;
2939 // Determine device latencies
2940 result = ASIOGetLatencies( &inputLatency, &outputLatency );
2941 if ( result != ASE_OK ) {
2942 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
2943 errorText_ = errorStream_.str();
2944 error( RtError::WARNING); // warn but don't fail
2947 stream_.latency[0] = outputLatency;
2948 stream_.latency[1] = inputLatency;
2951 // Setup the buffer conversion information structure. We don't use
2952 // buffers to do channel offsets, so we override that parameter
2954 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2959 if ( buffersAllocated )
2960 ASIODisposeBuffers();
2961 drivers.removeCurrentDriver();
2964 CloseHandle( handle->condition );
2965 if ( handle->bufferInfos )
2966 free( handle->bufferInfos );
2968 stream_.apiHandle = 0;
2971 for ( int i=0; i<2; i++ ) {
2972 if ( stream_.userBuffer[i] ) {
2973 free( stream_.userBuffer[i] );
2974 stream_.userBuffer[i] = 0;
2978 if ( stream_.deviceBuffer ) {
2979 free( stream_.deviceBuffer );
2980 stream_.deviceBuffer = 0;
2986 void RtApiAsio :: closeStream()
2988 if ( stream_.state == STREAM_CLOSED ) {
2989 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
2990 error( RtError::WARNING );
2994 if ( stream_.state == STREAM_RUNNING ) {
2995 stream_.state = STREAM_STOPPED;
2998 ASIODisposeBuffers();
2999 drivers.removeCurrentDriver();
3001 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3003 CloseHandle( handle->condition );
3004 if ( handle->bufferInfos )
3005 free( handle->bufferInfos );
3007 stream_.apiHandle = 0;
3010 for ( int i=0; i<2; i++ ) {
3011 if ( stream_.userBuffer[i] ) {
3012 free( stream_.userBuffer[i] );
3013 stream_.userBuffer[i] = 0;
3017 if ( stream_.deviceBuffer ) {
3018 free( stream_.deviceBuffer );
3019 stream_.deviceBuffer = 0;
3022 stream_.mode = UNINITIALIZED;
3023 stream_.state = STREAM_CLOSED;
3026 void RtApiAsio :: startStream()
3029 if ( stream_.state == STREAM_RUNNING ) {
3030 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
3031 error( RtError::WARNING );
3035 MUTEX_LOCK( &stream_.mutex );
3037 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3038 ASIOError result = ASIOStart();
3039 if ( result != ASE_OK ) {
3040 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
3041 errorText_ = errorStream_.str();
3045 handle->drainCounter = 0;
3046 handle->internalDrain = false;
3047 stream_.state = STREAM_RUNNING;
3051 MUTEX_UNLOCK( &stream_.mutex );
3053 if ( result == ASE_OK ) return;
3054 error( RtError::SYSTEM_ERROR );
3057 void RtApiAsio :: stopStream()
3060 if ( stream_.state == STREAM_STOPPED ) {
3061 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
3062 error( RtError::WARNING );
3066 MUTEX_LOCK( &stream_.mutex );
3068 if ( stream_.state == STREAM_STOPPED ) {
3069 MUTEX_UNLOCK( &stream_.mutex );
3073 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3074 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3075 if ( handle->drainCounter == 0 ) {
3076 handle->drainCounter = 1;
3077 MUTEX_UNLOCK( &stream_.mutex );
3078 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
3079 ResetEvent( handle->condition );
3080 MUTEX_LOCK( &stream_.mutex );
3084 ASIOError result = ASIOStop();
3085 if ( result != ASE_OK ) {
3086 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
3087 errorText_ = errorStream_.str();
3090 stream_.state = STREAM_STOPPED;
3091 MUTEX_UNLOCK( &stream_.mutex );
3093 if ( result == ASE_OK ) return;
3094 error( RtError::SYSTEM_ERROR );
3097 void RtApiAsio :: abortStream()
3100 if ( stream_.state == STREAM_STOPPED ) {
3101 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
3102 error( RtError::WARNING );
3106 // The following lines were commented-out because some behavior was
3107 // noted where the device buffers need to be zeroed to avoid
3108 // continuing sound, even when the device buffers are completely
3109 // disposed. So now, calling abort is the same as calling stop.
3110 // AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3111 // handle->drainCounter = 1;
3115 bool RtApiAsio :: callbackEvent( long bufferIndex )
3117 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
3118 if ( stream_.state == STREAM_CLOSED ) {
3119 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
3120 error( RtError::WARNING );
3124 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3125 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3127 // Check if we were draining the stream and signal is finished.
3128 if ( handle->drainCounter > 3 ) {
3129 if ( handle->internalDrain == false )
3130 SetEvent( handle->condition );
3136 MUTEX_LOCK( &stream_.mutex );
3138 // The state might change while waiting on a mutex.
3139 if ( stream_.state == STREAM_STOPPED ) goto unlock;
3141 // Invoke user callback to get fresh output data UNLESS we are
3143 if ( handle->drainCounter == 0 ) {
3144 RtAudioCallback callback = (RtAudioCallback) info->callback;
3145 double streamTime = getStreamTime();
3146 RtAudioStreamStatus status = 0;
3147 if ( stream_.mode != INPUT && asioXRun == true ) {
3148 status |= RTAUDIO_OUTPUT_UNDERFLOW;
3151 if ( stream_.mode != OUTPUT && asioXRun == true ) {
3152 status |= RTAUDIO_INPUT_OVERFLOW;
3155 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
3156 stream_.bufferSize, streamTime, status, info->userData );
3157 if ( handle->drainCounter == 2 ) {
3158 MUTEX_UNLOCK( &stream_.mutex );
3162 else if ( handle->drainCounter == 1 )
3163 handle->internalDrain = true;
3166 unsigned int nChannels, bufferBytes, i, j;
3167 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
3168 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3170 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
3172 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3174 for ( i=0, j=0; i<nChannels; i++ ) {
3175 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3176 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3180 else if ( stream_.doConvertBuffer[0] ) {
3182 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3183 if ( stream_.doByteSwap[0] )
3184 byteSwapBuffer( stream_.deviceBuffer,
3185 stream_.bufferSize * stream_.nDeviceChannels[0],
3186 stream_.deviceFormat[0] );
3188 for ( i=0, j=0; i<nChannels; i++ ) {
3189 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3190 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3191 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3197 if ( stream_.doByteSwap[0] )
3198 byteSwapBuffer( stream_.userBuffer[0],
3199 stream_.bufferSize * stream_.nUserChannels[0],
3200 stream_.userFormat );
3202 for ( i=0, j=0; i<nChannels; i++ ) {
3203 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3204 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3205 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3210 if ( handle->drainCounter ) {
3211 handle->drainCounter++;
3216 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3218 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3220 if (stream_.doConvertBuffer[1]) {
3222 // Always interleave ASIO input data.
3223 for ( i=0, j=0; i<nChannels; i++ ) {
3224 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3225 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3226 handle->bufferInfos[i].buffers[bufferIndex],
3230 if ( stream_.doByteSwap[1] )
3231 byteSwapBuffer( stream_.deviceBuffer,
3232 stream_.bufferSize * stream_.nDeviceChannels[1],
3233 stream_.deviceFormat[1] );
3234 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3238 for ( i=0, j=0; i<nChannels; i++ ) {
3239 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3240 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3241 handle->bufferInfos[i].buffers[bufferIndex],
3246 if ( stream_.doByteSwap[1] )
3247 byteSwapBuffer( stream_.userBuffer[1],
3248 stream_.bufferSize * stream_.nUserChannels[1],
3249 stream_.userFormat );
3254 // The following call was suggested by Malte Clasen. While the API
3255 // documentation indicates it should not be required, some device
3256 // drivers apparently do not function correctly without it.
3259 MUTEX_UNLOCK( &stream_.mutex );
3261 RtApi::tickStreamTime();
3265 void sampleRateChanged( ASIOSampleRate sRate )
3267 // The ASIO documentation says that this usually only happens during
3268 // external sync. Audio processing is not stopped by the driver,
3269 // actual sample rate might not have even changed, maybe only the
3270 // sample rate status of an AES/EBU or S/PDIF digital input at the
3273 RtApi *object = (RtApi *) asioCallbackInfo->object;
3275 object->stopStream();
3277 catch ( RtError &exception ) {
3278 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3282 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3285 long asioMessages( long selector, long value, void* message, double* opt )
3289 switch( selector ) {
3290 case kAsioSelectorSupported:
3291 if ( value == kAsioResetRequest
3292 || value == kAsioEngineVersion
3293 || value == kAsioResyncRequest
3294 || value == kAsioLatenciesChanged
3295 // The following three were added for ASIO 2.0, you don't
3296 // necessarily have to support them.
3297 || value == kAsioSupportsTimeInfo
3298 || value == kAsioSupportsTimeCode
3299 || value == kAsioSupportsInputMonitor)
3302 case kAsioResetRequest:
3303 // Defer the task and perform the reset of the driver during the
3304 // next "safe" situation. You cannot reset the driver right now,
3305 // as this code is called from the driver. Reset the driver is
3306 // done by completely destruct is. I.e. ASIOStop(),
3307 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3309 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3312 case kAsioResyncRequest:
3313 // This informs the application that the driver encountered some
3314 // non-fatal data loss. It is used for synchronization purposes
3315 // of different media. Added mainly to work around the Win16Mutex
3316 // problems in Windows 95/98 with the Windows Multimedia system,
3317 // which could lose data because the Mutex was held too long by
3318 // another thread. However a driver can issue it in other
3320 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3324 case kAsioLatenciesChanged:
3325 // This will inform the host application that the drivers were
3326 // latencies changed. Beware, it this does not mean that the
3327 // buffer sizes have changed! You might need to update internal
3329 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3332 case kAsioEngineVersion:
3333 // Return the supported ASIO version of the host application. If
3334 // a host application does not implement this selector, ASIO 1.0
3335 // is assumed by the driver.
3338 case kAsioSupportsTimeInfo:
3339 // Informs the driver whether the
3340 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3341 // For compatibility with ASIO 1.0 drivers the host application
3342 // should always support the "old" bufferSwitch method, too.
3345 case kAsioSupportsTimeCode:
3346 // Informs the driver whether application is interested in time
3347 // code info. If an application does not need to know about time
3348 // code, the driver has less work to do.
3355 static const char* getAsioErrorString( ASIOError result )
3363 static Messages m[] =
3365 { ASE_NotPresent, "Hardware input or output is not present or available." },
3366 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3367 { ASE_InvalidParameter, "Invalid input parameter." },
3368 { ASE_InvalidMode, "Invalid mode." },
3369 { ASE_SPNotAdvancing, "Sample position not advancing." },
3370 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3371 { ASE_NoMemory, "Not enough memory to complete the request." }
3374 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3375 if ( m[i].value == result ) return m[i].message;
3377 return "Unknown error.";
3379 //******************** End of __WINDOWS_ASIO__ *********************//
3383 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3385 // Modified by Robin Davies, October 2005
3386 // - Improvements to DirectX pointer chasing.
3387 // - Backdoor RtDsStatistics hook provides DirectX performance information.
3388 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3389 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3390 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3395 #if defined(__MINGW32__)
3396 // missing from latest mingw winapi
3397 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3398 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3399 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3400 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3403 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3405 #ifdef _MSC_VER // if Microsoft Visual C++
3406 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3409 static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3411 if ( laterPointer > earlierPointer )
3412 return laterPointer - earlierPointer;
3414 return laterPointer - earlierPointer + bufferSize;
3417 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3419 if ( pointer > bufferSize ) pointer -= bufferSize;
3420 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3421 if ( pointer < earlierPointer ) pointer += bufferSize;
3422 return pointer >= earlierPointer && pointer < laterPointer;
3425 // A structure to hold various information related to the DirectSound
3426 // API implementation.
3428 unsigned int drainCounter; // Tracks callback counts when draining
3429 bool internalDrain; // Indicates if stop is initiated from callback or not.
3433 UINT bufferPointer[2];
3434 DWORD dsBufferSize[2];
3435 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3439 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3443 RtApiDs::RtDsStatistics RtApiDs::statistics;
3445 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
3446 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
3448 RtDsStatistics s = statistics;
3450 // update the calculated fields.
3451 if ( s.inputFrameSize != 0 )
3452 s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
3454 if ( s.outputFrameSize != 0 )
3455 s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
3461 // Declarations for utility functions, callbacks, and structures
3462 // specific to the DirectSound implementation.
3463 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3464 LPCTSTR description,
3468 static char* getErrorString( int code );
3470 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3476 unsigned int counter;
3482 : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
3485 RtApiDs :: RtApiDs()
3487 // Dsound will run both-threaded. If CoInitialize fails, then just
3488 // accept whatever the mainline chose for a threading model.
3489 coInitialized_ = false;
3490 HRESULT hr = CoInitialize( NULL );
3491 if ( !FAILED( hr ) ) coInitialized_ = true;
3494 RtApiDs :: ~RtApiDs()
3496 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3497 if ( stream_.state != STREAM_CLOSED ) closeStream();
3500 unsigned int RtApiDs :: getDefaultInputDevice( void )
3502 // Count output devices.
3504 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3505 if ( FAILED( result ) ) {
3506 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
3507 errorText_ = errorStream_.str();
3508 error( RtError::WARNING );
3512 // Now enumerate input devices until we find the id = NULL.
3513 info.isInput = true;
3514 info.getDefault = true;
3515 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3516 if ( FAILED( result ) ) {
3517 errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
3518 errorText_ = errorStream_.str();
3519 error( RtError::WARNING );
3523 if ( info.counter > 0 ) return info.counter - 1;
3527 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3529 // Enumerate output devices until we find the id = NULL.
3531 info.getDefault = true;
3532 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3533 if ( FAILED( result ) ) {
3534 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
3535 errorText_ = errorStream_.str();
3536 error( RtError::WARNING );
3540 if ( info.counter > 0 ) return info.counter - 1;
3544 unsigned int RtApiDs :: getDeviceCount( void )
3546 // Count DirectSound devices.
3548 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3549 if ( FAILED( result ) ) {
3550 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3551 errorText_ = errorStream_.str();
3552 error( RtError::WARNING );
3555 // Count DirectSoundCapture devices.
3556 info.isInput = true;
3557 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3558 if ( FAILED( result ) ) {
3559 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3560 errorText_ = errorStream_.str();
3561 error( RtError::WARNING );
3564 return info.counter;
3567 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3569 // Because DirectSound always enumerates input and output devices
3570 // separately (and because we don't attempt to combine devices
3571 // internally), none of our "devices" will ever be duplex.
3573 RtAudio::DeviceInfo info;
3574 info.probed = false;
3576 // Enumerate through devices to find the id (if it exists). Note
3577 // that we have to do the output enumeration first, even if this is
3578 // an input device, in order for the device counter to be correct.
3580 dsinfo.findIndex = true;
3581 dsinfo.index = device;
3582 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3583 if ( FAILED( result ) ) {
3584 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
3585 errorText_ = errorStream_.str();
3586 error( RtError::WARNING );
3589 if ( dsinfo.name.empty() ) goto probeInput;
3591 LPDIRECTSOUND output;
3593 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3594 if ( FAILED( result ) ) {
3595 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3596 errorText_ = errorStream_.str();
3597 error( RtError::WARNING );
3601 outCaps.dwSize = sizeof( outCaps );
3602 result = output->GetCaps( &outCaps );
3603 if ( FAILED( result ) ) {
3605 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3606 errorText_ = errorStream_.str();
3607 error( RtError::WARNING );
3611 // Get output channel information.
3612 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3614 // Get sample rate information.
3615 info.sampleRates.clear();
3616 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3617 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3618 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3619 info.sampleRates.push_back( SAMPLE_RATES[k] );
3622 // Get format information.
3623 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3624 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3628 if ( getDefaultOutputDevice() == device )
3629 info.isDefaultOutput = true;
3631 // Copy name and return.
3632 info.name = dsinfo.name;
3639 dsinfo.isInput = true;
3640 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3641 if ( FAILED( result ) ) {
3642 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
3643 errorText_ = errorStream_.str();
3644 error( RtError::WARNING );
3647 if ( dsinfo.name.empty() ) return info;
3649 LPDIRECTSOUNDCAPTURE input;
3650 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3651 if ( FAILED( result ) ) {
3652 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3653 errorText_ = errorStream_.str();
3654 error( RtError::WARNING );
3659 inCaps.dwSize = sizeof( inCaps );
3660 result = input->GetCaps( &inCaps );
3661 if ( FAILED( result ) ) {
3663 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
3664 errorText_ = errorStream_.str();
3665 error( RtError::WARNING );
3669 // Get input channel information.
3670 info.inputChannels = inCaps.dwChannels;
3672 // Get sample rate and format information.
3673 if ( inCaps.dwChannels == 2 ) {
3674 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3675 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3676 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3677 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3678 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3679 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3680 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3681 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3683 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3684 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
3685 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
3686 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
3687 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
3689 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3690 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
3691 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
3692 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
3693 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
3696 else if ( inCaps.dwChannels == 1 ) {
3697 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3698 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3699 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3700 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3701 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3702 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3703 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3704 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3706 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3707 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
3708 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
3709 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
3710 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
3712 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3713 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
3714 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
3715 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
3716 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
3719 else info.inputChannels = 0; // technically, this would be an error
3723 if ( info.inputChannels == 0 ) return info;
3725 if ( getDefaultInputDevice() == device )
3726 info.isDefaultInput = true;
3728 // Copy name and return.
3729 info.name = dsinfo.name;
3734 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3735 unsigned int firstChannel, unsigned int sampleRate,
3736 RtAudioFormat format, unsigned int *bufferSize,
3737 RtAudio::StreamOptions *options )
3739 if ( channels + firstChannel > 2 ) {
3740 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3744 // Enumerate through devices to find the id (if it exists). Note
3745 // that we have to do the output enumeration first, even if this is
3746 // an input device, in order for the device counter to be correct.
3748 dsinfo.findIndex = true;
3749 dsinfo.index = device;
3750 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3751 if ( FAILED( result ) ) {
3752 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
3753 errorText_ = errorStream_.str();
3757 if ( mode == OUTPUT ) {
3758 if ( dsinfo.name.empty() ) {
3759 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3760 errorText_ = errorStream_.str();
3764 else { // mode == INPUT
3765 dsinfo.isInput = true;
3766 HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3767 if ( FAILED( result ) ) {
3768 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
3769 errorText_ = errorStream_.str();
3772 if ( dsinfo.name.empty() ) {
3773 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3774 errorText_ = errorStream_.str();
3779 // According to a note in PortAudio, using GetDesktopWindow()
3780 // instead of GetForegroundWindow() is supposed to avoid problems
3781 // that occur when the application's window is not the foreground
3782 // window. Also, if the application window closes before the
3783 // DirectSound buffer, DirectSound can crash. However, for console
3784 // applications, no sound was produced when using GetDesktopWindow().
3785 HWND hWnd = GetForegroundWindow();
3787 // Check the numberOfBuffers parameter and limit the lowest value to
3788 // two. This is a judgement call and a value of two is probably too
3789 // low for capture, but it should work for playback.
3791 if ( options ) nBuffers = options->numberOfBuffers;
3792 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3793 if ( nBuffers < 2 ) nBuffers = 3;
3795 // Create the wave format structure. The data format setting will
3796 // be determined later.
3797 WAVEFORMATEX waveFormat;
3798 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3799 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3800 waveFormat.nChannels = channels + firstChannel;
3801 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3803 // Determine the device buffer size. By default, 32k, but we will
3804 // grow it to make allowances for very large software buffer sizes.
3805 DWORD dsBufferSize = 0;
3806 DWORD dsPointerLeadTime = 0;
3807 long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
3809 void *ohandle = 0, *bhandle = 0;
3810 if ( mode == OUTPUT ) {
3812 LPDIRECTSOUND output;
3813 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3814 if ( FAILED( result ) ) {
3815 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3816 errorText_ = errorStream_.str();
3821 outCaps.dwSize = sizeof( outCaps );
3822 result = output->GetCaps( &outCaps );
3823 if ( FAILED( result ) ) {
3825 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
3826 errorText_ = errorStream_.str();
3830 // Check channel information.
3831 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3832 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
3833 errorText_ = errorStream_.str();
3837 // Check format information. Use 16-bit format unless not
3838 // supported or user requests 8-bit.
3839 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3840 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3841 waveFormat.wBitsPerSample = 16;
3842 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3845 waveFormat.wBitsPerSample = 8;
3846 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3848 stream_.userFormat = format;
3850 // Update wave format structure and buffer information.
3851 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3852 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3853 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3855 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3856 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
3859 // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.
3860 //result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3861 // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.
3862 result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );
3863 if ( FAILED( result ) ) {
3865 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
3866 errorText_ = errorStream_.str();
3870 // Even though we will write to the secondary buffer, we need to
3871 // access the primary buffer to set the correct output format
3872 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3873 // buffer description.
3874 DSBUFFERDESC bufferDescription;
3875 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3876 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3877 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3879 // Obtain the primary buffer
3880 LPDIRECTSOUNDBUFFER buffer;
3881 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3882 if ( FAILED( result ) ) {
3884 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
3885 errorText_ = errorStream_.str();
3889 // Set the primary DS buffer sound format.
3890 result = buffer->SetFormat( &waveFormat );
3891 if ( FAILED( result ) ) {
3893 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
3894 errorText_ = errorStream_.str();
3898 // Setup the secondary DS buffer description.
3899 dsBufferSize = (DWORD) bufferBytes;
3900 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3901 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3902 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3903 DSBCAPS_GLOBALFOCUS |
3904 DSBCAPS_GETCURRENTPOSITION2 |
3905 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3906 bufferDescription.dwBufferBytes = bufferBytes;
3907 bufferDescription.lpwfxFormat = &waveFormat;
3909 // Try to create the secondary DS buffer. If that doesn't work,
3910 // try to use software mixing. Otherwise, there's a problem.
3911 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3912 if ( FAILED( result ) ) {
3913 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3914 DSBCAPS_GLOBALFOCUS |
3915 DSBCAPS_GETCURRENTPOSITION2 |
3916 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3917 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3918 if ( FAILED( result ) ) {
3920 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
3921 errorText_ = errorStream_.str();
3926 // Get the buffer size ... might be different from what we specified.
3928 dsbcaps.dwSize = sizeof( DSBCAPS );
3929 result = buffer->GetCaps( &dsbcaps );
3930 if ( FAILED( result ) ) {
3933 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
3934 errorText_ = errorStream_.str();
3938 bufferBytes = dsbcaps.dwBufferBytes;
3940 // Lock the DS buffer
3943 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3944 if ( FAILED( result ) ) {
3947 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
3948 errorText_ = errorStream_.str();
3952 // Zero the DS buffer
3953 ZeroMemory( audioPtr, dataLen );
3955 // Unlock the DS buffer
3956 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3957 if ( FAILED( result ) ) {
3960 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
3961 errorText_ = errorStream_.str();
3965 dsBufferSize = bufferBytes;
3966 ohandle = (void *) output;
3967 bhandle = (void *) buffer;
3970 if ( mode == INPUT ) {
3972 LPDIRECTSOUNDCAPTURE input;
3973 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3974 if ( FAILED( result ) ) {
3975 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3976 errorText_ = errorStream_.str();
3981 inCaps.dwSize = sizeof( inCaps );
3982 result = input->GetCaps( &inCaps );
3983 if ( FAILED( result ) ) {
3985 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
3986 errorText_ = errorStream_.str();
3990 // Check channel information.
3991 if ( inCaps.dwChannels < channels + firstChannel ) {
3992 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
3996 // Check format information. Use 16-bit format unless user
3998 DWORD deviceFormats;
3999 if ( channels + firstChannel == 2 ) {
4000 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
4001 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4002 waveFormat.wBitsPerSample = 8;
4003 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4005 else { // assume 16-bit is supported
4006 waveFormat.wBitsPerSample = 16;
4007 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4010 else { // channel == 1
4011 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
4012 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4013 waveFormat.wBitsPerSample = 8;
4014 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4016 else { // assume 16-bit is supported
4017 waveFormat.wBitsPerSample = 16;
4018 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4021 stream_.userFormat = format;
4023 // Update wave format structure and buffer information.
4024 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
4025 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
4027 // Setup the secondary DS buffer description.
4028 dsBufferSize = bufferBytes;
4029 DSCBUFFERDESC bufferDescription;
4030 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
4031 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
4032 bufferDescription.dwFlags = 0;
4033 bufferDescription.dwReserved = 0;
4034 bufferDescription.dwBufferBytes = bufferBytes;
4035 bufferDescription.lpwfxFormat = &waveFormat;
4037 // Create the capture buffer.
4038 LPDIRECTSOUNDCAPTUREBUFFER buffer;
4039 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
4040 if ( FAILED( result ) ) {
4042 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
4043 errorText_ = errorStream_.str();
4047 // Lock the capture buffer
4050 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
4051 if ( FAILED( result ) ) {
4054 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
4055 errorText_ = errorStream_.str();
4060 ZeroMemory( audioPtr, dataLen );
4062 // Unlock the buffer
4063 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4064 if ( FAILED( result ) ) {
4067 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
4068 errorText_ = errorStream_.str();
4072 dsBufferSize = bufferBytes;
4073 ohandle = (void *) input;
4074 bhandle = (void *) buffer;
4077 // Set various stream parameters
4078 DsHandle *handle = 0;
4079 stream_.nDeviceChannels[mode] = channels + firstChannel;
4080 stream_.nUserChannels[mode] = channels;
4081 stream_.bufferSize = *bufferSize;
4082 stream_.channelOffset[mode] = firstChannel;
4083 stream_.deviceInterleaved[mode] = true;
4084 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
4085 else stream_.userInterleaved = true;
4087 // Set flag for buffer conversion
4088 stream_.doConvertBuffer[mode] = false;
4089 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
4090 stream_.doConvertBuffer[mode] = true;
4091 if (stream_.userFormat != stream_.deviceFormat[mode])
4092 stream_.doConvertBuffer[mode] = true;
4093 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
4094 stream_.nUserChannels[mode] > 1 )
4095 stream_.doConvertBuffer[mode] = true;
4097 // Allocate necessary internal buffers
4098 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
4099 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
4100 if ( stream_.userBuffer[mode] == NULL ) {
4101 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
4105 if ( stream_.doConvertBuffer[mode] ) {
4107 bool makeBuffer = true;
4108 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
4109 if ( mode == INPUT ) {
4110 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4111 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
4112 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
4117 bufferBytes *= *bufferSize;
4118 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
4119 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
4120 if ( stream_.deviceBuffer == NULL ) {
4121 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
4127 // Allocate our DsHandle structures for the stream.
4128 if ( stream_.apiHandle == 0 ) {
4130 handle = new DsHandle;
4132 catch ( std::bad_alloc& ) {
4133 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
4137 // Create a manual-reset event.
4138 handle->condition = CreateEvent( NULL, // no security
4139 TRUE, // manual-reset
4140 FALSE, // non-signaled initially
4142 stream_.apiHandle = (void *) handle;
4145 handle = (DsHandle *) stream_.apiHandle;
4146 handle->id[mode] = ohandle;
4147 handle->buffer[mode] = bhandle;
4148 handle->dsBufferSize[mode] = dsBufferSize;
4149 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
4151 stream_.device[mode] = device;
4152 stream_.state = STREAM_STOPPED;
4153 if ( stream_.mode == OUTPUT && mode == INPUT )
4154 // We had already set up an output stream.
4155 stream_.mode = DUPLEX;
4157 stream_.mode = mode;
4158 stream_.nBuffers = nBuffers;
4159 stream_.sampleRate = sampleRate;
4161 // Setup the buffer conversion information structure.
4162 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
4164 // Setup the callback thread.
4166 stream_.callbackInfo.object = (void *) this;
4167 stream_.callbackInfo.isRunning = true;
4168 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
4169 &stream_.callbackInfo, 0, &threadId );
4170 if ( stream_.callbackInfo.thread == 0 ) {
4171 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4175 // Boost DS thread priority
4176 SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
4181 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4182 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4183 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4184 if ( buffer ) buffer->Release();
4187 if ( handle->buffer[1] ) {
4188 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4189 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4190 if ( buffer ) buffer->Release();
4193 CloseHandle( handle->condition );
4195 stream_.apiHandle = 0;
4198 for ( int i=0; i<2; i++ ) {
4199 if ( stream_.userBuffer[i] ) {
4200 free( stream_.userBuffer[i] );
4201 stream_.userBuffer[i] = 0;
4205 if ( stream_.deviceBuffer ) {
4206 free( stream_.deviceBuffer );
4207 stream_.deviceBuffer = 0;
4213 void RtApiDs :: closeStream()
4215 if ( stream_.state == STREAM_CLOSED ) {
4216 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4217 error( RtError::WARNING );
4221 // Stop the callback thread.
4222 stream_.callbackInfo.isRunning = false;
4223 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4224 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4226 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4228 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4229 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4230 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4237 if ( handle->buffer[1] ) {
4238 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4239 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4246 CloseHandle( handle->condition );
4248 stream_.apiHandle = 0;
4251 for ( int i=0; i<2; i++ ) {
4252 if ( stream_.userBuffer[i] ) {
4253 free( stream_.userBuffer[i] );
4254 stream_.userBuffer[i] = 0;
4258 if ( stream_.deviceBuffer ) {
4259 free( stream_.deviceBuffer );
4260 stream_.deviceBuffer = 0;
4263 stream_.mode = UNINITIALIZED;
4264 stream_.state = STREAM_CLOSED;
4267 void RtApiDs :: startStream()
4270 if ( stream_.state == STREAM_RUNNING ) {
4271 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4272 error( RtError::WARNING );
4276 // Increase scheduler frequency on lesser windows (a side-effect of
4277 // increasing timer accuracy). On greater windows (Win2K or later),
4278 // this is already in effect.
4280 MUTEX_LOCK( &stream_.mutex );
4282 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4284 timeBeginPeriod( 1 );
4287 memset( &statistics, 0, sizeof( statistics ) );
4288 statistics.sampleRate = stream_.sampleRate;
4289 statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
4292 buffersRolling = false;
4293 duplexPrerollBytes = 0;
4295 if ( stream_.mode == DUPLEX ) {
4296 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4297 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4301 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4302 //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
4304 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4305 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4306 if ( FAILED( result ) ) {
4307 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4308 errorText_ = errorStream_.str();
4313 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4314 //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
4316 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4317 result = buffer->Start( DSCBSTART_LOOPING );
4318 if ( FAILED( result ) ) {
4319 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4320 errorText_ = errorStream_.str();
4325 handle->drainCounter = 0;
4326 handle->internalDrain = false;
4327 stream_.state = STREAM_RUNNING;
4330 MUTEX_UNLOCK( &stream_.mutex );
4332 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4335 void RtApiDs :: stopStream()
4338 if ( stream_.state == STREAM_STOPPED ) {
4339 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4340 error( RtError::WARNING );
4344 MUTEX_LOCK( &stream_.mutex );
4346 if ( stream_.state == STREAM_STOPPED ) {
4347 MUTEX_UNLOCK( &stream_.mutex );
4354 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4355 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4356 if ( handle->drainCounter == 0 ) {
4357 handle->drainCounter = 1;
4358 MUTEX_UNLOCK( &stream_.mutex );
4359 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4360 ResetEvent( handle->condition );
4361 MUTEX_LOCK( &stream_.mutex );
4364 // Stop the buffer and clear memory
4365 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4366 result = buffer->Stop();
4367 if ( FAILED( result ) ) {
4368 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4369 errorText_ = errorStream_.str();
4373 // Lock the buffer and clear it so that if we start to play again,
4374 // we won't have old data playing.
4375 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4376 if ( FAILED( result ) ) {
4377 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking output buffer!";
4378 errorText_ = errorStream_.str();
4382 // Zero the DS buffer
4383 ZeroMemory( audioPtr, dataLen );
4385 // Unlock the DS buffer
4386 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4387 if ( FAILED( result ) ) {
4388 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4389 errorText_ = errorStream_.str();
4393 // If we start playing again, we must begin at beginning of buffer.
4394 handle->bufferPointer[0] = 0;
4397 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4398 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4402 result = buffer->Stop();
4403 if ( FAILED( result ) ) {
4404 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4405 errorText_ = errorStream_.str();
4409 // Lock the buffer and clear it so that if we start to play again,
4410 // we won't have old data playing.
4411 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4412 if ( FAILED( result ) ) {
4413 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking input buffer!";
4414 errorText_ = errorStream_.str();
4418 // Zero the DS buffer
4419 ZeroMemory( audioPtr, dataLen );
4421 // Unlock the DS buffer
4422 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4423 if ( FAILED( result ) ) {
4424 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4425 errorText_ = errorStream_.str();
4429 // If we start recording again, we must begin at beginning of buffer.
4430 handle->bufferPointer[1] = 0;
4434 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4435 stream_.state = STREAM_STOPPED;
4436 MUTEX_UNLOCK( &stream_.mutex );
4438 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4441 void RtApiDs :: abortStream()
4444 if ( stream_.state == STREAM_STOPPED ) {
4445 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4446 error( RtError::WARNING );
4450 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4451 handle->drainCounter = 1;
4456 void RtApiDs :: callbackEvent()
4458 if ( stream_.state == STREAM_STOPPED ) {
4459 Sleep(50); // sleep 50 milliseconds
4463 if ( stream_.state == STREAM_CLOSED ) {
4464 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4465 error( RtError::WARNING );
4469 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4470 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4472 // Check if we were draining the stream and signal is finished.
4473 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4474 if ( handle->internalDrain == false )
4475 SetEvent( handle->condition );
4481 MUTEX_LOCK( &stream_.mutex );
4483 // The state might change while waiting on a mutex.
4484 if ( stream_.state == STREAM_STOPPED ) {
4485 MUTEX_UNLOCK( &stream_.mutex );
4489 // Invoke user callback to get fresh output data UNLESS we are
4491 if ( handle->drainCounter == 0 ) {
4492 RtAudioCallback callback = (RtAudioCallback) info->callback;
4493 double streamTime = getStreamTime();
4494 RtAudioStreamStatus status = 0;
4495 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4496 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4497 handle->xrun[0] = false;
4499 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4500 status |= RTAUDIO_INPUT_OVERFLOW;
4501 handle->xrun[1] = false;
4503 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4504 stream_.bufferSize, streamTime, status, info->userData );
4505 if ( handle->drainCounter == 2 ) {
4506 MUTEX_UNLOCK( &stream_.mutex );
4510 else if ( handle->drainCounter == 1 )
4511 handle->internalDrain = true;
4515 DWORD currentWritePos, safeWritePos;
4516 DWORD currentReadPos, safeReadPos;
4520 #ifdef GENERATE_DEBUG_LOG
4521 DWORD writeTime, readTime;
4524 LPVOID buffer1 = NULL;
4525 LPVOID buffer2 = NULL;
4526 DWORD bufferSize1 = 0;
4527 DWORD bufferSize2 = 0;
4532 if ( stream_.mode == DUPLEX && !buffersRolling ) {
4533 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4535 // It takes a while for the devices to get rolling. As a result,
4536 // there's no guarantee that the capture and write device pointers
4537 // will move in lockstep. Wait here for both devices to start
4538 // rolling, and then set our buffer pointers accordingly.
4539 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4540 // bytes later than the write buffer.
4542 // Stub: a serious risk of having a pre-emptive scheduling round
4543 // take place between the two GetCurrentPosition calls... but I'm
4544 // really not sure how to solve the problem. Temporarily boost to
4545 // Realtime priority, maybe; but I'm not sure what priority the
4546 // DirectSound service threads run at. We *should* be roughly
4547 // within a ms or so of correct.
4549 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4550 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4552 DWORD initialWritePos, initialSafeWritePos;
4553 DWORD initialReadPos, initialSafeReadPos;
4555 result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
4556 if ( FAILED( result ) ) {
4557 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4558 errorText_ = errorStream_.str();
4559 error( RtError::SYSTEM_ERROR );
4561 result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
4562 if ( FAILED( result ) ) {
4563 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4564 errorText_ = errorStream_.str();
4565 error( RtError::SYSTEM_ERROR );
4568 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4569 if ( FAILED( result ) ) {
4570 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4571 errorText_ = errorStream_.str();
4572 error( RtError::SYSTEM_ERROR );
4574 result = dsCaptureBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4575 if ( FAILED( result ) ) {
4576 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4577 errorText_ = errorStream_.str();
4578 error( RtError::SYSTEM_ERROR );
4580 if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
4584 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4586 buffersRolling = true;
4587 handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
4588 handle->bufferPointer[1] = safeReadPos;
4591 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4593 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4595 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4596 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4597 bufferBytes *= formatBytes( stream_.userFormat );
4598 memset( stream_.userBuffer[0], 0, bufferBytes );
4601 // Setup parameters and do buffer conversion if necessary.
4602 if ( stream_.doConvertBuffer[0] ) {
4603 buffer = stream_.deviceBuffer;
4604 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4605 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4606 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4609 buffer = stream_.userBuffer[0];
4610 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4611 bufferBytes *= formatBytes( stream_.userFormat );
4614 // No byte swapping necessary in DirectSound implementation.
4616 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4617 // unsigned. So, we need to convert our signed 8-bit data here to
4619 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4620 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4622 DWORD dsBufferSize = handle->dsBufferSize[0];
4623 nextWritePos = handle->bufferPointer[0];
4627 // Find out where the read and "safe write" pointers are.
4628 result = dsBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4629 if ( FAILED( result ) ) {
4630 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4631 errorText_ = errorStream_.str();
4632 error( RtError::SYSTEM_ERROR );
4635 leadPos = safeWritePos + handle->dsPointerLeadTime[0];
4636 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
4637 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
4638 endWrite = nextWritePos + bufferBytes;
4640 // Check whether the entire write region is behind the play pointer.
4641 if ( leadPos >= endWrite ) break;
4643 // If we are here, then we must wait until the play pointer gets
4644 // beyond the write region. The approach here is to use the
4645 // Sleep() function to suspend operation until safePos catches
4646 // up. Calculate number of milliseconds to wait as:
4647 // time = distance * (milliseconds/second) * fudgefactor /
4648 // ((bytes/sample) * (samples/second))
4649 // A "fudgefactor" less than 1 is used because it was found
4650 // that sleeping too long was MUCH worse than sleeping for
4651 // several shorter periods.
4652 double millis = ( endWrite - leadPos ) * 900.0;
4653 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4654 if ( millis < 1.0 ) millis = 1.0;
4655 if ( millis > 50.0 ) {
4656 static int nOverruns = 0;
4659 Sleep( (DWORD) millis );
4662 //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
4663 // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
4666 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
4667 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
4668 // We've strayed into the forbidden zone ... resync the read pointer.
4669 //++statistics.numberOfWriteUnderruns;
4670 handle->xrun[0] = true;
4671 nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
4672 while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
4673 handle->bufferPointer[0] = nextWritePos;
4674 endWrite = nextWritePos + bufferBytes;
4677 // Lock free space in the buffer
4678 result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
4679 &bufferSize1, &buffer2, &bufferSize2, 0 );
4680 if ( FAILED( result ) ) {
4681 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4682 errorText_ = errorStream_.str();
4683 error( RtError::SYSTEM_ERROR );
4686 // Copy our buffer into the DS buffer
4687 CopyMemory( buffer1, buffer, bufferSize1 );
4688 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4690 // Update our buffer offset and unlock sound buffer
4691 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4692 if ( FAILED( result ) ) {
4693 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4694 errorText_ = errorStream_.str();
4695 error( RtError::SYSTEM_ERROR );
4697 nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4698 handle->bufferPointer[0] = nextWritePos;
4700 if ( handle->drainCounter ) {
4701 handle->drainCounter++;
4706 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4708 // Setup parameters.
4709 if ( stream_.doConvertBuffer[1] ) {
4710 buffer = stream_.deviceBuffer;
4711 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4712 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4715 buffer = stream_.userBuffer[1];
4716 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4717 bufferBytes *= formatBytes( stream_.userFormat );
4720 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4721 long nextReadPos = handle->bufferPointer[1];
4722 DWORD dsBufferSize = handle->dsBufferSize[1];
4724 // Find out where the write and "safe read" pointers are.
4725 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4726 if ( FAILED( result ) ) {
4727 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4728 errorText_ = errorStream_.str();
4729 error( RtError::SYSTEM_ERROR );
4732 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4733 DWORD endRead = nextReadPos + bufferBytes;
4735 // Handling depends on whether we are INPUT or DUPLEX.
4736 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4737 // then a wait here will drag the write pointers into the forbidden zone.
4739 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4740 // it's in a safe position. This causes dropouts, but it seems to be the only
4741 // practical way to sync up the read and write pointers reliably, given the
4742 // the very complex relationship between phase and increment of the read and write
4745 // In order to minimize audible dropouts in DUPLEX mode, we will
4746 // provide a pre-roll period of 0.5 seconds in which we return
4747 // zeros from the read buffer while the pointers sync up.
4749 if ( stream_.mode == DUPLEX ) {
4750 if ( safeReadPos < endRead ) {
4751 if ( duplexPrerollBytes <= 0 ) {
4752 // Pre-roll time over. Be more agressive.
4753 int adjustment = endRead-safeReadPos;
4755 handle->xrun[1] = true;
4756 //++statistics.numberOfReadOverruns;
4758 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4759 // and perform fine adjustments later.
4760 // - small adjustments: back off by twice as much.
4761 if ( adjustment >= 2*bufferBytes )
4762 nextReadPos = safeReadPos-2*bufferBytes;
4764 nextReadPos = safeReadPos-bufferBytes-adjustment;
4766 //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
4767 //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
4768 if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4772 // In pre=roll time. Just do it.
4773 nextReadPos = safeReadPos-bufferBytes;
4774 while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4776 endRead = nextReadPos + bufferBytes;
4779 else { // mode == INPUT
4780 while ( safeReadPos < endRead ) {
4781 // See comments for playback.
4782 double millis = (endRead - safeReadPos) * 900.0;
4783 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4784 if ( millis < 1.0 ) millis = 1.0;
4785 Sleep( (DWORD) millis );
4787 // Wake up, find out where we are now
4788 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4789 if ( FAILED( result ) ) {
4790 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4791 errorText_ = errorStream_.str();
4792 error( RtError::SYSTEM_ERROR );
4795 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4799 //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
4800 // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
4802 // Lock free space in the buffer
4803 result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
4804 &bufferSize1, &buffer2, &bufferSize2, 0 );
4805 if ( FAILED( result ) ) {
4806 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4807 errorText_ = errorStream_.str();
4808 error( RtError::SYSTEM_ERROR );
4811 if ( duplexPrerollBytes <= 0 ) {
4812 // Copy our buffer into the DS buffer
4813 CopyMemory( buffer, buffer1, bufferSize1 );
4814 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4817 memset( buffer, 0, bufferSize1 );
4818 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4819 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4822 // Update our buffer offset and unlock sound buffer
4823 nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4824 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4825 if ( FAILED( result ) ) {
4826 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4827 errorText_ = errorStream_.str();
4828 error( RtError::SYSTEM_ERROR );
4830 handle->bufferPointer[1] = nextReadPos;
4832 // No byte swapping necessary in DirectSound implementation.
4834 // If necessary, convert 8-bit data from unsigned to signed.
4835 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4836 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4838 // Do buffer conversion if necessary.
4839 if ( stream_.doConvertBuffer[1] )
4840 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4842 #ifdef GENERATE_DEBUG_LOG
4843 if ( currentDebugLogEntry < debugLog.size() )
4845 TTickRecord &r = debugLog[currentDebugLogEntry++];
4846 r.currentReadPointer = currentReadPos;
4847 r.safeReadPointer = safeReadPos;
4848 r.currentWritePointer = currentWritePos;
4849 r.safeWritePointer = safeWritePos;
4850 r.readTime = readTime;
4851 r.writeTime = writeTime;
4852 r.nextReadPointer = handles[1].bufferPointer;
4853 r.nextWritePointer = handles[0].bufferPointer;
4858 MUTEX_UNLOCK( &stream_.mutex );
4860 RtApi::tickStreamTime();
4863 // Definitions for utility functions and callbacks
4864 // specific to the DirectSound implementation.
4866 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4868 CallbackInfo *info = (CallbackInfo *) ptr;
4869 RtApiDs *object = (RtApiDs *) info->object;
4870 bool* isRunning = &info->isRunning;
4872 while ( *isRunning == true ) {
4873 object->callbackEvent();
4882 std::string convertTChar( LPCTSTR name )
4886 #if defined( UNICODE ) || defined( _UNICODE )
4887 // Yes, this conversion doesn't make sense for two-byte characters
4888 // but RtAudio is currently written to return an std::string of
4889 // one-byte chars for the device name.
4890 for ( unsigned int i=0; i<wcslen( name ); i++ )
4891 s.push_back( name[i] );
4893 s.append( std::string( name ) );
4899 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4900 LPCTSTR description,
4904 EnumInfo *info = (EnumInfo *) lpContext;
4907 if ( info->isInput == true ) {
4909 LPDIRECTSOUNDCAPTURE object;
4911 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4912 if ( hr != DS_OK ) return TRUE;
4914 caps.dwSize = sizeof(caps);
4915 hr = object->GetCaps( &caps );
4916 if ( hr == DS_OK ) {
4917 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4924 LPDIRECTSOUND object;
4925 hr = DirectSoundCreate( lpguid, &object, NULL );
4926 if ( hr != DS_OK ) return TRUE;
4928 caps.dwSize = sizeof(caps);
4929 hr = object->GetCaps( &caps );
4930 if ( hr == DS_OK ) {
4931 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4937 if ( info->getDefault && lpguid == NULL ) return FALSE;
4939 if ( info->findIndex && info->counter > info->index ) {
4941 info->name = convertTChar( description );
4948 static char* getErrorString( int code )
4952 case DSERR_ALLOCATED:
4953 return "Already allocated";
4955 case DSERR_CONTROLUNAVAIL:
4956 return "Control unavailable";
4958 case DSERR_INVALIDPARAM:
4959 return "Invalid parameter";
4961 case DSERR_INVALIDCALL:
4962 return "Invalid call";
4965 return "Generic error";
4967 case DSERR_PRIOLEVELNEEDED:
4968 return "Priority level needed";
4970 case DSERR_OUTOFMEMORY:
4971 return "Out of memory";
4973 case DSERR_BADFORMAT:
4974 return "The sample rate or the channel format is not supported";
4976 case DSERR_UNSUPPORTED:
4977 return "Not supported";
4979 case DSERR_NODRIVER:
4982 case DSERR_ALREADYINITIALIZED:
4983 return "Already initialized";
4985 case DSERR_NOAGGREGATION:
4986 return "No aggregation";
4988 case DSERR_BUFFERLOST:
4989 return "Buffer lost";
4991 case DSERR_OTHERAPPHASPRIO:
4992 return "Another application already has priority";
4994 case DSERR_UNINITIALIZED:
4995 return "Uninitialized";
4998 return "DirectSound unknown error";
5001 //******************** End of __WINDOWS_DS__ *********************//
5005 #if defined(__LINUX_ALSA__)
5007 #include <alsa/asoundlib.h>
5010 // A structure to hold various information related to the ALSA API
5013 snd_pcm_t *handles[2];
5016 pthread_cond_t runnable;
5019 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
5022 extern "C" void *alsaCallbackHandler( void * ptr );
5024 RtApiAlsa :: RtApiAlsa()
5026 // Nothing to do here.
5029 RtApiAlsa :: ~RtApiAlsa()
5031 if ( stream_.state != STREAM_CLOSED ) closeStream();
5034 unsigned int RtApiAlsa :: getDeviceCount( void )
5036 unsigned nDevices = 0;
5037 int result, subdevice, card;
5041 // Count cards and devices
5043 snd_card_next( &card );
5044 while ( card >= 0 ) {
5045 sprintf( name, "hw:%d", card );
5046 result = snd_ctl_open( &handle, name, 0 );
5048 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5049 errorText_ = errorStream_.str();
5050 error( RtError::WARNING );
5055 result = snd_ctl_pcm_next_device( handle, &subdevice );
5057 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5058 errorText_ = errorStream_.str();
5059 error( RtError::WARNING );
5062 if ( subdevice < 0 )
5067 snd_ctl_close( handle );
5068 snd_card_next( &card );
5074 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
5076 RtAudio::DeviceInfo info;
5077 info.probed = false;
5079 unsigned nDevices = 0;
5080 int result, subdevice, card;
5084 // Count cards and devices
5086 snd_card_next( &card );
5087 while ( card >= 0 ) {
5088 sprintf( name, "hw:%d", card );
5089 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5091 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5092 errorText_ = errorStream_.str();
5093 error( RtError::WARNING );
5098 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5100 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5101 errorText_ = errorStream_.str();
5102 error( RtError::WARNING );
5105 if ( subdevice < 0 ) break;
5106 if ( nDevices == device ) {
5107 sprintf( name, "hw:%d,%d", card, subdevice );
5113 snd_ctl_close( chandle );
5114 snd_card_next( &card );
5117 if ( nDevices == 0 ) {
5118 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
5119 error( RtError::INVALID_USE );
5122 if ( device >= nDevices ) {
5123 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
5124 error( RtError::INVALID_USE );
5129 // If a stream is already open, we cannot probe the stream devices.
5130 // Thus, use the saved results.
5131 if ( stream_.state != STREAM_CLOSED &&
5132 ( stream_.device[0] == device || stream_.device[1] == device ) ) {
5133 if ( device >= devices_.size() ) {
5134 errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";
5135 error( RtError::WARNING );
5138 return devices_[ device ];
5141 int openMode = SND_PCM_ASYNC;
5142 snd_pcm_stream_t stream;
5143 snd_pcm_info_t *pcminfo;
5144 snd_pcm_info_alloca( &pcminfo );
5146 snd_pcm_hw_params_t *params;
5147 snd_pcm_hw_params_alloca( ¶ms );
5149 // First try for playback
5150 stream = SND_PCM_STREAM_PLAYBACK;
5151 snd_pcm_info_set_device( pcminfo, subdevice );
5152 snd_pcm_info_set_subdevice( pcminfo, 0 );
5153 snd_pcm_info_set_stream( pcminfo, stream );
5155 result = snd_ctl_pcm_info( chandle, pcminfo );
5157 // Device probably doesn't support playback.
5161 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
5163 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5164 errorText_ = errorStream_.str();
5165 error( RtError::WARNING );
5169 // The device is open ... fill the parameter structure.
5170 result = snd_pcm_hw_params_any( phandle, params );
5172 snd_pcm_close( phandle );
5173 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5174 errorText_ = errorStream_.str();
5175 error( RtError::WARNING );
5179 // Get output channel information.
5181 result = snd_pcm_hw_params_get_channels_max( params, &value );
5183 snd_pcm_close( phandle );
5184 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
5185 errorText_ = errorStream_.str();
5186 error( RtError::WARNING );
5189 info.outputChannels = value;
5190 snd_pcm_close( phandle );
5193 // Now try for capture
5194 stream = SND_PCM_STREAM_CAPTURE;
5195 snd_pcm_info_set_stream( pcminfo, stream );
5197 result = snd_ctl_pcm_info( chandle, pcminfo );
5198 snd_ctl_close( chandle );
5200 // Device probably doesn't support capture.
5201 if ( info.outputChannels == 0 ) return info;
5202 goto probeParameters;
5205 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5207 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5208 errorText_ = errorStream_.str();
5209 error( RtError::WARNING );
5210 if ( info.outputChannels == 0 ) return info;
5211 goto probeParameters;
5214 // The device is open ... fill the parameter structure.
5215 result = snd_pcm_hw_params_any( phandle, params );
5217 snd_pcm_close( phandle );
5218 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5219 errorText_ = errorStream_.str();
5220 error( RtError::WARNING );
5221 if ( info.outputChannels == 0 ) return info;
5222 goto probeParameters;
5225 result = snd_pcm_hw_params_get_channels_max( params, &value );
5227 snd_pcm_close( phandle );
5228 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5229 errorText_ = errorStream_.str();
5230 error( RtError::WARNING );
5231 if ( info.outputChannels == 0 ) return info;
5232 goto probeParameters;
5234 info.inputChannels = value;
5235 snd_pcm_close( phandle );
5237 // If device opens for both playback and capture, we determine the channels.
5238 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5239 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5241 // ALSA doesn't provide default devices so we'll use the first available one.
5242 if ( device == 0 && info.outputChannels > 0 )
5243 info.isDefaultOutput = true;
5244 if ( device == 0 && info.inputChannels > 0 )
5245 info.isDefaultInput = true;
5248 // At this point, we just need to figure out the supported data
5249 // formats and sample rates. We'll proceed by opening the device in
5250 // the direction with the maximum number of channels, or playback if
5251 // they are equal. This might limit our sample rate options, but so
5254 if ( info.outputChannels >= info.inputChannels )
5255 stream = SND_PCM_STREAM_PLAYBACK;
5257 stream = SND_PCM_STREAM_CAPTURE;
5258 snd_pcm_info_set_stream( pcminfo, stream );
5260 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5262 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5263 errorText_ = errorStream_.str();
5264 error( RtError::WARNING );
5268 // The device is open ... fill the parameter structure.
5269 result = snd_pcm_hw_params_any( phandle, params );
5271 snd_pcm_close( phandle );
5272 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5273 errorText_ = errorStream_.str();
5274 error( RtError::WARNING );
5278 // Test our discrete set of sample rate values.
5279 info.sampleRates.clear();
5280 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5281 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5282 info.sampleRates.push_back( SAMPLE_RATES[i] );
5284 if ( info.sampleRates.size() == 0 ) {
5285 snd_pcm_close( phandle );
5286 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5287 errorText_ = errorStream_.str();
5288 error( RtError::WARNING );
5292 // Probe the supported data formats ... we don't care about endian-ness just yet
5293 snd_pcm_format_t format;
5294 info.nativeFormats = 0;
5295 format = SND_PCM_FORMAT_S8;
5296 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5297 info.nativeFormats |= RTAUDIO_SINT8;
5298 format = SND_PCM_FORMAT_S16;
5299 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5300 info.nativeFormats |= RTAUDIO_SINT16;
5301 format = SND_PCM_FORMAT_S24;
5302 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5303 info.nativeFormats |= RTAUDIO_SINT24;
5304 format = SND_PCM_FORMAT_S32;
5305 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5306 info.nativeFormats |= RTAUDIO_SINT32;
5307 format = SND_PCM_FORMAT_FLOAT;
5308 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5309 info.nativeFormats |= RTAUDIO_FLOAT32;
5310 format = SND_PCM_FORMAT_FLOAT64;
5311 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5312 info.nativeFormats |= RTAUDIO_FLOAT64;
5314 // Check that we have at least one supported format
5315 if ( info.nativeFormats == 0 ) {
5316 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5317 errorText_ = errorStream_.str();
5318 error( RtError::WARNING );
5322 // Get the device name
5324 result = snd_card_get_name( card, &cardname );
5326 sprintf( name, "hw:%s,%d", cardname, subdevice );
5329 // That's all ... close the device and return
5330 snd_pcm_close( phandle );
5335 void RtApiAlsa :: saveDeviceInfo( void )
5339 unsigned int nDevices = getDeviceCount();
5340 devices_.resize( nDevices );
5341 for ( unsigned int i=0; i<nDevices; i++ )
5342 devices_[i] = getDeviceInfo( i );
5345 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5346 unsigned int firstChannel, unsigned int sampleRate,
5347 RtAudioFormat format, unsigned int *bufferSize,
5348 RtAudio::StreamOptions *options )
5351 #if defined(__RTAUDIO_DEBUG__)
5353 snd_output_stdio_attach(&out, stderr, 0);
5356 // I'm not using the "plug" interface ... too much inconsistent behavior.
5358 unsigned nDevices = 0;
5359 int result, subdevice, card;
5363 // Count cards and devices
5365 snd_card_next( &card );
5366 while ( card >= 0 ) {
5367 sprintf( name, "hw:%d", card );
5368 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5370 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5371 errorText_ = errorStream_.str();
5376 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5377 if ( result < 0 ) break;
5378 if ( subdevice < 0 ) break;
5379 if ( nDevices == device ) {
5380 sprintf( name, "hw:%d,%d", card, subdevice );
5381 snd_ctl_close( chandle );
5386 snd_ctl_close( chandle );
5387 snd_card_next( &card );
5390 if ( nDevices == 0 ) {
5391 // This should not happen because a check is made before this function is called.
5392 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5396 if ( device >= nDevices ) {
5397 // This should not happen because a check is made before this function is called.
5398 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5404 // The getDeviceInfo() function will not work for a device that is
5405 // already open. Thus, we'll probe the system before opening a
5406 // stream and save the results for use by getDeviceInfo().
5407 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once
5408 this->saveDeviceInfo();
5410 snd_pcm_stream_t stream;
5411 if ( mode == OUTPUT )
5412 stream = SND_PCM_STREAM_PLAYBACK;
5414 stream = SND_PCM_STREAM_CAPTURE;
5417 int openMode = SND_PCM_ASYNC;
5418 result = snd_pcm_open( &phandle, name, stream, openMode );
5420 if ( mode == OUTPUT )
5421 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5423 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5424 errorText_ = errorStream_.str();
5428 // Fill the parameter structure.
5429 snd_pcm_hw_params_t *hw_params;
5430 snd_pcm_hw_params_alloca( &hw_params );
5431 result = snd_pcm_hw_params_any( phandle, hw_params );
5433 snd_pcm_close( phandle );
5434 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5435 errorText_ = errorStream_.str();
5439 #if defined(__RTAUDIO_DEBUG__)
5440 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5441 snd_pcm_hw_params_dump( hw_params, out );
5444 // Set access ... check user preference.
5445 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5446 stream_.userInterleaved = false;
5447 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5449 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5450 stream_.deviceInterleaved[mode] = true;
5453 stream_.deviceInterleaved[mode] = false;
5456 stream_.userInterleaved = true;
5457 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5459 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5460 stream_.deviceInterleaved[mode] = false;
5463 stream_.deviceInterleaved[mode] = true;
5467 snd_pcm_close( phandle );
5468 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5469 errorText_ = errorStream_.str();
5473 // Determine how to set the device format.
5474 stream_.userFormat = format;
5475 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5477 if ( format == RTAUDIO_SINT8 )
5478 deviceFormat = SND_PCM_FORMAT_S8;
5479 else if ( format == RTAUDIO_SINT16 )
5480 deviceFormat = SND_PCM_FORMAT_S16;
5481 else if ( format == RTAUDIO_SINT24 )
5482 deviceFormat = SND_PCM_FORMAT_S24;
5483 else if ( format == RTAUDIO_SINT32 )
5484 deviceFormat = SND_PCM_FORMAT_S32;
5485 else if ( format == RTAUDIO_FLOAT32 )
5486 deviceFormat = SND_PCM_FORMAT_FLOAT;
5487 else if ( format == RTAUDIO_FLOAT64 )
5488 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5490 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5491 stream_.deviceFormat[mode] = format;
5495 // The user requested format is not natively supported by the device.
5496 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5497 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5498 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5502 deviceFormat = SND_PCM_FORMAT_FLOAT;
5503 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5504 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5508 deviceFormat = SND_PCM_FORMAT_S32;
5509 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5510 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5514 deviceFormat = SND_PCM_FORMAT_S24;
5515 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5516 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5520 deviceFormat = SND_PCM_FORMAT_S16;
5521 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5522 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5526 deviceFormat = SND_PCM_FORMAT_S8;
5527 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5528 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5532 // If we get here, no supported format was found.
5533 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5534 errorText_ = errorStream_.str();
5538 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5540 snd_pcm_close( phandle );
5541 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5542 errorText_ = errorStream_.str();
5546 // Determine whether byte-swaping is necessary.
5547 stream_.doByteSwap[mode] = false;
5548 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5549 result = snd_pcm_format_cpu_endian( deviceFormat );
5551 stream_.doByteSwap[mode] = true;
5552 else if (result < 0) {
5553 snd_pcm_close( phandle );
5554 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5555 errorText_ = errorStream_.str();
5560 // Set the sample rate.
5561 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5563 snd_pcm_close( phandle );
5564 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5565 errorText_ = errorStream_.str();
5569 // Determine the number of channels for this device. We support a possible
5570 // minimum device channel number > than the value requested by the user.
5571 stream_.nUserChannels[mode] = channels;
5573 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5574 unsigned int deviceChannels = value;
5575 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5576 snd_pcm_close( phandle );
5577 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5578 errorText_ = errorStream_.str();
5582 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5584 snd_pcm_close( phandle );
5585 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5586 errorText_ = errorStream_.str();
5589 deviceChannels = value;
5590 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5591 stream_.nDeviceChannels[mode] = deviceChannels;
5593 // Set the device channels.
5594 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5596 snd_pcm_close( phandle );
5597 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5598 errorText_ = errorStream_.str();
5602 // Set the buffer number, which in ALSA is referred to as the "period".
5604 unsigned int periods = 0;
5605 if ( options ) periods = options->numberOfBuffers;
5606 totalSize = *bufferSize * periods;
5608 // Set the buffer (or period) size.
5609 snd_pcm_uframes_t periodSize = *bufferSize;
5610 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5612 snd_pcm_close( phandle );
5613 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5614 errorText_ = errorStream_.str();
5617 *bufferSize = periodSize;
5619 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5620 else periods = totalSize / *bufferSize;
5621 // Even though the hardware might allow 1 buffer, it won't work reliably.
5622 if ( periods < 2 ) periods = 2;
5623 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5625 snd_pcm_close( phandle );
5626 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5627 errorText_ = errorStream_.str();
5631 // If attempting to setup a duplex stream, the bufferSize parameter
5632 // MUST be the same in both directions!
5633 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5634 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5635 errorText_ = errorStream_.str();
5639 stream_.bufferSize = *bufferSize;
5641 // Install the hardware configuration
5642 result = snd_pcm_hw_params( phandle, hw_params );
5644 snd_pcm_close( phandle );
5645 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5646 errorText_ = errorStream_.str();
5650 #if defined(__RTAUDIO_DEBUG__)
5651 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5652 snd_pcm_hw_params_dump( hw_params, out );
5655 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5656 snd_pcm_sw_params_t *sw_params = NULL;
5657 snd_pcm_sw_params_alloca( &sw_params );
5658 snd_pcm_sw_params_current( phandle, sw_params );
5659 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5660 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, ULONG_MAX );
5661 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5663 // The following two settings were suggested by Theo Veenker
5664 //snd_pcm_sw_params_set_avail_min( phandle, sw_params, *bufferSize );
5665 //snd_pcm_sw_params_set_xfer_align( phandle, sw_params, 1 );
5667 // here are two options for a fix
5668 //snd_pcm_sw_params_set_silence_size( phandle, sw_params, ULONG_MAX );
5669 snd_pcm_uframes_t val;
5670 snd_pcm_sw_params_get_boundary( sw_params, &val );
5671 snd_pcm_sw_params_set_silence_size( phandle, sw_params, val );
5673 result = snd_pcm_sw_params( phandle, sw_params );
5675 snd_pcm_close( phandle );
5676 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5677 errorText_ = errorStream_.str();
5681 #if defined(__RTAUDIO_DEBUG__)
5682 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5683 snd_pcm_sw_params_dump( sw_params, out );
5686 // Set flags for buffer conversion
5687 stream_.doConvertBuffer[mode] = false;
5688 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5689 stream_.doConvertBuffer[mode] = true;
5690 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5691 stream_.doConvertBuffer[mode] = true;
5692 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5693 stream_.nUserChannels[mode] > 1 )
5694 stream_.doConvertBuffer[mode] = true;
5696 // Allocate the ApiHandle if necessary and then save.
5697 AlsaHandle *apiInfo = 0;
5698 if ( stream_.apiHandle == 0 ) {
5700 apiInfo = (AlsaHandle *) new AlsaHandle;
5702 catch ( std::bad_alloc& ) {
5703 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5707 if ( pthread_cond_init( &apiInfo->runnable, NULL ) ) {
5708 errorText_ = "RtApiAlsa::probeDeviceOpen: error initializing pthread condition variable.";
5712 stream_.apiHandle = (void *) apiInfo;
5713 apiInfo->handles[0] = 0;
5714 apiInfo->handles[1] = 0;
5717 apiInfo = (AlsaHandle *) stream_.apiHandle;
5719 apiInfo->handles[mode] = phandle;
5721 // Allocate necessary internal buffers.
5722 unsigned long bufferBytes;
5723 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5724 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5725 if ( stream_.userBuffer[mode] == NULL ) {
5726 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5730 if ( stream_.doConvertBuffer[mode] ) {
5732 bool makeBuffer = true;
5733 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5734 if ( mode == INPUT ) {
5735 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5736 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5737 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5742 bufferBytes *= *bufferSize;
5743 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5744 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5745 if ( stream_.deviceBuffer == NULL ) {
5746 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5752 stream_.sampleRate = sampleRate;
5753 stream_.nBuffers = periods;
5754 stream_.device[mode] = device;
5755 stream_.state = STREAM_STOPPED;
5757 // Setup the buffer conversion information structure.
5758 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5760 // Setup thread if necessary.
5761 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5762 // We had already set up an output stream.
5763 stream_.mode = DUPLEX;
5764 // Link the streams if possible.
5765 apiInfo->synchronized = false;
5766 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5767 apiInfo->synchronized = true;
5769 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5770 error( RtError::WARNING );
5774 stream_.mode = mode;
5776 // Setup callback thread.
5777 stream_.callbackInfo.object = (void *) this;
5779 // Set the thread attributes for joinable and realtime scheduling
5780 // priority (optional). The higher priority will only take affect
5781 // if the program is run as root or suid. Note, under Linux
5782 // processes with CAP_SYS_NICE privilege, a user can change
5783 // scheduling policy and priority (thus need not be root). See
5784 // POSIX "capabilities".
5785 pthread_attr_t attr;
5786 pthread_attr_init( &attr );
5787 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5788 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5789 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
5790 struct sched_param param;
5791 int priority = options->priority;
5792 int min = sched_get_priority_min( SCHED_RR );
5793 int max = sched_get_priority_max( SCHED_RR );
5794 if ( priority < min ) priority = min;
5795 else if ( priority > max ) priority = max;
5796 param.sched_priority = priority;
5797 pthread_attr_setschedparam( &attr, ¶m );
5798 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5801 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5803 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5806 stream_.callbackInfo.isRunning = true;
5807 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5808 pthread_attr_destroy( &attr );
5810 stream_.callbackInfo.isRunning = false;
5811 errorText_ = "RtApiAlsa::error creating callback thread!";
5820 pthread_cond_destroy( &apiInfo->runnable );
5821 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5822 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5824 stream_.apiHandle = 0;
5827 for ( int i=0; i<2; i++ ) {
5828 if ( stream_.userBuffer[i] ) {
5829 free( stream_.userBuffer[i] );
5830 stream_.userBuffer[i] = 0;
5834 if ( stream_.deviceBuffer ) {
5835 free( stream_.deviceBuffer );
5836 stream_.deviceBuffer = 0;
5842 void RtApiAlsa :: closeStream()
5844 if ( stream_.state == STREAM_CLOSED ) {
5845 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5846 error( RtError::WARNING );
5850 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5851 stream_.callbackInfo.isRunning = false;
5852 MUTEX_LOCK( &stream_.mutex );
5853 if ( stream_.state == STREAM_STOPPED )
5854 pthread_cond_signal( &apiInfo->runnable );
5855 MUTEX_UNLOCK( &stream_.mutex );
5856 pthread_join( stream_.callbackInfo.thread, NULL );
5858 if ( stream_.state == STREAM_RUNNING ) {
5859 stream_.state = STREAM_STOPPED;
5860 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5861 snd_pcm_drop( apiInfo->handles[0] );
5862 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5863 snd_pcm_drop( apiInfo->handles[1] );
5867 pthread_cond_destroy( &apiInfo->runnable );
5868 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5869 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5871 stream_.apiHandle = 0;
5874 for ( int i=0; i<2; i++ ) {
5875 if ( stream_.userBuffer[i] ) {
5876 free( stream_.userBuffer[i] );
5877 stream_.userBuffer[i] = 0;
5881 if ( stream_.deviceBuffer ) {
5882 free( stream_.deviceBuffer );
5883 stream_.deviceBuffer = 0;
5886 stream_.mode = UNINITIALIZED;
5887 stream_.state = STREAM_CLOSED;
5890 void RtApiAlsa :: startStream()
5892 // This method calls snd_pcm_prepare if the device isn't already in that state.
5895 if ( stream_.state == STREAM_RUNNING ) {
5896 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5897 error( RtError::WARNING );
5901 MUTEX_LOCK( &stream_.mutex );
5904 snd_pcm_state_t state;
5905 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5906 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5907 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5908 state = snd_pcm_state( handle[0] );
5909 if ( state != SND_PCM_STATE_PREPARED ) {
5910 result = snd_pcm_prepare( handle[0] );
5912 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5913 errorText_ = errorStream_.str();
5919 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5920 state = snd_pcm_state( handle[1] );
5921 if ( state != SND_PCM_STATE_PREPARED ) {
5922 result = snd_pcm_prepare( handle[1] );
5924 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5925 errorText_ = errorStream_.str();
5931 stream_.state = STREAM_RUNNING;
5934 MUTEX_UNLOCK( &stream_.mutex );
5936 pthread_cond_signal( &apiInfo->runnable );
5938 if ( result >= 0 ) return;
5939 error( RtError::SYSTEM_ERROR );
5942 void RtApiAlsa :: stopStream()
5945 if ( stream_.state == STREAM_STOPPED ) {
5946 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
5947 error( RtError::WARNING );
5951 MUTEX_LOCK( &stream_.mutex );
5953 if ( stream_.state == STREAM_STOPPED ) {
5954 MUTEX_UNLOCK( &stream_.mutex );
5959 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5960 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5961 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5962 if ( apiInfo->synchronized )
5963 result = snd_pcm_drop( handle[0] );
5965 result = snd_pcm_drain( handle[0] );
5967 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
5968 errorText_ = errorStream_.str();
5973 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5974 result = snd_pcm_drop( handle[1] );
5976 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
5977 errorText_ = errorStream_.str();
5983 stream_.state = STREAM_STOPPED;
5984 MUTEX_UNLOCK( &stream_.mutex );
5986 if ( result >= 0 ) return;
5987 error( RtError::SYSTEM_ERROR );
5990 void RtApiAlsa :: abortStream()
5993 if ( stream_.state == STREAM_STOPPED ) {
5994 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
5995 error( RtError::WARNING );
5999 MUTEX_LOCK( &stream_.mutex );
6001 if ( stream_.state == STREAM_STOPPED ) {
6002 MUTEX_UNLOCK( &stream_.mutex );
6007 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6008 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6009 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6010 result = snd_pcm_drop( handle[0] );
6012 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
6013 errorText_ = errorStream_.str();
6018 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6019 result = snd_pcm_drop( handle[1] );
6021 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
6022 errorText_ = errorStream_.str();
6028 stream_.state = STREAM_STOPPED;
6029 MUTEX_UNLOCK( &stream_.mutex );
6031 if ( result >= 0 ) return;
6032 error( RtError::SYSTEM_ERROR );
6035 void RtApiAlsa :: callbackEvent()
6037 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6038 if ( stream_.state == STREAM_STOPPED ) {
6039 MUTEX_LOCK( &stream_.mutex );
6040 pthread_cond_wait( &apiInfo->runnable, &stream_.mutex );
6041 if ( stream_.state != STREAM_RUNNING ) {
6042 MUTEX_UNLOCK( &stream_.mutex );
6045 MUTEX_UNLOCK( &stream_.mutex );
6048 if ( stream_.state == STREAM_CLOSED ) {
6049 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
6050 error( RtError::WARNING );
6054 int doStopStream = 0;
6055 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6056 double streamTime = getStreamTime();
6057 RtAudioStreamStatus status = 0;
6058 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
6059 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6060 apiInfo->xrun[0] = false;
6062 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
6063 status |= RTAUDIO_INPUT_OVERFLOW;
6064 apiInfo->xrun[1] = false;
6066 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6067 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6069 if ( doStopStream == 2 ) {
6074 MUTEX_LOCK( &stream_.mutex );
6076 // The state might change while waiting on a mutex.
6077 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6083 snd_pcm_sframes_t frames;
6084 RtAudioFormat format;
6085 handle = (snd_pcm_t **) apiInfo->handles;
6087 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6089 // Setup parameters.
6090 if ( stream_.doConvertBuffer[1] ) {
6091 buffer = stream_.deviceBuffer;
6092 channels = stream_.nDeviceChannels[1];
6093 format = stream_.deviceFormat[1];
6096 buffer = stream_.userBuffer[1];
6097 channels = stream_.nUserChannels[1];
6098 format = stream_.userFormat;
6101 // Read samples from device in interleaved/non-interleaved format.
6102 if ( stream_.deviceInterleaved[1] )
6103 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
6105 void *bufs[channels];
6106 size_t offset = stream_.bufferSize * formatBytes( format );
6107 for ( int i=0; i<channels; i++ )
6108 bufs[i] = (void *) (buffer + (i * offset));
6109 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
6112 if ( result < (int) stream_.bufferSize ) {
6113 // Either an error or overrun occured.
6114 if ( result == -EPIPE ) {
6115 snd_pcm_state_t state = snd_pcm_state( handle[1] );
6116 if ( state == SND_PCM_STATE_XRUN ) {
6117 apiInfo->xrun[1] = true;
6118 result = snd_pcm_prepare( handle[1] );
6120 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
6121 errorText_ = errorStream_.str();
6125 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6126 errorText_ = errorStream_.str();
6130 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
6131 errorText_ = errorStream_.str();
6133 error( RtError::WARNING );
6137 // Do byte swapping if necessary.
6138 if ( stream_.doByteSwap[1] )
6139 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
6141 // Do buffer conversion if necessary.
6142 if ( stream_.doConvertBuffer[1] )
6143 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6145 // Check stream latency
6146 result = snd_pcm_delay( handle[1], &frames );
6147 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
6152 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6154 // Setup parameters and do buffer conversion if necessary.
6155 if ( stream_.doConvertBuffer[0] ) {
6156 buffer = stream_.deviceBuffer;
6157 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6158 channels = stream_.nDeviceChannels[0];
6159 format = stream_.deviceFormat[0];
6162 buffer = stream_.userBuffer[0];
6163 channels = stream_.nUserChannels[0];
6164 format = stream_.userFormat;
6167 // Do byte swapping if necessary.
6168 if ( stream_.doByteSwap[0] )
6169 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
6171 // Write samples to device in interleaved/non-interleaved format.
6172 if ( stream_.deviceInterleaved[0] )
6173 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
6175 void *bufs[channels];
6176 size_t offset = stream_.bufferSize * formatBytes( format );
6177 for ( int i=0; i<channels; i++ )
6178 bufs[i] = (void *) (buffer + (i * offset));
6179 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
6182 if ( result < (int) stream_.bufferSize ) {
6183 // Either an error or underrun occured.
6184 if ( result == -EPIPE ) {
6185 snd_pcm_state_t state = snd_pcm_state( handle[0] );
6186 if ( state == SND_PCM_STATE_XRUN ) {
6187 apiInfo->xrun[0] = true;
6188 result = snd_pcm_prepare( handle[0] );
6190 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
6191 errorText_ = errorStream_.str();
6195 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6196 errorText_ = errorStream_.str();
6200 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
6201 errorText_ = errorStream_.str();
6203 error( RtError::WARNING );
6207 // Check stream latency
6208 result = snd_pcm_delay( handle[0], &frames );
6209 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
6213 MUTEX_UNLOCK( &stream_.mutex );
6215 RtApi::tickStreamTime();
6216 if ( doStopStream == 1 ) this->stopStream();
6219 extern "C" void *alsaCallbackHandler( void *ptr )
6221 CallbackInfo *info = (CallbackInfo *) ptr;
6222 RtApiAlsa *object = (RtApiAlsa *) info->object;
6223 bool *isRunning = &info->isRunning;
6225 while ( *isRunning == true ) {
6226 pthread_testcancel();
6227 object->callbackEvent();
6230 pthread_exit( NULL );
6233 //******************** End of __LINUX_ALSA__ *********************//
6237 #if defined(__LINUX_OSS__)
6240 #include <sys/ioctl.h>
6243 #include "soundcard.h"
6247 extern "C" void *ossCallbackHandler(void * ptr);
6249 // A structure to hold various information related to the OSS API
6252 int id[2]; // device ids
6255 pthread_cond_t runnable;
6258 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
6261 RtApiOss :: RtApiOss()
6263 // Nothing to do here.
6266 RtApiOss :: ~RtApiOss()
6268 if ( stream_.state != STREAM_CLOSED ) closeStream();
6271 unsigned int RtApiOss :: getDeviceCount( void )
6273 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6274 if ( mixerfd == -1 ) {
6275 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6276 error( RtError::WARNING );
6280 oss_sysinfo sysinfo;
6281 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6283 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6284 error( RtError::WARNING );
6289 return sysinfo.numaudios;
6292 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6294 RtAudio::DeviceInfo info;
6295 info.probed = false;
6297 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6298 if ( mixerfd == -1 ) {
6299 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6300 error( RtError::WARNING );
6304 oss_sysinfo sysinfo;
6305 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6306 if ( result == -1 ) {
6308 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6309 error( RtError::WARNING );
6313 unsigned nDevices = sysinfo.numaudios;
6314 if ( nDevices == 0 ) {
6316 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6317 error( RtError::INVALID_USE );
6320 if ( device >= nDevices ) {
6322 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6323 error( RtError::INVALID_USE );
6326 oss_audioinfo ainfo;
6328 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6330 if ( result == -1 ) {
6331 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6332 errorText_ = errorStream_.str();
6333 error( RtError::WARNING );
6338 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6339 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6340 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6341 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6342 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6345 // Probe data formats ... do for input
6346 unsigned long mask = ainfo.iformats;
6347 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6348 info.nativeFormats |= RTAUDIO_SINT16;
6349 if ( mask & AFMT_S8 )
6350 info.nativeFormats |= RTAUDIO_SINT8;
6351 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6352 info.nativeFormats |= RTAUDIO_SINT32;
6353 if ( mask & AFMT_FLOAT )
6354 info.nativeFormats |= RTAUDIO_FLOAT32;
6355 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6356 info.nativeFormats |= RTAUDIO_SINT24;
6358 // Check that we have at least one supported format
6359 if ( info.nativeFormats == 0 ) {
6360 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6361 errorText_ = errorStream_.str();
6362 error( RtError::WARNING );
6366 // Probe the supported sample rates.
6367 info.sampleRates.clear();
6368 if ( ainfo.nrates ) {
6369 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6370 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6371 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6372 info.sampleRates.push_back( SAMPLE_RATES[k] );
6379 // Check min and max rate values;
6380 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6381 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6382 info.sampleRates.push_back( SAMPLE_RATES[k] );
6386 if ( info.sampleRates.size() == 0 ) {
6387 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6388 errorText_ = errorStream_.str();
6389 error( RtError::WARNING );
6393 info.name = ainfo.name;
6400 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6401 unsigned int firstChannel, unsigned int sampleRate,
6402 RtAudioFormat format, unsigned int *bufferSize,
6403 RtAudio::StreamOptions *options )
6405 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6406 if ( mixerfd == -1 ) {
6407 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6411 oss_sysinfo sysinfo;
6412 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6413 if ( result == -1 ) {
6415 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6419 unsigned nDevices = sysinfo.numaudios;
6420 if ( nDevices == 0 ) {
6421 // This should not happen because a check is made before this function is called.
6423 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6427 if ( device >= nDevices ) {
6428 // This should not happen because a check is made before this function is called.
6430 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6434 oss_audioinfo ainfo;
6436 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6438 if ( result == -1 ) {
6439 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6440 errorText_ = errorStream_.str();
6444 // Check if device supports input or output
6445 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6446 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6447 if ( mode == OUTPUT )
6448 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6450 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6451 errorText_ = errorStream_.str();
6456 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6457 if ( mode == OUTPUT )
6459 else { // mode == INPUT
6460 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6461 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6462 close( handle->id[0] );
6464 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6465 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6466 errorText_ = errorStream_.str();
6469 // Check that the number previously set channels is the same.
6470 if ( stream_.nUserChannels[0] != channels ) {
6471 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6472 errorText_ = errorStream_.str();
6481 // Set exclusive access if specified.
6482 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6484 // Try to open the device.
6486 fd = open( ainfo.devnode, flags, 0 );
6488 if ( errno == EBUSY )
6489 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6491 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6492 errorText_ = errorStream_.str();
6496 // For duplex operation, specifically set this mode (this doesn't seem to work).
6498 if ( flags | O_RDWR ) {
6499 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6500 if ( result == -1) {
6501 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6502 errorText_ = errorStream_.str();
6508 // Check the device channel support.
6509 stream_.nUserChannels[mode] = channels;
6510 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6512 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6513 errorText_ = errorStream_.str();
6517 // Set the number of channels.
6518 int deviceChannels = channels + firstChannel;
6519 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6520 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6522 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6523 errorText_ = errorStream_.str();
6526 stream_.nDeviceChannels[mode] = deviceChannels;
6528 // Get the data format mask
6530 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6531 if ( result == -1 ) {
6533 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6534 errorText_ = errorStream_.str();
6538 // Determine how to set the device format.
6539 stream_.userFormat = format;
6540 int deviceFormat = -1;
6541 stream_.doByteSwap[mode] = false;
6542 if ( format == RTAUDIO_SINT8 ) {
6543 if ( mask & AFMT_S8 ) {
6544 deviceFormat = AFMT_S8;
6545 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6548 else if ( format == RTAUDIO_SINT16 ) {
6549 if ( mask & AFMT_S16_NE ) {
6550 deviceFormat = AFMT_S16_NE;
6551 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6553 else if ( mask & AFMT_S16_OE ) {
6554 deviceFormat = AFMT_S16_OE;
6555 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6556 stream_.doByteSwap[mode] = true;
6559 else if ( format == RTAUDIO_SINT24 ) {
6560 if ( mask & AFMT_S24_NE ) {
6561 deviceFormat = AFMT_S24_NE;
6562 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6564 else if ( mask & AFMT_S24_OE ) {
6565 deviceFormat = AFMT_S24_OE;
6566 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6567 stream_.doByteSwap[mode] = true;
6570 else if ( format == RTAUDIO_SINT32 ) {
6571 if ( mask & AFMT_S32_NE ) {
6572 deviceFormat = AFMT_S32_NE;
6573 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6575 else if ( mask & AFMT_S32_OE ) {
6576 deviceFormat = AFMT_S32_OE;
6577 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6578 stream_.doByteSwap[mode] = true;
6582 if ( deviceFormat == -1 ) {
6583 // The user requested format is not natively supported by the device.
6584 if ( mask & AFMT_S16_NE ) {
6585 deviceFormat = AFMT_S16_NE;
6586 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6588 else if ( mask & AFMT_S32_NE ) {
6589 deviceFormat = AFMT_S32_NE;
6590 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6592 else if ( mask & AFMT_S24_NE ) {
6593 deviceFormat = AFMT_S24_NE;
6594 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6596 else if ( mask & AFMT_S16_OE ) {
6597 deviceFormat = AFMT_S16_OE;
6598 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6599 stream_.doByteSwap[mode] = true;
6601 else if ( mask & AFMT_S32_OE ) {
6602 deviceFormat = AFMT_S32_OE;
6603 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6604 stream_.doByteSwap[mode] = true;
6606 else if ( mask & AFMT_S24_OE ) {
6607 deviceFormat = AFMT_S24_OE;
6608 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6609 stream_.doByteSwap[mode] = true;
6611 else if ( mask & AFMT_S8) {
6612 deviceFormat = AFMT_S8;
6613 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6617 if ( stream_.deviceFormat[mode] == 0 ) {
6618 // This really shouldn't happen ...
6620 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6621 errorText_ = errorStream_.str();
6625 // Set the data format.
6626 int temp = deviceFormat;
6627 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6628 if ( result == -1 || deviceFormat != temp ) {
6630 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6631 errorText_ = errorStream_.str();
6635 // Attempt to set the buffer size. According to OSS, the minimum
6636 // number of buffers is two. The supposed minimum buffer size is 16
6637 // bytes, so that will be our lower bound. The argument to this
6638 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6639 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6640 // We'll check the actual value used near the end of the setup
6642 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6643 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6645 if ( options ) buffers = options->numberOfBuffers;
6646 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6647 if ( buffers < 2 ) buffers = 3;
6648 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6649 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6650 if ( result == -1 ) {
6652 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6653 errorText_ = errorStream_.str();
6656 stream_.nBuffers = buffers;
6658 // Save buffer size (in sample frames).
6659 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6660 stream_.bufferSize = *bufferSize;
6662 // Set the sample rate.
6663 int srate = sampleRate;
6664 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6665 if ( result == -1 ) {
6667 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6668 errorText_ = errorStream_.str();
6672 // Verify the sample rate setup worked.
6673 if ( abs( srate - sampleRate ) > 100 ) {
6675 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6676 errorText_ = errorStream_.str();
6679 stream_.sampleRate = sampleRate;
6681 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6682 // We're doing duplex setup here.
6683 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6684 stream_.nDeviceChannels[0] = deviceChannels;
6687 // Set interleaving parameters.
6688 stream_.userInterleaved = true;
6689 stream_.deviceInterleaved[mode] = true;
6690 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6691 stream_.userInterleaved = false;
6693 // Set flags for buffer conversion
6694 stream_.doConvertBuffer[mode] = false;
6695 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6696 stream_.doConvertBuffer[mode] = true;
6697 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6698 stream_.doConvertBuffer[mode] = true;
6699 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6700 stream_.nUserChannels[mode] > 1 )
6701 stream_.doConvertBuffer[mode] = true;
6703 // Allocate the stream handles if necessary and then save.
6704 if ( stream_.apiHandle == 0 ) {
6706 handle = new OssHandle;
6708 catch ( std::bad_alloc& ) {
6709 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6713 if ( pthread_cond_init( &handle->runnable, NULL ) ) {
6714 errorText_ = "RtApiOss::probeDeviceOpen: error initializing pthread condition variable.";
6718 stream_.apiHandle = (void *) handle;
6721 handle = (OssHandle *) stream_.apiHandle;
6723 handle->id[mode] = fd;
6725 // Allocate necessary internal buffers.
6726 unsigned long bufferBytes;
6727 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6728 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6729 if ( stream_.userBuffer[mode] == NULL ) {
6730 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6734 if ( stream_.doConvertBuffer[mode] ) {
6736 bool makeBuffer = true;
6737 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6738 if ( mode == INPUT ) {
6739 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6740 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6741 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6746 bufferBytes *= *bufferSize;
6747 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6748 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6749 if ( stream_.deviceBuffer == NULL ) {
6750 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6756 stream_.device[mode] = device;
6757 stream_.state = STREAM_STOPPED;
6759 // Setup the buffer conversion information structure.
6760 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6762 // Setup thread if necessary.
6763 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6764 // We had already set up an output stream.
6765 stream_.mode = DUPLEX;
6766 if ( stream_.device[0] == device ) handle->id[0] = fd;
6769 stream_.mode = mode;
6771 // Setup callback thread.
6772 stream_.callbackInfo.object = (void *) this;
6774 // Set the thread attributes for joinable and realtime scheduling
6775 // priority. The higher priority will only take affect if the
6776 // program is run as root or suid.
6777 pthread_attr_t attr;
6778 pthread_attr_init( &attr );
6779 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6780 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6781 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
6782 struct sched_param param;
6783 int priority = options->priority;
6784 int min = sched_get_priority_min( SCHED_RR );
6785 int max = sched_get_priority_max( SCHED_RR );
6786 if ( priority < min ) priority = min;
6787 else if ( priority > max ) priority = max;
6788 param.sched_priority = priority;
6789 pthread_attr_setschedparam( &attr, ¶m );
6790 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6793 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6795 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6798 stream_.callbackInfo.isRunning = true;
6799 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6800 pthread_attr_destroy( &attr );
6802 stream_.callbackInfo.isRunning = false;
6803 errorText_ = "RtApiOss::error creating callback thread!";
6812 pthread_cond_destroy( &handle->runnable );
6813 if ( handle->id[0] ) close( handle->id[0] );
6814 if ( handle->id[1] ) close( handle->id[1] );
6816 stream_.apiHandle = 0;
6819 for ( int i=0; i<2; i++ ) {
6820 if ( stream_.userBuffer[i] ) {
6821 free( stream_.userBuffer[i] );
6822 stream_.userBuffer[i] = 0;
6826 if ( stream_.deviceBuffer ) {
6827 free( stream_.deviceBuffer );
6828 stream_.deviceBuffer = 0;
6834 void RtApiOss :: closeStream()
6836 if ( stream_.state == STREAM_CLOSED ) {
6837 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6838 error( RtError::WARNING );
6842 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6843 stream_.callbackInfo.isRunning = false;
6844 MUTEX_LOCK( &stream_.mutex );
6845 if ( stream_.state == STREAM_STOPPED )
6846 pthread_cond_signal( &handle->runnable );
6847 MUTEX_UNLOCK( &stream_.mutex );
6848 pthread_join( stream_.callbackInfo.thread, NULL );
6850 if ( stream_.state == STREAM_RUNNING ) {
6851 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6852 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6854 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6855 stream_.state = STREAM_STOPPED;
6859 pthread_cond_destroy( &handle->runnable );
6860 if ( handle->id[0] ) close( handle->id[0] );
6861 if ( handle->id[1] ) close( handle->id[1] );
6863 stream_.apiHandle = 0;
6866 for ( int i=0; i<2; i++ ) {
6867 if ( stream_.userBuffer[i] ) {
6868 free( stream_.userBuffer[i] );
6869 stream_.userBuffer[i] = 0;
6873 if ( stream_.deviceBuffer ) {
6874 free( stream_.deviceBuffer );
6875 stream_.deviceBuffer = 0;
6878 stream_.mode = UNINITIALIZED;
6879 stream_.state = STREAM_CLOSED;
6882 void RtApiOss :: startStream()
6885 if ( stream_.state == STREAM_RUNNING ) {
6886 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6887 error( RtError::WARNING );
6891 MUTEX_LOCK( &stream_.mutex );
6893 stream_.state = STREAM_RUNNING;
6895 // No need to do anything else here ... OSS automatically starts
6896 // when fed samples.
6898 MUTEX_UNLOCK( &stream_.mutex );
6900 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6901 pthread_cond_signal( &handle->runnable );
6904 void RtApiOss :: stopStream()
6907 if ( stream_.state == STREAM_STOPPED ) {
6908 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6909 error( RtError::WARNING );
6913 MUTEX_LOCK( &stream_.mutex );
6915 // The state might change while waiting on a mutex.
6916 if ( stream_.state == STREAM_STOPPED ) {
6917 MUTEX_UNLOCK( &stream_.mutex );
6922 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6923 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6925 // Flush the output with zeros a few times.
6928 RtAudioFormat format;
6930 if ( stream_.doConvertBuffer[0] ) {
6931 buffer = stream_.deviceBuffer;
6932 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6933 format = stream_.deviceFormat[0];
6936 buffer = stream_.userBuffer[0];
6937 samples = stream_.bufferSize * stream_.nUserChannels[0];
6938 format = stream_.userFormat;
6941 memset( buffer, 0, samples * formatBytes(format) );
6942 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
6943 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6944 if ( result == -1 ) {
6945 errorText_ = "RtApiOss::stopStream: audio write error.";
6946 error( RtError::WARNING );
6950 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6951 if ( result == -1 ) {
6952 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6953 errorText_ = errorStream_.str();
6956 handle->triggered = false;
6959 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6960 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6961 if ( result == -1 ) {
6962 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6963 errorText_ = errorStream_.str();
6969 stream_.state = STREAM_STOPPED;
6970 MUTEX_UNLOCK( &stream_.mutex );
6972 if ( result != -1 ) return;
6973 error( RtError::SYSTEM_ERROR );
6976 void RtApiOss :: abortStream()
6979 if ( stream_.state == STREAM_STOPPED ) {
6980 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
6981 error( RtError::WARNING );
6985 MUTEX_LOCK( &stream_.mutex );
6987 // The state might change while waiting on a mutex.
6988 if ( stream_.state == STREAM_STOPPED ) {
6989 MUTEX_UNLOCK( &stream_.mutex );
6994 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6995 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6996 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6997 if ( result == -1 ) {
6998 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6999 errorText_ = errorStream_.str();
7002 handle->triggered = false;
7005 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7006 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7007 if ( result == -1 ) {
7008 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7009 errorText_ = errorStream_.str();
7015 stream_.state = STREAM_STOPPED;
7016 MUTEX_UNLOCK( &stream_.mutex );
7018 if ( result != -1 ) return;
7019 error( RtError::SYSTEM_ERROR );
7022 void RtApiOss :: callbackEvent()
7024 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7025 if ( stream_.state == STREAM_STOPPED ) {
7026 MUTEX_LOCK( &stream_.mutex );
7027 pthread_cond_wait( &handle->runnable, &stream_.mutex );
7028 if ( stream_.state != STREAM_RUNNING ) {
7029 MUTEX_UNLOCK( &stream_.mutex );
7032 MUTEX_UNLOCK( &stream_.mutex );
7035 if ( stream_.state == STREAM_CLOSED ) {
7036 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
7037 error( RtError::WARNING );
7041 // Invoke user callback to get fresh output data.
7042 int doStopStream = 0;
7043 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
7044 double streamTime = getStreamTime();
7045 RtAudioStreamStatus status = 0;
7046 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
7047 status |= RTAUDIO_OUTPUT_UNDERFLOW;
7048 handle->xrun[0] = false;
7050 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
7051 status |= RTAUDIO_INPUT_OVERFLOW;
7052 handle->xrun[1] = false;
7054 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
7055 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
7056 if ( doStopStream == 2 ) {
7057 this->abortStream();
7061 MUTEX_LOCK( &stream_.mutex );
7063 // The state might change while waiting on a mutex.
7064 if ( stream_.state == STREAM_STOPPED ) goto unlock;
7069 RtAudioFormat format;
7071 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7073 // Setup parameters and do buffer conversion if necessary.
7074 if ( stream_.doConvertBuffer[0] ) {
7075 buffer = stream_.deviceBuffer;
7076 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
7077 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
7078 format = stream_.deviceFormat[0];
7081 buffer = stream_.userBuffer[0];
7082 samples = stream_.bufferSize * stream_.nUserChannels[0];
7083 format = stream_.userFormat;
7086 // Do byte swapping if necessary.
7087 if ( stream_.doByteSwap[0] )
7088 byteSwapBuffer( buffer, samples, format );
7090 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
7092 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7093 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7094 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
7095 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7096 handle->triggered = true;
7099 // Write samples to device.
7100 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7102 if ( result == -1 ) {
7103 // We'll assume this is an underrun, though there isn't a
7104 // specific means for determining that.
7105 handle->xrun[0] = true;
7106 errorText_ = "RtApiOss::callbackEvent: audio write error.";
7107 error( RtError::WARNING );
7108 // Continue on to input section.
7112 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
7114 // Setup parameters.
7115 if ( stream_.doConvertBuffer[1] ) {
7116 buffer = stream_.deviceBuffer;
7117 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
7118 format = stream_.deviceFormat[1];
7121 buffer = stream_.userBuffer[1];
7122 samples = stream_.bufferSize * stream_.nUserChannels[1];
7123 format = stream_.userFormat;
7126 // Read samples from device.
7127 result = read( handle->id[1], buffer, samples * formatBytes(format) );
7129 if ( result == -1 ) {
7130 // We'll assume this is an overrun, though there isn't a
7131 // specific means for determining that.
7132 handle->xrun[1] = true;
7133 errorText_ = "RtApiOss::callbackEvent: audio read error.";
7134 error( RtError::WARNING );
7138 // Do byte swapping if necessary.
7139 if ( stream_.doByteSwap[1] )
7140 byteSwapBuffer( buffer, samples, format );
7142 // Do buffer conversion if necessary.
7143 if ( stream_.doConvertBuffer[1] )
7144 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
7148 MUTEX_UNLOCK( &stream_.mutex );
7150 RtApi::tickStreamTime();
7151 if ( doStopStream == 1 ) this->stopStream();
7154 extern "C" void *ossCallbackHandler( void *ptr )
7156 CallbackInfo *info = (CallbackInfo *) ptr;
7157 RtApiOss *object = (RtApiOss *) info->object;
7158 bool *isRunning = &info->isRunning;
7160 while ( *isRunning == true ) {
7161 pthread_testcancel();
7162 object->callbackEvent();
7165 pthread_exit( NULL );
7168 //******************** End of __LINUX_OSS__ *********************//
7172 // *************************************************** //
7174 // Protected common (OS-independent) RtAudio methods.
7176 // *************************************************** //
7178 // This method can be modified to control the behavior of error
7179 // message printing.
7180 void RtApi :: error( RtError::Type type )
7182 errorStream_.str(""); // clear the ostringstream
7183 if ( type == RtError::WARNING && showWarnings_ == true )
7184 std::cerr << '\n' << errorText_ << "\n\n";
7186 throw( RtError( errorText_, type ) );
7189 void RtApi :: verifyStream()
7191 if ( stream_.state == STREAM_CLOSED ) {
7192 errorText_ = "RtApi:: a stream is not open!";
7193 error( RtError::INVALID_USE );
7197 void RtApi :: clearStreamInfo()
7199 stream_.mode = UNINITIALIZED;
7200 stream_.state = STREAM_CLOSED;
7201 stream_.sampleRate = 0;
7202 stream_.bufferSize = 0;
7203 stream_.nBuffers = 0;
7204 stream_.userFormat = 0;
7205 stream_.userInterleaved = true;
7206 stream_.streamTime = 0.0;
7207 stream_.apiHandle = 0;
7208 stream_.deviceBuffer = 0;
7209 stream_.callbackInfo.callback = 0;
7210 stream_.callbackInfo.userData = 0;
7211 stream_.callbackInfo.isRunning = false;
7212 for ( int i=0; i<2; i++ ) {
7213 stream_.device[i] = 11111;
7214 stream_.doConvertBuffer[i] = false;
7215 stream_.deviceInterleaved[i] = true;
7216 stream_.doByteSwap[i] = false;
7217 stream_.nUserChannels[i] = 0;
7218 stream_.nDeviceChannels[i] = 0;
7219 stream_.channelOffset[i] = 0;
7220 stream_.deviceFormat[i] = 0;
7221 stream_.latency[i] = 0;
7222 stream_.userBuffer[i] = 0;
7223 stream_.convertInfo[i].channels = 0;
7224 stream_.convertInfo[i].inJump = 0;
7225 stream_.convertInfo[i].outJump = 0;
7226 stream_.convertInfo[i].inFormat = 0;
7227 stream_.convertInfo[i].outFormat = 0;
7228 stream_.convertInfo[i].inOffset.clear();
7229 stream_.convertInfo[i].outOffset.clear();
7233 unsigned int RtApi :: formatBytes( RtAudioFormat format )
7235 if ( format == RTAUDIO_SINT16 )
7237 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
7238 format == RTAUDIO_FLOAT32 )
7240 else if ( format == RTAUDIO_FLOAT64 )
7242 else if ( format == RTAUDIO_SINT8 )
7245 errorText_ = "RtApi::formatBytes: undefined format.";
7246 error( RtError::WARNING );
7251 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
7253 if ( mode == INPUT ) { // convert device to user buffer
7254 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
7255 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
7256 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
7257 stream_.convertInfo[mode].outFormat = stream_.userFormat;
7259 else { // convert user to device buffer
7260 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
7261 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
7262 stream_.convertInfo[mode].inFormat = stream_.userFormat;
7263 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
7266 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
7267 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
7269 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
7271 // Set up the interleave/deinterleave offsets.
7272 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
7273 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
7274 ( mode == INPUT && stream_.userInterleaved ) ) {
7275 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7276 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7277 stream_.convertInfo[mode].outOffset.push_back( k );
7278 stream_.convertInfo[mode].inJump = 1;
7282 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7283 stream_.convertInfo[mode].inOffset.push_back( k );
7284 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7285 stream_.convertInfo[mode].outJump = 1;
7289 else { // no (de)interleaving
7290 if ( stream_.userInterleaved ) {
7291 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7292 stream_.convertInfo[mode].inOffset.push_back( k );
7293 stream_.convertInfo[mode].outOffset.push_back( k );
7297 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7298 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7299 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7300 stream_.convertInfo[mode].inJump = 1;
7301 stream_.convertInfo[mode].outJump = 1;
7306 // Add channel offset.
7307 if ( firstChannel > 0 ) {
7308 if ( stream_.deviceInterleaved[mode] ) {
7309 if ( mode == OUTPUT ) {
7310 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7311 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7314 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7315 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7319 if ( mode == OUTPUT ) {
7320 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7321 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7324 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7325 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7331 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7333 // This function does format conversion, input/output channel compensation, and
7334 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7335 // the upper three bytes of a 32-bit integer.
7337 // Clear our device buffer when in/out duplex device channels are different
7338 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7339 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7340 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7343 if (info.outFormat == RTAUDIO_FLOAT64) {
7345 Float64 *out = (Float64 *)outBuffer;
7347 if (info.inFormat == RTAUDIO_SINT8) {
7348 signed char *in = (signed char *)inBuffer;
7349 scale = 1.0 / 127.5;
7350 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7351 for (j=0; j<info.channels; j++) {
7352 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7353 out[info.outOffset[j]] += 0.5;
7354 out[info.outOffset[j]] *= scale;
7357 out += info.outJump;
7360 else if (info.inFormat == RTAUDIO_SINT16) {
7361 Int16 *in = (Int16 *)inBuffer;
7362 scale = 1.0 / 32767.5;
7363 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7364 for (j=0; j<info.channels; j++) {
7365 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7366 out[info.outOffset[j]] += 0.5;
7367 out[info.outOffset[j]] *= scale;
7370 out += info.outJump;
7373 else if (info.inFormat == RTAUDIO_SINT24) {
7374 Int32 *in = (Int32 *)inBuffer;
7375 scale = 1.0 / 8388607.5;
7376 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7377 for (j=0; j<info.channels; j++) {
7378 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7379 out[info.outOffset[j]] += 0.5;
7380 out[info.outOffset[j]] *= scale;
7383 out += info.outJump;
7386 else if (info.inFormat == RTAUDIO_SINT32) {
7387 Int32 *in = (Int32 *)inBuffer;
7388 scale = 1.0 / 2147483647.5;
7389 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7390 for (j=0; j<info.channels; j++) {
7391 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7392 out[info.outOffset[j]] += 0.5;
7393 out[info.outOffset[j]] *= scale;
7396 out += info.outJump;
7399 else if (info.inFormat == RTAUDIO_FLOAT32) {
7400 Float32 *in = (Float32 *)inBuffer;
7401 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7402 for (j=0; j<info.channels; j++) {
7403 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7406 out += info.outJump;
7409 else if (info.inFormat == RTAUDIO_FLOAT64) {
7410 // Channel compensation and/or (de)interleaving only.
7411 Float64 *in = (Float64 *)inBuffer;
7412 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7413 for (j=0; j<info.channels; j++) {
7414 out[info.outOffset[j]] = in[info.inOffset[j]];
7417 out += info.outJump;
7421 else if (info.outFormat == RTAUDIO_FLOAT32) {
7423 Float32 *out = (Float32 *)outBuffer;
7425 if (info.inFormat == RTAUDIO_SINT8) {
7426 signed char *in = (signed char *)inBuffer;
7427 scale = (Float32) ( 1.0 / 127.5 );
7428 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7429 for (j=0; j<info.channels; j++) {
7430 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7431 out[info.outOffset[j]] += 0.5;
7432 out[info.outOffset[j]] *= scale;
7435 out += info.outJump;
7438 else if (info.inFormat == RTAUDIO_SINT16) {
7439 Int16 *in = (Int16 *)inBuffer;
7440 scale = (Float32) ( 1.0 / 32767.5 );
7441 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7442 for (j=0; j<info.channels; j++) {
7443 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7444 out[info.outOffset[j]] += 0.5;
7445 out[info.outOffset[j]] *= scale;
7448 out += info.outJump;
7451 else if (info.inFormat == RTAUDIO_SINT24) {
7452 Int32 *in = (Int32 *)inBuffer;
7453 scale = (Float32) ( 1.0 / 8388607.5 );
7454 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7455 for (j=0; j<info.channels; j++) {
7456 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7457 out[info.outOffset[j]] += 0.5;
7458 out[info.outOffset[j]] *= scale;
7461 out += info.outJump;
7464 else if (info.inFormat == RTAUDIO_SINT32) {
7465 Int32 *in = (Int32 *)inBuffer;
7466 scale = (Float32) ( 1.0 / 2147483647.5 );
7467 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7468 for (j=0; j<info.channels; j++) {
7469 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7470 out[info.outOffset[j]] += 0.5;
7471 out[info.outOffset[j]] *= scale;
7474 out += info.outJump;
7477 else if (info.inFormat == RTAUDIO_FLOAT32) {
7478 // Channel compensation and/or (de)interleaving only.
7479 Float32 *in = (Float32 *)inBuffer;
7480 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7481 for (j=0; j<info.channels; j++) {
7482 out[info.outOffset[j]] = in[info.inOffset[j]];
7485 out += info.outJump;
7488 else if (info.inFormat == RTAUDIO_FLOAT64) {
7489 Float64 *in = (Float64 *)inBuffer;
7490 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7491 for (j=0; j<info.channels; j++) {
7492 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7495 out += info.outJump;
7499 else if (info.outFormat == RTAUDIO_SINT32) {
7500 Int32 *out = (Int32 *)outBuffer;
7501 if (info.inFormat == RTAUDIO_SINT8) {
7502 signed char *in = (signed char *)inBuffer;
7503 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7504 for (j=0; j<info.channels; j++) {
7505 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7506 out[info.outOffset[j]] <<= 24;
7509 out += info.outJump;
7512 else if (info.inFormat == RTAUDIO_SINT16) {
7513 Int16 *in = (Int16 *)inBuffer;
7514 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7515 for (j=0; j<info.channels; j++) {
7516 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7517 out[info.outOffset[j]] <<= 16;
7520 out += info.outJump;
7523 else if (info.inFormat == RTAUDIO_SINT24) {
7524 Int32 *in = (Int32 *)inBuffer;
7525 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7526 for (j=0; j<info.channels; j++) {
7527 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7528 out[info.outOffset[j]] <<= 8;
7531 out += info.outJump;
7534 else if (info.inFormat == RTAUDIO_SINT32) {
7535 // Channel compensation and/or (de)interleaving only.
7536 Int32 *in = (Int32 *)inBuffer;
7537 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7538 for (j=0; j<info.channels; j++) {
7539 out[info.outOffset[j]] = in[info.inOffset[j]];
7542 out += info.outJump;
7545 else if (info.inFormat == RTAUDIO_FLOAT32) {
7546 Float32 *in = (Float32 *)inBuffer;
7547 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7548 for (j=0; j<info.channels; j++) {
7549 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7552 out += info.outJump;
7555 else if (info.inFormat == RTAUDIO_FLOAT64) {
7556 Float64 *in = (Float64 *)inBuffer;
7557 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7558 for (j=0; j<info.channels; j++) {
7559 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7562 out += info.outJump;
7566 else if (info.outFormat == RTAUDIO_SINT24) {
7567 Int32 *out = (Int32 *)outBuffer;
7568 if (info.inFormat == RTAUDIO_SINT8) {
7569 signed char *in = (signed char *)inBuffer;
7570 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7571 for (j=0; j<info.channels; j++) {
7572 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7573 out[info.outOffset[j]] <<= 16;
7576 out += info.outJump;
7579 else if (info.inFormat == RTAUDIO_SINT16) {
7580 Int16 *in = (Int16 *)inBuffer;
7581 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7582 for (j=0; j<info.channels; j++) {
7583 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7584 out[info.outOffset[j]] <<= 8;
7587 out += info.outJump;
7590 else if (info.inFormat == RTAUDIO_SINT24) {
7591 // Channel compensation and/or (de)interleaving only.
7592 Int32 *in = (Int32 *)inBuffer;
7593 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7594 for (j=0; j<info.channels; j++) {
7595 out[info.outOffset[j]] = in[info.inOffset[j]];
7598 out += info.outJump;
7601 else if (info.inFormat == RTAUDIO_SINT32) {
7602 Int32 *in = (Int32 *)inBuffer;
7603 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7604 for (j=0; j<info.channels; j++) {
7605 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7606 out[info.outOffset[j]] >>= 8;
7609 out += info.outJump;
7612 else if (info.inFormat == RTAUDIO_FLOAT32) {
7613 Float32 *in = (Float32 *)inBuffer;
7614 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7615 for (j=0; j<info.channels; j++) {
7616 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7619 out += info.outJump;
7622 else if (info.inFormat == RTAUDIO_FLOAT64) {
7623 Float64 *in = (Float64 *)inBuffer;
7624 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7625 for (j=0; j<info.channels; j++) {
7626 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7629 out += info.outJump;
7633 else if (info.outFormat == RTAUDIO_SINT16) {
7634 Int16 *out = (Int16 *)outBuffer;
7635 if (info.inFormat == RTAUDIO_SINT8) {
7636 signed char *in = (signed char *)inBuffer;
7637 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7638 for (j=0; j<info.channels; j++) {
7639 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7640 out[info.outOffset[j]] <<= 8;
7643 out += info.outJump;
7646 else if (info.inFormat == RTAUDIO_SINT16) {
7647 // Channel compensation and/or (de)interleaving only.
7648 Int16 *in = (Int16 *)inBuffer;
7649 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7650 for (j=0; j<info.channels; j++) {
7651 out[info.outOffset[j]] = in[info.inOffset[j]];
7654 out += info.outJump;
7657 else if (info.inFormat == RTAUDIO_SINT24) {
7658 Int32 *in = (Int32 *)inBuffer;
7659 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7660 for (j=0; j<info.channels; j++) {
7661 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7664 out += info.outJump;
7667 else if (info.inFormat == RTAUDIO_SINT32) {
7668 Int32 *in = (Int32 *)inBuffer;
7669 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7670 for (j=0; j<info.channels; j++) {
7671 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7674 out += info.outJump;
7677 else if (info.inFormat == RTAUDIO_FLOAT32) {
7678 Float32 *in = (Float32 *)inBuffer;
7679 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7680 for (j=0; j<info.channels; j++) {
7681 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7684 out += info.outJump;
7687 else if (info.inFormat == RTAUDIO_FLOAT64) {
7688 Float64 *in = (Float64 *)inBuffer;
7689 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7690 for (j=0; j<info.channels; j++) {
7691 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7694 out += info.outJump;
7698 else if (info.outFormat == RTAUDIO_SINT8) {
7699 signed char *out = (signed char *)outBuffer;
7700 if (info.inFormat == RTAUDIO_SINT8) {
7701 // Channel compensation and/or (de)interleaving only.
7702 signed char *in = (signed char *)inBuffer;
7703 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7704 for (j=0; j<info.channels; j++) {
7705 out[info.outOffset[j]] = in[info.inOffset[j]];
7708 out += info.outJump;
7711 if (info.inFormat == RTAUDIO_SINT16) {
7712 Int16 *in = (Int16 *)inBuffer;
7713 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7714 for (j=0; j<info.channels; j++) {
7715 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7718 out += info.outJump;
7721 else if (info.inFormat == RTAUDIO_SINT24) {
7722 Int32 *in = (Int32 *)inBuffer;
7723 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7724 for (j=0; j<info.channels; j++) {
7725 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7728 out += info.outJump;
7731 else if (info.inFormat == RTAUDIO_SINT32) {
7732 Int32 *in = (Int32 *)inBuffer;
7733 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7734 for (j=0; j<info.channels; j++) {
7735 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7738 out += info.outJump;
7741 else if (info.inFormat == RTAUDIO_FLOAT32) {
7742 Float32 *in = (Float32 *)inBuffer;
7743 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7744 for (j=0; j<info.channels; j++) {
7745 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7748 out += info.outJump;
7751 else if (info.inFormat == RTAUDIO_FLOAT64) {
7752 Float64 *in = (Float64 *)inBuffer;
7753 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7754 for (j=0; j<info.channels; j++) {
7755 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7758 out += info.outJump;
7764 //static inline uint16_t bswap_16(uint16_t x) { return (x>>8) | (x<<8); }
7765 //static inline uint32_t bswap_32(uint32_t x) { return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); }
7766 //static inline uint64_t bswap_64(uint64_t x) { return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); }
7768 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7774 if ( format == RTAUDIO_SINT16 ) {
7775 for ( unsigned int i=0; i<samples; i++ ) {
7776 // Swap 1st and 2nd bytes.
7781 // Increment 2 bytes.
7785 else if ( format == RTAUDIO_SINT24 ||
7786 format == RTAUDIO_SINT32 ||
7787 format == RTAUDIO_FLOAT32 ) {
7788 for ( unsigned int i=0; i<samples; i++ ) {
7789 // Swap 1st and 4th bytes.
7794 // Swap 2nd and 3rd bytes.
7800 // Increment 3 more bytes.
7804 else if ( format == RTAUDIO_FLOAT64 ) {
7805 for ( unsigned int i=0; i<samples; i++ ) {
7806 // Swap 1st and 8th bytes
7811 // Swap 2nd and 7th bytes
7817 // Swap 3rd and 6th bytes
7823 // Swap 4th and 5th bytes
7829 // Increment 5 more bytes.
7835 // Indentation settings for Vim and Emacs
7838 // c-basic-offset: 2
7839 // indent-tabs-mode: nil
7842 // vim: et sts=2 sw=2