1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2007 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.4pre
46 // Static variable definitions.
47 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
48 const unsigned int RtApi::SAMPLE_RATES[] = {
49 4000, 5512, 8000, 9600, 11025, 16000, 22050,
50 32000, 44100, 48000, 88200, 96000, 176400, 192000
53 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
54 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
55 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
56 #define MUTEX_LOCK(A) EnterCriticalSection(A)
57 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
58 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
60 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
61 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
62 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
63 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
65 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
66 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
69 // *************************************************** //
71 // RtAudio definitions.
73 // *************************************************** //
75 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
79 // The order here will control the order of RtAudio's API search in
81 #if defined(__UNIX_JACK__)
82 apis.push_back( UNIX_JACK );
84 #if defined(__LINUX_ALSA__)
85 apis.push_back( LINUX_ALSA );
87 #if defined(__LINUX_OSS__)
88 apis.push_back( LINUX_OSS );
90 #if defined(__WINDOWS_ASIO__)
91 apis.push_back( WINDOWS_ASIO );
93 #if defined(__WINDOWS_DS__)
94 apis.push_back( WINDOWS_DS );
96 #if defined(__MACOSX_CORE__)
97 apis.push_back( MACOSX_CORE );
99 #if defined(__RTAUDIO_DUMMY__)
100 apis.push_back( RTAUDIO_DUMMY );
104 void RtAudio :: openRtApi( RtAudio::Api api )
106 #if defined(__UNIX_JACK__)
107 if ( api == UNIX_JACK )
108 rtapi_ = new RtApiJack();
110 #if defined(__LINUX_ALSA__)
111 if ( api == LINUX_ALSA )
112 rtapi_ = new RtApiAlsa();
114 #if defined(__LINUX_OSS__)
115 if ( api == LINUX_OSS )
116 rtapi_ = new RtApiOss();
118 #if defined(__WINDOWS_ASIO__)
119 if ( api == WINDOWS_ASIO )
120 rtapi_ = new RtApiAsio();
122 #if defined(__WINDOWS_DS__)
123 if ( api == WINDOWS_DS )
124 rtapi_ = new RtApiDs();
126 #if defined(__MACOSX_CORE__)
127 if ( api == MACOSX_CORE )
128 rtapi_ = new RtApiCore();
130 #if defined(__RTAUDIO_DUMMY__)
131 if ( api == RTAUDIO_DUMMY )
132 rtapi_ = new RtApiDummy();
136 RtAudio :: RtAudio( RtAudio::Api api ) throw()
140 if ( api != UNSPECIFIED ) {
141 // Attempt to open the specified API.
143 if ( rtapi_ ) return;
145 // No compiled support for specified API value. Issue a debug
146 // warning and continue as if no API was specified.
147 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
150 // Iterate through the compiled APIs and return as soon as we find
151 // one with at least one device or we reach the end of the list.
152 std::vector< RtAudio::Api > apis;
153 getCompiledApi( apis );
154 for ( unsigned int i=0; i<apis.size(); i++ ) {
155 openRtApi( apis[i] );
156 if ( rtapi_->getDeviceCount() ) break;
159 if ( rtapi_ ) return;
161 // It should not be possible to get here because the preprocessor
162 // definition __RTAUDIO_DUMMY__ is automatically defined if no
163 // API-specific definitions are passed to the compiler. But just in
164 // case something weird happens, we'll print out an error message.
165 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
168 RtAudio :: ~RtAudio() throw()
173 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
174 RtAudio::StreamParameters *inputParameters,
175 RtAudioFormat format, unsigned int sampleRate,
176 unsigned int *bufferFrames,
177 RtAudioCallback callback, void *userData,
178 RtAudio::StreamOptions *options )
180 return rtapi_->openStream( outputParameters, inputParameters, format,
181 sampleRate, bufferFrames, callback,
185 // *************************************************** //
187 // Public RtApi definitions (see end of file for
188 // private or protected utility functions).
190 // *************************************************** //
194 stream_.state = STREAM_CLOSED;
195 stream_.mode = UNINITIALIZED;
196 stream_.apiHandle = 0;
197 stream_.userBuffer[0] = 0;
198 stream_.userBuffer[1] = 0;
199 MUTEX_INITIALIZE( &stream_.mutex );
200 showWarnings_ = true;
205 MUTEX_DESTROY( &stream_.mutex );
208 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
209 RtAudio::StreamParameters *iParams,
210 RtAudioFormat format, unsigned int sampleRate,
211 unsigned int *bufferFrames,
212 RtAudioCallback callback, void *userData,
213 RtAudio::StreamOptions *options )
215 if ( stream_.state != STREAM_CLOSED ) {
216 errorText_ = "RtApi::openStream: a stream is already open!";
217 error( RtError::INVALID_USE );
220 if ( oParams && oParams->nChannels < 1 ) {
221 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
222 error( RtError::INVALID_USE );
225 if ( iParams && iParams->nChannels < 1 ) {
226 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
227 error( RtError::INVALID_USE );
230 if ( oParams == NULL && iParams == NULL ) {
231 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
232 error( RtError::INVALID_USE );
235 if ( formatBytes(format) == 0 ) {
236 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
237 error( RtError::INVALID_USE );
240 unsigned int nDevices = getDeviceCount();
241 unsigned int oChannels = 0;
243 oChannels = oParams->nChannels;
244 if ( oParams->deviceId >= nDevices ) {
245 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
246 error( RtError::INVALID_USE );
250 unsigned int iChannels = 0;
252 iChannels = iParams->nChannels;
253 if ( iParams->deviceId >= nDevices ) {
254 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
255 error( RtError::INVALID_USE );
262 if ( oChannels > 0 ) {
264 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
265 sampleRate, format, bufferFrames, options );
266 if ( result == false ) error( RtError::SYSTEM_ERROR );
269 if ( iChannels > 0 ) {
271 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
272 sampleRate, format, bufferFrames, options );
273 if ( result == false ) {
274 if ( oChannels > 0 ) closeStream();
275 error( RtError::SYSTEM_ERROR );
279 stream_.callbackInfo.callback = (void *) callback;
280 stream_.callbackInfo.userData = userData;
282 if ( options ) options->numberOfBuffers = stream_.nBuffers;
283 stream_.state = STREAM_STOPPED;
286 unsigned int RtApi :: getDefaultInputDevice( void )
288 // Should be implemented in subclasses if possible.
292 unsigned int RtApi :: getDefaultOutputDevice( void )
294 // Should be implemented in subclasses if possible.
298 void RtApi :: closeStream( void )
300 // MUST be implemented in subclasses!
304 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
305 unsigned int firstChannel, unsigned int sampleRate,
306 RtAudioFormat format, unsigned int *bufferSize,
307 RtAudio::StreamOptions *options )
309 // MUST be implemented in subclasses!
313 void RtApi :: tickStreamTime( void )
315 // Subclasses that do not provide their own implementation of
316 // getStreamTime should call this function once per buffer I/O to
317 // provide basic stream time support.
319 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
321 #if defined( HAVE_GETTIMEOFDAY )
322 gettimeofday( &stream_.lastTickTimestamp, NULL );
326 long RtApi :: getStreamLatency( void )
330 long totalLatency = 0;
331 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
332 totalLatency = stream_.latency[0];
333 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
334 totalLatency += stream_.latency[1];
339 double RtApi :: getStreamTime( void )
343 #if defined( HAVE_GETTIMEOFDAY )
344 // Return a very accurate estimate of the stream time by
345 // adding in the elapsed time since the last tick.
349 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
350 return stream_.streamTime;
352 gettimeofday( &now, NULL );
353 then = stream_.lastTickTimestamp;
354 return stream_.streamTime +
355 ((now.tv_sec + 0.000001 * now.tv_usec) -
356 (then.tv_sec + 0.000001 * then.tv_usec));
358 return stream_.streamTime;
363 // *************************************************** //
365 // OS/API-specific methods.
367 // *************************************************** //
369 #if defined(__MACOSX_CORE__)
371 // The OS X CoreAudio API is designed to use a separate callback
372 // procedure for each of its audio devices. A single RtAudio duplex
373 // stream using two different devices is supported here, though it
374 // cannot be guaranteed to always behave correctly because we cannot
375 // synchronize these two callbacks.
377 // A property listener is installed for over/underrun information.
378 // However, no functionality is currently provided to allow property
379 // listeners to trigger user handlers because it is unclear what could
380 // be done if a critical stream parameter (buffer size, sample rate,
381 // device disconnect) notification arrived. The listeners entail
382 // quite a bit of extra code and most likely, a user program wouldn't
383 // be prepared for the result anyway. However, we do provide a flag
384 // to the client callback function to inform of an over/underrun.
386 // The mechanism for querying and setting system parameters was
387 // updated (and perhaps simplified) in OS-X version 10.4. However,
388 // since 10.4 support is not necessarily available to all users, I've
389 // decided not to update the respective code at this time. Perhaps
390 // this will happen when Apple makes 10.4 free for everyone. :-)
392 // A structure to hold various information related to the CoreAudio API
395 AudioDeviceID id[2]; // device ids
396 UInt32 iStream[2]; // device stream index (first for mono mode)
399 pthread_cond_t condition;
400 int drainCounter; // Tracks callback counts when draining
401 bool internalDrain; // Indicates if stop is initiated from callback or not.
404 :deviceBuffer(0), drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
407 RtApiCore :: RtApiCore()
409 // Nothing to do here.
412 RtApiCore :: ~RtApiCore()
414 // The subclass destructor gets called before the base class
415 // destructor, so close an existing stream before deallocating
416 // apiDeviceId memory.
417 if ( stream_.state != STREAM_CLOSED ) closeStream();
420 unsigned int RtApiCore :: getDeviceCount( void )
422 // Find out how many audio devices there are, if any.
424 OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
425 if ( result != noErr ) {
426 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
427 error( RtError::WARNING );
431 return dataSize / sizeof( AudioDeviceID );
434 unsigned int RtApiCore :: getDefaultInputDevice( void )
436 unsigned int nDevices = getDeviceCount();
437 if ( nDevices <= 1 ) return 0;
440 UInt32 dataSize = sizeof( AudioDeviceID );
441 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
444 if ( result != noErr ) {
445 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
446 error( RtError::WARNING );
450 dataSize *= nDevices;
451 AudioDeviceID deviceList[ nDevices ];
452 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
453 if ( result != noErr ) {
454 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
455 error( RtError::WARNING );
459 for ( unsigned int i=0; i<nDevices; i++ )
460 if ( id == deviceList[i] ) return i;
462 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
463 error( RtError::WARNING );
467 unsigned int RtApiCore :: getDefaultOutputDevice( void )
469 unsigned int nDevices = getDeviceCount();
470 if ( nDevices <= 1 ) return 0;
473 UInt32 dataSize = sizeof( AudioDeviceID );
474 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
477 if ( result != noErr ) {
478 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
479 error( RtError::WARNING );
483 dataSize *= nDevices;
484 AudioDeviceID deviceList[ nDevices ];
485 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
486 if ( result != noErr ) {
487 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
488 error( RtError::WARNING );
492 for ( unsigned int i=0; i<nDevices; i++ )
493 if ( id == deviceList[i] ) return i;
495 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
496 error( RtError::WARNING );
500 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
502 RtAudio::DeviceInfo info;
506 unsigned int nDevices = getDeviceCount();
507 if ( nDevices == 0 ) {
508 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
509 error( RtError::INVALID_USE );
512 if ( device >= nDevices ) {
513 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
514 error( RtError::INVALID_USE );
517 AudioDeviceID deviceList[ nDevices ];
518 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
519 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
520 if ( result != noErr ) {
521 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
522 error( RtError::WARNING );
526 AudioDeviceID id = deviceList[ device ];
528 // Get the device name.
532 result = AudioDeviceGetProperty( id, 0, false,
533 kAudioDevicePropertyDeviceManufacturer,
536 if ( result != noErr ) {
537 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
538 errorText_ = errorStream_.str();
539 error( RtError::WARNING );
542 info.name.append( (const char *)name, strlen(name) );
543 info.name.append( ": " );
546 result = AudioDeviceGetProperty( id, 0, false,
547 kAudioDevicePropertyDeviceName,
549 if ( result != noErr ) {
550 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
551 errorText_ = errorStream_.str();
552 error( RtError::WARNING );
555 info.name.append( (const char *)name, strlen(name) );
557 // Get the output stream "configuration".
558 AudioBufferList *bufferList = nil;
559 result = AudioDeviceGetPropertyInfo( id, 0, false,
560 kAudioDevicePropertyStreamConfiguration,
562 if (result != noErr || dataSize == 0) {
563 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
564 errorText_ = errorStream_.str();
565 error( RtError::WARNING );
569 // Allocate the AudioBufferList.
570 bufferList = (AudioBufferList *) malloc( dataSize );
571 if ( bufferList == NULL ) {
572 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
573 error( RtError::WARNING );
577 result = AudioDeviceGetProperty( id, 0, false,
578 kAudioDevicePropertyStreamConfiguration,
579 &dataSize, bufferList );
580 if ( result != noErr ) {
582 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
583 errorText_ = errorStream_.str();
584 error( RtError::WARNING );
588 // Get output channel information.
589 unsigned int i, nStreams = bufferList->mNumberBuffers;
590 for ( i=0; i<nStreams; i++ )
591 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
594 // Get the input stream "configuration".
595 result = AudioDeviceGetPropertyInfo( id, 0, true,
596 kAudioDevicePropertyStreamConfiguration,
598 if (result != noErr || dataSize == 0) {
599 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
600 errorText_ = errorStream_.str();
601 error( RtError::WARNING );
605 // Allocate the AudioBufferList.
606 bufferList = (AudioBufferList *) malloc( dataSize );
607 if ( bufferList == NULL ) {
608 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
609 error( RtError::WARNING );
613 result = AudioDeviceGetProperty( id, 0, true,
614 kAudioDevicePropertyStreamConfiguration,
615 &dataSize, bufferList );
616 if ( result != noErr ) {
618 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
619 errorText_ = errorStream_.str();
620 error( RtError::WARNING );
624 // Get input channel information.
625 nStreams = bufferList->mNumberBuffers;
626 for ( i=0; i<nStreams; i++ )
627 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
630 // If device opens for both playback and capture, we determine the channels.
631 if ( info.outputChannels > 0 && info.inputChannels > 0 )
632 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
634 // Probe the device sample rates.
635 bool isInput = false;
636 if ( info.outputChannels == 0 ) isInput = true;
638 // Determine the supported sample rates.
639 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
640 kAudioDevicePropertyAvailableNominalSampleRates,
643 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
644 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
645 errorText_ = errorStream_.str();
646 error( RtError::WARNING );
650 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
651 AudioValueRange rangeList[ nRanges ];
652 result = AudioDeviceGetProperty( id, 0, isInput,
653 kAudioDevicePropertyAvailableNominalSampleRates,
654 &dataSize, &rangeList );
656 if ( result != kAudioHardwareNoError ) {
657 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
658 errorText_ = errorStream_.str();
659 error( RtError::WARNING );
663 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
664 for ( UInt32 i=0; i<nRanges; i++ ) {
665 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
666 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
669 info.sampleRates.clear();
670 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
671 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
672 info.sampleRates.push_back( SAMPLE_RATES[k] );
675 if ( info.sampleRates.size() == 0 ) {
676 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
677 errorText_ = errorStream_.str();
678 error( RtError::WARNING );
682 // CoreAudio always uses 32-bit floating point data for PCM streams.
683 // Thus, any other "physical" formats supported by the device are of
684 // no interest to the client.
685 info.nativeFormats = RTAUDIO_FLOAT32;
687 if ( getDefaultOutputDevice() == device )
688 info.isDefaultOutput = true;
689 if ( getDefaultInputDevice() == device )
690 info.isDefaultInput = true;
696 OSStatus callbackHandler( AudioDeviceID inDevice,
697 const AudioTimeStamp* inNow,
698 const AudioBufferList* inInputData,
699 const AudioTimeStamp* inInputTime,
700 AudioBufferList* outOutputData,
701 const AudioTimeStamp* inOutputTime,
704 CallbackInfo *info = (CallbackInfo *) infoPointer;
706 RtApiCore *object = (RtApiCore *) info->object;
707 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
708 return kAudioHardwareUnspecifiedError;
710 return kAudioHardwareNoError;
713 OSStatus deviceListener( AudioDeviceID inDevice,
716 AudioDevicePropertyID propertyID,
717 void* handlePointer )
719 CoreHandle *handle = (CoreHandle *) handlePointer;
720 if ( propertyID == kAudioDeviceProcessorOverload ) {
722 handle->xrun[1] = true;
724 handle->xrun[0] = true;
727 return kAudioHardwareNoError;
730 static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
732 OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
736 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
737 unsigned int firstChannel, unsigned int sampleRate,
738 RtAudioFormat format, unsigned int *bufferSize,
739 RtAudio::StreamOptions *options )
742 unsigned int nDevices = getDeviceCount();
743 if ( nDevices == 0 ) {
744 // This should not happen because a check is made before this function is called.
745 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
749 if ( device >= nDevices ) {
750 // This should not happen because a check is made before this function is called.
751 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
755 AudioDeviceID deviceList[ nDevices ];
756 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
757 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
758 if ( result != noErr ) {
759 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
763 AudioDeviceID id = deviceList[ device ];
765 // Setup for stream mode.
766 bool isInput = false;
767 if ( mode == INPUT ) isInput = true;
769 // Set or disable "hog" mode.
770 dataSize = sizeof( UInt32 );
772 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
773 result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
774 if ( result != noErr ) {
775 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
776 errorText_ = errorStream_.str();
780 // Get the stream "configuration".
781 AudioBufferList *bufferList;
782 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
783 kAudioDevicePropertyStreamConfiguration,
785 if (result != noErr || dataSize == 0) {
786 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
787 errorText_ = errorStream_.str();
791 // Allocate the AudioBufferList.
792 bufferList = (AudioBufferList *) malloc( dataSize );
793 if ( bufferList == NULL ) {
794 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
798 result = AudioDeviceGetProperty( id, 0, isInput,
799 kAudioDevicePropertyStreamConfiguration,
800 &dataSize, bufferList );
801 if ( result != noErr ) {
803 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
804 errorText_ = errorStream_.str();
808 // Search for a stream that contains the desired number of
809 // channels. CoreAudio devices can have an arbitrary number of
810 // streams and each stream can have an arbitrary number of channels.
811 // For each stream, a single buffer of interleaved samples is
812 // provided. RtAudio currently only supports the use of one stream
813 // of interleaved data or multiple consecutive single-channel
814 // streams. Thus, our search below is limited to these two
816 unsigned int streamChannels = 0, nStreams = 0;
817 UInt32 iChannel = 0, iStream = 0;
818 unsigned int offsetCounter = firstChannel;
819 stream_.deviceInterleaved[mode] = true;
820 nStreams = bufferList->mNumberBuffers;
821 bool foundStream = false;
823 for ( iStream=0; iStream<nStreams; iStream++ ) {
824 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
825 if ( streamChannels >= channels + offsetCounter ) {
826 iChannel += offsetCounter;
830 if ( streamChannels > offsetCounter ) break;
831 offsetCounter -= streamChannels;
832 iChannel += streamChannels;
835 // If we didn't find a single stream above, see if we can meet
836 // the channel specification in mono mode (i.e. using separate
837 // non-interleaved buffers). This can only work if there are N
838 // consecutive one-channel streams, where N is the number of
839 // desired channels (+ channel offset).
840 if ( foundStream == false ) {
841 unsigned int counter = 0;
842 offsetCounter = firstChannel;
844 for ( iStream=0; iStream<nStreams; iStream++ ) {
845 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
846 if ( offsetCounter ) {
847 if ( streamChannels > offsetCounter ) break;
848 offsetCounter -= streamChannels;
850 else if ( streamChannels == 1 )
854 if ( counter == channels ) {
855 iStream -= channels - 1;
856 iChannel -= channels - 1;
857 stream_.deviceInterleaved[mode] = false;
861 iChannel += streamChannels;
866 if ( foundStream == false ) {
867 errorStream_ << "RtApiCore::probeDeviceOpen: unable to find OS-X stream on device (" << device << ") for requested channels.";
868 errorText_ = errorStream_.str();
872 // Determine the buffer size.
873 AudioValueRange bufferRange;
874 dataSize = sizeof( AudioValueRange );
875 result = AudioDeviceGetProperty( id, 0, isInput,
876 kAudioDevicePropertyBufferFrameSizeRange,
877 &dataSize, &bufferRange );
878 if ( result != noErr ) {
879 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
880 errorText_ = errorStream_.str();
884 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
885 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
886 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
888 // Set the buffer size. For mono mode, I'm assuming we only need to
889 // make this setting for the master channel.
890 UInt32 theSize = (UInt32) *bufferSize;
891 dataSize = sizeof( UInt32 );
892 result = AudioDeviceSetProperty( id, NULL, 0, isInput,
893 kAudioDevicePropertyBufferFrameSize,
894 dataSize, &theSize );
896 if ( result != noErr ) {
897 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
898 errorText_ = errorStream_.str();
902 // If attempting to setup a duplex stream, the bufferSize parameter
903 // MUST be the same in both directions!
904 *bufferSize = theSize;
905 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
906 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
907 errorText_ = errorStream_.str();
911 stream_.bufferSize = *bufferSize;
912 stream_.nBuffers = 1;
914 // Get the stream ID(s) so we can set the stream format. In mono
915 // mode, we'll have to do this for each stream (channel).
916 AudioStreamID streamIDs[ nStreams ];
917 dataSize = nStreams * sizeof( AudioStreamID );
918 result = AudioDeviceGetProperty( id, 0, isInput,
919 kAudioDevicePropertyStreams,
920 &dataSize, &streamIDs );
921 if ( result != noErr ) {
922 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
923 errorText_ = errorStream_.str();
927 // Now set the stream format. Also, check the physical format of the
928 // device and change that if necessary.
929 AudioStreamBasicDescription description;
930 dataSize = sizeof( AudioStreamBasicDescription );
931 if ( stream_.deviceInterleaved[mode] ) nStreams = 1;
932 else nStreams = channels;
935 for ( unsigned int i=0; i<nStreams; i++ ) {
937 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
938 kAudioStreamPropertyVirtualFormat,
939 &dataSize, &description );
941 if ( result != noErr ) {
942 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
943 errorText_ = errorStream_.str();
947 // Set the sample rate and data format id. However, only make the
948 // change if the sample rate is not within 1.0 of the desired
949 // rate and the format is not linear pcm.
950 updateFormat = false;
951 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
952 description.mSampleRate = (double) sampleRate;
956 if ( description.mFormatID != kAudioFormatLinearPCM ) {
957 description.mFormatID = kAudioFormatLinearPCM;
961 if ( updateFormat ) {
962 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0,
963 kAudioStreamPropertyVirtualFormat,
964 dataSize, &description );
965 if ( result != noErr ) {
966 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
967 errorText_ = errorStream_.str();
972 // Now check the physical format.
973 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
974 kAudioStreamPropertyPhysicalFormat,
975 &dataSize, &description );
976 if ( result != noErr ) {
977 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
978 errorText_ = errorStream_.str();
982 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
983 description.mFormatID = kAudioFormatLinearPCM;
984 AudioStreamBasicDescription testDescription = description;
985 unsigned long formatFlags;
987 // We'll try higher bit rates first and then work our way down.
988 testDescription.mBitsPerChannel = 32;
989 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
990 testDescription.mFormatFlags = formatFlags;
991 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
992 if ( result == noErr ) continue;
994 testDescription = description;
995 testDescription.mBitsPerChannel = 32;
996 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
997 testDescription.mFormatFlags = formatFlags;
998 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
999 if ( result == noErr ) continue;
1001 testDescription = description;
1002 testDescription.mBitsPerChannel = 24;
1003 testDescription.mFormatFlags = formatFlags;
1004 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1005 if ( result == noErr ) continue;
1007 testDescription = description;
1008 testDescription.mBitsPerChannel = 16;
1009 testDescription.mFormatFlags = formatFlags;
1010 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1011 if ( result == noErr ) continue;
1013 testDescription = description;
1014 testDescription.mBitsPerChannel = 8;
1015 testDescription.mFormatFlags = formatFlags;
1016 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1017 if ( result != noErr ) {
1018 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1019 errorText_ = errorStream_.str();
1025 // Get the stream latency. There can be latency in both the device
1026 // and the stream. First, attempt to get the device latency on the
1027 // master channel or the first open channel. Errors that might
1028 // occur here are not deemed critical.
1029 UInt32 latency, channel = 0;
1030 dataSize = sizeof( UInt32 );
1031 AudioDevicePropertyID property = kAudioDevicePropertyLatency;
1032 for ( int i=0; i<2; i++ ) {
1033 if ( hasProperty( id, channel, isInput, property ) == true ) break;
1034 channel = iChannel + 1 + i;
1036 if ( channel <= iChannel + 1 ) {
1037 result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
1038 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1040 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1041 errorText_ = errorStream_.str();
1042 error( RtError::WARNING );
1046 // Now try to get the stream latency. For "mono" mode, I assume the
1047 // latency is equal for all single-channel streams.
1048 result = AudioStreamGetProperty( streamIDs[iStream], 0, property, &dataSize, &latency );
1049 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1051 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1052 errorText_ = errorStream_.str();
1053 error( RtError::WARNING );
1056 // Byte-swapping: According to AudioHardware.h, the stream data will
1057 // always be presented in native-endian format, so we should never
1058 // need to byte swap.
1059 stream_.doByteSwap[mode] = false;
1061 // From the CoreAudio documentation, PCM data must be supplied as
1063 stream_.userFormat = format;
1064 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1066 if ( stream_.deviceInterleaved[mode] )
1067 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1069 stream_.nDeviceChannels[mode] = channels;
1070 stream_.nUserChannels[mode] = channels;
1071 stream_.channelOffset[mode] = iChannel; // offset within a CoreAudio stream
1072 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1073 else stream_.userInterleaved = true;
1075 // Set flags for buffer conversion.
1076 stream_.doConvertBuffer[mode] = false;
1077 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1078 stream_.doConvertBuffer[mode] = true;
1079 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1080 stream_.doConvertBuffer[mode] = true;
1081 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1082 stream_.nUserChannels[mode] > 1 )
1083 stream_.doConvertBuffer[mode] = true;
1085 // Allocate our CoreHandle structure for the stream.
1086 CoreHandle *handle = 0;
1087 if ( stream_.apiHandle == 0 ) {
1089 handle = new CoreHandle;
1091 catch ( std::bad_alloc& ) {
1092 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1096 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1097 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1100 stream_.apiHandle = (void *) handle;
1103 handle = (CoreHandle *) stream_.apiHandle;
1104 handle->iStream[mode] = iStream;
1105 handle->id[mode] = id;
1107 // Allocate necessary internal buffers.
1108 unsigned long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1109 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1110 if ( stream_.userBuffer[mode] == NULL ) {
1111 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1115 // If possible, we will make use of the CoreAudio stream buffers as
1116 // "device buffers". However, we can't do this if the device
1117 // buffers are non-interleaved ("mono" mode).
1118 if ( !stream_.deviceInterleaved[mode] && stream_.doConvertBuffer[mode] ) {
1120 bool makeBuffer = true;
1121 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1122 if ( mode == INPUT ) {
1123 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1124 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1125 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1130 bufferBytes *= *bufferSize;
1131 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1132 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1133 if ( stream_.deviceBuffer == NULL ) {
1134 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1138 // Save a pointer to our own device buffer in the CoreHandle
1139 // structure because we may need to use the stream_.deviceBuffer
1140 // variable to point to the CoreAudio buffer before buffer
1141 // conversion (if we have a duplex stream with two different
1142 // conversion schemes).
1143 handle->deviceBuffer = stream_.deviceBuffer;
1147 stream_.sampleRate = sampleRate;
1148 stream_.device[mode] = device;
1149 stream_.state = STREAM_STOPPED;
1150 stream_.callbackInfo.object = (void *) this;
1152 // Setup the buffer conversion information structure. We override
1153 // the channel offset value and perform our own setting for that
1155 if ( stream_.doConvertBuffer[mode] ) {
1156 setConvertInfo( mode, 0 );
1158 // Add channel offset for interleaved channels.
1159 if ( firstChannel > 0 && stream_.deviceInterleaved[mode] ) {
1160 if ( mode == OUTPUT ) {
1161 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1162 stream_.convertInfo[mode].outOffset[k] += firstChannel;
1165 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1166 stream_.convertInfo[mode].inOffset[k] += firstChannel;
1171 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1172 // Only one callback procedure per device.
1173 stream_.mode = DUPLEX;
1175 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1176 if ( result != noErr ) {
1177 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1178 errorText_ = errorStream_.str();
1181 if ( stream_.mode == OUTPUT && mode == INPUT )
1182 stream_.mode = DUPLEX;
1184 stream_.mode = mode;
1187 // Setup the device property listener for over/underload.
1188 result = AudioDeviceAddPropertyListener( id, 0, isInput,
1189 kAudioDeviceProcessorOverload,
1190 deviceListener, (void *) handle );
1196 pthread_cond_destroy( &handle->condition );
1198 stream_.apiHandle = 0;
1201 for ( int i=0; i<2; i++ ) {
1202 if ( stream_.userBuffer[i] ) {
1203 free( stream_.userBuffer[i] );
1204 stream_.userBuffer[i] = 0;
1208 if ( stream_.deviceBuffer ) {
1209 free( stream_.deviceBuffer );
1210 stream_.deviceBuffer = 0;
1216 void RtApiCore :: closeStream( void )
1218 if ( stream_.state == STREAM_CLOSED ) {
1219 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1220 error( RtError::WARNING );
1224 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1225 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1226 if ( stream_.state == STREAM_RUNNING )
1227 AudioDeviceStop( handle->id[0], callbackHandler );
1228 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1231 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1232 if ( stream_.state == STREAM_RUNNING )
1233 AudioDeviceStop( handle->id[1], callbackHandler );
1234 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1237 for ( int i=0; i<2; i++ ) {
1238 if ( stream_.userBuffer[i] ) {
1239 free( stream_.userBuffer[i] );
1240 stream_.userBuffer[i] = 0;
1244 if ( handle->deviceBuffer ) {
1245 free( handle->deviceBuffer );
1246 stream_.deviceBuffer = 0;
1249 // Destroy pthread condition variable.
1250 pthread_cond_destroy( &handle->condition );
1252 stream_.apiHandle = 0;
1254 stream_.mode = UNINITIALIZED;
1255 stream_.state = STREAM_CLOSED;
1258 void RtApiCore :: startStream( void )
1261 if ( stream_.state == STREAM_RUNNING ) {
1262 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1263 error( RtError::WARNING );
1267 MUTEX_LOCK( &stream_.mutex );
1269 OSStatus result = noErr;
1270 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1271 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1273 result = AudioDeviceStart( handle->id[0], callbackHandler );
1274 if ( result != noErr ) {
1275 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1276 errorText_ = errorStream_.str();
1281 if ( stream_.mode == INPUT ||
1282 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1284 result = AudioDeviceStart( handle->id[1], callbackHandler );
1285 if ( result != noErr ) {
1286 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1287 errorText_ = errorStream_.str();
1292 handle->drainCounter = 0;
1293 handle->internalDrain = false;
1294 stream_.state = STREAM_RUNNING;
1297 MUTEX_UNLOCK( &stream_.mutex );
1299 if ( result == noErr ) return;
1300 error( RtError::SYSTEM_ERROR );
1303 void RtApiCore :: stopStream( void )
1306 if ( stream_.state == STREAM_STOPPED ) {
1307 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1308 error( RtError::WARNING );
1312 MUTEX_LOCK( &stream_.mutex );
1314 OSStatus result = noErr;
1315 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1316 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1318 if ( handle->drainCounter == 0 ) {
1319 handle->drainCounter = 1;
1320 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1323 result = AudioDeviceStop( handle->id[0], callbackHandler );
1324 if ( result != noErr ) {
1325 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1326 errorText_ = errorStream_.str();
1331 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1333 result = AudioDeviceStop( handle->id[1], callbackHandler );
1334 if ( result != noErr ) {
1335 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1336 errorText_ = errorStream_.str();
1342 MUTEX_UNLOCK( &stream_.mutex );
1344 stream_.state = STREAM_STOPPED;
1345 if ( result == noErr ) return;
1346 error( RtError::SYSTEM_ERROR );
1349 void RtApiCore :: abortStream( void )
1352 if ( stream_.state == STREAM_STOPPED ) {
1353 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1354 error( RtError::WARNING );
1358 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1359 handle->drainCounter = 1;
1364 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1365 const AudioBufferList *inBufferList,
1366 const AudioBufferList *outBufferList )
1368 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1369 if ( stream_.state == STREAM_CLOSED ) {
1370 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1371 error( RtError::WARNING );
1375 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1376 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1378 // Check if we were draining the stream and signal is finished.
1379 if ( handle->drainCounter > 3 ) {
1380 if ( handle->internalDrain == false )
1381 pthread_cond_signal( &handle->condition );
1387 MUTEX_LOCK( &stream_.mutex );
1389 AudioDeviceID outputDevice = handle->id[0];
1391 // Invoke user callback to get fresh output data UNLESS we are
1392 // draining stream or duplex mode AND the input/output devices are
1393 // different AND this function is called for the input device.
1394 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1395 RtAudioCallback callback = (RtAudioCallback) info->callback;
1396 double streamTime = getStreamTime();
1397 RtAudioStreamStatus status = 0;
1398 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1399 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1400 handle->xrun[0] = false;
1402 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1403 status |= RTAUDIO_INPUT_OVERFLOW;
1404 handle->xrun[1] = false;
1406 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1407 stream_.bufferSize, streamTime, status, info->userData );
1408 if ( handle->drainCounter == 2 ) {
1409 MUTEX_UNLOCK( &stream_.mutex );
1413 else if ( handle->drainCounter == 1 )
1414 handle->internalDrain = true;
1417 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1419 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1421 if ( stream_.deviceInterleaved[0] ) {
1422 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1424 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1427 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1428 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1430 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1434 else if ( stream_.doConvertBuffer[0] ) {
1436 if ( stream_.deviceInterleaved[0] )
1437 stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->iStream[0]].mData;
1439 stream_.deviceBuffer = handle->deviceBuffer;
1441 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1443 if ( !stream_.deviceInterleaved[0] ) {
1444 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1445 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1446 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1447 &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
1453 if ( stream_.deviceInterleaved[0] ) {
1454 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1455 stream_.userBuffer[0],
1456 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1459 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1460 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1461 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1462 &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
1467 if ( handle->drainCounter ) {
1468 handle->drainCounter++;
1473 AudioDeviceID inputDevice = handle->id[1];
1474 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1476 if ( stream_.doConvertBuffer[1] ) {
1478 if ( stream_.deviceInterleaved[1] )
1479 stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->iStream[1]].mData;
1481 stream_.deviceBuffer = (char *) handle->deviceBuffer;
1482 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1483 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
1484 memcpy( &stream_.deviceBuffer[i*bufferBytes],
1485 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1489 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
1493 memcpy( stream_.userBuffer[1],
1494 inBufferList->mBuffers[handle->iStream[1]].mData,
1495 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1500 MUTEX_UNLOCK( &stream_.mutex );
1502 RtApi::tickStreamTime();
1506 const char* RtApiCore :: getErrorCode( OSStatus code )
1510 case kAudioHardwareNotRunningError:
1511 return "kAudioHardwareNotRunningError";
1513 case kAudioHardwareUnspecifiedError:
1514 return "kAudioHardwareUnspecifiedError";
1516 case kAudioHardwareUnknownPropertyError:
1517 return "kAudioHardwareUnknownPropertyError";
1519 case kAudioHardwareBadPropertySizeError:
1520 return "kAudioHardwareBadPropertySizeError";
1522 case kAudioHardwareIllegalOperationError:
1523 return "kAudioHardwareIllegalOperationError";
1525 case kAudioHardwareBadObjectError:
1526 return "kAudioHardwareBadObjectError";
1528 case kAudioHardwareBadDeviceError:
1529 return "kAudioHardwareBadDeviceError";
1531 case kAudioHardwareBadStreamError:
1532 return "kAudioHardwareBadStreamError";
1534 case kAudioHardwareUnsupportedOperationError:
1535 return "kAudioHardwareUnsupportedOperationError";
1537 case kAudioDeviceUnsupportedFormatError:
1538 return "kAudioDeviceUnsupportedFormatError";
1540 case kAudioDevicePermissionsError:
1541 return "kAudioDevicePermissionsError";
1544 return "CoreAudio unknown error";
1548 //******************** End of __MACOSX_CORE__ *********************//
1551 #if defined(__UNIX_JACK__)
1553 // JACK is a low-latency audio server, originally written for the
1554 // GNU/Linux operating system and now also ported to OS-X. It can
1555 // connect a number of different applications to an audio device, as
1556 // well as allowing them to share audio between themselves.
1558 // When using JACK with RtAudio, "devices" refer to JACK clients that
1559 // have ports connected to the server. The JACK server is typically
1560 // started in a terminal as follows:
1562 // .jackd -d alsa -d hw:0
1564 // or through an interface program such as qjackctl. Many of the
1565 // parameters normally set for a stream are fixed by the JACK server
1566 // and can be specified when the JACK server is started. In
1569 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1571 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1572 // frames, and number of buffers = 4. Once the server is running, it
1573 // is not possible to override these values. If the values are not
1574 // specified in the command-line, the JACK server uses default values.
1576 // The JACK server does not have to be running when an instance of
1577 // RtApiJack is created, though the function getDeviceCount() will
1578 // report 0 devices found until JACK has been started. When no
1579 // devices are available (i.e., the JACK server is not running), a
1580 // stream cannot be opened.
1582 #include <jack/jack.h>
1585 // A structure to hold various information related to the Jack API
1588 jack_client_t *client;
1589 jack_port_t **ports[2];
1590 std::string deviceName[2];
1592 pthread_cond_t condition;
1593 int drainCounter; // Tracks callback counts when draining
1594 bool internalDrain; // Indicates if stop is initiated from callback or not.
1597 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1600 RtApiJack :: RtApiJack()
1602 // Nothing to do here.
1605 RtApiJack :: ~RtApiJack()
1607 if ( stream_.state != STREAM_CLOSED ) closeStream();
1610 unsigned int RtApiJack :: getDeviceCount( void )
1612 // See if we can become a jack client.
1613 jack_client_t *client = jack_client_new( "RtApiJackCount" );
1614 if ( client == 0 ) return 0;
1617 std::string port, previousPort;
1618 unsigned int nChannels = 0, nDevices = 0;
1619 ports = jack_get_ports( client, NULL, NULL, 0 );
1621 // Parse the port names up to the first colon (:).
1622 unsigned int iColon = 0;
1624 port = (char *) ports[ nChannels ];
1625 iColon = port.find(":");
1626 if ( iColon != std::string::npos ) {
1627 port = port.substr( 0, iColon + 1 );
1628 if ( port != previousPort ) {
1630 previousPort = port;
1633 } while ( ports[++nChannels] );
1637 jack_client_close( client );
1641 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1643 RtAudio::DeviceInfo info;
1644 info.probed = false;
1646 jack_client_t *client = jack_client_new( "RtApiJackInfo" );
1647 if ( client == 0 ) {
1648 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1649 error( RtError::WARNING );
1654 std::string port, previousPort;
1655 unsigned int nPorts = 0, nDevices = 0;
1656 ports = jack_get_ports( client, NULL, NULL, 0 );
1658 // Parse the port names up to the first colon (:).
1659 unsigned int iColon = 0;
1661 port = (char *) ports[ nPorts ];
1662 iColon = port.find(":");
1663 if ( iColon != std::string::npos ) {
1664 port = port.substr( 0, iColon );
1665 if ( port != previousPort ) {
1666 if ( nDevices == device ) info.name = port;
1668 previousPort = port;
1671 } while ( ports[++nPorts] );
1675 if ( device >= nDevices ) {
1676 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1677 error( RtError::INVALID_USE );
1680 // Get the current jack server sample rate.
1681 info.sampleRates.clear();
1682 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1684 // Count the available ports containing the client name as device
1685 // channels. Jack "input ports" equal RtAudio output channels.
1686 unsigned int nChannels = 0;
1687 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1689 while ( ports[ nChannels ] ) nChannels++;
1691 info.outputChannels = nChannels;
1694 // Jack "output ports" equal RtAudio input channels.
1696 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1698 while ( ports[ nChannels ] ) nChannels++;
1700 info.inputChannels = nChannels;
1703 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1704 jack_client_close(client);
1705 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1706 error( RtError::WARNING );
1710 // If device opens for both playback and capture, we determine the channels.
1711 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1712 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1714 // Jack always uses 32-bit floats.
1715 info.nativeFormats = RTAUDIO_FLOAT32;
1717 // Jack doesn't provide default devices so we'll use the first available one.
1718 if ( device == 0 && info.outputChannels > 0 )
1719 info.isDefaultOutput = true;
1720 if ( device == 0 && info.inputChannels > 0 )
1721 info.isDefaultInput = true;
1723 jack_client_close(client);
1728 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1730 CallbackInfo *info = (CallbackInfo *) infoPointer;
1732 RtApiJack *object = (RtApiJack *) info->object;
1733 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1738 void jackShutdown( void *infoPointer )
1740 CallbackInfo *info = (CallbackInfo *) infoPointer;
1741 RtApiJack *object = (RtApiJack *) info->object;
1743 // Check current stream state. If stopped, then we'll assume this
1744 // was called as a result of a call to RtApiJack::stopStream (the
1745 // deactivation of a client handle causes this function to be called).
1746 // If not, we'll assume the Jack server is shutting down or some
1747 // other problem occurred and we should close the stream.
1748 if ( object->isStreamRunning() == false ) return;
1750 object->closeStream();
1751 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1754 int jackXrun( void *infoPointer )
1756 JackHandle *handle = (JackHandle *) infoPointer;
1758 if ( handle->ports[0] ) handle->xrun[0] = true;
1759 if ( handle->ports[1] ) handle->xrun[1] = true;
1764 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1765 unsigned int firstChannel, unsigned int sampleRate,
1766 RtAudioFormat format, unsigned int *bufferSize,
1767 RtAudio::StreamOptions *options )
1769 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1771 // Look for jack server and try to become a client (only do once per stream).
1772 jack_client_t *client = 0;
1773 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1774 if ( options && !options->streamName.empty() )
1775 client = jack_client_new( options->streamName.c_str() );
1777 client = jack_client_new( "RtApiJack" );
1778 if ( client == 0 ) {
1779 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1780 error( RtError::WARNING );
1785 // The handle must have been created on an earlier pass.
1786 client = handle->client;
1790 std::string port, previousPort, deviceName;
1791 unsigned int nPorts = 0, nDevices = 0;
1792 ports = jack_get_ports( client, NULL, NULL, 0 );
1794 // Parse the port names up to the first colon (:).
1795 unsigned int iColon = 0;
1797 port = (char *) ports[ nPorts ];
1798 iColon = port.find(":");
1799 if ( iColon != std::string::npos ) {
1800 port = port.substr( 0, iColon );
1801 if ( port != previousPort ) {
1802 if ( nDevices == device ) deviceName = port;
1804 previousPort = port;
1807 } while ( ports[++nPorts] );
1811 if ( device >= nDevices ) {
1812 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
1816 // Count the available ports containing the client name as device
1817 // channels. Jack "input ports" equal RtAudio output channels.
1818 unsigned int nChannels = 0;
1819 unsigned long flag = JackPortIsInput;
1820 if ( mode == INPUT ) flag = JackPortIsOutput;
1821 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1823 while ( ports[ nChannels ] ) nChannels++;
1827 // Compare the jack ports for specified client to the requested number of channels.
1828 if ( nChannels < (channels + firstChannel) ) {
1829 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
1830 errorText_ = errorStream_.str();
1834 // Check the jack server sample rate.
1835 unsigned int jackRate = jack_get_sample_rate( client );
1836 if ( sampleRate != jackRate ) {
1837 jack_client_close( client );
1838 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
1839 errorText_ = errorStream_.str();
1842 stream_.sampleRate = jackRate;
1844 // Get the latency of the JACK port.
1845 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1846 if ( ports[ firstChannel ] )
1847 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
1850 // The jack server always uses 32-bit floating-point data.
1851 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1852 stream_.userFormat = format;
1854 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1855 else stream_.userInterleaved = true;
1857 // Jack always uses non-interleaved buffers.
1858 stream_.deviceInterleaved[mode] = false;
1860 // Jack always provides host byte-ordered data.
1861 stream_.doByteSwap[mode] = false;
1863 // Get the buffer size. The buffer size and number of buffers
1864 // (periods) is set when the jack server is started.
1865 stream_.bufferSize = (int) jack_get_buffer_size( client );
1866 *bufferSize = stream_.bufferSize;
1868 stream_.nDeviceChannels[mode] = channels;
1869 stream_.nUserChannels[mode] = channels;
1871 // Set flags for buffer conversion.
1872 stream_.doConvertBuffer[mode] = false;
1873 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1874 stream_.doConvertBuffer[mode] = true;
1875 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1876 stream_.nUserChannels[mode] > 1 )
1877 stream_.doConvertBuffer[mode] = true;
1879 // Allocate our JackHandle structure for the stream.
1880 if ( handle == 0 ) {
1882 handle = new JackHandle;
1884 catch ( std::bad_alloc& ) {
1885 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
1889 if ( pthread_cond_init(&handle->condition, NULL) ) {
1890 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
1893 stream_.apiHandle = (void *) handle;
1894 handle->client = client;
1896 handle->deviceName[mode] = deviceName;
1898 // Allocate necessary internal buffers.
1899 unsigned long bufferBytes;
1900 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1901 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1902 if ( stream_.userBuffer[mode] == NULL ) {
1903 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
1907 if ( stream_.doConvertBuffer[mode] ) {
1909 bool makeBuffer = true;
1910 if ( mode == OUTPUT )
1911 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1912 else { // mode == INPUT
1913 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
1914 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1915 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1916 if ( bufferBytes < bytesOut ) makeBuffer = false;
1921 bufferBytes *= *bufferSize;
1922 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1923 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1924 if ( stream_.deviceBuffer == NULL ) {
1925 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
1931 // Allocate memory for the Jack ports (channels) identifiers.
1932 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
1933 if ( handle->ports[mode] == NULL ) {
1934 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
1938 stream_.device[mode] = device;
1939 stream_.channelOffset[mode] = firstChannel;
1940 stream_.state = STREAM_STOPPED;
1941 stream_.callbackInfo.object = (void *) this;
1943 if ( stream_.mode == OUTPUT && mode == INPUT )
1944 // We had already set up the stream for output.
1945 stream_.mode = DUPLEX;
1947 stream_.mode = mode;
1948 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
1949 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
1950 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
1953 // Register our ports.
1955 if ( mode == OUTPUT ) {
1956 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1957 snprintf( label, 64, "outport %d", i );
1958 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
1959 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
1963 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1964 snprintf( label, 64, "inport %d", i );
1965 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
1966 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
1970 // Setup the buffer conversion information structure. We don't use
1971 // buffers to do channel offsets, so we override that parameter
1973 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
1979 pthread_cond_destroy( &handle->condition );
1980 jack_client_close( handle->client );
1982 if ( handle->ports[0] ) free( handle->ports[0] );
1983 if ( handle->ports[1] ) free( handle->ports[1] );
1986 stream_.apiHandle = 0;
1989 for ( int i=0; i<2; i++ ) {
1990 if ( stream_.userBuffer[i] ) {
1991 free( stream_.userBuffer[i] );
1992 stream_.userBuffer[i] = 0;
1996 if ( stream_.deviceBuffer ) {
1997 free( stream_.deviceBuffer );
1998 stream_.deviceBuffer = 0;
2004 void RtApiJack :: closeStream( void )
2006 if ( stream_.state == STREAM_CLOSED ) {
2007 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2008 error( RtError::WARNING );
2012 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2015 if ( stream_.state == STREAM_RUNNING )
2016 jack_deactivate( handle->client );
2018 jack_client_close( handle->client );
2022 if ( handle->ports[0] ) free( handle->ports[0] );
2023 if ( handle->ports[1] ) free( handle->ports[1] );
2024 pthread_cond_destroy( &handle->condition );
2026 stream_.apiHandle = 0;
2029 for ( int i=0; i<2; i++ ) {
2030 if ( stream_.userBuffer[i] ) {
2031 free( stream_.userBuffer[i] );
2032 stream_.userBuffer[i] = 0;
2036 if ( stream_.deviceBuffer ) {
2037 free( stream_.deviceBuffer );
2038 stream_.deviceBuffer = 0;
2041 stream_.mode = UNINITIALIZED;
2042 stream_.state = STREAM_CLOSED;
2045 void RtApiJack :: startStream( void )
2048 if ( stream_.state == STREAM_RUNNING ) {
2049 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2050 error( RtError::WARNING );
2054 MUTEX_LOCK(&stream_.mutex);
2056 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2057 int result = jack_activate( handle->client );
2059 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2065 // Get the list of available ports.
2066 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2068 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2069 if ( ports == NULL) {
2070 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2074 // Now make the port connections. Since RtAudio wasn't designed to
2075 // allow the user to select particular channels of a device, we'll
2076 // just open the first "nChannels" ports with offset.
2077 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2079 if ( ports[ stream_.channelOffset[0] + i ] )
2080 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2083 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2090 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2092 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2093 if ( ports == NULL) {
2094 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2098 // Now make the port connections. See note above.
2099 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2101 if ( ports[ stream_.channelOffset[1] + i ] )
2102 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2105 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2112 handle->drainCounter = 0;
2113 handle->internalDrain = false;
2114 stream_.state = STREAM_RUNNING;
2117 MUTEX_UNLOCK(&stream_.mutex);
2119 if ( result == 0 ) return;
2120 error( RtError::SYSTEM_ERROR );
2123 void RtApiJack :: stopStream( void )
2126 if ( stream_.state == STREAM_STOPPED ) {
2127 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2128 error( RtError::WARNING );
2132 MUTEX_LOCK( &stream_.mutex );
2134 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2135 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2137 if ( handle->drainCounter == 0 ) {
2138 handle->drainCounter = 1;
2139 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2143 jack_deactivate( handle->client );
2144 stream_.state = STREAM_STOPPED;
2146 MUTEX_UNLOCK( &stream_.mutex );
2149 void RtApiJack :: abortStream( void )
2152 if ( stream_.state == STREAM_STOPPED ) {
2153 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2154 error( RtError::WARNING );
2158 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2159 handle->drainCounter = 1;
2164 bool RtApiJack :: callbackEvent( unsigned long nframes )
2166 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2167 if ( stream_.state == STREAM_CLOSED ) {
2168 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2169 error( RtError::WARNING );
2172 if ( stream_.bufferSize != nframes ) {
2173 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2174 error( RtError::WARNING );
2178 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2179 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2181 // Check if we were draining the stream and signal is finished.
2182 if ( handle->drainCounter > 3 ) {
2183 if ( handle->internalDrain == false )
2184 pthread_cond_signal( &handle->condition );
2190 MUTEX_LOCK( &stream_.mutex );
2192 // Invoke user callback first, to get fresh output data.
2193 if ( handle->drainCounter == 0 ) {
2194 RtAudioCallback callback = (RtAudioCallback) info->callback;
2195 double streamTime = getStreamTime();
2196 RtAudioStreamStatus status = 0;
2197 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2198 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2199 handle->xrun[0] = false;
2201 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2202 status |= RTAUDIO_INPUT_OVERFLOW;
2203 handle->xrun[1] = false;
2205 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2206 stream_.bufferSize, streamTime, status, info->userData );
2207 if ( handle->drainCounter == 2 ) {
2208 MUTEX_UNLOCK( &stream_.mutex );
2212 else if ( handle->drainCounter == 1 )
2213 handle->internalDrain = true;
2216 jack_default_audio_sample_t *jackbuffer;
2217 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2218 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2220 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2222 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2223 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2224 memset( jackbuffer, 0, bufferBytes );
2228 else if ( stream_.doConvertBuffer[0] ) {
2230 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2232 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2233 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2234 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2237 else { // no buffer conversion
2238 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2239 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2240 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2244 if ( handle->drainCounter ) {
2245 handle->drainCounter++;
2250 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2252 if ( stream_.doConvertBuffer[1] ) {
2253 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2254 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2255 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2257 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2259 else { // no buffer conversion
2260 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2261 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2262 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2268 MUTEX_UNLOCK(&stream_.mutex);
2270 RtApi::tickStreamTime();
2273 //******************** End of __UNIX_JACK__ *********************//
2276 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2278 // The ASIO API is designed around a callback scheme, so this
2279 // implementation is similar to that used for OS-X CoreAudio and Linux
2280 // Jack. The primary constraint with ASIO is that it only allows
2281 // access to a single driver at a time. Thus, it is not possible to
2282 // have more than one simultaneous RtAudio stream.
2284 // This implementation also requires a number of external ASIO files
2285 // and a few global variables. The ASIO callback scheme does not
2286 // allow for the passing of user data, so we must create a global
2287 // pointer to our callbackInfo structure.
2289 // On unix systems, we make use of a pthread condition variable.
2290 // Since there is no equivalent in Windows, I hacked something based
2291 // on information found in
2292 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2294 #include "asiosys.h"
2296 #include "iasiothiscallresolver.h"
2297 #include "asiodrivers.h"
2300 AsioDrivers drivers;
2301 ASIOCallbacks asioCallbacks;
2302 ASIODriverInfo driverInfo;
2303 CallbackInfo *asioCallbackInfo;
2307 int drainCounter; // Tracks callback counts when draining
2308 bool internalDrain; // Indicates if stop is initiated from callback or not.
2309 ASIOBufferInfo *bufferInfos;
2313 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2316 // Function declarations (definitions at end of section)
2317 static const char* getAsioErrorString( ASIOError result );
2318 void sampleRateChanged( ASIOSampleRate sRate );
2319 long asioMessages( long selector, long value, void* message, double* opt );
2321 RtApiAsio :: RtApiAsio()
2323 // ASIO cannot run on a multi-threaded appartment. You can call
2324 // CoInitialize beforehand, but it must be for appartment threading
2325 // (in which case, CoInitilialize will return S_FALSE here).
2326 coInitialized_ = false;
2327 HRESULT hr = CoInitialize( NULL );
2329 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2330 error( RtError::WARNING );
2332 coInitialized_ = true;
2334 drivers.removeCurrentDriver();
2335 driverInfo.asioVersion = 2;
2337 // See note in DirectSound implementation about GetDesktopWindow().
2338 driverInfo.sysRef = GetForegroundWindow();
2341 RtApiAsio :: ~RtApiAsio()
2343 if ( stream_.state != STREAM_CLOSED ) closeStream();
2344 if ( coInitialized_ ) CoUninitialize();
2347 unsigned int RtApiAsio :: getDeviceCount( void )
2349 return (unsigned int) drivers.asioGetNumDev();
2352 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2354 RtAudio::DeviceInfo info;
2355 info.probed = false;
2358 unsigned int nDevices = getDeviceCount();
2359 if ( nDevices == 0 ) {
2360 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2361 error( RtError::INVALID_USE );
2364 if ( device >= nDevices ) {
2365 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2366 error( RtError::INVALID_USE );
2369 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2370 if ( stream_.state != STREAM_CLOSED ) {
2371 if ( device >= devices_.size() ) {
2372 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2373 error( RtError::WARNING );
2376 return devices_[ device ];
2379 char driverName[32];
2380 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2381 if ( result != ASE_OK ) {
2382 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2383 errorText_ = errorStream_.str();
2384 error( RtError::WARNING );
2388 info.name = driverName;
2390 if ( !drivers.loadDriver( driverName ) ) {
2391 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2392 errorText_ = errorStream_.str();
2393 error( RtError::WARNING );
2397 result = ASIOInit( &driverInfo );
2398 if ( result != ASE_OK ) {
2399 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2400 errorText_ = errorStream_.str();
2401 error( RtError::WARNING );
2405 // Determine the device channel information.
2406 long inputChannels, outputChannels;
2407 result = ASIOGetChannels( &inputChannels, &outputChannels );
2408 if ( result != ASE_OK ) {
2409 drivers.removeCurrentDriver();
2410 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2411 errorText_ = errorStream_.str();
2412 error( RtError::WARNING );
2416 info.outputChannels = outputChannels;
2417 info.inputChannels = inputChannels;
2418 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2419 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2421 // Determine the supported sample rates.
2422 info.sampleRates.clear();
2423 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2424 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2425 if ( result == ASE_OK )
2426 info.sampleRates.push_back( SAMPLE_RATES[i] );
2429 // Determine supported data types ... just check first channel and assume rest are the same.
2430 ASIOChannelInfo channelInfo;
2431 channelInfo.channel = 0;
2432 channelInfo.isInput = true;
2433 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2434 result = ASIOGetChannelInfo( &channelInfo );
2435 if ( result != ASE_OK ) {
2436 drivers.removeCurrentDriver();
2437 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2438 errorText_ = errorStream_.str();
2439 error( RtError::WARNING );
2443 info.nativeFormats = 0;
2444 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2445 info.nativeFormats |= RTAUDIO_SINT16;
2446 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2447 info.nativeFormats |= RTAUDIO_SINT32;
2448 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2449 info.nativeFormats |= RTAUDIO_FLOAT32;
2450 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2451 info.nativeFormats |= RTAUDIO_FLOAT64;
2453 if ( getDefaultOutputDevice() == device )
2454 info.isDefaultOutput = true;
2455 if ( getDefaultInputDevice() == device )
2456 info.isDefaultInput = true;
2459 drivers.removeCurrentDriver();
2463 void bufferSwitch( long index, ASIOBool processNow )
2465 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2466 object->callbackEvent( index );
2469 void RtApiAsio :: saveDeviceInfo( void )
2473 unsigned int nDevices = getDeviceCount();
2474 devices_.resize( nDevices );
2475 for ( unsigned int i=0; i<nDevices; i++ )
2476 devices_[i] = getDeviceInfo( i );
2479 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2480 unsigned int firstChannel, unsigned int sampleRate,
2481 RtAudioFormat format, unsigned int *bufferSize,
2482 RtAudio::StreamOptions *options )
2484 // For ASIO, a duplex stream MUST use the same driver.
2485 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2486 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2490 char driverName[32];
2491 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2492 if ( result != ASE_OK ) {
2493 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2494 errorText_ = errorStream_.str();
2498 // The getDeviceInfo() function will not work when a stream is open
2499 // because ASIO does not allow multiple devices to run at the same
2500 // time. Thus, we'll probe the system before opening a stream and
2501 // save the results for use by getDeviceInfo().
2502 this->saveDeviceInfo();
2504 // Only load the driver once for duplex stream.
2505 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2506 if ( !drivers.loadDriver( driverName ) ) {
2507 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2508 errorText_ = errorStream_.str();
2512 result = ASIOInit( &driverInfo );
2513 if ( result != ASE_OK ) {
2514 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2515 errorText_ = errorStream_.str();
2520 // Check the device channel count.
2521 long inputChannels, outputChannels;
2522 result = ASIOGetChannels( &inputChannels, &outputChannels );
2523 if ( result != ASE_OK ) {
2524 drivers.removeCurrentDriver();
2525 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2526 errorText_ = errorStream_.str();
2530 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2531 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2532 drivers.removeCurrentDriver();
2533 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2534 errorText_ = errorStream_.str();
2537 stream_.nDeviceChannels[mode] = channels;
2538 stream_.nUserChannels[mode] = channels;
2539 stream_.channelOffset[mode] = firstChannel;
2541 // Verify the sample rate is supported.
2542 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2543 if ( result != ASE_OK ) {
2544 drivers.removeCurrentDriver();
2545 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2546 errorText_ = errorStream_.str();
2550 // Get the current sample rate
2551 ASIOSampleRate currentRate;
2552 result = ASIOGetSampleRate( ¤tRate );
2553 if ( result != ASE_OK ) {
2554 drivers.removeCurrentDriver();
2555 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2556 errorText_ = errorStream_.str();
2560 // Set the sample rate only if necessary
2561 if ( currentRate != sampleRate ) {
2562 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2563 if ( result != ASE_OK ) {
2564 drivers.removeCurrentDriver();
2565 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2566 errorText_ = errorStream_.str();
2571 // Determine the driver data type.
2572 ASIOChannelInfo channelInfo;
2573 channelInfo.channel = 0;
2574 if ( mode == OUTPUT ) channelInfo.isInput = false;
2575 else channelInfo.isInput = true;
2576 result = ASIOGetChannelInfo( &channelInfo );
2577 if ( result != ASE_OK ) {
2578 drivers.removeCurrentDriver();
2579 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2580 errorText_ = errorStream_.str();
2584 // Assuming WINDOWS host is always little-endian.
2585 stream_.doByteSwap[mode] = false;
2586 stream_.userFormat = format;
2587 stream_.deviceFormat[mode] = 0;
2588 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2589 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2590 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2592 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2593 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2594 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2596 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2597 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2598 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2600 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2601 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2602 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2605 if ( stream_.deviceFormat[mode] == 0 ) {
2606 drivers.removeCurrentDriver();
2607 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2608 errorText_ = errorStream_.str();
2612 // Set the buffer size. For a duplex stream, this will end up
2613 // setting the buffer size based on the input constraints, which
2615 long minSize, maxSize, preferSize, granularity;
2616 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2617 if ( result != ASE_OK ) {
2618 drivers.removeCurrentDriver();
2619 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2620 errorText_ = errorStream_.str();
2624 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2625 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2626 else if ( granularity == -1 ) {
2627 // Make sure bufferSize is a power of two.
2628 double power = std::log10( (double) *bufferSize ) / log10( 2.0 );
2629 *bufferSize = (int) pow( 2.0, floor(power+0.5) );
2630 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2631 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2632 else *bufferSize = preferSize;
2634 else if ( granularity != 0 ) {
2635 // Set to an even multiple of granularity, rounding up.
2636 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2639 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2640 drivers.removeCurrentDriver();
2641 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2645 stream_.bufferSize = *bufferSize;
2646 stream_.nBuffers = 2;
2648 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2649 else stream_.userInterleaved = true;
2651 // ASIO always uses non-interleaved buffers.
2652 stream_.deviceInterleaved[mode] = false;
2654 // Allocate, if necessary, our AsioHandle structure for the stream.
2655 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2656 if ( handle == 0 ) {
2658 handle = new AsioHandle;
2660 catch ( std::bad_alloc& ) {
2661 //if ( handle == NULL ) {
2662 drivers.removeCurrentDriver();
2663 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2666 handle->bufferInfos = 0;
2668 // Create a manual-reset event.
2669 handle->condition = CreateEvent( NULL, // no security
2670 TRUE, // manual-reset
2671 FALSE, // non-signaled initially
2673 stream_.apiHandle = (void *) handle;
2676 // Create the ASIO internal buffers. Since RtAudio sets up input
2677 // and output separately, we'll have to dispose of previously
2678 // created output buffers for a duplex stream.
2679 long inputLatency, outputLatency;
2680 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2681 ASIODisposeBuffers();
2682 if ( handle->bufferInfos ) free( handle->bufferInfos );
2685 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2686 bool buffersAllocated = false;
2687 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2688 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2689 if ( handle->bufferInfos == NULL ) {
2690 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2691 errorText_ = errorStream_.str();
2695 ASIOBufferInfo *infos;
2696 infos = handle->bufferInfos;
2697 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2698 infos->isInput = ASIOFalse;
2699 infos->channelNum = i + stream_.channelOffset[0];
2700 infos->buffers[0] = infos->buffers[1] = 0;
2702 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2703 infos->isInput = ASIOTrue;
2704 infos->channelNum = i + stream_.channelOffset[1];
2705 infos->buffers[0] = infos->buffers[1] = 0;
2708 // Set up the ASIO callback structure and create the ASIO data buffers.
2709 asioCallbacks.bufferSwitch = &bufferSwitch;
2710 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2711 asioCallbacks.asioMessage = &asioMessages;
2712 asioCallbacks.bufferSwitchTimeInfo = NULL;
2713 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2714 if ( result != ASE_OK ) {
2715 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2716 errorText_ = errorStream_.str();
2719 buffersAllocated = true;
2721 // Set flags for buffer conversion.
2722 stream_.doConvertBuffer[mode] = false;
2723 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2724 stream_.doConvertBuffer[mode] = true;
2725 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2726 stream_.nUserChannels[mode] > 1 )
2727 stream_.doConvertBuffer[mode] = true;
2729 // Allocate necessary internal buffers
2730 unsigned long bufferBytes;
2731 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2732 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2733 if ( stream_.userBuffer[mode] == NULL ) {
2734 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2738 if ( stream_.doConvertBuffer[mode] ) {
2740 bool makeBuffer = true;
2741 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2742 if ( mode == INPUT ) {
2743 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2744 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2745 if ( bufferBytes <= bytesOut ) makeBuffer = false;
2750 bufferBytes *= *bufferSize;
2751 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2752 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2753 if ( stream_.deviceBuffer == NULL ) {
2754 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
2760 stream_.sampleRate = sampleRate;
2761 stream_.device[mode] = device;
2762 stream_.state = STREAM_STOPPED;
2763 asioCallbackInfo = &stream_.callbackInfo;
2764 stream_.callbackInfo.object = (void *) this;
2765 if ( stream_.mode == OUTPUT && mode == INPUT )
2766 // We had already set up an output stream.
2767 stream_.mode = DUPLEX;
2769 stream_.mode = mode;
2771 // Determine device latencies
2772 result = ASIOGetLatencies( &inputLatency, &outputLatency );
2773 if ( result != ASE_OK ) {
2774 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
2775 errorText_ = errorStream_.str();
2776 error( RtError::WARNING); // warn but don't fail
2779 stream_.latency[0] = outputLatency;
2780 stream_.latency[1] = inputLatency;
2783 // Setup the buffer conversion information structure. We don't use
2784 // buffers to do channel offsets, so we override that parameter
2786 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2791 if ( buffersAllocated )
2792 ASIODisposeBuffers();
2793 drivers.removeCurrentDriver();
2796 CloseHandle( handle->condition );
2797 if ( handle->bufferInfos )
2798 free( handle->bufferInfos );
2800 stream_.apiHandle = 0;
2803 for ( int i=0; i<2; i++ ) {
2804 if ( stream_.userBuffer[i] ) {
2805 free( stream_.userBuffer[i] );
2806 stream_.userBuffer[i] = 0;
2810 if ( stream_.deviceBuffer ) {
2811 free( stream_.deviceBuffer );
2812 stream_.deviceBuffer = 0;
2818 void RtApiAsio :: closeStream()
2820 if ( stream_.state == STREAM_CLOSED ) {
2821 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
2822 error( RtError::WARNING );
2826 if ( stream_.state == STREAM_RUNNING ) {
2827 stream_.state = STREAM_STOPPED;
2830 ASIODisposeBuffers();
2831 drivers.removeCurrentDriver();
2833 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2835 CloseHandle( handle->condition );
2836 if ( handle->bufferInfos )
2837 free( handle->bufferInfos );
2839 stream_.apiHandle = 0;
2842 for ( int i=0; i<2; i++ ) {
2843 if ( stream_.userBuffer[i] ) {
2844 free( stream_.userBuffer[i] );
2845 stream_.userBuffer[i] = 0;
2849 if ( stream_.deviceBuffer ) {
2850 free( stream_.deviceBuffer );
2851 stream_.deviceBuffer = 0;
2854 stream_.mode = UNINITIALIZED;
2855 stream_.state = STREAM_CLOSED;
2858 void RtApiAsio :: startStream()
2861 if ( stream_.state == STREAM_RUNNING ) {
2862 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
2863 error( RtError::WARNING );
2867 MUTEX_LOCK( &stream_.mutex );
2869 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2870 ASIOError result = ASIOStart();
2871 if ( result != ASE_OK ) {
2872 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
2873 errorText_ = errorStream_.str();
2877 handle->drainCounter = 0;
2878 handle->internalDrain = false;
2879 stream_.state = STREAM_RUNNING;
2883 MUTEX_UNLOCK( &stream_.mutex );
2885 if ( result == ASE_OK ) return;
2886 error( RtError::SYSTEM_ERROR );
2889 void RtApiAsio :: stopStream()
2892 if ( stream_.state == STREAM_STOPPED ) {
2893 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
2894 error( RtError::WARNING );
2898 MUTEX_LOCK( &stream_.mutex );
2900 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2901 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2902 if ( handle->drainCounter == 0 ) {
2903 handle->drainCounter = 1;
2904 MUTEX_UNLOCK( &stream_.mutex );
2905 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
2906 ResetEvent( handle->condition );
2907 MUTEX_LOCK( &stream_.mutex );
2911 ASIOError result = ASIOStop();
2912 if ( result != ASE_OK ) {
2913 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
2914 errorText_ = errorStream_.str();
2917 stream_.state = STREAM_STOPPED;
2918 MUTEX_UNLOCK( &stream_.mutex );
2920 if ( result == ASE_OK ) return;
2921 error( RtError::SYSTEM_ERROR );
2924 void RtApiAsio :: abortStream()
2927 if ( stream_.state == STREAM_STOPPED ) {
2928 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
2929 error( RtError::WARNING );
2933 // The following lines were commented-out because some behavior was
2934 // noted where the device buffers need to be zeroed to avoid
2935 // continuing sound, even when the device buffers are completed
2936 // disposed. So now, calling abort is the same as calling stop.
2937 //AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2938 //handle->drainCounter = 1;
2942 bool RtApiAsio :: callbackEvent( long bufferIndex )
2944 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2945 if ( stream_.state == STREAM_CLOSED ) {
2946 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
2947 error( RtError::WARNING );
2951 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2952 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2954 // Check if we were draining the stream and signal is finished.
2955 if ( handle->drainCounter > 3 ) {
2956 if ( handle->internalDrain == false )
2957 SetEvent( handle->condition );
2963 MUTEX_LOCK( &stream_.mutex );
2965 // The state might change while waiting on a mutex.
2966 if ( stream_.state == STREAM_STOPPED ) goto unlock;
2968 // Invoke user callback to get fresh output data UNLESS we are
2970 if ( handle->drainCounter == 0 ) {
2971 RtAudioCallback callback = (RtAudioCallback) info->callback;
2972 double streamTime = getStreamTime();
2973 RtAudioStreamStatus status = 0;
2974 if ( stream_.mode != INPUT && asioXRun == true ) {
2975 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2978 if ( stream_.mode != OUTPUT && asioXRun == true ) {
2979 status |= RTAUDIO_INPUT_OVERFLOW;
2982 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2983 stream_.bufferSize, streamTime, status, info->userData );
2984 if ( handle->drainCounter == 2 ) {
2985 MUTEX_UNLOCK( &stream_.mutex );
2989 else if ( handle->drainCounter == 1 )
2990 handle->internalDrain = true;
2993 unsigned int nChannels, bufferBytes, i, j;
2994 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2995 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2997 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
2999 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3001 for ( i=0, j=0; i<nChannels; i++ ) {
3002 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3003 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3007 else if ( stream_.doConvertBuffer[0] ) {
3009 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3010 if ( stream_.doByteSwap[0] )
3011 byteSwapBuffer( stream_.deviceBuffer,
3012 stream_.bufferSize * stream_.nDeviceChannels[0],
3013 stream_.deviceFormat[0] );
3015 for ( i=0, j=0; i<nChannels; i++ ) {
3016 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3017 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3018 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3024 if ( stream_.doByteSwap[0] )
3025 byteSwapBuffer( stream_.userBuffer[0],
3026 stream_.bufferSize * stream_.nUserChannels[0],
3027 stream_.userFormat );
3029 for ( i=0, j=0; i<nChannels; i++ ) {
3030 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3031 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3032 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3037 if ( handle->drainCounter ) {
3038 handle->drainCounter++;
3043 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3045 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3047 if (stream_.doConvertBuffer[1]) {
3049 // Always interleave ASIO input data.
3050 for ( i=0, j=0; i<nChannels; i++ ) {
3051 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3052 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3053 handle->bufferInfos[i].buffers[bufferIndex],
3057 if ( stream_.doByteSwap[1] )
3058 byteSwapBuffer( stream_.deviceBuffer,
3059 stream_.bufferSize * stream_.nDeviceChannels[1],
3060 stream_.deviceFormat[1] );
3061 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3065 for ( i=0, j=0; i<nChannels; i++ ) {
3066 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3067 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3068 handle->bufferInfos[i].buffers[bufferIndex],
3073 if ( stream_.doByteSwap[1] )
3074 byteSwapBuffer( stream_.userBuffer[1],
3075 stream_.bufferSize * stream_.nUserChannels[1],
3076 stream_.userFormat );
3081 // The following call was suggested by Malte Clasen. While the API
3082 // documentation indicates it should not be required, some device
3083 // drivers apparently do not function correctly without it.
3086 MUTEX_UNLOCK( &stream_.mutex );
3088 RtApi::tickStreamTime();
3092 void sampleRateChanged( ASIOSampleRate sRate )
3094 // The ASIO documentation says that this usually only happens during
3095 // external sync. Audio processing is not stopped by the driver,
3096 // actual sample rate might not have even changed, maybe only the
3097 // sample rate status of an AES/EBU or S/PDIF digital input at the
3100 RtApi *object = (RtApi *) asioCallbackInfo->object;
3102 object->stopStream();
3104 catch ( RtError &exception ) {
3105 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3109 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3112 long asioMessages( long selector, long value, void* message, double* opt )
3116 switch( selector ) {
3117 case kAsioSelectorSupported:
3118 if ( value == kAsioResetRequest
3119 || value == kAsioEngineVersion
3120 || value == kAsioResyncRequest
3121 || value == kAsioLatenciesChanged
3122 // The following three were added for ASIO 2.0, you don't
3123 // necessarily have to support them.
3124 || value == kAsioSupportsTimeInfo
3125 || value == kAsioSupportsTimeCode
3126 || value == kAsioSupportsInputMonitor)
3129 case kAsioResetRequest:
3130 // Defer the task and perform the reset of the driver during the
3131 // next "safe" situation. You cannot reset the driver right now,
3132 // as this code is called from the driver. Reset the driver is
3133 // done by completely destruct is. I.e. ASIOStop(),
3134 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3136 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3139 case kAsioResyncRequest:
3140 // This informs the application that the driver encountered some
3141 // non-fatal data loss. It is used for synchronization purposes
3142 // of different media. Added mainly to work around the Win16Mutex
3143 // problems in Windows 95/98 with the Windows Multimedia system,
3144 // which could lose data because the Mutex was held too long by
3145 // another thread. However a driver can issue it in other
3147 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3151 case kAsioLatenciesChanged:
3152 // This will inform the host application that the drivers were
3153 // latencies changed. Beware, it this does not mean that the
3154 // buffer sizes have changed! You might need to update internal
3156 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3159 case kAsioEngineVersion:
3160 // Return the supported ASIO version of the host application. If
3161 // a host application does not implement this selector, ASIO 1.0
3162 // is assumed by the driver.
3165 case kAsioSupportsTimeInfo:
3166 // Informs the driver whether the
3167 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3168 // For compatibility with ASIO 1.0 drivers the host application
3169 // should always support the "old" bufferSwitch method, too.
3172 case kAsioSupportsTimeCode:
3173 // Informs the driver whether application is interested in time
3174 // code info. If an application does not need to know about time
3175 // code, the driver has less work to do.
3182 static const char* getAsioErrorString( ASIOError result )
3190 static Messages m[] =
3192 { ASE_NotPresent, "Hardware input or output is not present or available." },
3193 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3194 { ASE_InvalidParameter, "Invalid input parameter." },
3195 { ASE_InvalidMode, "Invalid mode." },
3196 { ASE_SPNotAdvancing, "Sample position not advancing." },
3197 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3198 { ASE_NoMemory, "Not enough memory to complete the request." }
3201 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3202 if ( m[i].value == result ) return m[i].message;
3204 return "Unknown error.";
3206 //******************** End of __WINDOWS_ASIO__ *********************//
3210 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3212 // Modified by Robin Davies, October 2005
3213 // - Improvements to DirectX pointer chasing.
3214 // - Backdoor RtDsStatistics hook provides DirectX performance information.
3215 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3216 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3217 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3222 #if defined(__MINGW32__)
3223 // missing from latest mingw winapi
3224 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3225 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3226 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3227 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3230 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3232 #ifdef _MSC_VER // if Microsoft Visual C++
3233 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3236 static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3238 if (laterPointer > earlierPointer)
3239 return laterPointer - earlierPointer;
3241 return laterPointer - earlierPointer + bufferSize;
3244 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3246 if ( pointer > bufferSize ) pointer -= bufferSize;
3247 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3248 if ( pointer < earlierPointer ) pointer += bufferSize;
3249 return pointer >= earlierPointer && pointer < laterPointer;
3252 // A structure to hold various information related to the DirectSound
3253 // API implementation.
3255 unsigned int drainCounter; // Tracks callback counts when draining
3256 bool internalDrain; // Indicates if stop is initiated from callback or not.
3260 UINT bufferPointer[2];
3261 DWORD dsBufferSize[2];
3262 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3266 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3270 RtApiDs::RtDsStatistics RtApiDs::statistics;
3272 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
3273 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
3275 RtDsStatistics s = statistics;
3277 // update the calculated fields.
3278 if ( s.inputFrameSize != 0 )
3279 s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
3281 if ( s.outputFrameSize != 0 )
3282 s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
3288 // Declarations for utility functions, callbacks, and structures
3289 // specific to the DirectSound implementation.
3290 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3291 LPCTSTR description,
3295 static char* getErrorString( int code );
3297 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3303 unsigned int counter;
3309 : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
3312 RtApiDs :: RtApiDs()
3314 // Dsound will run both-threaded. If CoInitialize fails, then just
3315 // accept whatever the mainline chose for a threading model.
3316 coInitialized_ = false;
3317 HRESULT hr = CoInitialize( NULL );
3318 if ( !FAILED( hr ) ) coInitialized_ = true;
3321 RtApiDs :: ~RtApiDs()
3323 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3324 if ( stream_.state != STREAM_CLOSED ) closeStream();
3327 unsigned int RtApiDs :: getDefaultInputDevice( void )
3329 // Count output devices.
3331 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3332 if ( FAILED( result ) ) {
3333 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
3334 errorText_ = errorStream_.str();
3335 error( RtError::WARNING );
3339 // Now enumerate input devices until we find the id = NULL.
3340 info.isInput = true;
3341 info.getDefault = true;
3342 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3343 if ( FAILED( result ) ) {
3344 errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
3345 errorText_ = errorStream_.str();
3346 error( RtError::WARNING );
3350 if ( info.counter > 0 ) return info.counter - 1;
3354 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3356 // Enumerate output devices until we find the id = NULL.
3358 info.getDefault = true;
3359 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3360 if ( FAILED( result ) ) {
3361 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
3362 errorText_ = errorStream_.str();
3363 error( RtError::WARNING );
3367 if ( info.counter > 0 ) return info.counter - 1;
3371 unsigned int RtApiDs :: getDeviceCount( void )
3373 // Count DirectSound devices.
3375 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3376 if ( FAILED( result ) ) {
3377 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3378 errorText_ = errorStream_.str();
3379 error( RtError::WARNING );
3382 // Count DirectSoundCapture devices.
3383 info.isInput = true;
3384 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3385 if ( FAILED( result ) ) {
3386 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3387 errorText_ = errorStream_.str();
3388 error( RtError::WARNING );
3391 return info.counter;
3394 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3396 // Because DirectSound always enumerates input and output devices
3397 // separately (and because we don't attempt to combine devices
3398 // internally), none of our "devices" will ever be duplex.
3400 RtAudio::DeviceInfo info;
3401 info.probed = false;
3403 // Enumerate through devices to find the id (if it exists). Note
3404 // that we have to do the output enumeration first, even if this is
3405 // an input device, in order for the device counter to be correct.
3407 dsinfo.findIndex = true;
3408 dsinfo.index = device;
3409 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3410 if ( FAILED( result ) ) {
3411 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
3412 errorText_ = errorStream_.str();
3413 error( RtError::WARNING );
3416 if ( dsinfo.name.empty() ) goto probeInput;
3418 LPDIRECTSOUND output;
3420 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3421 if ( FAILED( result ) ) {
3422 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3423 errorText_ = errorStream_.str();
3424 error( RtError::WARNING );
3428 outCaps.dwSize = sizeof( outCaps );
3429 result = output->GetCaps( &outCaps );
3430 if ( FAILED( result ) ) {
3432 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3433 errorText_ = errorStream_.str();
3434 error( RtError::WARNING );
3438 // Get output channel information.
3439 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3441 // Get sample rate information.
3442 info.sampleRates.clear();
3443 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3444 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3445 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3446 info.sampleRates.push_back( SAMPLE_RATES[k] );
3449 // Get format information.
3450 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3451 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3455 if ( getDefaultOutputDevice() == device )
3456 info.isDefaultOutput = true;
3458 // Copy name and return.
3459 info.name = dsinfo.name;
3466 dsinfo.isInput = true;
3467 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3468 if ( FAILED( result ) ) {
3469 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
3470 errorText_ = errorStream_.str();
3471 error( RtError::WARNING );
3474 if ( dsinfo.name.empty() ) return info;
3476 LPDIRECTSOUNDCAPTURE input;
3477 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3478 if ( FAILED( result ) ) {
3479 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3480 errorText_ = errorStream_.str();
3481 error( RtError::WARNING );
3486 inCaps.dwSize = sizeof( inCaps );
3487 result = input->GetCaps( &inCaps );
3488 if ( FAILED( result ) ) {
3490 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
3491 errorText_ = errorStream_.str();
3492 error( RtError::WARNING );
3496 // Get input channel information.
3497 info.inputChannels = inCaps.dwChannels;
3499 // Get sample rate and format information.
3500 if ( inCaps.dwChannels == 2 ) {
3501 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3502 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3503 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3504 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3505 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3506 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3507 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3508 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3510 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3511 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
3512 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
3513 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
3514 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
3516 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3517 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
3518 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
3519 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
3520 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
3523 else if ( inCaps.dwChannels == 1 ) {
3524 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3525 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3526 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3527 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3528 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3529 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3530 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3531 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3533 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3534 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
3535 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
3536 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
3537 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
3539 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3540 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
3541 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
3542 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
3543 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
3546 else info.inputChannels = 0; // technically, this would be an error
3550 if ( info.inputChannels == 0 ) return info;
3552 if ( getDefaultInputDevice() == device )
3553 info.isDefaultInput = true;
3555 // Copy name and return.
3556 info.name = dsinfo.name;
3561 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3562 unsigned int firstChannel, unsigned int sampleRate,
3563 RtAudioFormat format, unsigned int *bufferSize,
3564 RtAudio::StreamOptions *options )
3566 if ( channels + firstChannel > 2 ) {
3567 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3571 // Enumerate through devices to find the id (if it exists). Note
3572 // that we have to do the output enumeration first, even if this is
3573 // an input device, in order for the device counter to be correct.
3575 dsinfo.findIndex = true;
3576 dsinfo.index = device;
3577 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3578 if ( FAILED( result ) ) {
3579 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
3580 errorText_ = errorStream_.str();
3584 if ( mode == OUTPUT ) {
3585 if ( dsinfo.name.empty() ) {
3586 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3587 errorText_ = errorStream_.str();
3591 else { // mode == INPUT
3592 dsinfo.isInput = true;
3593 HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3594 if ( FAILED( result ) ) {
3595 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
3596 errorText_ = errorStream_.str();
3599 if ( dsinfo.name.empty() ) {
3600 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3601 errorText_ = errorStream_.str();
3606 // According to a note in PortAudio, using GetDesktopWindow()
3607 // instead of GetForegroundWindow() is supposed to avoid problems
3608 // that occur when the application's window is not the foreground
3609 // window. Also, if the application window closes before the
3610 // DirectSound buffer, DirectSound can crash. However, for console
3611 // applications, no sound was produced when using GetDesktopWindow().
3612 HWND hWnd = GetForegroundWindow();
3614 // Check the numberOfBuffers parameter and limit the lowest value to
3615 // two. This is a judgement call and a value of two is probably too
3616 // low for capture, but it should work for playback.
3618 if ( options ) nBuffers = options->numberOfBuffers;
3619 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3620 if ( nBuffers < 2 ) nBuffers = 3;
3622 // Create the wave format structure. The data format setting will
3623 // be determined later.
3624 WAVEFORMATEX waveFormat;
3625 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3626 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3627 waveFormat.nChannels = channels + firstChannel;
3628 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3630 // Determine the device buffer size. By default, 32k, but we will
3631 // grow it to make allowances for very large software buffer sizes.
3632 DWORD dsBufferSize = 0;
3633 DWORD dsPointerLeadTime = 0;
3634 long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
3636 void *ohandle = 0, *bhandle = 0;
3637 if ( mode == OUTPUT ) {
3639 LPDIRECTSOUND output;
3640 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3641 if ( FAILED( result ) ) {
3642 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3643 errorText_ = errorStream_.str();
3648 outCaps.dwSize = sizeof( outCaps );
3649 result = output->GetCaps( &outCaps );
3650 if ( FAILED( result ) ) {
3652 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
3653 errorText_ = errorStream_.str();
3657 // Check channel information.
3658 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3659 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
3660 errorText_ = errorStream_.str();
3664 // Check format information. Use 16-bit format unless not
3665 // supported or user requests 8-bit.
3666 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3667 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3668 waveFormat.wBitsPerSample = 16;
3669 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3672 waveFormat.wBitsPerSample = 8;
3673 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3675 stream_.userFormat = format;
3677 // Update wave format structure and buffer information.
3678 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3679 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3680 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3682 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3683 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
3686 // Set cooperative level to DSSCL_EXCLUSIVE
3687 result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3688 if ( FAILED( result ) ) {
3690 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
3691 errorText_ = errorStream_.str();
3695 // Even though we will write to the secondary buffer, we need to
3696 // access the primary buffer to set the correct output format
3697 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3698 // buffer description.
3699 DSBUFFERDESC bufferDescription;
3700 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3701 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3702 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3704 // Obtain the primary buffer
3705 LPDIRECTSOUNDBUFFER buffer;
3706 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3707 if ( FAILED( result ) ) {
3709 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
3710 errorText_ = errorStream_.str();
3714 // Set the primary DS buffer sound format.
3715 result = buffer->SetFormat( &waveFormat );
3716 if ( FAILED( result ) ) {
3718 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
3719 errorText_ = errorStream_.str();
3723 // Setup the secondary DS buffer description.
3724 dsBufferSize = (DWORD) bufferBytes;
3725 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3726 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3727 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3728 DSBCAPS_GLOBALFOCUS |
3729 DSBCAPS_GETCURRENTPOSITION2 |
3730 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3731 bufferDescription.dwBufferBytes = bufferBytes;
3732 bufferDescription.lpwfxFormat = &waveFormat;
3734 // Try to create the secondary DS buffer. If that doesn't work,
3735 // try to use software mixing. Otherwise, there's a problem.
3736 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3737 if ( FAILED( result ) ) {
3738 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3739 DSBCAPS_GLOBALFOCUS |
3740 DSBCAPS_GETCURRENTPOSITION2 |
3741 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3742 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3743 if ( FAILED( result ) ) {
3745 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
3746 errorText_ = errorStream_.str();
3751 // Get the buffer size ... might be different from what we specified.
3753 dsbcaps.dwSize = sizeof( DSBCAPS );
3754 result = buffer->GetCaps( &dsbcaps );
3755 if ( FAILED( result ) ) {
3758 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
3759 errorText_ = errorStream_.str();
3763 bufferBytes = dsbcaps.dwBufferBytes;
3765 // Lock the DS buffer
3768 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3769 if ( FAILED( result ) ) {
3772 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
3773 errorText_ = errorStream_.str();
3777 // Zero the DS buffer
3778 ZeroMemory( audioPtr, dataLen );
3780 // Unlock the DS buffer
3781 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3782 if ( FAILED( result ) ) {
3785 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
3786 errorText_ = errorStream_.str();
3790 dsBufferSize = bufferBytes;
3791 ohandle = (void *) output;
3792 bhandle = (void *) buffer;
3795 if ( mode == INPUT ) {
3797 LPDIRECTSOUNDCAPTURE input;
3798 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3799 if ( FAILED( result ) ) {
3800 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3801 errorText_ = errorStream_.str();
3806 inCaps.dwSize = sizeof( inCaps );
3807 result = input->GetCaps( &inCaps );
3808 if ( FAILED( result ) ) {
3810 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
3811 errorText_ = errorStream_.str();
3815 // Check channel information.
3816 if ( inCaps.dwChannels < channels + firstChannel ) {
3817 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
3821 // Check format information. Use 16-bit format unless user
3823 DWORD deviceFormats;
3824 if ( channels + firstChannel == 2 ) {
3825 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
3826 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3827 waveFormat.wBitsPerSample = 8;
3828 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3830 else { // assume 16-bit is supported
3831 waveFormat.wBitsPerSample = 16;
3832 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3835 else { // channel == 1
3836 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
3837 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3838 waveFormat.wBitsPerSample = 8;
3839 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3841 else { // assume 16-bit is supported
3842 waveFormat.wBitsPerSample = 16;
3843 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3846 stream_.userFormat = format;
3848 // Update wave format structure and buffer information.
3849 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3850 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3852 // Setup the secondary DS buffer description.
3853 dsBufferSize = bufferBytes;
3854 DSCBUFFERDESC bufferDescription;
3855 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
3856 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
3857 bufferDescription.dwFlags = 0;
3858 bufferDescription.dwReserved = 0;
3859 bufferDescription.dwBufferBytes = bufferBytes;
3860 bufferDescription.lpwfxFormat = &waveFormat;
3862 // Create the capture buffer.
3863 LPDIRECTSOUNDCAPTUREBUFFER buffer;
3864 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
3865 if ( FAILED( result ) ) {
3867 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
3868 errorText_ = errorStream_.str();
3872 // Lock the capture buffer
3875 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3876 if ( FAILED( result ) ) {
3879 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
3880 errorText_ = errorStream_.str();
3885 ZeroMemory( audioPtr, dataLen );
3887 // Unlock the buffer
3888 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3889 if ( FAILED( result ) ) {
3892 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
3893 errorText_ = errorStream_.str();
3897 dsBufferSize = bufferBytes;
3898 ohandle = (void *) input;
3899 bhandle = (void *) buffer;
3902 // Set various stream parameters
3903 DsHandle *handle = 0;
3904 stream_.nDeviceChannels[mode] = channels + firstChannel;
3905 stream_.nUserChannels[mode] = channels;
3906 stream_.bufferSize = *bufferSize;
3907 stream_.channelOffset[mode] = firstChannel;
3908 stream_.deviceInterleaved[mode] = true;
3909 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
3910 else stream_.userInterleaved = true;
3912 // Set flag for buffer conversion
3913 stream_.doConvertBuffer[mode] = false;
3914 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
3915 stream_.doConvertBuffer[mode] = true;
3916 if (stream_.userFormat != stream_.deviceFormat[mode])
3917 stream_.doConvertBuffer[mode] = true;
3918 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
3919 stream_.nUserChannels[mode] > 1 )
3920 stream_.doConvertBuffer[mode] = true;
3922 // Allocate necessary internal buffers
3923 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
3924 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
3925 if ( stream_.userBuffer[mode] == NULL ) {
3926 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
3930 if ( stream_.doConvertBuffer[mode] ) {
3932 bool makeBuffer = true;
3933 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
3934 if ( mode == INPUT ) {
3935 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
3936 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
3937 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
3942 bufferBytes *= *bufferSize;
3943 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
3944 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
3945 if ( stream_.deviceBuffer == NULL ) {
3946 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
3952 // Allocate our DsHandle structures for the stream.
3953 if ( stream_.apiHandle == 0 ) {
3955 handle = new DsHandle;
3957 catch ( std::bad_alloc& ) {
3958 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
3962 // Create a manual-reset event.
3963 handle->condition = CreateEvent( NULL, // no security
3964 TRUE, // manual-reset
3965 FALSE, // non-signaled initially
3967 stream_.apiHandle = (void *) handle;
3970 handle = (DsHandle *) stream_.apiHandle;
3971 handle->id[mode] = ohandle;
3972 handle->buffer[mode] = bhandle;
3973 handle->dsBufferSize[mode] = dsBufferSize;
3974 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
3976 stream_.device[mode] = device;
3977 stream_.state = STREAM_STOPPED;
3978 if ( stream_.mode == OUTPUT && mode == INPUT )
3979 // We had already set up an output stream.
3980 stream_.mode = DUPLEX;
3982 stream_.mode = mode;
3983 stream_.nBuffers = nBuffers;
3984 stream_.sampleRate = sampleRate;
3986 // Setup the buffer conversion information structure.
3987 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
3989 // Setup the callback thread.
3991 stream_.callbackInfo.object = (void *) this;
3992 stream_.callbackInfo.isRunning = true;
3993 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
3994 &stream_.callbackInfo, 0, &threadId );
3995 if ( stream_.callbackInfo.thread == 0 ) {
3996 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4000 // Boost DS thread priority
4001 SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
4006 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4007 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4008 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4009 if ( buffer ) buffer->Release();
4012 if ( handle->buffer[1] ) {
4013 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4014 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4015 if ( buffer ) buffer->Release();
4018 CloseHandle( handle->condition );
4020 stream_.apiHandle = 0;
4023 for ( int i=0; i<2; i++ ) {
4024 if ( stream_.userBuffer[i] ) {
4025 free( stream_.userBuffer[i] );
4026 stream_.userBuffer[i] = 0;
4030 if ( stream_.deviceBuffer ) {
4031 free( stream_.deviceBuffer );
4032 stream_.deviceBuffer = 0;
4038 void RtApiDs :: closeStream()
4040 if ( stream_.state == STREAM_CLOSED ) {
4041 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4042 error( RtError::WARNING );
4046 // Stop the callback thread.
4047 stream_.callbackInfo.isRunning = false;
4048 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4049 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4051 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4053 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4054 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4055 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4062 if ( handle->buffer[1] ) {
4063 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4064 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4071 CloseHandle( handle->condition );
4073 stream_.apiHandle = 0;
4076 for ( int i=0; i<2; i++ ) {
4077 if ( stream_.userBuffer[i] ) {
4078 free( stream_.userBuffer[i] );
4079 stream_.userBuffer[i] = 0;
4083 if ( stream_.deviceBuffer ) {
4084 free( stream_.deviceBuffer );
4085 stream_.deviceBuffer = 0;
4088 stream_.mode = UNINITIALIZED;
4089 stream_.state = STREAM_CLOSED;
4092 void RtApiDs :: startStream()
4095 if ( stream_.state == STREAM_RUNNING ) {
4096 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4097 error( RtError::WARNING );
4101 // Increase scheduler frequency on lesser windows (a side-effect of
4102 // increasing timer accuracy). On greater windows (Win2K or later),
4103 // this is already in effect.
4105 MUTEX_LOCK( &stream_.mutex );
4107 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4109 timeBeginPeriod( 1 );
4112 memset( &statistics, 0, sizeof( statistics ) );
4113 statistics.sampleRate = stream_.sampleRate;
4114 statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
4117 buffersRolling = false;
4118 duplexPrerollBytes = 0;
4120 if ( stream_.mode == DUPLEX ) {
4121 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4122 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4126 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4127 //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
4129 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4130 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4131 if ( FAILED( result ) ) {
4132 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4133 errorText_ = errorStream_.str();
4138 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4139 //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
4141 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4142 result = buffer->Start( DSCBSTART_LOOPING );
4143 if ( FAILED( result ) ) {
4144 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4145 errorText_ = errorStream_.str();
4150 handle->drainCounter = 0;
4151 handle->internalDrain = false;
4152 stream_.state = STREAM_RUNNING;
4155 MUTEX_UNLOCK( &stream_.mutex );
4157 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4160 void RtApiDs :: stopStream()
4163 if ( stream_.state == STREAM_STOPPED ) {
4164 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4165 error( RtError::WARNING );
4169 MUTEX_LOCK( &stream_.mutex );
4174 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4175 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4176 if ( handle->drainCounter == 0 ) {
4177 handle->drainCounter = 1;
4178 MUTEX_UNLOCK( &stream_.mutex );
4179 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4180 ResetEvent( handle->condition );
4181 MUTEX_LOCK( &stream_.mutex );
4184 // Stop the buffer and clear memory
4185 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4186 result = buffer->Stop();
4187 if ( FAILED( result ) ) {
4188 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4189 errorText_ = errorStream_.str();
4193 // Lock the buffer and clear it so that if we start to play again,
4194 // we won't have old data playing.
4195 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4196 if ( FAILED( result ) ) {
4197 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking output buffer!";
4198 errorText_ = errorStream_.str();
4202 // Zero the DS buffer
4203 ZeroMemory( audioPtr, dataLen );
4205 // Unlock the DS buffer
4206 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4207 if ( FAILED( result ) ) {
4208 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4209 errorText_ = errorStream_.str();
4213 // If we start playing again, we must begin at beginning of buffer.
4214 handle->bufferPointer[0] = 0;
4217 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4218 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4222 result = buffer->Stop();
4223 if ( FAILED( result ) ) {
4224 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4225 errorText_ = errorStream_.str();
4229 // Lock the buffer and clear it so that if we start to play again,
4230 // we won't have old data playing.
4231 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4232 if ( FAILED( result ) ) {
4233 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking input buffer!";
4234 errorText_ = errorStream_.str();
4238 // Zero the DS buffer
4239 ZeroMemory( audioPtr, dataLen );
4241 // Unlock the DS buffer
4242 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4243 if ( FAILED( result ) ) {
4244 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4245 errorText_ = errorStream_.str();
4249 // If we start recording again, we must begin at beginning of buffer.
4250 handle->bufferPointer[1] = 0;
4254 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4255 stream_.state = STREAM_STOPPED;
4256 MUTEX_UNLOCK( &stream_.mutex );
4257 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4260 void RtApiDs :: abortStream()
4263 if ( stream_.state == STREAM_STOPPED ) {
4264 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4265 error( RtError::WARNING );
4269 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4270 handle->drainCounter = 1;
4275 void RtApiDs :: callbackEvent()
4277 if ( stream_.state == STREAM_STOPPED ) {
4278 Sleep(50); // sleep 50 milliseconds
4282 if ( stream_.state == STREAM_CLOSED ) {
4283 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4284 error( RtError::WARNING );
4288 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4289 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4291 // Check if we were draining the stream and signal is finished.
4292 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4293 if ( handle->internalDrain == false )
4294 SetEvent( handle->condition );
4300 MUTEX_LOCK( &stream_.mutex );
4302 // Invoke user callback to get fresh output data UNLESS we are
4304 if ( handle->drainCounter == 0 ) {
4305 RtAudioCallback callback = (RtAudioCallback) info->callback;
4306 double streamTime = getStreamTime();
4307 RtAudioStreamStatus status = 0;
4308 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4309 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4310 handle->xrun[0] = false;
4312 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4313 status |= RTAUDIO_INPUT_OVERFLOW;
4314 handle->xrun[1] = false;
4316 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4317 stream_.bufferSize, streamTime, status, info->userData );
4318 if ( handle->drainCounter == 2 ) {
4319 MUTEX_UNLOCK( &stream_.mutex );
4323 else if ( handle->drainCounter == 1 )
4324 handle->internalDrain = true;
4328 DWORD currentWritePos, safeWritePos;
4329 DWORD currentReadPos, safeReadPos;
4333 #ifdef GENERATE_DEBUG_LOG
4334 DWORD writeTime, readTime;
4337 LPVOID buffer1 = NULL;
4338 LPVOID buffer2 = NULL;
4339 DWORD bufferSize1 = 0;
4340 DWORD bufferSize2 = 0;
4345 if ( stream_.mode == DUPLEX && !buffersRolling ) {
4346 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4348 // It takes a while for the devices to get rolling. As a result,
4349 // there's no guarantee that the capture and write device pointers
4350 // will move in lockstep. Wait here for both devices to start
4351 // rolling, and then set our buffer pointers accordingly.
4352 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4353 // bytes later than the write buffer.
4355 // Stub: a serious risk of having a pre-emptive scheduling round
4356 // take place between the two GetCurrentPosition calls... but I'm
4357 // really not sure how to solve the problem. Temporarily boost to
4358 // Realtime priority, maybe; but I'm not sure what priority the
4359 // DirectSound service threads run at. We *should* be roughly
4360 // within a ms or so of correct.
4362 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4363 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4365 DWORD initialWritePos, initialSafeWritePos;
4366 DWORD initialReadPos, initialSafeReadPos;
4368 result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
4369 if ( FAILED( result ) ) {
4370 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4371 errorText_ = errorStream_.str();
4372 error( RtError::SYSTEM_ERROR );
4374 result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
4375 if ( FAILED( result ) ) {
4376 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4377 errorText_ = errorStream_.str();
4378 error( RtError::SYSTEM_ERROR );
4381 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4382 if ( FAILED( result ) ) {
4383 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4384 errorText_ = errorStream_.str();
4385 error( RtError::SYSTEM_ERROR );
4387 result = dsCaptureBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4388 if ( FAILED( result ) ) {
4389 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4390 errorText_ = errorStream_.str();
4391 error( RtError::SYSTEM_ERROR );
4393 if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
4397 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4399 buffersRolling = true;
4400 handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
4401 handle->bufferPointer[1] = safeReadPos;
4404 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4406 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4408 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4409 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4410 bufferBytes *= formatBytes( stream_.userFormat );
4411 memset( stream_.userBuffer[0], 0, bufferBytes );
4414 // Setup parameters and do buffer conversion if necessary.
4415 if ( stream_.doConvertBuffer[0] ) {
4416 buffer = stream_.deviceBuffer;
4417 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4418 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4419 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4422 buffer = stream_.userBuffer[0];
4423 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4424 bufferBytes *= formatBytes( stream_.userFormat );
4427 // No byte swapping necessary in DirectSound implementation.
4429 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4430 // unsigned. So, we need to convert our signed 8-bit data here to
4432 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4433 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4435 DWORD dsBufferSize = handle->dsBufferSize[0];
4436 nextWritePos = handle->bufferPointer[0];
4440 // Find out where the read and "safe write" pointers are.
4441 result = dsBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4442 if ( FAILED( result ) ) {
4443 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4444 errorText_ = errorStream_.str();
4445 error( RtError::SYSTEM_ERROR );
4448 leadPos = safeWritePos + handle->dsPointerLeadTime[0];
4449 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
4450 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
4451 endWrite = nextWritePos + bufferBytes;
4453 // Check whether the entire write region is behind the play pointer.
4454 if ( leadPos >= endWrite ) break;
4456 // If we are here, then we must wait until the play pointer gets
4457 // beyond the write region. The approach here is to use the
4458 // Sleep() function to suspend operation until safePos catches
4459 // up. Calculate number of milliseconds to wait as:
4460 // time = distance * (milliseconds/second) * fudgefactor /
4461 // ((bytes/sample) * (samples/second))
4462 // A "fudgefactor" less than 1 is used because it was found
4463 // that sleeping too long was MUCH worse than sleeping for
4464 // several shorter periods.
4465 double millis = ( endWrite - leadPos ) * 900.0;
4466 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4467 if ( millis < 1.0 ) millis = 1.0;
4468 if ( millis > 50.0 ) {
4469 static int nOverruns = 0;
4472 Sleep( (DWORD) millis );
4475 //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
4476 // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
4479 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
4480 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
4481 // We've strayed into the forbidden zone ... resync the read pointer.
4482 //++statistics.numberOfWriteUnderruns;
4483 handle->xrun[0] = true;
4484 nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
4485 while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
4486 handle->bufferPointer[0] = nextWritePos;
4487 endWrite = nextWritePos + bufferBytes;
4490 // Lock free space in the buffer
4491 result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
4492 &bufferSize1, &buffer2, &bufferSize2, 0 );
4493 if ( FAILED( result ) ) {
4494 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4495 errorText_ = errorStream_.str();
4496 error( RtError::SYSTEM_ERROR );
4499 // Copy our buffer into the DS buffer
4500 CopyMemory( buffer1, buffer, bufferSize1 );
4501 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4503 // Update our buffer offset and unlock sound buffer
4504 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4505 if ( FAILED( result ) ) {
4506 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4507 errorText_ = errorStream_.str();
4508 error( RtError::SYSTEM_ERROR );
4510 nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4511 handle->bufferPointer[0] = nextWritePos;
4513 if ( handle->drainCounter ) {
4514 handle->drainCounter++;
4519 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4521 // Setup parameters.
4522 if ( stream_.doConvertBuffer[1] ) {
4523 buffer = stream_.deviceBuffer;
4524 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4525 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4528 buffer = stream_.userBuffer[1];
4529 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4530 bufferBytes *= formatBytes( stream_.userFormat );
4533 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4534 long nextReadPos = handle->bufferPointer[1];
4535 DWORD dsBufferSize = handle->dsBufferSize[1];
4537 // Find out where the write and "safe read" pointers are.
4538 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4539 if ( FAILED( result ) ) {
4540 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4541 errorText_ = errorStream_.str();
4542 error( RtError::SYSTEM_ERROR );
4545 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4546 DWORD endRead = nextReadPos + bufferBytes;
4548 // Handling depends on whether we are INPUT or DUPLEX.
4549 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4550 // then a wait here will drag the write pointers into the forbidden zone.
4552 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4553 // it's in a safe position. This causes dropouts, but it seems to be the only
4554 // practical way to sync up the read and write pointers reliably, given the
4555 // the very complex relationship between phase and increment of the read and write
4558 // In order to minimize audible dropouts in DUPLEX mode, we will
4559 // provide a pre-roll period of 0.5 seconds in which we return
4560 // zeros from the read buffer while the pointers sync up.
4562 if ( stream_.mode == DUPLEX ) {
4563 if ( safeReadPos < endRead ) {
4564 if ( duplexPrerollBytes <= 0 ) {
4565 // Pre-roll time over. Be more agressive.
4566 int adjustment = endRead-safeReadPos;
4568 handle->xrun[1] = true;
4569 //++statistics.numberOfReadOverruns;
4571 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4572 // and perform fine adjustments later.
4573 // - small adjustments: back off by twice as much.
4574 if ( adjustment >= 2*bufferBytes )
4575 nextReadPos = safeReadPos-2*bufferBytes;
4577 nextReadPos = safeReadPos-bufferBytes-adjustment;
4579 //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
4580 //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
4581 if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4585 // In pre=roll time. Just do it.
4586 nextReadPos = safeReadPos-bufferBytes;
4587 while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4589 endRead = nextReadPos + bufferBytes;
4592 else { // mode == INPUT
4593 while ( safeReadPos < endRead ) {
4594 // See comments for playback.
4595 double millis = (endRead - safeReadPos) * 900.0;
4596 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4597 if ( millis < 1.0 ) millis = 1.0;
4598 Sleep( (DWORD) millis );
4600 // Wake up, find out where we are now
4601 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4602 if ( FAILED( result ) ) {
4603 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4604 errorText_ = errorStream_.str();
4605 error( RtError::SYSTEM_ERROR );
4608 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4612 //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
4613 // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
4615 // Lock free space in the buffer
4616 result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
4617 &bufferSize1, &buffer2, &bufferSize2, 0 );
4618 if ( FAILED( result ) ) {
4619 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4620 errorText_ = errorStream_.str();
4621 error( RtError::SYSTEM_ERROR );
4624 if ( duplexPrerollBytes <= 0 ) {
4625 // Copy our buffer into the DS buffer
4626 CopyMemory( buffer, buffer1, bufferSize1 );
4627 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4630 memset( buffer, 0, bufferSize1 );
4631 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4632 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4635 // Update our buffer offset and unlock sound buffer
4636 nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4637 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4638 if ( FAILED( result ) ) {
4639 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4640 errorText_ = errorStream_.str();
4641 error( RtError::SYSTEM_ERROR );
4643 handle->bufferPointer[1] = nextReadPos;
4645 // No byte swapping necessary in DirectSound implementation.
4647 // If necessary, convert 8-bit data from unsigned to signed.
4648 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4649 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4651 // Do buffer conversion if necessary.
4652 if ( stream_.doConvertBuffer[1] )
4653 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4655 #ifdef GENERATE_DEBUG_LOG
4656 if ( currentDebugLogEntry < debugLog.size() )
4658 TTickRecord &r = debugLog[currentDebugLogEntry++];
4659 r.currentReadPointer = currentReadPos;
4660 r.safeReadPointer = safeReadPos;
4661 r.currentWritePointer = currentWritePos;
4662 r.safeWritePointer = safeWritePos;
4663 r.readTime = readTime;
4664 r.writeTime = writeTime;
4665 r.nextReadPointer = handles[1].bufferPointer;
4666 r.nextWritePointer = handles[0].bufferPointer;
4671 MUTEX_UNLOCK( &stream_.mutex );
4673 RtApi::tickStreamTime();
4676 // Definitions for utility functions and callbacks
4677 // specific to the DirectSound implementation.
4679 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4681 CallbackInfo *info = (CallbackInfo *) ptr;
4682 RtApiDs *object = (RtApiDs *) info->object;
4683 bool* isRunning = &info->isRunning;
4685 while ( *isRunning == true ) {
4686 object->callbackEvent();
4695 std::string convertTChar( LPCTSTR name )
4699 #if defined( UNICODE ) || defined( _UNICODE )
4700 // Yes, this conversion doesn't make sense for two-byte characters
4701 // but RtAudio is currently written to return an std::string of
4702 // one-byte chars for the device name.
4703 for ( unsigned int i=0; i<wcslen( name ); i++ )
4704 s.push_back( name[i] );
4706 s.append( std::string( name ) );
4712 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4713 LPCTSTR description,
4717 EnumInfo *info = (EnumInfo *) lpContext;
4720 if ( info->isInput == true ) {
4722 LPDIRECTSOUNDCAPTURE object;
4724 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4725 if ( hr != DS_OK ) return TRUE;
4727 caps.dwSize = sizeof(caps);
4728 hr = object->GetCaps( &caps );
4729 if ( hr == DS_OK ) {
4730 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4737 LPDIRECTSOUND object;
4738 hr = DirectSoundCreate( lpguid, &object, NULL );
4739 if ( hr != DS_OK ) return TRUE;
4741 caps.dwSize = sizeof(caps);
4742 hr = object->GetCaps( &caps );
4743 if ( hr == DS_OK ) {
4744 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4750 if ( info->getDefault && lpguid == NULL ) return FALSE;
4752 if ( info->findIndex && info->counter > info->index ) {
4754 info->name = convertTChar( description );
4761 static char* getErrorString( int code )
4765 case DSERR_ALLOCATED:
4766 return "Already allocated";
4768 case DSERR_CONTROLUNAVAIL:
4769 return "Control unavailable";
4771 case DSERR_INVALIDPARAM:
4772 return "Invalid parameter";
4774 case DSERR_INVALIDCALL:
4775 return "Invalid call";
4778 return "Generic error";
4780 case DSERR_PRIOLEVELNEEDED:
4781 return "Priority level needed";
4783 case DSERR_OUTOFMEMORY:
4784 return "Out of memory";
4786 case DSERR_BADFORMAT:
4787 return "The sample rate or the channel format is not supported";
4789 case DSERR_UNSUPPORTED:
4790 return "Not supported";
4792 case DSERR_NODRIVER:
4795 case DSERR_ALREADYINITIALIZED:
4796 return "Already initialized";
4798 case DSERR_NOAGGREGATION:
4799 return "No aggregation";
4801 case DSERR_BUFFERLOST:
4802 return "Buffer lost";
4804 case DSERR_OTHERAPPHASPRIO:
4805 return "Another application already has priority";
4807 case DSERR_UNINITIALIZED:
4808 return "Uninitialized";
4811 return "DirectSound unknown error";
4814 //******************** End of __WINDOWS_DS__ *********************//
4818 #if defined(__LINUX_ALSA__)
4820 #include <alsa/asoundlib.h>
4823 // A structure to hold various information related to the ALSA API
4826 snd_pcm_t *handles[2];
4831 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
4834 extern "C" void *alsaCallbackHandler( void * ptr );
4836 RtApiAlsa :: RtApiAlsa()
4838 // Nothing to do here.
4841 RtApiAlsa :: ~RtApiAlsa()
4843 if ( stream_.state != STREAM_CLOSED ) closeStream();
4846 unsigned int RtApiAlsa :: getDeviceCount( void )
4848 unsigned nDevices = 0;
4849 int result, subdevice, card;
4853 // Count cards and devices
4855 snd_card_next( &card );
4856 while ( card >= 0 ) {
4857 sprintf( name, "hw:%d", card );
4858 result = snd_ctl_open( &handle, name, 0 );
4860 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4861 errorText_ = errorStream_.str();
4862 error( RtError::WARNING );
4867 result = snd_ctl_pcm_next_device( handle, &subdevice );
4869 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4870 errorText_ = errorStream_.str();
4871 error( RtError::WARNING );
4874 if ( subdevice < 0 )
4879 snd_ctl_close( handle );
4880 snd_card_next( &card );
4886 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
4888 RtAudio::DeviceInfo info;
4889 info.probed = false;
4891 unsigned nDevices = 0;
4892 int result, subdevice, card;
4896 // Count cards and devices
4898 snd_card_next( &card );
4899 while ( card >= 0 ) {
4900 sprintf( name, "hw:%d", card );
4901 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
4903 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4904 errorText_ = errorStream_.str();
4905 error( RtError::WARNING );
4910 result = snd_ctl_pcm_next_device( chandle, &subdevice );
4912 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4913 errorText_ = errorStream_.str();
4914 error( RtError::WARNING );
4917 if ( subdevice < 0 ) break;
4918 if ( nDevices == device ) {
4919 sprintf( name, "hw:%d,%d", card, subdevice );
4925 snd_ctl_close( chandle );
4926 snd_card_next( &card );
4929 if ( nDevices == 0 ) {
4930 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
4931 error( RtError::INVALID_USE );
4934 if ( device >= nDevices ) {
4935 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
4936 error( RtError::INVALID_USE );
4941 int openMode = SND_PCM_ASYNC;
4942 snd_pcm_stream_t stream;
4943 snd_pcm_info_t *pcminfo;
4944 snd_pcm_info_alloca( &pcminfo );
4946 snd_pcm_hw_params_t *params;
4947 snd_pcm_hw_params_alloca( ¶ms );
4949 // First try for playback
4950 stream = SND_PCM_STREAM_PLAYBACK;
4951 snd_pcm_info_set_device( pcminfo, subdevice );
4952 snd_pcm_info_set_subdevice( pcminfo, 0 );
4953 snd_pcm_info_set_stream( pcminfo, stream );
4955 result = snd_ctl_pcm_info( chandle, pcminfo );
4957 // Device probably doesn't support playback.
4961 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
4963 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
4964 errorText_ = errorStream_.str();
4965 error( RtError::WARNING );
4969 // The device is open ... fill the parameter structure.
4970 result = snd_pcm_hw_params_any( phandle, params );
4972 snd_pcm_close( phandle );
4973 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
4974 errorText_ = errorStream_.str();
4975 error( RtError::WARNING );
4979 // Get output channel information.
4981 result = snd_pcm_hw_params_get_channels_max( params, &value );
4983 snd_pcm_close( phandle );
4984 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
4985 errorText_ = errorStream_.str();
4986 error( RtError::WARNING );
4989 info.outputChannels = value;
4990 snd_pcm_close( phandle );
4993 // Now try for capture
4994 stream = SND_PCM_STREAM_CAPTURE;
4995 snd_pcm_info_set_stream( pcminfo, stream );
4997 result = snd_ctl_pcm_info( chandle, pcminfo );
4998 snd_ctl_close( chandle );
5000 // Device probably doesn't support capture.
5001 if ( info.outputChannels == 0 ) return info;
5002 goto probeParameters;
5005 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5007 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5008 errorText_ = errorStream_.str();
5009 error( RtError::WARNING );
5010 if ( info.outputChannels == 0 ) return info;
5011 goto probeParameters;
5014 // The device is open ... fill the parameter structure.
5015 result = snd_pcm_hw_params_any( phandle, params );
5017 snd_pcm_close( phandle );
5018 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5019 errorText_ = errorStream_.str();
5020 error( RtError::WARNING );
5021 if ( info.outputChannels == 0 ) return info;
5022 goto probeParameters;
5025 result = snd_pcm_hw_params_get_channels_max( params, &value );
5027 snd_pcm_close( phandle );
5028 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5029 errorText_ = errorStream_.str();
5030 error( RtError::WARNING );
5031 if ( info.outputChannels == 0 ) return info;
5032 goto probeParameters;
5034 info.inputChannels = value;
5035 snd_pcm_close( phandle );
5037 // If device opens for both playback and capture, we determine the channels.
5038 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5039 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5041 // ALSA doesn't provide default devices so we'll use the first available one.
5042 if ( device == 0 && info.outputChannels > 0 )
5043 info.isDefaultOutput = true;
5044 if ( device == 0 && info.inputChannels > 0 )
5045 info.isDefaultInput = true;
5048 // At this point, we just need to figure out the supported data
5049 // formats and sample rates. We'll proceed by opening the device in
5050 // the direction with the maximum number of channels, or playback if
5051 // they are equal. This might limit our sample rate options, but so
5054 if ( info.outputChannels >= info.inputChannels )
5055 stream = SND_PCM_STREAM_PLAYBACK;
5057 stream = SND_PCM_STREAM_CAPTURE;
5058 snd_pcm_info_set_stream( pcminfo, stream );
5060 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5062 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5063 errorText_ = errorStream_.str();
5064 error( RtError::WARNING );
5068 // The device is open ... fill the parameter structure.
5069 result = snd_pcm_hw_params_any( phandle, params );
5071 snd_pcm_close( phandle );
5072 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5073 errorText_ = errorStream_.str();
5074 error( RtError::WARNING );
5078 // Test our discrete set of sample rate values.
5079 info.sampleRates.clear();
5080 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5081 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5082 info.sampleRates.push_back( SAMPLE_RATES[i] );
5084 if ( info.sampleRates.size() == 0 ) {
5085 snd_pcm_close( phandle );
5086 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5087 errorText_ = errorStream_.str();
5088 error( RtError::WARNING );
5092 // Probe the supported data formats ... we don't care about endian-ness just yet
5093 snd_pcm_format_t format;
5094 info.nativeFormats = 0;
5095 format = SND_PCM_FORMAT_S8;
5096 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5097 info.nativeFormats |= RTAUDIO_SINT8;
5098 format = SND_PCM_FORMAT_S16;
5099 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5100 info.nativeFormats |= RTAUDIO_SINT16;
5101 format = SND_PCM_FORMAT_S24;
5102 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5103 info.nativeFormats |= RTAUDIO_SINT24;
5104 format = SND_PCM_FORMAT_S32;
5105 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5106 info.nativeFormats |= RTAUDIO_SINT32;
5107 format = SND_PCM_FORMAT_FLOAT;
5108 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5109 info.nativeFormats |= RTAUDIO_FLOAT32;
5110 format = SND_PCM_FORMAT_FLOAT64;
5111 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5112 info.nativeFormats |= RTAUDIO_FLOAT64;
5114 // Check that we have at least one supported format
5115 if ( info.nativeFormats == 0 ) {
5116 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5117 errorText_ = errorStream_.str();
5118 error( RtError::WARNING );
5122 // Get the device name
5124 result = snd_card_get_name( card, &cardname );
5126 sprintf( name, "hw:%s,%d", cardname, subdevice );
5129 // That's all ... close the device and return
5130 snd_pcm_close( phandle );
5135 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5136 unsigned int firstChannel, unsigned int sampleRate,
5137 RtAudioFormat format, unsigned int *bufferSize,
5138 RtAudio::StreamOptions *options )
5141 #if defined(__RTAUDIO_DEBUG__)
5143 snd_output_stdio_attach(&out, stderr, 0);
5146 // I'm not using the "plug" interface ... too much inconsistent behavior.
5148 unsigned nDevices = 0;
5149 int result, subdevice, card;
5153 // Count cards and devices
5155 snd_card_next( &card );
5156 while ( card >= 0 ) {
5157 sprintf( name, "hw:%d", card );
5158 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5160 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5161 errorText_ = errorStream_.str();
5166 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5167 if ( result < 0 ) break;
5168 if ( subdevice < 0 ) break;
5169 if ( nDevices == device ) {
5170 sprintf( name, "hw:%d,%d", card, subdevice );
5171 snd_ctl_close( chandle );
5176 snd_ctl_close( chandle );
5177 snd_card_next( &card );
5180 if ( nDevices == 0 ) {
5181 // This should not happen because a check is made before this function is called.
5182 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5186 if ( device >= nDevices ) {
5187 // This should not happen because a check is made before this function is called.
5188 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5194 snd_pcm_stream_t stream;
5195 if ( mode == OUTPUT )
5196 stream = SND_PCM_STREAM_PLAYBACK;
5198 stream = SND_PCM_STREAM_CAPTURE;
5201 int openMode = SND_PCM_ASYNC;
5202 result = snd_pcm_open( &phandle, name, stream, openMode );
5204 if ( mode == OUTPUT )
5205 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5207 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5208 errorText_ = errorStream_.str();
5212 // Fill the parameter structure.
5213 snd_pcm_hw_params_t *hw_params;
5214 snd_pcm_hw_params_alloca( &hw_params );
5215 result = snd_pcm_hw_params_any( phandle, hw_params );
5217 snd_pcm_close( phandle );
5218 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5219 errorText_ = errorStream_.str();
5223 #if defined(__RTAUDIO_DEBUG__)
5224 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5225 snd_pcm_hw_params_dump( hw_params, out );
5228 // Set access ... check user preference.
5229 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5230 stream_.userInterleaved = false;
5231 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5233 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5234 stream_.deviceInterleaved[mode] = true;
5237 stream_.deviceInterleaved[mode] = false;
5240 stream_.userInterleaved = true;
5241 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5243 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5244 stream_.deviceInterleaved[mode] = false;
5247 stream_.deviceInterleaved[mode] = true;
5251 snd_pcm_close( phandle );
5252 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5253 errorText_ = errorStream_.str();
5257 // Determine how to set the device format.
5258 stream_.userFormat = format;
5259 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5261 if ( format == RTAUDIO_SINT8 )
5262 deviceFormat = SND_PCM_FORMAT_S8;
5263 else if ( format == RTAUDIO_SINT16 )
5264 deviceFormat = SND_PCM_FORMAT_S16;
5265 else if ( format == RTAUDIO_SINT24 )
5266 deviceFormat = SND_PCM_FORMAT_S24;
5267 else if ( format == RTAUDIO_SINT32 )
5268 deviceFormat = SND_PCM_FORMAT_S32;
5269 else if ( format == RTAUDIO_FLOAT32 )
5270 deviceFormat = SND_PCM_FORMAT_FLOAT;
5271 else if ( format == RTAUDIO_FLOAT64 )
5272 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5274 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5275 stream_.deviceFormat[mode] = format;
5279 // The user requested format is not natively supported by the device.
5280 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5281 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5282 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5286 deviceFormat = SND_PCM_FORMAT_FLOAT;
5287 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5288 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5292 deviceFormat = SND_PCM_FORMAT_S32;
5293 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5294 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5298 deviceFormat = SND_PCM_FORMAT_S24;
5299 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5300 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5304 deviceFormat = SND_PCM_FORMAT_S16;
5305 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5306 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5310 deviceFormat = SND_PCM_FORMAT_S8;
5311 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5312 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5316 // If we get here, no supported format was found.
5317 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5318 errorText_ = errorStream_.str();
5322 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5324 snd_pcm_close( phandle );
5325 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5326 errorText_ = errorStream_.str();
5330 // Determine whether byte-swaping is necessary.
5331 stream_.doByteSwap[mode] = false;
5332 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5333 result = snd_pcm_format_cpu_endian( deviceFormat );
5335 stream_.doByteSwap[mode] = true;
5336 else if (result < 0) {
5337 snd_pcm_close( phandle );
5338 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5339 errorText_ = errorStream_.str();
5344 // Set the sample rate.
5345 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5347 snd_pcm_close( phandle );
5348 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5349 errorText_ = errorStream_.str();
5353 // Determine the number of channels for this device. We support a possible
5354 // minimum device channel number > than the value requested by the user.
5355 stream_.nUserChannels[mode] = channels;
5357 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5358 unsigned int deviceChannels = value;
5359 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5360 snd_pcm_close( phandle );
5361 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5362 errorText_ = errorStream_.str();
5366 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5368 snd_pcm_close( phandle );
5369 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5370 errorText_ = errorStream_.str();
5373 deviceChannels = value;
5374 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5375 stream_.nDeviceChannels[mode] = deviceChannels;
5377 // Set the device channels.
5378 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5380 snd_pcm_close( phandle );
5381 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5382 errorText_ = errorStream_.str();
5386 // Set the buffer number, which in ALSA is referred to as the "period".
5388 unsigned int periods = 0;
5389 if ( options ) periods = options->numberOfBuffers;
5390 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5391 // Even though the hardware might allow 1 buffer, it won't work reliably.
5392 if ( periods < 2 ) periods = 2;
5393 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5395 snd_pcm_close( phandle );
5396 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5397 errorText_ = errorStream_.str();
5401 // Set the buffer (or period) size.
5402 snd_pcm_uframes_t periodSize = *bufferSize;
5403 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5405 snd_pcm_close( phandle );
5406 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5407 errorText_ = errorStream_.str();
5410 *bufferSize = periodSize;
5412 // If attempting to setup a duplex stream, the bufferSize parameter
5413 // MUST be the same in both directions!
5414 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5415 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5416 errorText_ = errorStream_.str();
5420 stream_.bufferSize = *bufferSize;
5422 // Install the hardware configuration
5423 result = snd_pcm_hw_params( phandle, hw_params );
5425 snd_pcm_close( phandle );
5426 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5427 errorText_ = errorStream_.str();
5431 #if defined(__RTAUDIO_DEBUG__)
5432 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5433 snd_pcm_hw_params_dump( hw_params, out );
5436 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5437 snd_pcm_sw_params_t *sw_params = NULL;
5438 snd_pcm_sw_params_alloca( &sw_params );
5439 snd_pcm_sw_params_current( phandle, sw_params );
5440 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5441 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, 0x7fffffff );
5442 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5443 snd_pcm_sw_params_set_silence_size( phandle, sw_params, INT_MAX );
5444 result = snd_pcm_sw_params( phandle, sw_params );
5446 snd_pcm_close( phandle );
5447 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5448 errorText_ = errorStream_.str();
5452 #if defined(__RTAUDIO_DEBUG__)
5453 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5454 snd_pcm_sw_params_dump( sw_params, out );
5457 // Set flags for buffer conversion
5458 stream_.doConvertBuffer[mode] = false;
5459 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5460 stream_.doConvertBuffer[mode] = true;
5461 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5462 stream_.doConvertBuffer[mode] = true;
5463 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5464 stream_.nUserChannels[mode] > 1 )
5465 stream_.doConvertBuffer[mode] = true;
5467 // Allocate the ApiHandle if necessary and then save.
5468 AlsaHandle *apiInfo = 0;
5469 if ( stream_.apiHandle == 0 ) {
5471 apiInfo = (AlsaHandle *) new AlsaHandle;
5473 catch ( std::bad_alloc& ) {
5474 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5477 stream_.apiHandle = (void *) apiInfo;
5478 apiInfo->handles[0] = 0;
5479 apiInfo->handles[1] = 0;
5482 apiInfo = (AlsaHandle *) stream_.apiHandle;
5484 apiInfo->handles[mode] = phandle;
5486 // Allocate necessary internal buffers.
5487 unsigned long bufferBytes;
5488 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5489 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5490 if ( stream_.userBuffer[mode] == NULL ) {
5491 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5495 if ( stream_.doConvertBuffer[mode] ) {
5497 bool makeBuffer = true;
5498 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5499 if ( mode == INPUT ) {
5500 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5501 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5502 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5507 bufferBytes *= *bufferSize;
5508 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5509 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5510 if ( stream_.deviceBuffer == NULL ) {
5511 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5517 stream_.sampleRate = sampleRate;
5518 stream_.nBuffers = periods;
5519 stream_.device[mode] = device;
5520 stream_.state = STREAM_STOPPED;
5522 // Setup the buffer conversion information structure.
5523 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5525 // Setup thread if necessary.
5526 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5527 // We had already set up an output stream.
5528 stream_.mode = DUPLEX;
5529 // Link the streams if possible.
5530 apiInfo->synchronized = false;
5531 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5532 apiInfo->synchronized = true;
5534 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5535 error( RtError::WARNING );
5539 stream_.mode = mode;
5541 // Setup callback thread.
5542 stream_.callbackInfo.object = (void *) this;
5544 // Set the thread attributes for joinable and realtime scheduling
5545 // priority. The higher priority will only take affect if the
5546 // program is run as root or suid.
5547 pthread_attr_t attr;
5548 pthread_attr_init( &attr );
5549 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5550 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5551 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5553 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5556 stream_.callbackInfo.isRunning = true;
5557 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5558 pthread_attr_destroy( &attr );
5560 stream_.callbackInfo.isRunning = false;
5561 errorText_ = "RtApiAlsa::error creating callback thread!";
5570 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5571 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5573 stream_.apiHandle = 0;
5576 for ( int i=0; i<2; i++ ) {
5577 if ( stream_.userBuffer[i] ) {
5578 free( stream_.userBuffer[i] );
5579 stream_.userBuffer[i] = 0;
5583 if ( stream_.deviceBuffer ) {
5584 free( stream_.deviceBuffer );
5585 stream_.deviceBuffer = 0;
5591 void RtApiAlsa :: closeStream()
5593 if ( stream_.state == STREAM_CLOSED ) {
5594 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5595 error( RtError::WARNING );
5599 stream_.callbackInfo.isRunning = false;
5600 pthread_join( stream_.callbackInfo.thread, NULL );
5602 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5603 if ( stream_.state == STREAM_RUNNING ) {
5604 stream_.state = STREAM_STOPPED;
5605 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5606 snd_pcm_drop( apiInfo->handles[0] );
5607 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5608 snd_pcm_drop( apiInfo->handles[1] );
5612 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5613 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5615 stream_.apiHandle = 0;
5618 for ( int i=0; i<2; i++ ) {
5619 if ( stream_.userBuffer[i] ) {
5620 free( stream_.userBuffer[i] );
5621 stream_.userBuffer[i] = 0;
5625 if ( stream_.deviceBuffer ) {
5626 free( stream_.deviceBuffer );
5627 stream_.deviceBuffer = 0;
5630 stream_.mode = UNINITIALIZED;
5631 stream_.state = STREAM_CLOSED;
5634 void RtApiAlsa :: startStream()
5636 // This method calls snd_pcm_prepare if the device isn't already in that state.
5639 if ( stream_.state == STREAM_RUNNING ) {
5640 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5641 error( RtError::WARNING );
5645 MUTEX_LOCK( &stream_.mutex );
5648 snd_pcm_state_t state;
5649 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5650 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5651 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5652 state = snd_pcm_state( handle[0] );
5653 if ( state != SND_PCM_STATE_PREPARED ) {
5654 result = snd_pcm_prepare( handle[0] );
5656 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5657 errorText_ = errorStream_.str();
5663 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5664 state = snd_pcm_state( handle[1] );
5665 if ( state != SND_PCM_STATE_PREPARED ) {
5666 result = snd_pcm_prepare( handle[1] );
5668 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5669 errorText_ = errorStream_.str();
5675 stream_.state = STREAM_RUNNING;
5678 MUTEX_UNLOCK( &stream_.mutex );
5680 if ( result >= 0 ) return;
5681 error( RtError::SYSTEM_ERROR );
5684 void RtApiAlsa :: stopStream()
5687 if ( stream_.state == STREAM_STOPPED ) {
5688 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
5689 error( RtError::WARNING );
5693 // Change the state before the lock to improve shutdown response
5694 // when using a callback.
5695 stream_.state = STREAM_STOPPED;
5696 MUTEX_LOCK( &stream_.mutex );
5699 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5700 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5701 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5702 if ( apiInfo->synchronized )
5703 result = snd_pcm_drop( handle[0] );
5705 result = snd_pcm_drain( handle[0] );
5707 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
5708 errorText_ = errorStream_.str();
5713 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5714 result = snd_pcm_drop( handle[1] );
5716 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
5717 errorText_ = errorStream_.str();
5723 MUTEX_UNLOCK( &stream_.mutex );
5725 if ( result >= 0 ) return;
5726 error( RtError::SYSTEM_ERROR );
5729 void RtApiAlsa :: abortStream()
5732 if ( stream_.state == STREAM_STOPPED ) {
5733 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
5734 error( RtError::WARNING );
5738 // Change the state before the lock to improve shutdown response
5739 // when using a callback.
5740 stream_.state = STREAM_STOPPED;
5741 MUTEX_LOCK( &stream_.mutex );
5744 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5745 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5746 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5747 result = snd_pcm_drop( handle[0] );
5749 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
5750 errorText_ = errorStream_.str();
5755 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5756 result = snd_pcm_drop( handle[1] );
5758 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
5759 errorText_ = errorStream_.str();
5765 MUTEX_UNLOCK( &stream_.mutex );
5767 stream_.state = STREAM_STOPPED;
5768 if ( result >= 0 ) return;
5769 error( RtError::SYSTEM_ERROR );
5772 void RtApiAlsa :: callbackEvent()
5774 if ( stream_.state == STREAM_STOPPED ) {
5775 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
5779 if ( stream_.state == STREAM_CLOSED ) {
5780 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
5781 error( RtError::WARNING );
5785 int doStopStream = 0;
5786 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5787 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
5788 double streamTime = getStreamTime();
5789 RtAudioStreamStatus status = 0;
5790 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
5791 status |= RTAUDIO_OUTPUT_UNDERFLOW;
5792 apiInfo->xrun[0] = false;
5794 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
5795 status |= RTAUDIO_INPUT_OVERFLOW;
5796 apiInfo->xrun[1] = false;
5798 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
5799 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
5801 MUTEX_LOCK( &stream_.mutex );
5803 // The state might change while waiting on a mutex.
5804 if ( stream_.state == STREAM_STOPPED ) goto unlock;
5810 snd_pcm_sframes_t frames;
5811 RtAudioFormat format;
5812 handle = (snd_pcm_t **) apiInfo->handles;
5814 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
5816 // Setup parameters.
5817 if ( stream_.doConvertBuffer[1] ) {
5818 buffer = stream_.deviceBuffer;
5819 channels = stream_.nDeviceChannels[1];
5820 format = stream_.deviceFormat[1];
5823 buffer = stream_.userBuffer[1];
5824 channels = stream_.nUserChannels[1];
5825 format = stream_.userFormat;
5828 // Read samples from device in interleaved/non-interleaved format.
5829 if ( stream_.deviceInterleaved[1] )
5830 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
5832 void *bufs[channels];
5833 size_t offset = stream_.bufferSize * formatBytes( format );
5834 for ( int i=0; i<channels; i++ )
5835 bufs[i] = (void *) (buffer + (i * offset));
5836 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
5839 if ( result < (int) stream_.bufferSize ) {
5840 // Either an error or underrun occured.
5841 if ( result == -EPIPE ) {
5842 snd_pcm_state_t state = snd_pcm_state( handle[1] );
5843 if ( state == SND_PCM_STATE_XRUN ) {
5844 apiInfo->xrun[1] = true;
5845 result = snd_pcm_prepare( handle[1] );
5847 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
5848 errorText_ = errorStream_.str();
5852 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5853 errorText_ = errorStream_.str();
5857 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
5858 errorText_ = errorStream_.str();
5860 error( RtError::WARNING );
5864 // Do byte swapping if necessary.
5865 if ( stream_.doByteSwap[1] )
5866 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
5868 // Do buffer conversion if necessary.
5869 if ( stream_.doConvertBuffer[1] )
5870 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
5872 // Check stream latency
5873 result = snd_pcm_delay( handle[1], &frames );
5874 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
5877 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5879 // Setup parameters and do buffer conversion if necessary.
5880 if ( stream_.doConvertBuffer[0] ) {
5881 buffer = stream_.deviceBuffer;
5882 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
5883 channels = stream_.nDeviceChannels[0];
5884 format = stream_.deviceFormat[0];
5887 buffer = stream_.userBuffer[0];
5888 channels = stream_.nUserChannels[0];
5889 format = stream_.userFormat;
5892 // Do byte swapping if necessary.
5893 if ( stream_.doByteSwap[0] )
5894 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
5896 // Write samples to device in interleaved/non-interleaved format.
5897 if ( stream_.deviceInterleaved[0] )
5898 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
5900 void *bufs[channels];
5901 size_t offset = stream_.bufferSize * formatBytes( format );
5902 for ( int i=0; i<channels; i++ )
5903 bufs[i] = (void *) (buffer + (i * offset));
5904 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
5907 if ( result < (int) stream_.bufferSize ) {
5908 // Either an error or underrun occured.
5909 if ( result == -EPIPE ) {
5910 snd_pcm_state_t state = snd_pcm_state( handle[0] );
5911 if ( state == SND_PCM_STATE_XRUN ) {
5912 apiInfo->xrun[0] = true;
5913 result = snd_pcm_prepare( handle[0] );
5915 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
5916 errorText_ = errorStream_.str();
5920 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5921 errorText_ = errorStream_.str();
5925 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
5926 errorText_ = errorStream_.str();
5928 error( RtError::WARNING );
5932 // Check stream latency
5933 result = snd_pcm_delay( handle[0], &frames );
5934 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
5938 MUTEX_UNLOCK( &stream_.mutex );
5940 RtApi::tickStreamTime();
5941 if ( doStopStream == 1 ) this->stopStream();
5942 else if ( doStopStream == 2 ) this->abortStream();
5945 extern "C" void *alsaCallbackHandler( void *ptr )
5947 CallbackInfo *info = (CallbackInfo *) ptr;
5948 RtApiAlsa *object = (RtApiAlsa *) info->object;
5949 bool *isRunning = &info->isRunning;
5952 // Set a higher scheduler priority (P.J. Leonard)
5953 struct sched_param param;
5954 int min = sched_get_priority_min( SCHED_RR );
5955 int max = sched_get_priority_max( SCHED_RR );
5956 param.sched_priority = min + ( max - min ) / 2; // Is this the best number?
5957 sched_setscheduler( 0, SCHED_RR, ¶m );
5960 while ( *isRunning == true ) {
5961 pthread_testcancel();
5962 object->callbackEvent();
5965 pthread_exit( NULL );
5968 //******************** End of __LINUX_ALSA__ *********************//
5972 #if defined(__LINUX_OSS__)
5975 #include <sys/ioctl.h>
5978 #include "soundcard.h"
5982 extern "C" void *ossCallbackHandler(void * ptr);
5984 // A structure to hold various information related to the OSS API
5987 int id[2]; // device ids
5992 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
5995 RtApiOss :: RtApiOss()
5997 // Nothing to do here.
6000 RtApiOss :: ~RtApiOss()
6002 if ( stream_.state != STREAM_CLOSED ) closeStream();
6005 unsigned int RtApiOss :: getDeviceCount( void )
6007 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6008 if ( mixerfd == -1 ) {
6009 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6010 error( RtError::WARNING );
6014 oss_sysinfo sysinfo;
6015 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6017 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6018 error( RtError::WARNING );
6023 return sysinfo.numaudios;
6026 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6028 RtAudio::DeviceInfo info;
6029 info.probed = false;
6031 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6032 if ( mixerfd == -1 ) {
6033 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6034 error( RtError::WARNING );
6038 oss_sysinfo sysinfo;
6039 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6040 if ( result == -1 ) {
6042 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6043 error( RtError::WARNING );
6047 unsigned nDevices = sysinfo.numaudios;
6048 if ( nDevices == 0 ) {
6050 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6051 error( RtError::INVALID_USE );
6054 if ( device >= nDevices ) {
6056 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6057 error( RtError::INVALID_USE );
6060 oss_audioinfo ainfo;
6062 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6064 if ( result == -1 ) {
6065 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6066 errorText_ = errorStream_.str();
6067 error( RtError::WARNING );
6072 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6073 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6074 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6075 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6076 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6079 // Probe data formats ... do for input
6080 unsigned long mask = ainfo.iformats;
6081 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6082 info.nativeFormats |= RTAUDIO_SINT16;
6083 if ( mask & AFMT_S8 )
6084 info.nativeFormats |= RTAUDIO_SINT8;
6085 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6086 info.nativeFormats |= RTAUDIO_SINT32;
6087 if ( mask & AFMT_FLOAT )
6088 info.nativeFormats |= RTAUDIO_FLOAT32;
6089 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6090 info.nativeFormats |= RTAUDIO_SINT24;
6092 // Check that we have at least one supported format
6093 if ( info.nativeFormats == 0 ) {
6094 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6095 errorText_ = errorStream_.str();
6096 error( RtError::WARNING );
6100 // Probe the supported sample rates.
6101 info.sampleRates.clear();
6102 if ( ainfo.nrates ) {
6103 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6104 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6105 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6106 info.sampleRates.push_back( SAMPLE_RATES[k] );
6113 // Check min and max rate values;
6114 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6115 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6116 info.sampleRates.push_back( SAMPLE_RATES[k] );
6120 if ( info.sampleRates.size() == 0 ) {
6121 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6122 errorText_ = errorStream_.str();
6123 error( RtError::WARNING );
6127 info.name = ainfo.name;
6134 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6135 unsigned int firstChannel, unsigned int sampleRate,
6136 RtAudioFormat format, unsigned int *bufferSize,
6137 RtAudio::StreamOptions *options )
6139 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6140 if ( mixerfd == -1 ) {
6141 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6145 oss_sysinfo sysinfo;
6146 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6147 if ( result == -1 ) {
6149 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6153 unsigned nDevices = sysinfo.numaudios;
6154 if ( nDevices == 0 ) {
6155 // This should not happen because a check is made before this function is called.
6157 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6161 if ( device >= nDevices ) {
6162 // This should not happen because a check is made before this function is called.
6164 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6168 oss_audioinfo ainfo;
6170 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6172 if ( result == -1 ) {
6173 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6174 errorText_ = errorStream_.str();
6178 // Check if device supports input or output
6179 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6180 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6181 if ( mode == OUTPUT )
6182 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6184 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6185 errorText_ = errorStream_.str();
6190 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6191 if ( mode == OUTPUT )
6193 else { // mode == INPUT
6194 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6195 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6196 close( handle->id[0] );
6198 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6199 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6200 errorText_ = errorStream_.str();
6203 // Check that the number previously set channels is the same.
6204 if ( stream_.nUserChannels[0] != channels ) {
6205 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6206 errorText_ = errorStream_.str();
6215 // Set exclusive access if specified.
6216 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6218 // Try to open the device.
6220 fd = open( ainfo.devnode, flags, 0 );
6222 if ( errno == EBUSY )
6223 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6225 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6226 errorText_ = errorStream_.str();
6230 // For duplex operation, specifically set this mode (this doesn't seem to work).
6232 if ( flags | O_RDWR ) {
6233 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6234 if ( result == -1) {
6235 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6236 errorText_ = errorStream_.str();
6242 // Check the device channel support.
6243 stream_.nUserChannels[mode] = channels;
6244 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6246 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6247 errorText_ = errorStream_.str();
6251 // Set the number of channels.
6252 int deviceChannels = channels + firstChannel;
6253 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6254 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6256 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6257 errorText_ = errorStream_.str();
6260 stream_.nDeviceChannels[mode] = deviceChannels;
6262 // Get the data format mask
6264 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6265 if ( result == -1 ) {
6267 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6268 errorText_ = errorStream_.str();
6272 // Determine how to set the device format.
6273 stream_.userFormat = format;
6274 int deviceFormat = -1;
6275 stream_.doByteSwap[mode] = false;
6276 if ( format == RTAUDIO_SINT8 ) {
6277 if ( mask & AFMT_S8 ) {
6278 deviceFormat = AFMT_S8;
6279 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6282 else if ( format == RTAUDIO_SINT16 ) {
6283 if ( mask & AFMT_S16_NE ) {
6284 deviceFormat = AFMT_S16_NE;
6285 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6287 else if ( mask & AFMT_S16_OE ) {
6288 deviceFormat = AFMT_S16_OE;
6289 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6290 stream_.doByteSwap[mode] = true;
6293 else if ( format == RTAUDIO_SINT24 ) {
6294 if ( mask & AFMT_S24_NE ) {
6295 deviceFormat = AFMT_S24_NE;
6296 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6298 else if ( mask & AFMT_S24_OE ) {
6299 deviceFormat = AFMT_S24_OE;
6300 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6301 stream_.doByteSwap[mode] = true;
6304 else if ( format == RTAUDIO_SINT32 ) {
6305 if ( mask & AFMT_S32_NE ) {
6306 deviceFormat = AFMT_S32_NE;
6307 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6309 else if ( mask & AFMT_S32_OE ) {
6310 deviceFormat = AFMT_S32_OE;
6311 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6312 stream_.doByteSwap[mode] = true;
6316 if ( deviceFormat == -1 ) {
6317 // The user requested format is not natively supported by the device.
6318 if ( mask & AFMT_S16_NE ) {
6319 deviceFormat = AFMT_S16_NE;
6320 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6322 else if ( mask & AFMT_S32_NE ) {
6323 deviceFormat = AFMT_S32_NE;
6324 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6326 else if ( mask & AFMT_S24_NE ) {
6327 deviceFormat = AFMT_S24_NE;
6328 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6330 else if ( mask & AFMT_S16_OE ) {
6331 deviceFormat = AFMT_S16_OE;
6332 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6333 stream_.doByteSwap[mode] = true;
6335 else if ( mask & AFMT_S32_OE ) {
6336 deviceFormat = AFMT_S32_OE;
6337 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6338 stream_.doByteSwap[mode] = true;
6340 else if ( mask & AFMT_S24_OE ) {
6341 deviceFormat = AFMT_S24_OE;
6342 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6343 stream_.doByteSwap[mode] = true;
6345 else if ( mask & AFMT_S8) {
6346 deviceFormat = AFMT_S8;
6347 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6351 if ( stream_.deviceFormat[mode] == 0 ) {
6352 // This really shouldn't happen ...
6354 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6355 errorText_ = errorStream_.str();
6359 // Set the data format.
6360 int temp = deviceFormat;
6361 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6362 if ( result == -1 || deviceFormat != temp ) {
6364 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6365 errorText_ = errorStream_.str();
6369 // Attempt to set the buffer size. According to OSS, the minimum
6370 // number of buffers is two. The supposed minimum buffer size is 16
6371 // bytes, so that will be our lower bound. The argument to this
6372 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6373 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6374 // We'll check the actual value used near the end of the setup
6376 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6377 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6379 if ( options ) buffers = options->numberOfBuffers;
6380 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6381 if ( buffers < 2 ) buffers = 3;
6382 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6383 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6384 if ( result == -1 ) {
6386 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6387 errorText_ = errorStream_.str();
6390 stream_.nBuffers = buffers;
6392 // Save buffer size (in sample frames).
6393 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6394 stream_.bufferSize = *bufferSize;
6396 // Set the sample rate.
6397 int srate = sampleRate;
6398 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6399 if ( result == -1 ) {
6401 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6402 errorText_ = errorStream_.str();
6406 // Verify the sample rate setup worked.
6407 if ( abs( srate - sampleRate ) > 100 ) {
6409 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6410 errorText_ = errorStream_.str();
6413 stream_.sampleRate = sampleRate;
6415 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6416 // We're doing duplex setup here.
6417 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6418 stream_.nDeviceChannels[0] = deviceChannels;
6421 // Set interleaving parameters.
6422 stream_.userInterleaved = true;
6423 stream_.deviceInterleaved[mode] = true;
6424 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6425 stream_.userInterleaved = false;
6427 // Set flags for buffer conversion
6428 stream_.doConvertBuffer[mode] = false;
6429 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6430 stream_.doConvertBuffer[mode] = true;
6431 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6432 stream_.doConvertBuffer[mode] = true;
6433 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6434 stream_.nUserChannels[mode] > 1 )
6435 stream_.doConvertBuffer[mode] = true;
6437 // Allocate the stream handles if necessary and then save.
6438 if ( stream_.apiHandle == 0 ) {
6440 handle = new OssHandle;
6442 catch ( std::bad_alloc& ) {
6443 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6447 stream_.apiHandle = (void *) handle;
6450 handle = (OssHandle *) stream_.apiHandle;
6452 handle->id[mode] = fd;
6454 // Allocate necessary internal buffers.
6455 unsigned long bufferBytes;
6456 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6457 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6458 if ( stream_.userBuffer[mode] == NULL ) {
6459 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6463 if ( stream_.doConvertBuffer[mode] ) {
6465 bool makeBuffer = true;
6466 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6467 if ( mode == INPUT ) {
6468 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6469 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6470 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6475 bufferBytes *= *bufferSize;
6476 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6477 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6478 if ( stream_.deviceBuffer == NULL ) {
6479 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6485 stream_.device[mode] = device;
6486 stream_.state = STREAM_STOPPED;
6488 // Setup the buffer conversion information structure.
6489 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6491 // Setup thread if necessary.
6492 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6493 // We had already set up an output stream.
6494 stream_.mode = DUPLEX;
6495 if ( stream_.device[0] == device ) handle->id[0] = fd;
6498 stream_.mode = mode;
6500 // Setup callback thread.
6501 stream_.callbackInfo.object = (void *) this;
6503 // Set the thread attributes for joinable and realtime scheduling
6504 // priority. The higher priority will only take affect if the
6505 // program is run as root or suid.
6506 pthread_attr_t attr;
6507 pthread_attr_init( &attr );
6508 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6509 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6510 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6512 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6515 stream_.callbackInfo.isRunning = true;
6516 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6517 pthread_attr_destroy( &attr );
6519 stream_.callbackInfo.isRunning = false;
6520 errorText_ = "RtApiOss::error creating callback thread!";
6529 if ( handle->id[0] ) close( handle->id[0] );
6530 if ( handle->id[1] ) close( handle->id[1] );
6532 stream_.apiHandle = 0;
6535 for ( int i=0; i<2; i++ ) {
6536 if ( stream_.userBuffer[i] ) {
6537 free( stream_.userBuffer[i] );
6538 stream_.userBuffer[i] = 0;
6542 if ( stream_.deviceBuffer ) {
6543 free( stream_.deviceBuffer );
6544 stream_.deviceBuffer = 0;
6550 void RtApiOss :: closeStream()
6552 if ( stream_.state == STREAM_CLOSED ) {
6553 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6554 error( RtError::WARNING );
6558 stream_.callbackInfo.isRunning = false;
6559 pthread_join( stream_.callbackInfo.thread, NULL );
6561 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6562 if ( stream_.state == STREAM_RUNNING ) {
6563 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6564 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6566 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6567 stream_.state = STREAM_STOPPED;
6571 if ( handle->id[0] ) close( handle->id[0] );
6572 if ( handle->id[1] ) close( handle->id[1] );
6574 stream_.apiHandle = 0;
6577 for ( int i=0; i<2; i++ ) {
6578 if ( stream_.userBuffer[i] ) {
6579 free( stream_.userBuffer[i] );
6580 stream_.userBuffer[i] = 0;
6584 if ( stream_.deviceBuffer ) {
6585 free( stream_.deviceBuffer );
6586 stream_.deviceBuffer = 0;
6589 stream_.mode = UNINITIALIZED;
6590 stream_.state = STREAM_CLOSED;
6593 void RtApiOss :: startStream()
6596 if ( stream_.state == STREAM_RUNNING ) {
6597 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6598 error( RtError::WARNING );
6602 MUTEX_LOCK( &stream_.mutex );
6604 stream_.state = STREAM_RUNNING;
6606 // No need to do anything else here ... OSS automatically starts
6607 // when fed samples.
6609 MUTEX_UNLOCK( &stream_.mutex );
6612 void RtApiOss :: stopStream()
6615 if ( stream_.state == STREAM_STOPPED ) {
6616 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6617 error( RtError::WARNING );
6621 // Change the state before the lock to improve shutdown response
6622 // when using a callback.
6623 stream_.state = STREAM_STOPPED;
6624 MUTEX_LOCK( &stream_.mutex );
6627 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6628 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6630 // Flush the output with zeros a few times.
6633 RtAudioFormat format;
6635 if ( stream_.doConvertBuffer[0] ) {
6636 buffer = stream_.deviceBuffer;
6637 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6638 format = stream_.deviceFormat[0];
6641 buffer = stream_.userBuffer[0];
6642 samples = stream_.bufferSize * stream_.nUserChannels[0];
6643 format = stream_.userFormat;
6646 memset( buffer, 0, samples * formatBytes(format) );
6647 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
6648 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6649 if ( result == -1 ) {
6650 errorText_ = "RtApiOss::stopStream: audio write error.";
6651 error( RtError::WARNING );
6655 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6656 if ( result == -1 ) {
6657 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6658 errorText_ = errorStream_.str();
6661 handle->triggered = false;
6664 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6665 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6666 if ( result == -1 ) {
6667 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6668 errorText_ = errorStream_.str();
6674 MUTEX_UNLOCK( &stream_.mutex );
6676 stream_.state = STREAM_STOPPED;
6677 if ( result != -1 ) return;
6678 error( RtError::SYSTEM_ERROR );
6681 void RtApiOss :: abortStream()
6684 if ( stream_.state == STREAM_STOPPED ) {
6685 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
6686 error( RtError::WARNING );
6690 // Change the state before the lock to improve shutdown response
6691 // when using a callback.
6692 stream_.state = STREAM_STOPPED;
6693 MUTEX_LOCK( &stream_.mutex );
6696 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6697 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6698 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6699 if ( result == -1 ) {
6700 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6701 errorText_ = errorStream_.str();
6704 handle->triggered = false;
6707 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6708 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6709 if ( result == -1 ) {
6710 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6711 errorText_ = errorStream_.str();
6717 MUTEX_UNLOCK( &stream_.mutex );
6719 stream_.state = STREAM_STOPPED;
6720 if ( result != -1 ) return;
6721 error( RtError::SYSTEM_ERROR );
6724 void RtApiOss :: callbackEvent()
6726 if ( stream_.state == STREAM_STOPPED ) {
6727 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
6731 if ( stream_.state == STREAM_CLOSED ) {
6732 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
6733 error( RtError::WARNING );
6737 // Invoke user callback to get fresh output data.
6738 int doStopStream = 0;
6739 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6740 double streamTime = getStreamTime();
6741 RtAudioStreamStatus status = 0;
6742 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6743 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
6744 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6745 handle->xrun[0] = false;
6747 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
6748 status |= RTAUDIO_INPUT_OVERFLOW;
6749 handle->xrun[1] = false;
6751 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6752 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6754 MUTEX_LOCK( &stream_.mutex );
6756 // The state might change while waiting on a mutex.
6757 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6762 RtAudioFormat format;
6764 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6766 // Setup parameters and do buffer conversion if necessary.
6767 if ( stream_.doConvertBuffer[0] ) {
6768 buffer = stream_.deviceBuffer;
6769 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6770 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6771 format = stream_.deviceFormat[0];
6774 buffer = stream_.userBuffer[0];
6775 samples = stream_.bufferSize * stream_.nUserChannels[0];
6776 format = stream_.userFormat;
6779 // Do byte swapping if necessary.
6780 if ( stream_.doByteSwap[0] )
6781 byteSwapBuffer( buffer, samples, format );
6783 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
6785 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6786 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6787 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
6788 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6789 handle->triggered = true;
6792 // Write samples to device.
6793 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6795 if ( result == -1 ) {
6796 // We'll assume this is an underrun, though there isn't a
6797 // specific means for determining that.
6798 handle->xrun[0] = true;
6799 errorText_ = "RtApiOss::callbackEvent: audio write error.";
6800 error( RtError::WARNING );
6805 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6807 // Setup parameters.
6808 if ( stream_.doConvertBuffer[1] ) {
6809 buffer = stream_.deviceBuffer;
6810 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
6811 format = stream_.deviceFormat[1];
6814 buffer = stream_.userBuffer[1];
6815 samples = stream_.bufferSize * stream_.nUserChannels[1];
6816 format = stream_.userFormat;
6819 // Read samples from device.
6820 result = read( handle->id[1], buffer, samples * formatBytes(format) );
6822 if ( result == -1 ) {
6823 // We'll assume this is an overrun, though there isn't a
6824 // specific means for determining that.
6825 handle->xrun[1] = true;
6826 errorText_ = "RtApiOss::callbackEvent: audio read error.";
6827 error( RtError::WARNING );
6831 // Do byte swapping if necessary.
6832 if ( stream_.doByteSwap[1] )
6833 byteSwapBuffer( buffer, samples, format );
6835 // Do buffer conversion if necessary.
6836 if ( stream_.doConvertBuffer[1] )
6837 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6841 MUTEX_UNLOCK( &stream_.mutex );
6843 RtApi::tickStreamTime();
6844 if ( doStopStream == 1 ) this->stopStream();
6845 else if ( doStopStream == 2 ) this->abortStream();
6848 extern "C" void *ossCallbackHandler( void *ptr )
6850 CallbackInfo *info = (CallbackInfo *) ptr;
6851 RtApiOss *object = (RtApiOss *) info->object;
6852 bool *isRunning = &info->isRunning;
6855 // Set a higher scheduler priority (P.J. Leonard)
6856 struct sched_param param;
6857 param.sched_priority = 39; // Is this the best number?
6858 sched_setscheduler( 0, SCHED_RR, ¶m );
6861 while ( *isRunning == true ) {
6862 pthread_testcancel();
6863 object->callbackEvent();
6866 pthread_exit( NULL );
6869 //******************** End of __LINUX_OSS__ *********************//
6873 // *************************************************** //
6875 // Protected common (OS-independent) RtAudio methods.
6877 // *************************************************** //
6879 // This method can be modified to control the behavior of error
6880 // message printing.
6881 void RtApi :: error( RtError::Type type )
6883 errorStream_.str(""); // clear the ostringstream
6884 if ( type == RtError::WARNING && showWarnings_ == true )
6885 std::cerr << '\n' << errorText_ << "\n\n";
6887 throw( RtError( errorText_, type ) );
6890 void RtApi :: verifyStream()
6892 if ( stream_.state == STREAM_CLOSED ) {
6893 errorText_ = "RtApi:: a stream is not open!";
6894 error( RtError::INVALID_USE );
6898 void RtApi :: clearStreamInfo()
6900 stream_.mode = UNINITIALIZED;
6901 stream_.state = STREAM_CLOSED;
6902 stream_.sampleRate = 0;
6903 stream_.bufferSize = 0;
6904 stream_.nBuffers = 0;
6905 stream_.userFormat = 0;
6906 stream_.userInterleaved = true;
6907 stream_.streamTime = 0.0;
6908 stream_.apiHandle = 0;
6909 stream_.deviceBuffer = 0;
6910 stream_.callbackInfo.callback = 0;
6911 stream_.callbackInfo.userData = 0;
6912 stream_.callbackInfo.isRunning = false;
6913 for ( int i=0; i<2; i++ ) {
6914 stream_.device[i] = 0;
6915 stream_.doConvertBuffer[i] = false;
6916 stream_.deviceInterleaved[i] = true;
6917 stream_.doByteSwap[i] = false;
6918 stream_.nUserChannels[i] = 0;
6919 stream_.nDeviceChannels[i] = 0;
6920 stream_.channelOffset[i] = 0;
6921 stream_.deviceFormat[i] = 0;
6922 stream_.latency[i] = 0;
6923 stream_.userBuffer[i] = 0;
6924 stream_.convertInfo[i].channels = 0;
6925 stream_.convertInfo[i].inJump = 0;
6926 stream_.convertInfo[i].outJump = 0;
6927 stream_.convertInfo[i].inFormat = 0;
6928 stream_.convertInfo[i].outFormat = 0;
6929 stream_.convertInfo[i].inOffset.clear();
6930 stream_.convertInfo[i].outOffset.clear();
6934 unsigned int RtApi :: formatBytes( RtAudioFormat format )
6936 if ( format == RTAUDIO_SINT16 )
6938 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
6939 format == RTAUDIO_FLOAT32 )
6941 else if ( format == RTAUDIO_FLOAT64 )
6943 else if ( format == RTAUDIO_SINT8 )
6946 errorText_ = "RtApi::formatBytes: undefined format.";
6947 error( RtError::WARNING );
6952 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
6954 if ( mode == INPUT ) { // convert device to user buffer
6955 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
6956 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
6957 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
6958 stream_.convertInfo[mode].outFormat = stream_.userFormat;
6960 else { // convert user to device buffer
6961 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
6962 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
6963 stream_.convertInfo[mode].inFormat = stream_.userFormat;
6964 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
6967 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
6968 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
6970 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
6972 // Set up the interleave/deinterleave offsets.
6973 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
6974 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
6975 ( mode == INPUT && stream_.userInterleaved ) ) {
6976 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6977 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
6978 stream_.convertInfo[mode].outOffset.push_back( k );
6979 stream_.convertInfo[mode].inJump = 1;
6983 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6984 stream_.convertInfo[mode].inOffset.push_back( k );
6985 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
6986 stream_.convertInfo[mode].outJump = 1;
6990 else { // no (de)interleaving
6991 if ( stream_.userInterleaved ) {
6992 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6993 stream_.convertInfo[mode].inOffset.push_back( k );
6994 stream_.convertInfo[mode].outOffset.push_back( k );
6998 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6999 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7000 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7001 stream_.convertInfo[mode].inJump = 1;
7002 stream_.convertInfo[mode].outJump = 1;
7007 // Add channel offset.
7008 if ( firstChannel > 0 ) {
7009 if ( stream_.deviceInterleaved[mode] ) {
7010 if ( mode == OUTPUT ) {
7011 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7012 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7015 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7016 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7020 if ( mode == OUTPUT ) {
7021 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7022 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7025 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7026 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7032 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7034 // This function does format conversion, input/output channel compensation, and
7035 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7036 // the upper three bytes of a 32-bit integer.
7038 // Clear our device buffer when in/out duplex device channels are different
7039 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7040 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7041 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7044 if (info.outFormat == RTAUDIO_FLOAT64) {
7046 Float64 *out = (Float64 *)outBuffer;
7048 if (info.inFormat == RTAUDIO_SINT8) {
7049 signed char *in = (signed char *)inBuffer;
7050 scale = 1.0 / 128.0;
7051 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7052 for (j=0; j<info.channels; j++) {
7053 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7054 out[info.outOffset[j]] *= scale;
7057 out += info.outJump;
7060 else if (info.inFormat == RTAUDIO_SINT16) {
7061 Int16 *in = (Int16 *)inBuffer;
7062 scale = 1.0 / 32768.0;
7063 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7064 for (j=0; j<info.channels; j++) {
7065 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7066 out[info.outOffset[j]] *= scale;
7069 out += info.outJump;
7072 else if (info.inFormat == RTAUDIO_SINT24) {
7073 Int32 *in = (Int32 *)inBuffer;
7074 scale = 1.0 / 8388608.0;
7075 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7076 for (j=0; j<info.channels; j++) {
7077 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7078 out[info.outOffset[j]] *= scale;
7081 out += info.outJump;
7084 else if (info.inFormat == RTAUDIO_SINT32) {
7085 Int32 *in = (Int32 *)inBuffer;
7086 scale = 1.0 / 2147483648.0;
7087 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7088 for (j=0; j<info.channels; j++) {
7089 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7090 out[info.outOffset[j]] *= scale;
7093 out += info.outJump;
7096 else if (info.inFormat == RTAUDIO_FLOAT32) {
7097 Float32 *in = (Float32 *)inBuffer;
7098 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7099 for (j=0; j<info.channels; j++) {
7100 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7103 out += info.outJump;
7106 else if (info.inFormat == RTAUDIO_FLOAT64) {
7107 // Channel compensation and/or (de)interleaving only.
7108 Float64 *in = (Float64 *)inBuffer;
7109 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7110 for (j=0; j<info.channels; j++) {
7111 out[info.outOffset[j]] = in[info.inOffset[j]];
7114 out += info.outJump;
7118 else if (info.outFormat == RTAUDIO_FLOAT32) {
7120 Float32 *out = (Float32 *)outBuffer;
7122 if (info.inFormat == RTAUDIO_SINT8) {
7123 signed char *in = (signed char *)inBuffer;
7124 scale = 1.0 / 128.0;
7125 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7126 for (j=0; j<info.channels; j++) {
7127 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7128 out[info.outOffset[j]] *= scale;
7131 out += info.outJump;
7134 else if (info.inFormat == RTAUDIO_SINT16) {
7135 Int16 *in = (Int16 *)inBuffer;
7136 scale = 1.0 / 32768.0;
7137 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7138 for (j=0; j<info.channels; j++) {
7139 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7140 out[info.outOffset[j]] *= scale;
7143 out += info.outJump;
7146 else if (info.inFormat == RTAUDIO_SINT24) {
7147 Int32 *in = (Int32 *)inBuffer;
7148 scale = 1.0 / 8388608.0;
7149 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7150 for (j=0; j<info.channels; j++) {
7151 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7152 out[info.outOffset[j]] *= scale;
7155 out += info.outJump;
7158 else if (info.inFormat == RTAUDIO_SINT32) {
7159 Int32 *in = (Int32 *)inBuffer;
7160 scale = 1.0 / 2147483648.0;
7161 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7162 for (j=0; j<info.channels; j++) {
7163 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7164 out[info.outOffset[j]] *= scale;
7167 out += info.outJump;
7170 else if (info.inFormat == RTAUDIO_FLOAT32) {
7171 // Channel compensation and/or (de)interleaving only.
7172 Float32 *in = (Float32 *)inBuffer;
7173 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7174 for (j=0; j<info.channels; j++) {
7175 out[info.outOffset[j]] = in[info.inOffset[j]];
7178 out += info.outJump;
7181 else if (info.inFormat == RTAUDIO_FLOAT64) {
7182 Float64 *in = (Float64 *)inBuffer;
7183 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7184 for (j=0; j<info.channels; j++) {
7185 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7188 out += info.outJump;
7192 else if (info.outFormat == RTAUDIO_SINT32) {
7193 Int32 *out = (Int32 *)outBuffer;
7194 if (info.inFormat == RTAUDIO_SINT8) {
7195 signed char *in = (signed char *)inBuffer;
7196 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7197 for (j=0; j<info.channels; j++) {
7198 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7199 out[info.outOffset[j]] <<= 24;
7202 out += info.outJump;
7205 else if (info.inFormat == RTAUDIO_SINT16) {
7206 Int16 *in = (Int16 *)inBuffer;
7207 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7208 for (j=0; j<info.channels; j++) {
7209 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7210 out[info.outOffset[j]] <<= 16;
7213 out += info.outJump;
7216 else if (info.inFormat == RTAUDIO_SINT24) {
7217 Int32 *in = (Int32 *)inBuffer;
7218 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7219 for (j=0; j<info.channels; j++) {
7220 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7221 out[info.outOffset[j]] <<= 8;
7224 out += info.outJump;
7227 else if (info.inFormat == RTAUDIO_SINT32) {
7228 // Channel compensation and/or (de)interleaving only.
7229 Int32 *in = (Int32 *)inBuffer;
7230 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7231 for (j=0; j<info.channels; j++) {
7232 out[info.outOffset[j]] = in[info.inOffset[j]];
7235 out += info.outJump;
7238 else if (info.inFormat == RTAUDIO_FLOAT32) {
7239 Float32 *in = (Float32 *)inBuffer;
7240 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7241 for (j=0; j<info.channels; j++) {
7242 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7245 out += info.outJump;
7248 else if (info.inFormat == RTAUDIO_FLOAT64) {
7249 Float64 *in = (Float64 *)inBuffer;
7250 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7251 for (j=0; j<info.channels; j++) {
7252 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7255 out += info.outJump;
7259 else if (info.outFormat == RTAUDIO_SINT24) {
7260 Int32 *out = (Int32 *)outBuffer;
7261 if (info.inFormat == RTAUDIO_SINT8) {
7262 signed char *in = (signed char *)inBuffer;
7263 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7264 for (j=0; j<info.channels; j++) {
7265 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7266 out[info.outOffset[j]] <<= 16;
7269 out += info.outJump;
7272 else if (info.inFormat == RTAUDIO_SINT16) {
7273 Int16 *in = (Int16 *)inBuffer;
7274 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7275 for (j=0; j<info.channels; j++) {
7276 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7277 out[info.outOffset[j]] <<= 8;
7280 out += info.outJump;
7283 else if (info.inFormat == RTAUDIO_SINT24) {
7284 // Channel compensation and/or (de)interleaving only.
7285 Int32 *in = (Int32 *)inBuffer;
7286 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7287 for (j=0; j<info.channels; j++) {
7288 out[info.outOffset[j]] = in[info.inOffset[j]];
7291 out += info.outJump;
7294 else if (info.inFormat == RTAUDIO_SINT32) {
7295 Int32 *in = (Int32 *)inBuffer;
7296 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7297 for (j=0; j<info.channels; j++) {
7298 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7299 out[info.outOffset[j]] >>= 8;
7302 out += info.outJump;
7305 else if (info.inFormat == RTAUDIO_FLOAT32) {
7306 Float32 *in = (Float32 *)inBuffer;
7307 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7308 for (j=0; j<info.channels; j++) {
7309 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388608.0);
7312 out += info.outJump;
7315 else if (info.inFormat == RTAUDIO_FLOAT64) {
7316 Float64 *in = (Float64 *)inBuffer;
7317 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7318 for (j=0; j<info.channels; j++) {
7319 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7322 out += info.outJump;
7326 else if (info.outFormat == RTAUDIO_SINT16) {
7327 Int16 *out = (Int16 *)outBuffer;
7328 if (info.inFormat == RTAUDIO_SINT8) {
7329 signed char *in = (signed char *)inBuffer;
7330 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7331 for (j=0; j<info.channels; j++) {
7332 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7333 out[info.outOffset[j]] <<= 8;
7336 out += info.outJump;
7339 else if (info.inFormat == RTAUDIO_SINT16) {
7340 // Channel compensation and/or (de)interleaving only.
7341 Int16 *in = (Int16 *)inBuffer;
7342 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7343 for (j=0; j<info.channels; j++) {
7344 out[info.outOffset[j]] = in[info.inOffset[j]];
7347 out += info.outJump;
7350 else if (info.inFormat == RTAUDIO_SINT24) {
7351 Int32 *in = (Int32 *)inBuffer;
7352 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7353 for (j=0; j<info.channels; j++) {
7354 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7357 out += info.outJump;
7360 else if (info.inFormat == RTAUDIO_SINT32) {
7361 Int32 *in = (Int32 *)inBuffer;
7362 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7363 for (j=0; j<info.channels; j++) {
7364 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7367 out += info.outJump;
7370 else if (info.inFormat == RTAUDIO_FLOAT32) {
7371 Float32 *in = (Float32 *)inBuffer;
7372 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7373 for (j=0; j<info.channels; j++) {
7374 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7377 out += info.outJump;
7380 else if (info.inFormat == RTAUDIO_FLOAT64) {
7381 Float64 *in = (Float64 *)inBuffer;
7382 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7383 for (j=0; j<info.channels; j++) {
7384 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7387 out += info.outJump;
7391 else if (info.outFormat == RTAUDIO_SINT8) {
7392 signed char *out = (signed char *)outBuffer;
7393 if (info.inFormat == RTAUDIO_SINT8) {
7394 // Channel compensation and/or (de)interleaving only.
7395 signed char *in = (signed char *)inBuffer;
7396 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7397 for (j=0; j<info.channels; j++) {
7398 out[info.outOffset[j]] = in[info.inOffset[j]];
7401 out += info.outJump;
7404 if (info.inFormat == RTAUDIO_SINT16) {
7405 Int16 *in = (Int16 *)inBuffer;
7406 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7407 for (j=0; j<info.channels; j++) {
7408 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7411 out += info.outJump;
7414 else if (info.inFormat == RTAUDIO_SINT24) {
7415 Int32 *in = (Int32 *)inBuffer;
7416 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7417 for (j=0; j<info.channels; j++) {
7418 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7421 out += info.outJump;
7424 else if (info.inFormat == RTAUDIO_SINT32) {
7425 Int32 *in = (Int32 *)inBuffer;
7426 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7427 for (j=0; j<info.channels; j++) {
7428 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7431 out += info.outJump;
7434 else if (info.inFormat == RTAUDIO_FLOAT32) {
7435 Float32 *in = (Float32 *)inBuffer;
7436 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7437 for (j=0; j<info.channels; j++) {
7438 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7441 out += info.outJump;
7444 else if (info.inFormat == RTAUDIO_FLOAT64) {
7445 Float64 *in = (Float64 *)inBuffer;
7446 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7447 for (j=0; j<info.channels; j++) {
7448 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7451 out += info.outJump;
7457 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7463 if ( format == RTAUDIO_SINT16 ) {
7464 for ( unsigned int i=0; i<samples; i++ ) {
7465 // Swap 1st and 2nd bytes.
7470 // Increment 2 bytes.
7474 else if ( format == RTAUDIO_SINT24 ||
7475 format == RTAUDIO_SINT32 ||
7476 format == RTAUDIO_FLOAT32 ) {
7477 for ( unsigned int i=0; i<samples; i++ ) {
7478 // Swap 1st and 4th bytes.
7483 // Swap 2nd and 3rd bytes.
7489 // Increment 4 bytes.
7493 else if ( format == RTAUDIO_FLOAT64 ) {
7494 for ( unsigned int i=0; i<samples; i++ ) {
7495 // Swap 1st and 8th bytes
7500 // Swap 2nd and 7th bytes
7506 // Swap 3rd and 6th bytes
7512 // Swap 4th and 5th bytes
7518 // Increment 8 bytes.
7524 // Indentation settings for Vim and Emacs
7527 // c-basic-offset: 2
7528 // indent-tabs-mode: nil
7531 // vim: et sts=2 sw=2