1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2008 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.4
46 // Static variable definitions.
47 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
48 const unsigned int RtApi::SAMPLE_RATES[] = {
49 4000, 5512, 8000, 9600, 11025, 16000, 22050,
50 32000, 44100, 48000, 88200, 96000, 176400, 192000
53 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
54 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
55 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
56 #define MUTEX_LOCK(A) EnterCriticalSection(A)
57 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
58 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
60 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
61 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
62 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
63 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
65 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
66 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
69 // *************************************************** //
71 // RtAudio definitions.
73 // *************************************************** //
75 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
79 // The order here will control the order of RtAudio's API search in
81 #if defined(__UNIX_JACK__)
82 apis.push_back( UNIX_JACK );
84 #if defined(__LINUX_ALSA__)
85 apis.push_back( LINUX_ALSA );
87 #if defined(__LINUX_OSS__)
88 apis.push_back( LINUX_OSS );
90 #if defined(__WINDOWS_ASIO__)
91 apis.push_back( WINDOWS_ASIO );
93 #if defined(__WINDOWS_DS__)
94 apis.push_back( WINDOWS_DS );
96 #if defined(__MACOSX_CORE__)
97 apis.push_back( MACOSX_CORE );
99 #if defined(__RTAUDIO_DUMMY__)
100 apis.push_back( RTAUDIO_DUMMY );
104 void RtAudio :: openRtApi( RtAudio::Api api )
106 #if defined(__UNIX_JACK__)
107 if ( api == UNIX_JACK )
108 rtapi_ = new RtApiJack();
110 #if defined(__LINUX_ALSA__)
111 if ( api == LINUX_ALSA )
112 rtapi_ = new RtApiAlsa();
114 #if defined(__LINUX_OSS__)
115 if ( api == LINUX_OSS )
116 rtapi_ = new RtApiOss();
118 #if defined(__WINDOWS_ASIO__)
119 if ( api == WINDOWS_ASIO )
120 rtapi_ = new RtApiAsio();
122 #if defined(__WINDOWS_DS__)
123 if ( api == WINDOWS_DS )
124 rtapi_ = new RtApiDs();
126 #if defined(__MACOSX_CORE__)
127 if ( api == MACOSX_CORE )
128 rtapi_ = new RtApiCore();
130 #if defined(__RTAUDIO_DUMMY__)
131 if ( api == RTAUDIO_DUMMY )
132 rtapi_ = new RtApiDummy();
136 RtAudio :: RtAudio( RtAudio::Api api ) throw()
140 if ( api != UNSPECIFIED ) {
141 // Attempt to open the specified API.
143 if ( rtapi_ ) return;
145 // No compiled support for specified API value. Issue a debug
146 // warning and continue as if no API was specified.
147 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
150 // Iterate through the compiled APIs and return as soon as we find
151 // one with at least one device or we reach the end of the list.
152 std::vector< RtAudio::Api > apis;
153 getCompiledApi( apis );
154 for ( unsigned int i=0; i<apis.size(); i++ ) {
155 openRtApi( apis[i] );
156 if ( rtapi_->getDeviceCount() ) break;
159 if ( rtapi_ ) return;
161 // It should not be possible to get here because the preprocessor
162 // definition __RTAUDIO_DUMMY__ is automatically defined if no
163 // API-specific definitions are passed to the compiler. But just in
164 // case something weird happens, we'll print out an error message.
165 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
168 RtAudio :: ~RtAudio() throw()
173 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
174 RtAudio::StreamParameters *inputParameters,
175 RtAudioFormat format, unsigned int sampleRate,
176 unsigned int *bufferFrames,
177 RtAudioCallback callback, void *userData,
178 RtAudio::StreamOptions *options )
180 return rtapi_->openStream( outputParameters, inputParameters, format,
181 sampleRate, bufferFrames, callback,
185 // *************************************************** //
187 // Public RtApi definitions (see end of file for
188 // private or protected utility functions).
190 // *************************************************** //
194 stream_.state = STREAM_CLOSED;
195 stream_.mode = UNINITIALIZED;
196 stream_.apiHandle = 0;
197 stream_.userBuffer[0] = 0;
198 stream_.userBuffer[1] = 0;
199 MUTEX_INITIALIZE( &stream_.mutex );
200 showWarnings_ = true;
205 MUTEX_DESTROY( &stream_.mutex );
208 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
209 RtAudio::StreamParameters *iParams,
210 RtAudioFormat format, unsigned int sampleRate,
211 unsigned int *bufferFrames,
212 RtAudioCallback callback, void *userData,
213 RtAudio::StreamOptions *options )
215 if ( stream_.state != STREAM_CLOSED ) {
216 errorText_ = "RtApi::openStream: a stream is already open!";
217 error( RtError::INVALID_USE );
220 if ( oParams && oParams->nChannels < 1 ) {
221 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
222 error( RtError::INVALID_USE );
225 if ( iParams && iParams->nChannels < 1 ) {
226 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
227 error( RtError::INVALID_USE );
230 if ( oParams == NULL && iParams == NULL ) {
231 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
232 error( RtError::INVALID_USE );
235 if ( formatBytes(format) == 0 ) {
236 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
237 error( RtError::INVALID_USE );
240 unsigned int nDevices = getDeviceCount();
241 unsigned int oChannels = 0;
243 oChannels = oParams->nChannels;
244 if ( oParams->deviceId >= nDevices ) {
245 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
246 error( RtError::INVALID_USE );
250 unsigned int iChannels = 0;
252 iChannels = iParams->nChannels;
253 if ( iParams->deviceId >= nDevices ) {
254 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
255 error( RtError::INVALID_USE );
262 if ( oChannels > 0 ) {
264 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
265 sampleRate, format, bufferFrames, options );
266 if ( result == false ) error( RtError::SYSTEM_ERROR );
269 if ( iChannels > 0 ) {
271 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
272 sampleRate, format, bufferFrames, options );
273 if ( result == false ) {
274 if ( oChannels > 0 ) closeStream();
275 error( RtError::SYSTEM_ERROR );
279 stream_.callbackInfo.callback = (void *) callback;
280 stream_.callbackInfo.userData = userData;
282 if ( options ) options->numberOfBuffers = stream_.nBuffers;
283 stream_.state = STREAM_STOPPED;
286 unsigned int RtApi :: getDefaultInputDevice( void )
288 // Should be implemented in subclasses if possible.
292 unsigned int RtApi :: getDefaultOutputDevice( void )
294 // Should be implemented in subclasses if possible.
298 void RtApi :: closeStream( void )
300 // MUST be implemented in subclasses!
304 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
305 unsigned int firstChannel, unsigned int sampleRate,
306 RtAudioFormat format, unsigned int *bufferSize,
307 RtAudio::StreamOptions *options )
309 // MUST be implemented in subclasses!
313 void RtApi :: tickStreamTime( void )
315 // Subclasses that do not provide their own implementation of
316 // getStreamTime should call this function once per buffer I/O to
317 // provide basic stream time support.
319 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
321 #if defined( HAVE_GETTIMEOFDAY )
322 gettimeofday( &stream_.lastTickTimestamp, NULL );
326 long RtApi :: getStreamLatency( void )
330 long totalLatency = 0;
331 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
332 totalLatency = stream_.latency[0];
333 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
334 totalLatency += stream_.latency[1];
339 double RtApi :: getStreamTime( void )
343 #if defined( HAVE_GETTIMEOFDAY )
344 // Return a very accurate estimate of the stream time by
345 // adding in the elapsed time since the last tick.
349 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
350 return stream_.streamTime;
352 gettimeofday( &now, NULL );
353 then = stream_.lastTickTimestamp;
354 return stream_.streamTime +
355 ((now.tv_sec + 0.000001 * now.tv_usec) -
356 (then.tv_sec + 0.000001 * then.tv_usec));
358 return stream_.streamTime;
363 // *************************************************** //
365 // OS/API-specific methods.
367 // *************************************************** //
369 #if defined(__MACOSX_CORE__)
371 // The OS X CoreAudio API is designed to use a separate callback
372 // procedure for each of its audio devices. A single RtAudio duplex
373 // stream using two different devices is supported here, though it
374 // cannot be guaranteed to always behave correctly because we cannot
375 // synchronize these two callbacks.
377 // A property listener is installed for over/underrun information.
378 // However, no functionality is currently provided to allow property
379 // listeners to trigger user handlers because it is unclear what could
380 // be done if a critical stream parameter (buffer size, sample rate,
381 // device disconnect) notification arrived. The listeners entail
382 // quite a bit of extra code and most likely, a user program wouldn't
383 // be prepared for the result anyway. However, we do provide a flag
384 // to the client callback function to inform of an over/underrun.
386 // The mechanism for querying and setting system parameters was
387 // updated (and perhaps simplified) in OS-X version 10.4. However,
388 // since 10.4 support is not necessarily available to all users, I've
389 // decided not to update the respective code at this time. Perhaps
390 // this will happen when Apple makes 10.4 free for everyone. :-)
392 // A structure to hold various information related to the CoreAudio API
395 AudioDeviceID id[2]; // device ids
396 UInt32 iStream[2]; // device stream index (first for mono mode)
399 pthread_cond_t condition;
400 int drainCounter; // Tracks callback counts when draining
401 bool internalDrain; // Indicates if stop is initiated from callback or not.
404 :deviceBuffer(0), drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
407 RtApiCore :: RtApiCore()
409 // Nothing to do here.
412 RtApiCore :: ~RtApiCore()
414 // The subclass destructor gets called before the base class
415 // destructor, so close an existing stream before deallocating
416 // apiDeviceId memory.
417 if ( stream_.state != STREAM_CLOSED ) closeStream();
420 unsigned int RtApiCore :: getDeviceCount( void )
422 // Find out how many audio devices there are, if any.
424 OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
425 if ( result != noErr ) {
426 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
427 error( RtError::WARNING );
431 return dataSize / sizeof( AudioDeviceID );
434 unsigned int RtApiCore :: getDefaultInputDevice( void )
436 unsigned int nDevices = getDeviceCount();
437 if ( nDevices <= 1 ) return 0;
440 UInt32 dataSize = sizeof( AudioDeviceID );
441 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
444 if ( result != noErr ) {
445 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
446 error( RtError::WARNING );
450 dataSize *= nDevices;
451 AudioDeviceID deviceList[ nDevices ];
452 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
453 if ( result != noErr ) {
454 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
455 error( RtError::WARNING );
459 for ( unsigned int i=0; i<nDevices; i++ )
460 if ( id == deviceList[i] ) return i;
462 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
463 error( RtError::WARNING );
467 unsigned int RtApiCore :: getDefaultOutputDevice( void )
469 unsigned int nDevices = getDeviceCount();
470 if ( nDevices <= 1 ) return 0;
473 UInt32 dataSize = sizeof( AudioDeviceID );
474 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
477 if ( result != noErr ) {
478 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
479 error( RtError::WARNING );
483 dataSize *= nDevices;
484 AudioDeviceID deviceList[ nDevices ];
485 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
486 if ( result != noErr ) {
487 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
488 error( RtError::WARNING );
492 for ( unsigned int i=0; i<nDevices; i++ )
493 if ( id == deviceList[i] ) return i;
495 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
496 error( RtError::WARNING );
500 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
502 RtAudio::DeviceInfo info;
506 unsigned int nDevices = getDeviceCount();
507 if ( nDevices == 0 ) {
508 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
509 error( RtError::INVALID_USE );
512 if ( device >= nDevices ) {
513 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
514 error( RtError::INVALID_USE );
517 AudioDeviceID deviceList[ nDevices ];
518 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
519 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
520 if ( result != noErr ) {
521 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
522 error( RtError::WARNING );
526 AudioDeviceID id = deviceList[ device ];
528 // Get the device name.
532 result = AudioDeviceGetProperty( id, 0, false,
533 kAudioDevicePropertyDeviceManufacturer,
536 if ( result != noErr ) {
537 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
538 errorText_ = errorStream_.str();
539 error( RtError::WARNING );
542 info.name.append( (const char *)name, strlen(name) );
543 info.name.append( ": " );
546 result = AudioDeviceGetProperty( id, 0, false,
547 kAudioDevicePropertyDeviceName,
549 if ( result != noErr ) {
550 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
551 errorText_ = errorStream_.str();
552 error( RtError::WARNING );
555 info.name.append( (const char *)name, strlen(name) );
557 // Get the output stream "configuration".
558 AudioBufferList *bufferList = nil;
559 result = AudioDeviceGetPropertyInfo( id, 0, false,
560 kAudioDevicePropertyStreamConfiguration,
562 if (result != noErr || dataSize == 0) {
563 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
564 errorText_ = errorStream_.str();
565 error( RtError::WARNING );
569 // Allocate the AudioBufferList.
570 bufferList = (AudioBufferList *) malloc( dataSize );
571 if ( bufferList == NULL ) {
572 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
573 error( RtError::WARNING );
577 result = AudioDeviceGetProperty( id, 0, false,
578 kAudioDevicePropertyStreamConfiguration,
579 &dataSize, bufferList );
580 if ( result != noErr ) {
582 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
583 errorText_ = errorStream_.str();
584 error( RtError::WARNING );
588 // Get output channel information.
589 unsigned int i, nStreams = bufferList->mNumberBuffers;
590 for ( i=0; i<nStreams; i++ )
591 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
594 // Get the input stream "configuration".
595 result = AudioDeviceGetPropertyInfo( id, 0, true,
596 kAudioDevicePropertyStreamConfiguration,
598 if (result != noErr || dataSize == 0) {
599 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
600 errorText_ = errorStream_.str();
601 error( RtError::WARNING );
605 // Allocate the AudioBufferList.
606 bufferList = (AudioBufferList *) malloc( dataSize );
607 if ( bufferList == NULL ) {
608 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
609 error( RtError::WARNING );
613 result = AudioDeviceGetProperty( id, 0, true,
614 kAudioDevicePropertyStreamConfiguration,
615 &dataSize, bufferList );
616 if ( result != noErr ) {
618 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
619 errorText_ = errorStream_.str();
620 error( RtError::WARNING );
624 // Get input channel information.
625 nStreams = bufferList->mNumberBuffers;
626 for ( i=0; i<nStreams; i++ )
627 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
630 // If device opens for both playback and capture, we determine the channels.
631 if ( info.outputChannels > 0 && info.inputChannels > 0 )
632 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
634 // Probe the device sample rates.
635 bool isInput = false;
636 if ( info.outputChannels == 0 ) isInput = true;
638 // Determine the supported sample rates.
639 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
640 kAudioDevicePropertyAvailableNominalSampleRates,
643 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
644 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
645 errorText_ = errorStream_.str();
646 error( RtError::WARNING );
650 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
651 AudioValueRange rangeList[ nRanges ];
652 result = AudioDeviceGetProperty( id, 0, isInput,
653 kAudioDevicePropertyAvailableNominalSampleRates,
654 &dataSize, &rangeList );
656 if ( result != kAudioHardwareNoError ) {
657 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
658 errorText_ = errorStream_.str();
659 error( RtError::WARNING );
663 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
664 for ( UInt32 i=0; i<nRanges; i++ ) {
665 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
666 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
669 info.sampleRates.clear();
670 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
671 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
672 info.sampleRates.push_back( SAMPLE_RATES[k] );
675 if ( info.sampleRates.size() == 0 ) {
676 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
677 errorText_ = errorStream_.str();
678 error( RtError::WARNING );
682 // CoreAudio always uses 32-bit floating point data for PCM streams.
683 // Thus, any other "physical" formats supported by the device are of
684 // no interest to the client.
685 info.nativeFormats = RTAUDIO_FLOAT32;
687 if ( getDefaultOutputDevice() == device )
688 info.isDefaultOutput = true;
689 if ( getDefaultInputDevice() == device )
690 info.isDefaultInput = true;
696 OSStatus callbackHandler( AudioDeviceID inDevice,
697 const AudioTimeStamp* inNow,
698 const AudioBufferList* inInputData,
699 const AudioTimeStamp* inInputTime,
700 AudioBufferList* outOutputData,
701 const AudioTimeStamp* inOutputTime,
704 CallbackInfo *info = (CallbackInfo *) infoPointer;
706 RtApiCore *object = (RtApiCore *) info->object;
707 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
708 return kAudioHardwareUnspecifiedError;
710 return kAudioHardwareNoError;
713 OSStatus deviceListener( AudioDeviceID inDevice,
716 AudioDevicePropertyID propertyID,
717 void* handlePointer )
719 CoreHandle *handle = (CoreHandle *) handlePointer;
720 if ( propertyID == kAudioDeviceProcessorOverload ) {
722 handle->xrun[1] = true;
724 handle->xrun[0] = true;
727 return kAudioHardwareNoError;
730 static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
732 OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
736 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
737 unsigned int firstChannel, unsigned int sampleRate,
738 RtAudioFormat format, unsigned int *bufferSize,
739 RtAudio::StreamOptions *options )
742 unsigned int nDevices = getDeviceCount();
743 if ( nDevices == 0 ) {
744 // This should not happen because a check is made before this function is called.
745 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
749 if ( device >= nDevices ) {
750 // This should not happen because a check is made before this function is called.
751 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
755 AudioDeviceID deviceList[ nDevices ];
756 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
757 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
758 if ( result != noErr ) {
759 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
763 AudioDeviceID id = deviceList[ device ];
765 // Setup for stream mode.
766 bool isInput = false;
767 if ( mode == INPUT ) isInput = true;
769 // Set or disable "hog" mode.
770 dataSize = sizeof( UInt32 );
772 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
773 result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
774 if ( result != noErr ) {
775 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
776 errorText_ = errorStream_.str();
780 // Get the stream "configuration".
781 AudioBufferList *bufferList;
782 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
783 kAudioDevicePropertyStreamConfiguration,
785 if (result != noErr || dataSize == 0) {
786 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
787 errorText_ = errorStream_.str();
791 // Allocate the AudioBufferList.
792 bufferList = (AudioBufferList *) malloc( dataSize );
793 if ( bufferList == NULL ) {
794 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
798 result = AudioDeviceGetProperty( id, 0, isInput,
799 kAudioDevicePropertyStreamConfiguration,
800 &dataSize, bufferList );
801 if ( result != noErr ) {
803 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
804 errorText_ = errorStream_.str();
808 // Search for a stream that contains the desired number of
809 // channels. CoreAudio devices can have an arbitrary number of
810 // streams and each stream can have an arbitrary number of channels.
811 // For each stream, a single buffer of interleaved samples is
812 // provided. RtAudio currently only supports the use of one stream
813 // of interleaved data or multiple consecutive single-channel
814 // streams. Thus, our search below is limited to these two
816 unsigned int streamChannels = 0, nStreams = 0;
817 UInt32 iChannel = 0, iStream = 0;
818 unsigned int offsetCounter = firstChannel;
819 stream_.deviceInterleaved[mode] = true;
820 nStreams = bufferList->mNumberBuffers;
821 bool foundStream = false;
823 for ( iStream=0; iStream<nStreams; iStream++ ) {
824 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
825 if ( streamChannels >= channels + offsetCounter ) {
826 iChannel += offsetCounter;
830 if ( streamChannels > offsetCounter ) break;
831 offsetCounter -= streamChannels;
832 iChannel += streamChannels;
835 // If we didn't find a single stream above, see if we can meet
836 // the channel specification in mono mode (i.e. using separate
837 // non-interleaved buffers). This can only work if there are N
838 // consecutive one-channel streams, where N is the number of
839 // desired channels (+ channel offset).
840 if ( foundStream == false ) {
841 unsigned int counter = 0;
842 offsetCounter = firstChannel;
844 for ( iStream=0; iStream<nStreams; iStream++ ) {
845 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
846 if ( offsetCounter ) {
847 if ( streamChannels > offsetCounter ) break;
848 offsetCounter -= streamChannels;
850 else if ( streamChannels == 1 )
854 if ( counter == channels ) {
855 iStream -= channels - 1;
856 iChannel -= channels - 1;
857 stream_.deviceInterleaved[mode] = false;
861 iChannel += streamChannels;
866 if ( foundStream == false ) {
867 errorStream_ << "RtApiCore::probeDeviceOpen: unable to find OS-X stream on device (" << device << ") for requested channels.";
868 errorText_ = errorStream_.str();
872 // Determine the buffer size.
873 AudioValueRange bufferRange;
874 dataSize = sizeof( AudioValueRange );
875 result = AudioDeviceGetProperty( id, 0, isInput,
876 kAudioDevicePropertyBufferFrameSizeRange,
877 &dataSize, &bufferRange );
878 if ( result != noErr ) {
879 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
880 errorText_ = errorStream_.str();
884 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
885 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
886 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
888 // Set the buffer size. For mono mode, I'm assuming we only need to
889 // make this setting for the master channel.
890 UInt32 theSize = (UInt32) *bufferSize;
891 dataSize = sizeof( UInt32 );
892 result = AudioDeviceSetProperty( id, NULL, 0, isInput,
893 kAudioDevicePropertyBufferFrameSize,
894 dataSize, &theSize );
896 if ( result != noErr ) {
897 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
898 errorText_ = errorStream_.str();
902 // If attempting to setup a duplex stream, the bufferSize parameter
903 // MUST be the same in both directions!
904 *bufferSize = theSize;
905 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
906 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
907 errorText_ = errorStream_.str();
911 stream_.bufferSize = *bufferSize;
912 stream_.nBuffers = 1;
914 // Get the stream ID(s) so we can set the stream format. In mono
915 // mode, we'll have to do this for each stream (channel).
916 AudioStreamID streamIDs[ nStreams ];
917 dataSize = nStreams * sizeof( AudioStreamID );
918 result = AudioDeviceGetProperty( id, 0, isInput,
919 kAudioDevicePropertyStreams,
920 &dataSize, &streamIDs );
921 if ( result != noErr ) {
922 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
923 errorText_ = errorStream_.str();
927 // Now set the stream format. Also, check the physical format of the
928 // device and change that if necessary.
929 AudioStreamBasicDescription description;
930 dataSize = sizeof( AudioStreamBasicDescription );
931 if ( stream_.deviceInterleaved[mode] ) nStreams = 1;
932 else nStreams = channels;
935 for ( unsigned int i=0; i<nStreams; i++ ) {
937 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
938 kAudioStreamPropertyVirtualFormat,
939 &dataSize, &description );
941 if ( result != noErr ) {
942 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
943 errorText_ = errorStream_.str();
947 // Set the sample rate and data format id. However, only make the
948 // change if the sample rate is not within 1.0 of the desired
949 // rate and the format is not linear pcm.
950 updateFormat = false;
951 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
952 description.mSampleRate = (double) sampleRate;
956 if ( description.mFormatID != kAudioFormatLinearPCM ) {
957 description.mFormatID = kAudioFormatLinearPCM;
961 if ( updateFormat ) {
962 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0,
963 kAudioStreamPropertyVirtualFormat,
964 dataSize, &description );
965 if ( result != noErr ) {
966 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
967 errorText_ = errorStream_.str();
972 // Now check the physical format.
973 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
974 kAudioStreamPropertyPhysicalFormat,
975 &dataSize, &description );
976 if ( result != noErr ) {
977 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
978 errorText_ = errorStream_.str();
982 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
983 description.mFormatID = kAudioFormatLinearPCM;
984 AudioStreamBasicDescription testDescription = description;
985 unsigned long formatFlags;
987 // We'll try higher bit rates first and then work our way down.
988 testDescription.mBitsPerChannel = 32;
989 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
990 testDescription.mFormatFlags = formatFlags;
991 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
992 if ( result == noErr ) continue;
994 testDescription = description;
995 testDescription.mBitsPerChannel = 32;
996 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
997 testDescription.mFormatFlags = formatFlags;
998 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
999 if ( result == noErr ) continue;
1001 testDescription = description;
1002 testDescription.mBitsPerChannel = 24;
1003 testDescription.mFormatFlags = formatFlags;
1004 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1005 if ( result == noErr ) continue;
1007 testDescription = description;
1008 testDescription.mBitsPerChannel = 16;
1009 testDescription.mFormatFlags = formatFlags;
1010 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1011 if ( result == noErr ) continue;
1013 testDescription = description;
1014 testDescription.mBitsPerChannel = 8;
1015 testDescription.mFormatFlags = formatFlags;
1016 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1017 if ( result != noErr ) {
1018 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1019 errorText_ = errorStream_.str();
1025 // Get the stream latency. There can be latency in both the device
1026 // and the stream. First, attempt to get the device latency on the
1027 // master channel or the first open channel. Errors that might
1028 // occur here are not deemed critical.
1029 UInt32 latency, channel = 0;
1030 dataSize = sizeof( UInt32 );
1031 AudioDevicePropertyID property = kAudioDevicePropertyLatency;
1032 for ( int i=0; i<2; i++ ) {
1033 if ( hasProperty( id, channel, isInput, property ) == true ) break;
1034 channel = iChannel + 1 + i;
1036 if ( channel <= iChannel + 1 ) {
1037 result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
1038 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1040 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1041 errorText_ = errorStream_.str();
1042 error( RtError::WARNING );
1046 // Now try to get the stream latency. For "mono" mode, I assume the
1047 // latency is equal for all single-channel streams.
1048 result = AudioStreamGetProperty( streamIDs[iStream], 0, property, &dataSize, &latency );
1049 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1051 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1052 errorText_ = errorStream_.str();
1053 error( RtError::WARNING );
1056 // Byte-swapping: According to AudioHardware.h, the stream data will
1057 // always be presented in native-endian format, so we should never
1058 // need to byte swap.
1059 stream_.doByteSwap[mode] = false;
1061 // From the CoreAudio documentation, PCM data must be supplied as
1063 stream_.userFormat = format;
1064 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1066 if ( stream_.deviceInterleaved[mode] )
1067 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1069 stream_.nDeviceChannels[mode] = channels;
1070 stream_.nUserChannels[mode] = channels;
1071 stream_.channelOffset[mode] = iChannel; // offset within a CoreAudio stream
1072 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1073 else stream_.userInterleaved = true;
1075 // Set flags for buffer conversion.
1076 stream_.doConvertBuffer[mode] = false;
1077 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1078 stream_.doConvertBuffer[mode] = true;
1079 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1080 stream_.doConvertBuffer[mode] = true;
1081 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1082 stream_.nUserChannels[mode] > 1 )
1083 stream_.doConvertBuffer[mode] = true;
1085 // Allocate our CoreHandle structure for the stream.
1086 CoreHandle *handle = 0;
1087 if ( stream_.apiHandle == 0 ) {
1089 handle = new CoreHandle;
1091 catch ( std::bad_alloc& ) {
1092 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1096 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1097 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1100 stream_.apiHandle = (void *) handle;
1103 handle = (CoreHandle *) stream_.apiHandle;
1104 handle->iStream[mode] = iStream;
1105 handle->id[mode] = id;
1107 // Allocate necessary internal buffers.
1108 unsigned long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1109 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1110 if ( stream_.userBuffer[mode] == NULL ) {
1111 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1115 // If possible, we will make use of the CoreAudio stream buffers as
1116 // "device buffers". However, we can't do this if the device
1117 // buffers are non-interleaved ("mono" mode).
1118 if ( !stream_.deviceInterleaved[mode] && stream_.doConvertBuffer[mode] ) {
1120 bool makeBuffer = true;
1121 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1122 if ( mode == INPUT ) {
1123 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1124 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1125 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1130 bufferBytes *= *bufferSize;
1131 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1132 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1133 if ( stream_.deviceBuffer == NULL ) {
1134 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1138 // Save a pointer to our own device buffer in the CoreHandle
1139 // structure because we may need to use the stream_.deviceBuffer
1140 // variable to point to the CoreAudio buffer before buffer
1141 // conversion (if we have a duplex stream with two different
1142 // conversion schemes).
1143 handle->deviceBuffer = stream_.deviceBuffer;
1147 stream_.sampleRate = sampleRate;
1148 stream_.device[mode] = device;
1149 stream_.state = STREAM_STOPPED;
1150 stream_.callbackInfo.object = (void *) this;
1152 // Setup the buffer conversion information structure. We override
1153 // the channel offset value and perform our own setting for that
1155 if ( stream_.doConvertBuffer[mode] ) {
1156 setConvertInfo( mode, 0 );
1158 // Add channel offset for interleaved channels.
1159 if ( firstChannel > 0 && stream_.deviceInterleaved[mode] ) {
1160 if ( mode == OUTPUT ) {
1161 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1162 stream_.convertInfo[mode].outOffset[k] += firstChannel;
1165 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1166 stream_.convertInfo[mode].inOffset[k] += firstChannel;
1171 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1172 // Only one callback procedure per device.
1173 stream_.mode = DUPLEX;
1175 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1176 if ( result != noErr ) {
1177 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1178 errorText_ = errorStream_.str();
1181 if ( stream_.mode == OUTPUT && mode == INPUT )
1182 stream_.mode = DUPLEX;
1184 stream_.mode = mode;
1187 // Setup the device property listener for over/underload.
1188 result = AudioDeviceAddPropertyListener( id, 0, isInput,
1189 kAudioDeviceProcessorOverload,
1190 deviceListener, (void *) handle );
1196 pthread_cond_destroy( &handle->condition );
1198 stream_.apiHandle = 0;
1201 for ( int i=0; i<2; i++ ) {
1202 if ( stream_.userBuffer[i] ) {
1203 free( stream_.userBuffer[i] );
1204 stream_.userBuffer[i] = 0;
1208 if ( stream_.deviceBuffer ) {
1209 free( stream_.deviceBuffer );
1210 stream_.deviceBuffer = 0;
1216 void RtApiCore :: closeStream( void )
1218 if ( stream_.state == STREAM_CLOSED ) {
1219 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1220 error( RtError::WARNING );
1224 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1225 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1226 if ( stream_.state == STREAM_RUNNING )
1227 AudioDeviceStop( handle->id[0], callbackHandler );
1228 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1231 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1232 if ( stream_.state == STREAM_RUNNING )
1233 AudioDeviceStop( handle->id[1], callbackHandler );
1234 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1237 for ( int i=0; i<2; i++ ) {
1238 if ( stream_.userBuffer[i] ) {
1239 free( stream_.userBuffer[i] );
1240 stream_.userBuffer[i] = 0;
1244 if ( handle->deviceBuffer ) {
1245 free( handle->deviceBuffer );
1246 stream_.deviceBuffer = 0;
1249 // Destroy pthread condition variable.
1250 pthread_cond_destroy( &handle->condition );
1252 stream_.apiHandle = 0;
1254 stream_.mode = UNINITIALIZED;
1255 stream_.state = STREAM_CLOSED;
1258 void RtApiCore :: startStream( void )
1261 if ( stream_.state == STREAM_RUNNING ) {
1262 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1263 error( RtError::WARNING );
1267 MUTEX_LOCK( &stream_.mutex );
1269 OSStatus result = noErr;
1270 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1271 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1273 result = AudioDeviceStart( handle->id[0], callbackHandler );
1274 if ( result != noErr ) {
1275 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1276 errorText_ = errorStream_.str();
1281 if ( stream_.mode == INPUT ||
1282 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1284 result = AudioDeviceStart( handle->id[1], callbackHandler );
1285 if ( result != noErr ) {
1286 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1287 errorText_ = errorStream_.str();
1292 handle->drainCounter = 0;
1293 handle->internalDrain = false;
1294 stream_.state = STREAM_RUNNING;
1297 MUTEX_UNLOCK( &stream_.mutex );
1299 if ( result == noErr ) return;
1300 error( RtError::SYSTEM_ERROR );
1303 void RtApiCore :: stopStream( void )
1306 if ( stream_.state == STREAM_STOPPED ) {
1307 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1308 error( RtError::WARNING );
1312 MUTEX_LOCK( &stream_.mutex );
1314 OSStatus result = noErr;
1315 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1316 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1318 if ( handle->drainCounter == 0 ) {
1319 handle->drainCounter = 1;
1320 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1323 result = AudioDeviceStop( handle->id[0], callbackHandler );
1324 if ( result != noErr ) {
1325 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1326 errorText_ = errorStream_.str();
1331 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1333 result = AudioDeviceStop( handle->id[1], callbackHandler );
1334 if ( result != noErr ) {
1335 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1336 errorText_ = errorStream_.str();
1342 MUTEX_UNLOCK( &stream_.mutex );
1344 stream_.state = STREAM_STOPPED;
1345 if ( result == noErr ) return;
1346 error( RtError::SYSTEM_ERROR );
1349 void RtApiCore :: abortStream( void )
1352 if ( stream_.state == STREAM_STOPPED ) {
1353 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1354 error( RtError::WARNING );
1358 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1359 handle->drainCounter = 1;
1364 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1365 const AudioBufferList *inBufferList,
1366 const AudioBufferList *outBufferList )
1368 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1369 if ( stream_.state == STREAM_CLOSED ) {
1370 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1371 error( RtError::WARNING );
1375 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1376 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1378 // Check if we were draining the stream and signal is finished.
1379 if ( handle->drainCounter > 3 ) {
1380 if ( handle->internalDrain == false )
1381 pthread_cond_signal( &handle->condition );
1387 MUTEX_LOCK( &stream_.mutex );
1389 AudioDeviceID outputDevice = handle->id[0];
1391 // Invoke user callback to get fresh output data UNLESS we are
1392 // draining stream or duplex mode AND the input/output devices are
1393 // different AND this function is called for the input device.
1394 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1395 RtAudioCallback callback = (RtAudioCallback) info->callback;
1396 double streamTime = getStreamTime();
1397 RtAudioStreamStatus status = 0;
1398 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1399 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1400 handle->xrun[0] = false;
1402 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1403 status |= RTAUDIO_INPUT_OVERFLOW;
1404 handle->xrun[1] = false;
1406 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1407 stream_.bufferSize, streamTime, status, info->userData );
1408 if ( handle->drainCounter == 2 ) {
1409 MUTEX_UNLOCK( &stream_.mutex );
1413 else if ( handle->drainCounter == 1 )
1414 handle->internalDrain = true;
1417 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1419 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1421 if ( stream_.deviceInterleaved[0] ) {
1422 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1424 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1427 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1428 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1430 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1434 else if ( stream_.doConvertBuffer[0] ) {
1436 if ( stream_.deviceInterleaved[0] )
1437 stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->iStream[0]].mData;
1439 stream_.deviceBuffer = handle->deviceBuffer;
1441 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1443 if ( !stream_.deviceInterleaved[0] ) {
1444 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1445 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1446 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1447 &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
1453 if ( stream_.deviceInterleaved[0] ) {
1454 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1455 stream_.userBuffer[0],
1456 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1459 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1460 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1461 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1462 &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
1467 if ( handle->drainCounter ) {
1468 handle->drainCounter++;
1473 AudioDeviceID inputDevice = handle->id[1];
1474 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1476 if ( stream_.doConvertBuffer[1] ) {
1478 if ( stream_.deviceInterleaved[1] )
1479 stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->iStream[1]].mData;
1481 stream_.deviceBuffer = (char *) handle->deviceBuffer;
1482 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1483 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
1484 memcpy( &stream_.deviceBuffer[i*bufferBytes],
1485 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1489 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
1493 memcpy( stream_.userBuffer[1],
1494 inBufferList->mBuffers[handle->iStream[1]].mData,
1495 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1500 MUTEX_UNLOCK( &stream_.mutex );
1502 RtApi::tickStreamTime();
1506 const char* RtApiCore :: getErrorCode( OSStatus code )
1510 case kAudioHardwareNotRunningError:
1511 return "kAudioHardwareNotRunningError";
1513 case kAudioHardwareUnspecifiedError:
1514 return "kAudioHardwareUnspecifiedError";
1516 case kAudioHardwareUnknownPropertyError:
1517 return "kAudioHardwareUnknownPropertyError";
1519 case kAudioHardwareBadPropertySizeError:
1520 return "kAudioHardwareBadPropertySizeError";
1522 case kAudioHardwareIllegalOperationError:
1523 return "kAudioHardwareIllegalOperationError";
1525 case kAudioHardwareBadObjectError:
1526 return "kAudioHardwareBadObjectError";
1528 case kAudioHardwareBadDeviceError:
1529 return "kAudioHardwareBadDeviceError";
1531 case kAudioHardwareBadStreamError:
1532 return "kAudioHardwareBadStreamError";
1534 case kAudioHardwareUnsupportedOperationError:
1535 return "kAudioHardwareUnsupportedOperationError";
1537 case kAudioDeviceUnsupportedFormatError:
1538 return "kAudioDeviceUnsupportedFormatError";
1540 case kAudioDevicePermissionsError:
1541 return "kAudioDevicePermissionsError";
1544 return "CoreAudio unknown error";
1548 //******************** End of __MACOSX_CORE__ *********************//
1551 #if defined(__UNIX_JACK__)
1553 // JACK is a low-latency audio server, originally written for the
1554 // GNU/Linux operating system and now also ported to OS-X. It can
1555 // connect a number of different applications to an audio device, as
1556 // well as allowing them to share audio between themselves.
1558 // When using JACK with RtAudio, "devices" refer to JACK clients that
1559 // have ports connected to the server. The JACK server is typically
1560 // started in a terminal as follows:
1562 // .jackd -d alsa -d hw:0
1564 // or through an interface program such as qjackctl. Many of the
1565 // parameters normally set for a stream are fixed by the JACK server
1566 // and can be specified when the JACK server is started. In
1569 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1571 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1572 // frames, and number of buffers = 4. Once the server is running, it
1573 // is not possible to override these values. If the values are not
1574 // specified in the command-line, the JACK server uses default values.
1576 // The JACK server does not have to be running when an instance of
1577 // RtApiJack is created, though the function getDeviceCount() will
1578 // report 0 devices found until JACK has been started. When no
1579 // devices are available (i.e., the JACK server is not running), a
1580 // stream cannot be opened.
1582 #include <jack/jack.h>
1585 // A structure to hold various information related to the Jack API
1588 jack_client_t *client;
1589 jack_port_t **ports[2];
1590 std::string deviceName[2];
1592 pthread_cond_t condition;
1593 int drainCounter; // Tracks callback counts when draining
1594 bool internalDrain; // Indicates if stop is initiated from callback or not.
1597 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1600 RtApiJack :: RtApiJack()
1602 // Nothing to do here.
1605 RtApiJack :: ~RtApiJack()
1607 if ( stream_.state != STREAM_CLOSED ) closeStream();
1610 unsigned int RtApiJack :: getDeviceCount( void )
1612 // See if we can become a jack client.
1613 jack_client_t *client = jack_client_new( "RtApiJackCount" );
1614 if ( client == 0 ) return 0;
1617 std::string port, previousPort;
1618 unsigned int nChannels = 0, nDevices = 0;
1619 ports = jack_get_ports( client, NULL, NULL, 0 );
1621 // Parse the port names up to the first colon (:).
1622 unsigned int iColon = 0;
1624 port = (char *) ports[ nChannels ];
1625 iColon = port.find(":");
1626 if ( iColon != std::string::npos ) {
1627 port = port.substr( 0, iColon + 1 );
1628 if ( port != previousPort ) {
1630 previousPort = port;
1633 } while ( ports[++nChannels] );
1637 jack_client_close( client );
1641 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1643 RtAudio::DeviceInfo info;
1644 info.probed = false;
1646 jack_client_t *client = jack_client_new( "RtApiJackInfo" );
1647 if ( client == 0 ) {
1648 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1649 error( RtError::WARNING );
1654 std::string port, previousPort;
1655 unsigned int nPorts = 0, nDevices = 0;
1656 ports = jack_get_ports( client, NULL, NULL, 0 );
1658 // Parse the port names up to the first colon (:).
1659 unsigned int iColon = 0;
1661 port = (char *) ports[ nPorts ];
1662 iColon = port.find(":");
1663 if ( iColon != std::string::npos ) {
1664 port = port.substr( 0, iColon );
1665 if ( port != previousPort ) {
1666 if ( nDevices == device ) info.name = port;
1668 previousPort = port;
1671 } while ( ports[++nPorts] );
1675 if ( device >= nDevices ) {
1676 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1677 error( RtError::INVALID_USE );
1680 // Get the current jack server sample rate.
1681 info.sampleRates.clear();
1682 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1684 // Count the available ports containing the client name as device
1685 // channels. Jack "input ports" equal RtAudio output channels.
1686 unsigned int nChannels = 0;
1687 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1689 while ( ports[ nChannels ] ) nChannels++;
1691 info.outputChannels = nChannels;
1694 // Jack "output ports" equal RtAudio input channels.
1696 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1698 while ( ports[ nChannels ] ) nChannels++;
1700 info.inputChannels = nChannels;
1703 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1704 jack_client_close(client);
1705 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1706 error( RtError::WARNING );
1710 // If device opens for both playback and capture, we determine the channels.
1711 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1712 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1714 // Jack always uses 32-bit floats.
1715 info.nativeFormats = RTAUDIO_FLOAT32;
1717 // Jack doesn't provide default devices so we'll use the first available one.
1718 if ( device == 0 && info.outputChannels > 0 )
1719 info.isDefaultOutput = true;
1720 if ( device == 0 && info.inputChannels > 0 )
1721 info.isDefaultInput = true;
1723 jack_client_close(client);
1728 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1730 CallbackInfo *info = (CallbackInfo *) infoPointer;
1732 RtApiJack *object = (RtApiJack *) info->object;
1733 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1738 void jackShutdown( void *infoPointer )
1740 CallbackInfo *info = (CallbackInfo *) infoPointer;
1741 RtApiJack *object = (RtApiJack *) info->object;
1743 // Check current stream state. If stopped, then we'll assume this
1744 // was called as a result of a call to RtApiJack::stopStream (the
1745 // deactivation of a client handle causes this function to be called).
1746 // If not, we'll assume the Jack server is shutting down or some
1747 // other problem occurred and we should close the stream.
1748 if ( object->isStreamRunning() == false ) return;
1750 object->closeStream();
1751 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1754 int jackXrun( void *infoPointer )
1756 JackHandle *handle = (JackHandle *) infoPointer;
1758 if ( handle->ports[0] ) handle->xrun[0] = true;
1759 if ( handle->ports[1] ) handle->xrun[1] = true;
1764 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1765 unsigned int firstChannel, unsigned int sampleRate,
1766 RtAudioFormat format, unsigned int *bufferSize,
1767 RtAudio::StreamOptions *options )
1769 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1771 // Look for jack server and try to become a client (only do once per stream).
1772 jack_client_t *client = 0;
1773 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1774 if ( options && !options->streamName.empty() )
1775 client = jack_client_new( options->streamName.c_str() );
1777 client = jack_client_new( "RtApiJack" );
1778 if ( client == 0 ) {
1779 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1780 error( RtError::WARNING );
1785 // The handle must have been created on an earlier pass.
1786 client = handle->client;
1790 std::string port, previousPort, deviceName;
1791 unsigned int nPorts = 0, nDevices = 0;
1792 ports = jack_get_ports( client, NULL, NULL, 0 );
1794 // Parse the port names up to the first colon (:).
1795 unsigned int iColon = 0;
1797 port = (char *) ports[ nPorts ];
1798 iColon = port.find(":");
1799 if ( iColon != std::string::npos ) {
1800 port = port.substr( 0, iColon );
1801 if ( port != previousPort ) {
1802 if ( nDevices == device ) deviceName = port;
1804 previousPort = port;
1807 } while ( ports[++nPorts] );
1811 if ( device >= nDevices ) {
1812 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
1816 // Count the available ports containing the client name as device
1817 // channels. Jack "input ports" equal RtAudio output channels.
1818 unsigned int nChannels = 0;
1819 unsigned long flag = JackPortIsInput;
1820 if ( mode == INPUT ) flag = JackPortIsOutput;
1821 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1823 while ( ports[ nChannels ] ) nChannels++;
1827 // Compare the jack ports for specified client to the requested number of channels.
1828 if ( nChannels < (channels + firstChannel) ) {
1829 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
1830 errorText_ = errorStream_.str();
1834 // Check the jack server sample rate.
1835 unsigned int jackRate = jack_get_sample_rate( client );
1836 if ( sampleRate != jackRate ) {
1837 jack_client_close( client );
1838 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
1839 errorText_ = errorStream_.str();
1842 stream_.sampleRate = jackRate;
1844 // Get the latency of the JACK port.
1845 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1846 if ( ports[ firstChannel ] )
1847 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
1850 // The jack server always uses 32-bit floating-point data.
1851 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1852 stream_.userFormat = format;
1854 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1855 else stream_.userInterleaved = true;
1857 // Jack always uses non-interleaved buffers.
1858 stream_.deviceInterleaved[mode] = false;
1860 // Jack always provides host byte-ordered data.
1861 stream_.doByteSwap[mode] = false;
1863 // Get the buffer size. The buffer size and number of buffers
1864 // (periods) is set when the jack server is started.
1865 stream_.bufferSize = (int) jack_get_buffer_size( client );
1866 *bufferSize = stream_.bufferSize;
1868 stream_.nDeviceChannels[mode] = channels;
1869 stream_.nUserChannels[mode] = channels;
1871 // Set flags for buffer conversion.
1872 stream_.doConvertBuffer[mode] = false;
1873 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1874 stream_.doConvertBuffer[mode] = true;
1875 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1876 stream_.nUserChannels[mode] > 1 )
1877 stream_.doConvertBuffer[mode] = true;
1879 // Allocate our JackHandle structure for the stream.
1880 if ( handle == 0 ) {
1882 handle = new JackHandle;
1884 catch ( std::bad_alloc& ) {
1885 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
1889 if ( pthread_cond_init(&handle->condition, NULL) ) {
1890 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
1893 stream_.apiHandle = (void *) handle;
1894 handle->client = client;
1896 handle->deviceName[mode] = deviceName;
1898 // Allocate necessary internal buffers.
1899 unsigned long bufferBytes;
1900 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1901 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1902 if ( stream_.userBuffer[mode] == NULL ) {
1903 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
1907 if ( stream_.doConvertBuffer[mode] ) {
1909 bool makeBuffer = true;
1910 if ( mode == OUTPUT )
1911 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1912 else { // mode == INPUT
1913 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
1914 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1915 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1916 if ( bufferBytes < bytesOut ) makeBuffer = false;
1921 bufferBytes *= *bufferSize;
1922 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1923 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1924 if ( stream_.deviceBuffer == NULL ) {
1925 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
1931 // Allocate memory for the Jack ports (channels) identifiers.
1932 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
1933 if ( handle->ports[mode] == NULL ) {
1934 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
1938 stream_.device[mode] = device;
1939 stream_.channelOffset[mode] = firstChannel;
1940 stream_.state = STREAM_STOPPED;
1941 stream_.callbackInfo.object = (void *) this;
1943 if ( stream_.mode == OUTPUT && mode == INPUT )
1944 // We had already set up the stream for output.
1945 stream_.mode = DUPLEX;
1947 stream_.mode = mode;
1948 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
1949 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
1950 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
1953 // Register our ports.
1955 if ( mode == OUTPUT ) {
1956 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1957 snprintf( label, 64, "outport %d", i );
1958 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
1959 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
1963 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1964 snprintf( label, 64, "inport %d", i );
1965 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
1966 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
1970 // Setup the buffer conversion information structure. We don't use
1971 // buffers to do channel offsets, so we override that parameter
1973 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
1979 pthread_cond_destroy( &handle->condition );
1980 jack_client_close( handle->client );
1982 if ( handle->ports[0] ) free( handle->ports[0] );
1983 if ( handle->ports[1] ) free( handle->ports[1] );
1986 stream_.apiHandle = 0;
1989 for ( int i=0; i<2; i++ ) {
1990 if ( stream_.userBuffer[i] ) {
1991 free( stream_.userBuffer[i] );
1992 stream_.userBuffer[i] = 0;
1996 if ( stream_.deviceBuffer ) {
1997 free( stream_.deviceBuffer );
1998 stream_.deviceBuffer = 0;
2004 void RtApiJack :: closeStream( void )
2006 if ( stream_.state == STREAM_CLOSED ) {
2007 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2008 error( RtError::WARNING );
2012 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2015 if ( stream_.state == STREAM_RUNNING )
2016 jack_deactivate( handle->client );
2018 jack_client_close( handle->client );
2022 if ( handle->ports[0] ) free( handle->ports[0] );
2023 if ( handle->ports[1] ) free( handle->ports[1] );
2024 pthread_cond_destroy( &handle->condition );
2026 stream_.apiHandle = 0;
2029 for ( int i=0; i<2; i++ ) {
2030 if ( stream_.userBuffer[i] ) {
2031 free( stream_.userBuffer[i] );
2032 stream_.userBuffer[i] = 0;
2036 if ( stream_.deviceBuffer ) {
2037 free( stream_.deviceBuffer );
2038 stream_.deviceBuffer = 0;
2041 stream_.mode = UNINITIALIZED;
2042 stream_.state = STREAM_CLOSED;
2045 void RtApiJack :: startStream( void )
2048 if ( stream_.state == STREAM_RUNNING ) {
2049 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2050 error( RtError::WARNING );
2054 MUTEX_LOCK(&stream_.mutex);
2056 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2057 int result = jack_activate( handle->client );
2059 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2065 // Get the list of available ports.
2066 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2068 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2069 if ( ports == NULL) {
2070 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2074 // Now make the port connections. Since RtAudio wasn't designed to
2075 // allow the user to select particular channels of a device, we'll
2076 // just open the first "nChannels" ports with offset.
2077 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2079 if ( ports[ stream_.channelOffset[0] + i ] )
2080 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2083 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2090 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2092 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2093 if ( ports == NULL) {
2094 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2098 // Now make the port connections. See note above.
2099 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2101 if ( ports[ stream_.channelOffset[1] + i ] )
2102 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2105 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2112 handle->drainCounter = 0;
2113 handle->internalDrain = false;
2114 stream_.state = STREAM_RUNNING;
2117 MUTEX_UNLOCK(&stream_.mutex);
2119 if ( result == 0 ) return;
2120 error( RtError::SYSTEM_ERROR );
2123 void RtApiJack :: stopStream( void )
2126 if ( stream_.state == STREAM_STOPPED ) {
2127 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2128 error( RtError::WARNING );
2132 MUTEX_LOCK( &stream_.mutex );
2134 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2135 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2137 if ( handle->drainCounter == 0 ) {
2138 handle->drainCounter = 1;
2139 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2143 jack_deactivate( handle->client );
2144 stream_.state = STREAM_STOPPED;
2146 MUTEX_UNLOCK( &stream_.mutex );
2149 void RtApiJack :: abortStream( void )
2152 if ( stream_.state == STREAM_STOPPED ) {
2153 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2154 error( RtError::WARNING );
2158 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2159 handle->drainCounter = 1;
2164 bool RtApiJack :: callbackEvent( unsigned long nframes )
2166 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2167 if ( stream_.state == STREAM_CLOSED ) {
2168 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2169 error( RtError::WARNING );
2172 if ( stream_.bufferSize != nframes ) {
2173 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2174 error( RtError::WARNING );
2178 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2179 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2181 // Check if we were draining the stream and signal is finished.
2182 if ( handle->drainCounter > 3 ) {
2183 if ( handle->internalDrain == false )
2184 pthread_cond_signal( &handle->condition );
2190 MUTEX_LOCK( &stream_.mutex );
2192 // Invoke user callback first, to get fresh output data.
2193 if ( handle->drainCounter == 0 ) {
2194 RtAudioCallback callback = (RtAudioCallback) info->callback;
2195 double streamTime = getStreamTime();
2196 RtAudioStreamStatus status = 0;
2197 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2198 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2199 handle->xrun[0] = false;
2201 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2202 status |= RTAUDIO_INPUT_OVERFLOW;
2203 handle->xrun[1] = false;
2205 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2206 stream_.bufferSize, streamTime, status, info->userData );
2207 if ( handle->drainCounter == 2 ) {
2208 MUTEX_UNLOCK( &stream_.mutex );
2212 else if ( handle->drainCounter == 1 )
2213 handle->internalDrain = true;
2216 jack_default_audio_sample_t *jackbuffer;
2217 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2218 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2220 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2222 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2223 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2224 memset( jackbuffer, 0, bufferBytes );
2228 else if ( stream_.doConvertBuffer[0] ) {
2230 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2232 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2233 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2234 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2237 else { // no buffer conversion
2238 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2239 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2240 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2244 if ( handle->drainCounter ) {
2245 handle->drainCounter++;
2250 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2252 if ( stream_.doConvertBuffer[1] ) {
2253 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2254 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2255 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2257 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2259 else { // no buffer conversion
2260 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2261 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2262 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2268 MUTEX_UNLOCK(&stream_.mutex);
2270 RtApi::tickStreamTime();
2273 //******************** End of __UNIX_JACK__ *********************//
2276 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2278 // The ASIO API is designed around a callback scheme, so this
2279 // implementation is similar to that used for OS-X CoreAudio and Linux
2280 // Jack. The primary constraint with ASIO is that it only allows
2281 // access to a single driver at a time. Thus, it is not possible to
2282 // have more than one simultaneous RtAudio stream.
2284 // This implementation also requires a number of external ASIO files
2285 // and a few global variables. The ASIO callback scheme does not
2286 // allow for the passing of user data, so we must create a global
2287 // pointer to our callbackInfo structure.
2289 // On unix systems, we make use of a pthread condition variable.
2290 // Since there is no equivalent in Windows, I hacked something based
2291 // on information found in
2292 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2294 #include "asiosys.h"
2296 #include "iasiothiscallresolver.h"
2297 #include "asiodrivers.h"
2300 AsioDrivers drivers;
2301 ASIOCallbacks asioCallbacks;
2302 ASIODriverInfo driverInfo;
2303 CallbackInfo *asioCallbackInfo;
2307 int drainCounter; // Tracks callback counts when draining
2308 bool internalDrain; // Indicates if stop is initiated from callback or not.
2309 ASIOBufferInfo *bufferInfos;
2313 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2316 // Function declarations (definitions at end of section)
2317 static const char* getAsioErrorString( ASIOError result );
2318 void sampleRateChanged( ASIOSampleRate sRate );
2319 long asioMessages( long selector, long value, void* message, double* opt );
2321 RtApiAsio :: RtApiAsio()
2323 // ASIO cannot run on a multi-threaded appartment. You can call
2324 // CoInitialize beforehand, but it must be for appartment threading
2325 // (in which case, CoInitilialize will return S_FALSE here).
2326 coInitialized_ = false;
2327 HRESULT hr = CoInitialize( NULL );
2329 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2330 error( RtError::WARNING );
2332 coInitialized_ = true;
2334 drivers.removeCurrentDriver();
2335 driverInfo.asioVersion = 2;
2337 // See note in DirectSound implementation about GetDesktopWindow().
2338 driverInfo.sysRef = GetForegroundWindow();
2341 RtApiAsio :: ~RtApiAsio()
2343 if ( stream_.state != STREAM_CLOSED ) closeStream();
2344 if ( coInitialized_ ) CoUninitialize();
2347 unsigned int RtApiAsio :: getDeviceCount( void )
2349 return (unsigned int) drivers.asioGetNumDev();
2352 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2354 RtAudio::DeviceInfo info;
2355 info.probed = false;
2358 unsigned int nDevices = getDeviceCount();
2359 if ( nDevices == 0 ) {
2360 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2361 error( RtError::INVALID_USE );
2364 if ( device >= nDevices ) {
2365 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2366 error( RtError::INVALID_USE );
2369 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2370 if ( stream_.state != STREAM_CLOSED ) {
2371 if ( device >= devices_.size() ) {
2372 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2373 error( RtError::WARNING );
2376 return devices_[ device ];
2379 char driverName[32];
2380 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2381 if ( result != ASE_OK ) {
2382 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2383 errorText_ = errorStream_.str();
2384 error( RtError::WARNING );
2388 info.name = driverName;
2390 if ( !drivers.loadDriver( driverName ) ) {
2391 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2392 errorText_ = errorStream_.str();
2393 error( RtError::WARNING );
2397 result = ASIOInit( &driverInfo );
2398 if ( result != ASE_OK ) {
2399 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2400 errorText_ = errorStream_.str();
2401 error( RtError::WARNING );
2405 // Determine the device channel information.
2406 long inputChannels, outputChannels;
2407 result = ASIOGetChannels( &inputChannels, &outputChannels );
2408 if ( result != ASE_OK ) {
2409 drivers.removeCurrentDriver();
2410 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2411 errorText_ = errorStream_.str();
2412 error( RtError::WARNING );
2416 info.outputChannels = outputChannels;
2417 info.inputChannels = inputChannels;
2418 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2419 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2421 // Determine the supported sample rates.
2422 info.sampleRates.clear();
2423 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2424 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2425 if ( result == ASE_OK )
2426 info.sampleRates.push_back( SAMPLE_RATES[i] );
2429 // Determine supported data types ... just check first channel and assume rest are the same.
2430 ASIOChannelInfo channelInfo;
2431 channelInfo.channel = 0;
2432 channelInfo.isInput = true;
2433 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2434 result = ASIOGetChannelInfo( &channelInfo );
2435 if ( result != ASE_OK ) {
2436 drivers.removeCurrentDriver();
2437 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2438 errorText_ = errorStream_.str();
2439 error( RtError::WARNING );
2443 info.nativeFormats = 0;
2444 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2445 info.nativeFormats |= RTAUDIO_SINT16;
2446 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2447 info.nativeFormats |= RTAUDIO_SINT32;
2448 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2449 info.nativeFormats |= RTAUDIO_FLOAT32;
2450 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2451 info.nativeFormats |= RTAUDIO_FLOAT64;
2453 if ( getDefaultOutputDevice() == device )
2454 info.isDefaultOutput = true;
2455 if ( getDefaultInputDevice() == device )
2456 info.isDefaultInput = true;
2459 drivers.removeCurrentDriver();
2463 void bufferSwitch( long index, ASIOBool processNow )
2465 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2466 object->callbackEvent( index );
2469 void RtApiAsio :: saveDeviceInfo( void )
2473 unsigned int nDevices = getDeviceCount();
2474 devices_.resize( nDevices );
2475 for ( unsigned int i=0; i<nDevices; i++ )
2476 devices_[i] = getDeviceInfo( i );
2479 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2480 unsigned int firstChannel, unsigned int sampleRate,
2481 RtAudioFormat format, unsigned int *bufferSize,
2482 RtAudio::StreamOptions *options )
2484 // For ASIO, a duplex stream MUST use the same driver.
2485 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2486 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2490 char driverName[32];
2491 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2492 if ( result != ASE_OK ) {
2493 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2494 errorText_ = errorStream_.str();
2498 // The getDeviceInfo() function will not work when a stream is open
2499 // because ASIO does not allow multiple devices to run at the same
2500 // time. Thus, we'll probe the system before opening a stream and
2501 // save the results for use by getDeviceInfo().
2502 this->saveDeviceInfo();
2504 // Only load the driver once for duplex stream.
2505 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2506 if ( !drivers.loadDriver( driverName ) ) {
2507 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2508 errorText_ = errorStream_.str();
2512 result = ASIOInit( &driverInfo );
2513 if ( result != ASE_OK ) {
2514 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2515 errorText_ = errorStream_.str();
2520 // Check the device channel count.
2521 long inputChannels, outputChannels;
2522 result = ASIOGetChannels( &inputChannels, &outputChannels );
2523 if ( result != ASE_OK ) {
2524 drivers.removeCurrentDriver();
2525 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2526 errorText_ = errorStream_.str();
2530 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2531 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2532 drivers.removeCurrentDriver();
2533 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2534 errorText_ = errorStream_.str();
2537 stream_.nDeviceChannels[mode] = channels;
2538 stream_.nUserChannels[mode] = channels;
2539 stream_.channelOffset[mode] = firstChannel;
2541 // Verify the sample rate is supported.
2542 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2543 if ( result != ASE_OK ) {
2544 drivers.removeCurrentDriver();
2545 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2546 errorText_ = errorStream_.str();
2550 // Get the current sample rate
2551 ASIOSampleRate currentRate;
2552 result = ASIOGetSampleRate( ¤tRate );
2553 if ( result != ASE_OK ) {
2554 drivers.removeCurrentDriver();
2555 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2556 errorText_ = errorStream_.str();
2560 // Set the sample rate only if necessary
2561 if ( currentRate != sampleRate ) {
2562 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2563 if ( result != ASE_OK ) {
2564 drivers.removeCurrentDriver();
2565 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2566 errorText_ = errorStream_.str();
2571 // Determine the driver data type.
2572 ASIOChannelInfo channelInfo;
2573 channelInfo.channel = 0;
2574 if ( mode == OUTPUT ) channelInfo.isInput = false;
2575 else channelInfo.isInput = true;
2576 result = ASIOGetChannelInfo( &channelInfo );
2577 if ( result != ASE_OK ) {
2578 drivers.removeCurrentDriver();
2579 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2580 errorText_ = errorStream_.str();
2584 // Assuming WINDOWS host is always little-endian.
2585 stream_.doByteSwap[mode] = false;
2586 stream_.userFormat = format;
2587 stream_.deviceFormat[mode] = 0;
2588 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2589 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2590 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2592 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2593 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2594 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2596 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2597 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2598 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2600 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2601 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2602 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2605 if ( stream_.deviceFormat[mode] == 0 ) {
2606 drivers.removeCurrentDriver();
2607 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2608 errorText_ = errorStream_.str();
2612 // Set the buffer size. For a duplex stream, this will end up
2613 // setting the buffer size based on the input constraints, which
2615 long minSize, maxSize, preferSize, granularity;
2616 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2617 if ( result != ASE_OK ) {
2618 drivers.removeCurrentDriver();
2619 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2620 errorText_ = errorStream_.str();
2624 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2625 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2626 else if ( granularity == -1 ) {
2627 // Make sure bufferSize is a power of two.
2628 double power = std::log10( (double) *bufferSize ) / log10( 2.0 );
2629 *bufferSize = (int) pow( 2.0, floor(power+0.5) );
2630 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2631 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2632 else *bufferSize = preferSize;
2634 else if ( granularity != 0 ) {
2635 // Set to an even multiple of granularity, rounding up.
2636 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2639 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2640 drivers.removeCurrentDriver();
2641 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2645 stream_.bufferSize = *bufferSize;
2646 stream_.nBuffers = 2;
2648 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2649 else stream_.userInterleaved = true;
2651 // ASIO always uses non-interleaved buffers.
2652 stream_.deviceInterleaved[mode] = false;
2654 // Allocate, if necessary, our AsioHandle structure for the stream.
2655 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2656 if ( handle == 0 ) {
2658 handle = new AsioHandle;
2660 catch ( std::bad_alloc& ) {
2661 //if ( handle == NULL ) {
2662 drivers.removeCurrentDriver();
2663 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2666 handle->bufferInfos = 0;
2668 // Create a manual-reset event.
2669 handle->condition = CreateEvent( NULL, // no security
2670 TRUE, // manual-reset
2671 FALSE, // non-signaled initially
2673 stream_.apiHandle = (void *) handle;
2676 // Create the ASIO internal buffers. Since RtAudio sets up input
2677 // and output separately, we'll have to dispose of previously
2678 // created output buffers for a duplex stream.
2679 long inputLatency, outputLatency;
2680 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2681 ASIODisposeBuffers();
2682 if ( handle->bufferInfos ) free( handle->bufferInfos );
2685 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2686 bool buffersAllocated = false;
2687 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2688 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2689 if ( handle->bufferInfos == NULL ) {
2690 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2691 errorText_ = errorStream_.str();
2695 ASIOBufferInfo *infos;
2696 infos = handle->bufferInfos;
2697 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2698 infos->isInput = ASIOFalse;
2699 infos->channelNum = i + stream_.channelOffset[0];
2700 infos->buffers[0] = infos->buffers[1] = 0;
2702 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2703 infos->isInput = ASIOTrue;
2704 infos->channelNum = i + stream_.channelOffset[1];
2705 infos->buffers[0] = infos->buffers[1] = 0;
2708 // Set up the ASIO callback structure and create the ASIO data buffers.
2709 asioCallbacks.bufferSwitch = &bufferSwitch;
2710 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2711 asioCallbacks.asioMessage = &asioMessages;
2712 asioCallbacks.bufferSwitchTimeInfo = NULL;
2713 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2714 if ( result != ASE_OK ) {
2715 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2716 errorText_ = errorStream_.str();
2719 buffersAllocated = true;
2721 // Set flags for buffer conversion.
2722 stream_.doConvertBuffer[mode] = false;
2723 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2724 stream_.doConvertBuffer[mode] = true;
2725 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2726 stream_.nUserChannels[mode] > 1 )
2727 stream_.doConvertBuffer[mode] = true;
2729 // Allocate necessary internal buffers
2730 unsigned long bufferBytes;
2731 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2732 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2733 if ( stream_.userBuffer[mode] == NULL ) {
2734 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2738 if ( stream_.doConvertBuffer[mode] ) {
2740 bool makeBuffer = true;
2741 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2742 if ( mode == INPUT ) {
2743 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2744 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2745 if ( bufferBytes <= bytesOut ) makeBuffer = false;
2750 bufferBytes *= *bufferSize;
2751 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2752 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2753 if ( stream_.deviceBuffer == NULL ) {
2754 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
2760 stream_.sampleRate = sampleRate;
2761 stream_.device[mode] = device;
2762 stream_.state = STREAM_STOPPED;
2763 asioCallbackInfo = &stream_.callbackInfo;
2764 stream_.callbackInfo.object = (void *) this;
2765 if ( stream_.mode == OUTPUT && mode == INPUT )
2766 // We had already set up an output stream.
2767 stream_.mode = DUPLEX;
2769 stream_.mode = mode;
2771 // Determine device latencies
2772 result = ASIOGetLatencies( &inputLatency, &outputLatency );
2773 if ( result != ASE_OK ) {
2774 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
2775 errorText_ = errorStream_.str();
2776 error( RtError::WARNING); // warn but don't fail
2779 stream_.latency[0] = outputLatency;
2780 stream_.latency[1] = inputLatency;
2783 // Setup the buffer conversion information structure. We don't use
2784 // buffers to do channel offsets, so we override that parameter
2786 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2791 if ( buffersAllocated )
2792 ASIODisposeBuffers();
2793 drivers.removeCurrentDriver();
2796 CloseHandle( handle->condition );
2797 if ( handle->bufferInfos )
2798 free( handle->bufferInfos );
2800 stream_.apiHandle = 0;
2803 for ( int i=0; i<2; i++ ) {
2804 if ( stream_.userBuffer[i] ) {
2805 free( stream_.userBuffer[i] );
2806 stream_.userBuffer[i] = 0;
2810 if ( stream_.deviceBuffer ) {
2811 free( stream_.deviceBuffer );
2812 stream_.deviceBuffer = 0;
2818 void RtApiAsio :: closeStream()
2820 if ( stream_.state == STREAM_CLOSED ) {
2821 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
2822 error( RtError::WARNING );
2826 if ( stream_.state == STREAM_RUNNING ) {
2827 stream_.state = STREAM_STOPPED;
2830 ASIODisposeBuffers();
2831 drivers.removeCurrentDriver();
2833 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2835 CloseHandle( handle->condition );
2836 if ( handle->bufferInfos )
2837 free( handle->bufferInfos );
2839 stream_.apiHandle = 0;
2842 for ( int i=0; i<2; i++ ) {
2843 if ( stream_.userBuffer[i] ) {
2844 free( stream_.userBuffer[i] );
2845 stream_.userBuffer[i] = 0;
2849 if ( stream_.deviceBuffer ) {
2850 free( stream_.deviceBuffer );
2851 stream_.deviceBuffer = 0;
2854 stream_.mode = UNINITIALIZED;
2855 stream_.state = STREAM_CLOSED;
2858 void RtApiAsio :: startStream()
2861 if ( stream_.state == STREAM_RUNNING ) {
2862 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
2863 error( RtError::WARNING );
2867 MUTEX_LOCK( &stream_.mutex );
2869 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2870 ASIOError result = ASIOStart();
2871 if ( result != ASE_OK ) {
2872 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
2873 errorText_ = errorStream_.str();
2877 handle->drainCounter = 0;
2878 handle->internalDrain = false;
2879 stream_.state = STREAM_RUNNING;
2883 MUTEX_UNLOCK( &stream_.mutex );
2885 if ( result == ASE_OK ) return;
2886 error( RtError::SYSTEM_ERROR );
2889 void RtApiAsio :: stopStream()
2892 if ( stream_.state == STREAM_STOPPED ) {
2893 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
2894 error( RtError::WARNING );
2898 MUTEX_LOCK( &stream_.mutex );
2900 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2901 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2902 if ( handle->drainCounter == 0 ) {
2903 handle->drainCounter = 1;
2904 MUTEX_UNLOCK( &stream_.mutex );
2905 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
2906 ResetEvent( handle->condition );
2907 MUTEX_LOCK( &stream_.mutex );
2911 ASIOError result = ASIOStop();
2912 if ( result != ASE_OK ) {
2913 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
2914 errorText_ = errorStream_.str();
2917 stream_.state = STREAM_STOPPED;
2918 MUTEX_UNLOCK( &stream_.mutex );
2920 if ( result == ASE_OK ) return;
2921 error( RtError::SYSTEM_ERROR );
2924 void RtApiAsio :: abortStream()
2927 if ( stream_.state == STREAM_STOPPED ) {
2928 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
2929 error( RtError::WARNING );
2933 // The following lines were commented-out because some behavior was
2934 // noted where the device buffers need to be zeroed to avoid
2935 // continuing sound, even when the device buffers are completed
2936 // disposed. So now, calling abort is the same as calling stop.
2937 //AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2938 //handle->drainCounter = 1;
2942 bool RtApiAsio :: callbackEvent( long bufferIndex )
2944 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2945 if ( stream_.state == STREAM_CLOSED ) {
2946 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
2947 error( RtError::WARNING );
2951 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2952 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2954 // Check if we were draining the stream and signal is finished.
2955 if ( handle->drainCounter > 3 ) {
2956 if ( handle->internalDrain == false )
2957 SetEvent( handle->condition );
2963 MUTEX_LOCK( &stream_.mutex );
2965 // The state might change while waiting on a mutex.
2966 if ( stream_.state == STREAM_STOPPED ) goto unlock;
2968 // Invoke user callback to get fresh output data UNLESS we are
2970 if ( handle->drainCounter == 0 ) {
2971 RtAudioCallback callback = (RtAudioCallback) info->callback;
2972 double streamTime = getStreamTime();
2973 RtAudioStreamStatus status = 0;
2974 if ( stream_.mode != INPUT && asioXRun == true ) {
2975 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2978 if ( stream_.mode != OUTPUT && asioXRun == true ) {
2979 status |= RTAUDIO_INPUT_OVERFLOW;
2982 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2983 stream_.bufferSize, streamTime, status, info->userData );
2984 if ( handle->drainCounter == 2 ) {
2985 MUTEX_UNLOCK( &stream_.mutex );
2989 else if ( handle->drainCounter == 1 )
2990 handle->internalDrain = true;
2993 unsigned int nChannels, bufferBytes, i, j;
2994 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2995 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2997 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
2999 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3001 for ( i=0, j=0; i<nChannels; i++ ) {
3002 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3003 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3007 else if ( stream_.doConvertBuffer[0] ) {
3009 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3010 if ( stream_.doByteSwap[0] )
3011 byteSwapBuffer( stream_.deviceBuffer,
3012 stream_.bufferSize * stream_.nDeviceChannels[0],
3013 stream_.deviceFormat[0] );
3015 for ( i=0, j=0; i<nChannels; i++ ) {
3016 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3017 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3018 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3024 if ( stream_.doByteSwap[0] )
3025 byteSwapBuffer( stream_.userBuffer[0],
3026 stream_.bufferSize * stream_.nUserChannels[0],
3027 stream_.userFormat );
3029 for ( i=0, j=0; i<nChannels; i++ ) {
3030 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3031 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3032 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3037 if ( handle->drainCounter ) {
3038 handle->drainCounter++;
3043 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3045 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3047 if (stream_.doConvertBuffer[1]) {
3049 // Always interleave ASIO input data.
3050 for ( i=0, j=0; i<nChannels; i++ ) {
3051 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3052 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3053 handle->bufferInfos[i].buffers[bufferIndex],
3057 if ( stream_.doByteSwap[1] )
3058 byteSwapBuffer( stream_.deviceBuffer,
3059 stream_.bufferSize * stream_.nDeviceChannels[1],
3060 stream_.deviceFormat[1] );
3061 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3065 for ( i=0, j=0; i<nChannels; i++ ) {
3066 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3067 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3068 handle->bufferInfos[i].buffers[bufferIndex],
3073 if ( stream_.doByteSwap[1] )
3074 byteSwapBuffer( stream_.userBuffer[1],
3075 stream_.bufferSize * stream_.nUserChannels[1],
3076 stream_.userFormat );
3081 // The following call was suggested by Malte Clasen. While the API
3082 // documentation indicates it should not be required, some device
3083 // drivers apparently do not function correctly without it.
3086 MUTEX_UNLOCK( &stream_.mutex );
3088 RtApi::tickStreamTime();
3092 void sampleRateChanged( ASIOSampleRate sRate )
3094 // The ASIO documentation says that this usually only happens during
3095 // external sync. Audio processing is not stopped by the driver,
3096 // actual sample rate might not have even changed, maybe only the
3097 // sample rate status of an AES/EBU or S/PDIF digital input at the
3100 RtApi *object = (RtApi *) asioCallbackInfo->object;
3102 object->stopStream();
3104 catch ( RtError &exception ) {
3105 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3109 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3112 long asioMessages( long selector, long value, void* message, double* opt )
3116 switch( selector ) {
3117 case kAsioSelectorSupported:
3118 if ( value == kAsioResetRequest
3119 || value == kAsioEngineVersion
3120 || value == kAsioResyncRequest
3121 || value == kAsioLatenciesChanged
3122 // The following three were added for ASIO 2.0, you don't
3123 // necessarily have to support them.
3124 || value == kAsioSupportsTimeInfo
3125 || value == kAsioSupportsTimeCode
3126 || value == kAsioSupportsInputMonitor)
3129 case kAsioResetRequest:
3130 // Defer the task and perform the reset of the driver during the
3131 // next "safe" situation. You cannot reset the driver right now,
3132 // as this code is called from the driver. Reset the driver is
3133 // done by completely destruct is. I.e. ASIOStop(),
3134 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3136 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3139 case kAsioResyncRequest:
3140 // This informs the application that the driver encountered some
3141 // non-fatal data loss. It is used for synchronization purposes
3142 // of different media. Added mainly to work around the Win16Mutex
3143 // problems in Windows 95/98 with the Windows Multimedia system,
3144 // which could lose data because the Mutex was held too long by
3145 // another thread. However a driver can issue it in other
3147 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3151 case kAsioLatenciesChanged:
3152 // This will inform the host application that the drivers were
3153 // latencies changed. Beware, it this does not mean that the
3154 // buffer sizes have changed! You might need to update internal
3156 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3159 case kAsioEngineVersion:
3160 // Return the supported ASIO version of the host application. If
3161 // a host application does not implement this selector, ASIO 1.0
3162 // is assumed by the driver.
3165 case kAsioSupportsTimeInfo:
3166 // Informs the driver whether the
3167 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3168 // For compatibility with ASIO 1.0 drivers the host application
3169 // should always support the "old" bufferSwitch method, too.
3172 case kAsioSupportsTimeCode:
3173 // Informs the driver whether application is interested in time
3174 // code info. If an application does not need to know about time
3175 // code, the driver has less work to do.
3182 static const char* getAsioErrorString( ASIOError result )
3190 static Messages m[] =
3192 { ASE_NotPresent, "Hardware input or output is not present or available." },
3193 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3194 { ASE_InvalidParameter, "Invalid input parameter." },
3195 { ASE_InvalidMode, "Invalid mode." },
3196 { ASE_SPNotAdvancing, "Sample position not advancing." },
3197 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3198 { ASE_NoMemory, "Not enough memory to complete the request." }
3201 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3202 if ( m[i].value == result ) return m[i].message;
3204 return "Unknown error.";
3206 //******************** End of __WINDOWS_ASIO__ *********************//
3210 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3212 // Modified by Robin Davies, October 2005
3213 // - Improvements to DirectX pointer chasing.
3214 // - Backdoor RtDsStatistics hook provides DirectX performance information.
3215 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3216 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3217 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3222 #if defined(__MINGW32__)
3223 // missing from latest mingw winapi
3224 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3225 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3226 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3227 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3230 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3232 #ifdef _MSC_VER // if Microsoft Visual C++
3233 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3236 static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3238 if (laterPointer > earlierPointer)
3239 return laterPointer - earlierPointer;
3241 return laterPointer - earlierPointer + bufferSize;
3244 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3246 if ( pointer > bufferSize ) pointer -= bufferSize;
3247 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3248 if ( pointer < earlierPointer ) pointer += bufferSize;
3249 return pointer >= earlierPointer && pointer < laterPointer;
3252 // A structure to hold various information related to the DirectSound
3253 // API implementation.
3255 unsigned int drainCounter; // Tracks callback counts when draining
3256 bool internalDrain; // Indicates if stop is initiated from callback or not.
3260 UINT bufferPointer[2];
3261 DWORD dsBufferSize[2];
3262 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3266 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3270 RtApiDs::RtDsStatistics RtApiDs::statistics;
3272 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
3273 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
3275 RtDsStatistics s = statistics;
3277 // update the calculated fields.
3278 if ( s.inputFrameSize != 0 )
3279 s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
3281 if ( s.outputFrameSize != 0 )
3282 s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
3288 // Declarations for utility functions, callbacks, and structures
3289 // specific to the DirectSound implementation.
3290 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3291 LPCTSTR description,
3295 static char* getErrorString( int code );
3297 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3303 unsigned int counter;
3309 : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
3312 RtApiDs :: RtApiDs()
3314 // Dsound will run both-threaded. If CoInitialize fails, then just
3315 // accept whatever the mainline chose for a threading model.
3316 coInitialized_ = false;
3317 HRESULT hr = CoInitialize( NULL );
3318 if ( !FAILED( hr ) ) coInitialized_ = true;
3321 RtApiDs :: ~RtApiDs()
3323 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3324 if ( stream_.state != STREAM_CLOSED ) closeStream();
3327 unsigned int RtApiDs :: getDefaultInputDevice( void )
3329 // Count output devices.
3331 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3332 if ( FAILED( result ) ) {
3333 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
3334 errorText_ = errorStream_.str();
3335 error( RtError::WARNING );
3339 // Now enumerate input devices until we find the id = NULL.
3340 info.isInput = true;
3341 info.getDefault = true;
3342 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3343 if ( FAILED( result ) ) {
3344 errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
3345 errorText_ = errorStream_.str();
3346 error( RtError::WARNING );
3350 if ( info.counter > 0 ) return info.counter - 1;
3354 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3356 // Enumerate output devices until we find the id = NULL.
3358 info.getDefault = true;
3359 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3360 if ( FAILED( result ) ) {
3361 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
3362 errorText_ = errorStream_.str();
3363 error( RtError::WARNING );
3367 if ( info.counter > 0 ) return info.counter - 1;
3371 unsigned int RtApiDs :: getDeviceCount( void )
3373 // Count DirectSound devices.
3375 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3376 if ( FAILED( result ) ) {
3377 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3378 errorText_ = errorStream_.str();
3379 error( RtError::WARNING );
3382 // Count DirectSoundCapture devices.
3383 info.isInput = true;
3384 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3385 if ( FAILED( result ) ) {
3386 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3387 errorText_ = errorStream_.str();
3388 error( RtError::WARNING );
3391 return info.counter;
3394 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3396 // Because DirectSound always enumerates input and output devices
3397 // separately (and because we don't attempt to combine devices
3398 // internally), none of our "devices" will ever be duplex.
3400 RtAudio::DeviceInfo info;
3401 info.probed = false;
3403 // Enumerate through devices to find the id (if it exists). Note
3404 // that we have to do the output enumeration first, even if this is
3405 // an input device, in order for the device counter to be correct.
3407 dsinfo.findIndex = true;
3408 dsinfo.index = device;
3409 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3410 if ( FAILED( result ) ) {
3411 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
3412 errorText_ = errorStream_.str();
3413 error( RtError::WARNING );
3416 if ( dsinfo.name.empty() ) goto probeInput;
3418 LPDIRECTSOUND output;
3420 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3421 if ( FAILED( result ) ) {
3422 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3423 errorText_ = errorStream_.str();
3424 error( RtError::WARNING );
3428 outCaps.dwSize = sizeof( outCaps );
3429 result = output->GetCaps( &outCaps );
3430 if ( FAILED( result ) ) {
3432 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3433 errorText_ = errorStream_.str();
3434 error( RtError::WARNING );
3438 // Get output channel information.
3439 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3441 // Get sample rate information.
3442 info.sampleRates.clear();
3443 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3444 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3445 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3446 info.sampleRates.push_back( SAMPLE_RATES[k] );
3449 // Get format information.
3450 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3451 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3455 if ( getDefaultOutputDevice() == device )
3456 info.isDefaultOutput = true;
3458 // Copy name and return.
3459 info.name = dsinfo.name;
3466 dsinfo.isInput = true;
3467 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3468 if ( FAILED( result ) ) {
3469 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
3470 errorText_ = errorStream_.str();
3471 error( RtError::WARNING );
3474 if ( dsinfo.name.empty() ) return info;
3476 LPDIRECTSOUNDCAPTURE input;
3477 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3478 if ( FAILED( result ) ) {
3479 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3480 errorText_ = errorStream_.str();
3481 error( RtError::WARNING );
3486 inCaps.dwSize = sizeof( inCaps );
3487 result = input->GetCaps( &inCaps );
3488 if ( FAILED( result ) ) {
3490 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
3491 errorText_ = errorStream_.str();
3492 error( RtError::WARNING );
3496 // Get input channel information.
3497 info.inputChannels = inCaps.dwChannels;
3499 // Get sample rate and format information.
3500 if ( inCaps.dwChannels == 2 ) {
3501 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3502 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3503 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3504 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3505 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3506 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3507 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3508 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3510 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3511 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
3512 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
3513 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
3514 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
3516 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3517 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
3518 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
3519 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
3520 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
3523 else if ( inCaps.dwChannels == 1 ) {
3524 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3525 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3526 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3527 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3528 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3529 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3530 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3531 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3533 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3534 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
3535 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
3536 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
3537 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
3539 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3540 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
3541 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
3542 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
3543 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
3546 else info.inputChannels = 0; // technically, this would be an error
3550 if ( info.inputChannels == 0 ) return info;
3552 if ( getDefaultInputDevice() == device )
3553 info.isDefaultInput = true;
3555 // Copy name and return.
3556 info.name = dsinfo.name;
3561 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3562 unsigned int firstChannel, unsigned int sampleRate,
3563 RtAudioFormat format, unsigned int *bufferSize,
3564 RtAudio::StreamOptions *options )
3566 if ( channels + firstChannel > 2 ) {
3567 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3571 // Enumerate through devices to find the id (if it exists). Note
3572 // that we have to do the output enumeration first, even if this is
3573 // an input device, in order for the device counter to be correct.
3575 dsinfo.findIndex = true;
3576 dsinfo.index = device;
3577 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3578 if ( FAILED( result ) ) {
3579 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
3580 errorText_ = errorStream_.str();
3584 if ( mode == OUTPUT ) {
3585 if ( dsinfo.name.empty() ) {
3586 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3587 errorText_ = errorStream_.str();
3591 else { // mode == INPUT
3592 dsinfo.isInput = true;
3593 HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3594 if ( FAILED( result ) ) {
3595 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
3596 errorText_ = errorStream_.str();
3599 if ( dsinfo.name.empty() ) {
3600 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3601 errorText_ = errorStream_.str();
3606 // According to a note in PortAudio, using GetDesktopWindow()
3607 // instead of GetForegroundWindow() is supposed to avoid problems
3608 // that occur when the application's window is not the foreground
3609 // window. Also, if the application window closes before the
3610 // DirectSound buffer, DirectSound can crash. However, for console
3611 // applications, no sound was produced when using GetDesktopWindow().
3612 HWND hWnd = GetForegroundWindow();
3614 // Check the numberOfBuffers parameter and limit the lowest value to
3615 // two. This is a judgement call and a value of two is probably too
3616 // low for capture, but it should work for playback.
3618 if ( options ) nBuffers = options->numberOfBuffers;
3619 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3620 if ( nBuffers < 2 ) nBuffers = 3;
3622 // Create the wave format structure. The data format setting will
3623 // be determined later.
3624 WAVEFORMATEX waveFormat;
3625 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3626 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3627 waveFormat.nChannels = channels + firstChannel;
3628 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3630 // Determine the device buffer size. By default, 32k, but we will
3631 // grow it to make allowances for very large software buffer sizes.
3632 DWORD dsBufferSize = 0;
3633 DWORD dsPointerLeadTime = 0;
3634 long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
3636 void *ohandle = 0, *bhandle = 0;
3637 if ( mode == OUTPUT ) {
3639 LPDIRECTSOUND output;
3640 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3641 if ( FAILED( result ) ) {
3642 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3643 errorText_ = errorStream_.str();
3648 outCaps.dwSize = sizeof( outCaps );
3649 result = output->GetCaps( &outCaps );
3650 if ( FAILED( result ) ) {
3652 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
3653 errorText_ = errorStream_.str();
3657 // Check channel information.
3658 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3659 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
3660 errorText_ = errorStream_.str();
3664 // Check format information. Use 16-bit format unless not
3665 // supported or user requests 8-bit.
3666 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3667 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3668 waveFormat.wBitsPerSample = 16;
3669 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3672 waveFormat.wBitsPerSample = 8;
3673 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3675 stream_.userFormat = format;
3677 // Update wave format structure and buffer information.
3678 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3679 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3680 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3682 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3683 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
3686 // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.
3687 //result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3688 // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.
3689 result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );
3690 if ( FAILED( result ) ) {
3692 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
3693 errorText_ = errorStream_.str();
3697 // Even though we will write to the secondary buffer, we need to
3698 // access the primary buffer to set the correct output format
3699 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3700 // buffer description.
3701 DSBUFFERDESC bufferDescription;
3702 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3703 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3704 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3706 // Obtain the primary buffer
3707 LPDIRECTSOUNDBUFFER buffer;
3708 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3709 if ( FAILED( result ) ) {
3711 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
3712 errorText_ = errorStream_.str();
3716 // Set the primary DS buffer sound format.
3717 result = buffer->SetFormat( &waveFormat );
3718 if ( FAILED( result ) ) {
3720 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
3721 errorText_ = errorStream_.str();
3725 // Setup the secondary DS buffer description.
3726 dsBufferSize = (DWORD) bufferBytes;
3727 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3728 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3729 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3730 DSBCAPS_GLOBALFOCUS |
3731 DSBCAPS_GETCURRENTPOSITION2 |
3732 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3733 bufferDescription.dwBufferBytes = bufferBytes;
3734 bufferDescription.lpwfxFormat = &waveFormat;
3736 // Try to create the secondary DS buffer. If that doesn't work,
3737 // try to use software mixing. Otherwise, there's a problem.
3738 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3739 if ( FAILED( result ) ) {
3740 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3741 DSBCAPS_GLOBALFOCUS |
3742 DSBCAPS_GETCURRENTPOSITION2 |
3743 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3744 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3745 if ( FAILED( result ) ) {
3747 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
3748 errorText_ = errorStream_.str();
3753 // Get the buffer size ... might be different from what we specified.
3755 dsbcaps.dwSize = sizeof( DSBCAPS );
3756 result = buffer->GetCaps( &dsbcaps );
3757 if ( FAILED( result ) ) {
3760 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
3761 errorText_ = errorStream_.str();
3765 bufferBytes = dsbcaps.dwBufferBytes;
3767 // Lock the DS buffer
3770 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3771 if ( FAILED( result ) ) {
3774 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
3775 errorText_ = errorStream_.str();
3779 // Zero the DS buffer
3780 ZeroMemory( audioPtr, dataLen );
3782 // Unlock the DS buffer
3783 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3784 if ( FAILED( result ) ) {
3787 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
3788 errorText_ = errorStream_.str();
3792 dsBufferSize = bufferBytes;
3793 ohandle = (void *) output;
3794 bhandle = (void *) buffer;
3797 if ( mode == INPUT ) {
3799 LPDIRECTSOUNDCAPTURE input;
3800 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3801 if ( FAILED( result ) ) {
3802 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3803 errorText_ = errorStream_.str();
3808 inCaps.dwSize = sizeof( inCaps );
3809 result = input->GetCaps( &inCaps );
3810 if ( FAILED( result ) ) {
3812 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
3813 errorText_ = errorStream_.str();
3817 // Check channel information.
3818 if ( inCaps.dwChannels < channels + firstChannel ) {
3819 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
3823 // Check format information. Use 16-bit format unless user
3825 DWORD deviceFormats;
3826 if ( channels + firstChannel == 2 ) {
3827 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
3828 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3829 waveFormat.wBitsPerSample = 8;
3830 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3832 else { // assume 16-bit is supported
3833 waveFormat.wBitsPerSample = 16;
3834 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3837 else { // channel == 1
3838 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
3839 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3840 waveFormat.wBitsPerSample = 8;
3841 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3843 else { // assume 16-bit is supported
3844 waveFormat.wBitsPerSample = 16;
3845 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3848 stream_.userFormat = format;
3850 // Update wave format structure and buffer information.
3851 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3852 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3854 // Setup the secondary DS buffer description.
3855 dsBufferSize = bufferBytes;
3856 DSCBUFFERDESC bufferDescription;
3857 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
3858 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
3859 bufferDescription.dwFlags = 0;
3860 bufferDescription.dwReserved = 0;
3861 bufferDescription.dwBufferBytes = bufferBytes;
3862 bufferDescription.lpwfxFormat = &waveFormat;
3864 // Create the capture buffer.
3865 LPDIRECTSOUNDCAPTUREBUFFER buffer;
3866 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
3867 if ( FAILED( result ) ) {
3869 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
3870 errorText_ = errorStream_.str();
3874 // Lock the capture buffer
3877 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3878 if ( FAILED( result ) ) {
3881 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
3882 errorText_ = errorStream_.str();
3887 ZeroMemory( audioPtr, dataLen );
3889 // Unlock the buffer
3890 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3891 if ( FAILED( result ) ) {
3894 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
3895 errorText_ = errorStream_.str();
3899 dsBufferSize = bufferBytes;
3900 ohandle = (void *) input;
3901 bhandle = (void *) buffer;
3904 // Set various stream parameters
3905 DsHandle *handle = 0;
3906 stream_.nDeviceChannels[mode] = channels + firstChannel;
3907 stream_.nUserChannels[mode] = channels;
3908 stream_.bufferSize = *bufferSize;
3909 stream_.channelOffset[mode] = firstChannel;
3910 stream_.deviceInterleaved[mode] = true;
3911 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
3912 else stream_.userInterleaved = true;
3914 // Set flag for buffer conversion
3915 stream_.doConvertBuffer[mode] = false;
3916 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
3917 stream_.doConvertBuffer[mode] = true;
3918 if (stream_.userFormat != stream_.deviceFormat[mode])
3919 stream_.doConvertBuffer[mode] = true;
3920 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
3921 stream_.nUserChannels[mode] > 1 )
3922 stream_.doConvertBuffer[mode] = true;
3924 // Allocate necessary internal buffers
3925 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
3926 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
3927 if ( stream_.userBuffer[mode] == NULL ) {
3928 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
3932 if ( stream_.doConvertBuffer[mode] ) {
3934 bool makeBuffer = true;
3935 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
3936 if ( mode == INPUT ) {
3937 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
3938 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
3939 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
3944 bufferBytes *= *bufferSize;
3945 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
3946 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
3947 if ( stream_.deviceBuffer == NULL ) {
3948 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
3954 // Allocate our DsHandle structures for the stream.
3955 if ( stream_.apiHandle == 0 ) {
3957 handle = new DsHandle;
3959 catch ( std::bad_alloc& ) {
3960 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
3964 // Create a manual-reset event.
3965 handle->condition = CreateEvent( NULL, // no security
3966 TRUE, // manual-reset
3967 FALSE, // non-signaled initially
3969 stream_.apiHandle = (void *) handle;
3972 handle = (DsHandle *) stream_.apiHandle;
3973 handle->id[mode] = ohandle;
3974 handle->buffer[mode] = bhandle;
3975 handle->dsBufferSize[mode] = dsBufferSize;
3976 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
3978 stream_.device[mode] = device;
3979 stream_.state = STREAM_STOPPED;
3980 if ( stream_.mode == OUTPUT && mode == INPUT )
3981 // We had already set up an output stream.
3982 stream_.mode = DUPLEX;
3984 stream_.mode = mode;
3985 stream_.nBuffers = nBuffers;
3986 stream_.sampleRate = sampleRate;
3988 // Setup the buffer conversion information structure.
3989 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
3991 // Setup the callback thread.
3993 stream_.callbackInfo.object = (void *) this;
3994 stream_.callbackInfo.isRunning = true;
3995 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
3996 &stream_.callbackInfo, 0, &threadId );
3997 if ( stream_.callbackInfo.thread == 0 ) {
3998 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4002 // Boost DS thread priority
4003 SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
4008 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4009 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4010 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4011 if ( buffer ) buffer->Release();
4014 if ( handle->buffer[1] ) {
4015 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4016 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4017 if ( buffer ) buffer->Release();
4020 CloseHandle( handle->condition );
4022 stream_.apiHandle = 0;
4025 for ( int i=0; i<2; i++ ) {
4026 if ( stream_.userBuffer[i] ) {
4027 free( stream_.userBuffer[i] );
4028 stream_.userBuffer[i] = 0;
4032 if ( stream_.deviceBuffer ) {
4033 free( stream_.deviceBuffer );
4034 stream_.deviceBuffer = 0;
4040 void RtApiDs :: closeStream()
4042 if ( stream_.state == STREAM_CLOSED ) {
4043 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4044 error( RtError::WARNING );
4048 // Stop the callback thread.
4049 stream_.callbackInfo.isRunning = false;
4050 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4051 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4053 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4055 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4056 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4057 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4064 if ( handle->buffer[1] ) {
4065 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4066 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4073 CloseHandle( handle->condition );
4075 stream_.apiHandle = 0;
4078 for ( int i=0; i<2; i++ ) {
4079 if ( stream_.userBuffer[i] ) {
4080 free( stream_.userBuffer[i] );
4081 stream_.userBuffer[i] = 0;
4085 if ( stream_.deviceBuffer ) {
4086 free( stream_.deviceBuffer );
4087 stream_.deviceBuffer = 0;
4090 stream_.mode = UNINITIALIZED;
4091 stream_.state = STREAM_CLOSED;
4094 void RtApiDs :: startStream()
4097 if ( stream_.state == STREAM_RUNNING ) {
4098 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4099 error( RtError::WARNING );
4103 // Increase scheduler frequency on lesser windows (a side-effect of
4104 // increasing timer accuracy). On greater windows (Win2K or later),
4105 // this is already in effect.
4107 MUTEX_LOCK( &stream_.mutex );
4109 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4111 timeBeginPeriod( 1 );
4114 memset( &statistics, 0, sizeof( statistics ) );
4115 statistics.sampleRate = stream_.sampleRate;
4116 statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
4119 buffersRolling = false;
4120 duplexPrerollBytes = 0;
4122 if ( stream_.mode == DUPLEX ) {
4123 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4124 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4128 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4129 //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
4131 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4132 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4133 if ( FAILED( result ) ) {
4134 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4135 errorText_ = errorStream_.str();
4140 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4141 //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
4143 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4144 result = buffer->Start( DSCBSTART_LOOPING );
4145 if ( FAILED( result ) ) {
4146 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4147 errorText_ = errorStream_.str();
4152 handle->drainCounter = 0;
4153 handle->internalDrain = false;
4154 stream_.state = STREAM_RUNNING;
4157 MUTEX_UNLOCK( &stream_.mutex );
4159 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4162 void RtApiDs :: stopStream()
4165 if ( stream_.state == STREAM_STOPPED ) {
4166 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4167 error( RtError::WARNING );
4171 MUTEX_LOCK( &stream_.mutex );
4176 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4177 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4178 if ( handle->drainCounter == 0 ) {
4179 handle->drainCounter = 1;
4180 MUTEX_UNLOCK( &stream_.mutex );
4181 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4182 ResetEvent( handle->condition );
4183 MUTEX_LOCK( &stream_.mutex );
4186 // Stop the buffer and clear memory
4187 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4188 result = buffer->Stop();
4189 if ( FAILED( result ) ) {
4190 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4191 errorText_ = errorStream_.str();
4195 // Lock the buffer and clear it so that if we start to play again,
4196 // we won't have old data playing.
4197 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4198 if ( FAILED( result ) ) {
4199 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking output buffer!";
4200 errorText_ = errorStream_.str();
4204 // Zero the DS buffer
4205 ZeroMemory( audioPtr, dataLen );
4207 // Unlock the DS buffer
4208 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4209 if ( FAILED( result ) ) {
4210 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4211 errorText_ = errorStream_.str();
4215 // If we start playing again, we must begin at beginning of buffer.
4216 handle->bufferPointer[0] = 0;
4219 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4220 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4224 result = buffer->Stop();
4225 if ( FAILED( result ) ) {
4226 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4227 errorText_ = errorStream_.str();
4231 // Lock the buffer and clear it so that if we start to play again,
4232 // we won't have old data playing.
4233 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4234 if ( FAILED( result ) ) {
4235 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking input buffer!";
4236 errorText_ = errorStream_.str();
4240 // Zero the DS buffer
4241 ZeroMemory( audioPtr, dataLen );
4243 // Unlock the DS buffer
4244 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4245 if ( FAILED( result ) ) {
4246 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4247 errorText_ = errorStream_.str();
4251 // If we start recording again, we must begin at beginning of buffer.
4252 handle->bufferPointer[1] = 0;
4256 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4257 stream_.state = STREAM_STOPPED;
4258 MUTEX_UNLOCK( &stream_.mutex );
4259 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4262 void RtApiDs :: abortStream()
4265 if ( stream_.state == STREAM_STOPPED ) {
4266 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4267 error( RtError::WARNING );
4271 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4272 handle->drainCounter = 1;
4277 void RtApiDs :: callbackEvent()
4279 if ( stream_.state == STREAM_STOPPED ) {
4280 Sleep(50); // sleep 50 milliseconds
4284 if ( stream_.state == STREAM_CLOSED ) {
4285 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4286 error( RtError::WARNING );
4290 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4291 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4293 // Check if we were draining the stream and signal is finished.
4294 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4295 if ( handle->internalDrain == false )
4296 SetEvent( handle->condition );
4302 MUTEX_LOCK( &stream_.mutex );
4304 // Invoke user callback to get fresh output data UNLESS we are
4306 if ( handle->drainCounter == 0 ) {
4307 RtAudioCallback callback = (RtAudioCallback) info->callback;
4308 double streamTime = getStreamTime();
4309 RtAudioStreamStatus status = 0;
4310 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4311 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4312 handle->xrun[0] = false;
4314 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4315 status |= RTAUDIO_INPUT_OVERFLOW;
4316 handle->xrun[1] = false;
4318 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4319 stream_.bufferSize, streamTime, status, info->userData );
4320 if ( handle->drainCounter == 2 ) {
4321 MUTEX_UNLOCK( &stream_.mutex );
4325 else if ( handle->drainCounter == 1 )
4326 handle->internalDrain = true;
4330 DWORD currentWritePos, safeWritePos;
4331 DWORD currentReadPos, safeReadPos;
4335 #ifdef GENERATE_DEBUG_LOG
4336 DWORD writeTime, readTime;
4339 LPVOID buffer1 = NULL;
4340 LPVOID buffer2 = NULL;
4341 DWORD bufferSize1 = 0;
4342 DWORD bufferSize2 = 0;
4347 if ( stream_.mode == DUPLEX && !buffersRolling ) {
4348 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4350 // It takes a while for the devices to get rolling. As a result,
4351 // there's no guarantee that the capture and write device pointers
4352 // will move in lockstep. Wait here for both devices to start
4353 // rolling, and then set our buffer pointers accordingly.
4354 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4355 // bytes later than the write buffer.
4357 // Stub: a serious risk of having a pre-emptive scheduling round
4358 // take place between the two GetCurrentPosition calls... but I'm
4359 // really not sure how to solve the problem. Temporarily boost to
4360 // Realtime priority, maybe; but I'm not sure what priority the
4361 // DirectSound service threads run at. We *should* be roughly
4362 // within a ms or so of correct.
4364 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4365 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4367 DWORD initialWritePos, initialSafeWritePos;
4368 DWORD initialReadPos, initialSafeReadPos;
4370 result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
4371 if ( FAILED( result ) ) {
4372 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4373 errorText_ = errorStream_.str();
4374 error( RtError::SYSTEM_ERROR );
4376 result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
4377 if ( FAILED( result ) ) {
4378 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4379 errorText_ = errorStream_.str();
4380 error( RtError::SYSTEM_ERROR );
4383 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4384 if ( FAILED( result ) ) {
4385 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4386 errorText_ = errorStream_.str();
4387 error( RtError::SYSTEM_ERROR );
4389 result = dsCaptureBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4390 if ( FAILED( result ) ) {
4391 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4392 errorText_ = errorStream_.str();
4393 error( RtError::SYSTEM_ERROR );
4395 if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
4399 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4401 buffersRolling = true;
4402 handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
4403 handle->bufferPointer[1] = safeReadPos;
4406 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4408 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4410 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4411 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4412 bufferBytes *= formatBytes( stream_.userFormat );
4413 memset( stream_.userBuffer[0], 0, bufferBytes );
4416 // Setup parameters and do buffer conversion if necessary.
4417 if ( stream_.doConvertBuffer[0] ) {
4418 buffer = stream_.deviceBuffer;
4419 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4420 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4421 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4424 buffer = stream_.userBuffer[0];
4425 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4426 bufferBytes *= formatBytes( stream_.userFormat );
4429 // No byte swapping necessary in DirectSound implementation.
4431 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4432 // unsigned. So, we need to convert our signed 8-bit data here to
4434 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4435 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4437 DWORD dsBufferSize = handle->dsBufferSize[0];
4438 nextWritePos = handle->bufferPointer[0];
4442 // Find out where the read and "safe write" pointers are.
4443 result = dsBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4444 if ( FAILED( result ) ) {
4445 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4446 errorText_ = errorStream_.str();
4447 error( RtError::SYSTEM_ERROR );
4450 leadPos = safeWritePos + handle->dsPointerLeadTime[0];
4451 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
4452 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
4453 endWrite = nextWritePos + bufferBytes;
4455 // Check whether the entire write region is behind the play pointer.
4456 if ( leadPos >= endWrite ) break;
4458 // If we are here, then we must wait until the play pointer gets
4459 // beyond the write region. The approach here is to use the
4460 // Sleep() function to suspend operation until safePos catches
4461 // up. Calculate number of milliseconds to wait as:
4462 // time = distance * (milliseconds/second) * fudgefactor /
4463 // ((bytes/sample) * (samples/second))
4464 // A "fudgefactor" less than 1 is used because it was found
4465 // that sleeping too long was MUCH worse than sleeping for
4466 // several shorter periods.
4467 double millis = ( endWrite - leadPos ) * 900.0;
4468 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4469 if ( millis < 1.0 ) millis = 1.0;
4470 if ( millis > 50.0 ) {
4471 static int nOverruns = 0;
4474 Sleep( (DWORD) millis );
4477 //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
4478 // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
4481 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
4482 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
4483 // We've strayed into the forbidden zone ... resync the read pointer.
4484 //++statistics.numberOfWriteUnderruns;
4485 handle->xrun[0] = true;
4486 nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
4487 while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
4488 handle->bufferPointer[0] = nextWritePos;
4489 endWrite = nextWritePos + bufferBytes;
4492 // Lock free space in the buffer
4493 result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
4494 &bufferSize1, &buffer2, &bufferSize2, 0 );
4495 if ( FAILED( result ) ) {
4496 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4497 errorText_ = errorStream_.str();
4498 error( RtError::SYSTEM_ERROR );
4501 // Copy our buffer into the DS buffer
4502 CopyMemory( buffer1, buffer, bufferSize1 );
4503 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4505 // Update our buffer offset and unlock sound buffer
4506 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4507 if ( FAILED( result ) ) {
4508 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4509 errorText_ = errorStream_.str();
4510 error( RtError::SYSTEM_ERROR );
4512 nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4513 handle->bufferPointer[0] = nextWritePos;
4515 if ( handle->drainCounter ) {
4516 handle->drainCounter++;
4521 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4523 // Setup parameters.
4524 if ( stream_.doConvertBuffer[1] ) {
4525 buffer = stream_.deviceBuffer;
4526 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4527 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4530 buffer = stream_.userBuffer[1];
4531 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4532 bufferBytes *= formatBytes( stream_.userFormat );
4535 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4536 long nextReadPos = handle->bufferPointer[1];
4537 DWORD dsBufferSize = handle->dsBufferSize[1];
4539 // Find out where the write and "safe read" pointers are.
4540 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4541 if ( FAILED( result ) ) {
4542 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4543 errorText_ = errorStream_.str();
4544 error( RtError::SYSTEM_ERROR );
4547 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4548 DWORD endRead = nextReadPos + bufferBytes;
4550 // Handling depends on whether we are INPUT or DUPLEX.
4551 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4552 // then a wait here will drag the write pointers into the forbidden zone.
4554 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4555 // it's in a safe position. This causes dropouts, but it seems to be the only
4556 // practical way to sync up the read and write pointers reliably, given the
4557 // the very complex relationship between phase and increment of the read and write
4560 // In order to minimize audible dropouts in DUPLEX mode, we will
4561 // provide a pre-roll period of 0.5 seconds in which we return
4562 // zeros from the read buffer while the pointers sync up.
4564 if ( stream_.mode == DUPLEX ) {
4565 if ( safeReadPos < endRead ) {
4566 if ( duplexPrerollBytes <= 0 ) {
4567 // Pre-roll time over. Be more agressive.
4568 int adjustment = endRead-safeReadPos;
4570 handle->xrun[1] = true;
4571 //++statistics.numberOfReadOverruns;
4573 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4574 // and perform fine adjustments later.
4575 // - small adjustments: back off by twice as much.
4576 if ( adjustment >= 2*bufferBytes )
4577 nextReadPos = safeReadPos-2*bufferBytes;
4579 nextReadPos = safeReadPos-bufferBytes-adjustment;
4581 //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
4582 //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
4583 if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4587 // In pre=roll time. Just do it.
4588 nextReadPos = safeReadPos-bufferBytes;
4589 while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4591 endRead = nextReadPos + bufferBytes;
4594 else { // mode == INPUT
4595 while ( safeReadPos < endRead ) {
4596 // See comments for playback.
4597 double millis = (endRead - safeReadPos) * 900.0;
4598 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4599 if ( millis < 1.0 ) millis = 1.0;
4600 Sleep( (DWORD) millis );
4602 // Wake up, find out where we are now
4603 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4604 if ( FAILED( result ) ) {
4605 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4606 errorText_ = errorStream_.str();
4607 error( RtError::SYSTEM_ERROR );
4610 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4614 //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
4615 // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
4617 // Lock free space in the buffer
4618 result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
4619 &bufferSize1, &buffer2, &bufferSize2, 0 );
4620 if ( FAILED( result ) ) {
4621 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4622 errorText_ = errorStream_.str();
4623 error( RtError::SYSTEM_ERROR );
4626 if ( duplexPrerollBytes <= 0 ) {
4627 // Copy our buffer into the DS buffer
4628 CopyMemory( buffer, buffer1, bufferSize1 );
4629 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4632 memset( buffer, 0, bufferSize1 );
4633 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4634 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4637 // Update our buffer offset and unlock sound buffer
4638 nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4639 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4640 if ( FAILED( result ) ) {
4641 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4642 errorText_ = errorStream_.str();
4643 error( RtError::SYSTEM_ERROR );
4645 handle->bufferPointer[1] = nextReadPos;
4647 // No byte swapping necessary in DirectSound implementation.
4649 // If necessary, convert 8-bit data from unsigned to signed.
4650 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4651 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4653 // Do buffer conversion if necessary.
4654 if ( stream_.doConvertBuffer[1] )
4655 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4657 #ifdef GENERATE_DEBUG_LOG
4658 if ( currentDebugLogEntry < debugLog.size() )
4660 TTickRecord &r = debugLog[currentDebugLogEntry++];
4661 r.currentReadPointer = currentReadPos;
4662 r.safeReadPointer = safeReadPos;
4663 r.currentWritePointer = currentWritePos;
4664 r.safeWritePointer = safeWritePos;
4665 r.readTime = readTime;
4666 r.writeTime = writeTime;
4667 r.nextReadPointer = handles[1].bufferPointer;
4668 r.nextWritePointer = handles[0].bufferPointer;
4673 MUTEX_UNLOCK( &stream_.mutex );
4675 RtApi::tickStreamTime();
4678 // Definitions for utility functions and callbacks
4679 // specific to the DirectSound implementation.
4681 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4683 CallbackInfo *info = (CallbackInfo *) ptr;
4684 RtApiDs *object = (RtApiDs *) info->object;
4685 bool* isRunning = &info->isRunning;
4687 while ( *isRunning == true ) {
4688 object->callbackEvent();
4697 std::string convertTChar( LPCTSTR name )
4701 #if defined( UNICODE ) || defined( _UNICODE )
4702 // Yes, this conversion doesn't make sense for two-byte characters
4703 // but RtAudio is currently written to return an std::string of
4704 // one-byte chars for the device name.
4705 for ( unsigned int i=0; i<wcslen( name ); i++ )
4706 s.push_back( name[i] );
4708 s.append( std::string( name ) );
4714 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4715 LPCTSTR description,
4719 EnumInfo *info = (EnumInfo *) lpContext;
4722 if ( info->isInput == true ) {
4724 LPDIRECTSOUNDCAPTURE object;
4726 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4727 if ( hr != DS_OK ) return TRUE;
4729 caps.dwSize = sizeof(caps);
4730 hr = object->GetCaps( &caps );
4731 if ( hr == DS_OK ) {
4732 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4739 LPDIRECTSOUND object;
4740 hr = DirectSoundCreate( lpguid, &object, NULL );
4741 if ( hr != DS_OK ) return TRUE;
4743 caps.dwSize = sizeof(caps);
4744 hr = object->GetCaps( &caps );
4745 if ( hr == DS_OK ) {
4746 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4752 if ( info->getDefault && lpguid == NULL ) return FALSE;
4754 if ( info->findIndex && info->counter > info->index ) {
4756 info->name = convertTChar( description );
4763 static char* getErrorString( int code )
4767 case DSERR_ALLOCATED:
4768 return "Already allocated";
4770 case DSERR_CONTROLUNAVAIL:
4771 return "Control unavailable";
4773 case DSERR_INVALIDPARAM:
4774 return "Invalid parameter";
4776 case DSERR_INVALIDCALL:
4777 return "Invalid call";
4780 return "Generic error";
4782 case DSERR_PRIOLEVELNEEDED:
4783 return "Priority level needed";
4785 case DSERR_OUTOFMEMORY:
4786 return "Out of memory";
4788 case DSERR_BADFORMAT:
4789 return "The sample rate or the channel format is not supported";
4791 case DSERR_UNSUPPORTED:
4792 return "Not supported";
4794 case DSERR_NODRIVER:
4797 case DSERR_ALREADYINITIALIZED:
4798 return "Already initialized";
4800 case DSERR_NOAGGREGATION:
4801 return "No aggregation";
4803 case DSERR_BUFFERLOST:
4804 return "Buffer lost";
4806 case DSERR_OTHERAPPHASPRIO:
4807 return "Another application already has priority";
4809 case DSERR_UNINITIALIZED:
4810 return "Uninitialized";
4813 return "DirectSound unknown error";
4816 //******************** End of __WINDOWS_DS__ *********************//
4820 #if defined(__LINUX_ALSA__)
4822 #include <alsa/asoundlib.h>
4825 // A structure to hold various information related to the ALSA API
4828 snd_pcm_t *handles[2];
4833 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
4836 extern "C" void *alsaCallbackHandler( void * ptr );
4838 RtApiAlsa :: RtApiAlsa()
4840 // Nothing to do here.
4843 RtApiAlsa :: ~RtApiAlsa()
4845 if ( stream_.state != STREAM_CLOSED ) closeStream();
4848 unsigned int RtApiAlsa :: getDeviceCount( void )
4850 unsigned nDevices = 0;
4851 int result, subdevice, card;
4855 // Count cards and devices
4857 snd_card_next( &card );
4858 while ( card >= 0 ) {
4859 sprintf( name, "hw:%d", card );
4860 result = snd_ctl_open( &handle, name, 0 );
4862 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4863 errorText_ = errorStream_.str();
4864 error( RtError::WARNING );
4869 result = snd_ctl_pcm_next_device( handle, &subdevice );
4871 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4872 errorText_ = errorStream_.str();
4873 error( RtError::WARNING );
4876 if ( subdevice < 0 )
4881 snd_ctl_close( handle );
4882 snd_card_next( &card );
4888 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
4890 RtAudio::DeviceInfo info;
4891 info.probed = false;
4893 unsigned nDevices = 0;
4894 int result, subdevice, card;
4898 // Count cards and devices
4900 snd_card_next( &card );
4901 while ( card >= 0 ) {
4902 sprintf( name, "hw:%d", card );
4903 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
4905 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4906 errorText_ = errorStream_.str();
4907 error( RtError::WARNING );
4912 result = snd_ctl_pcm_next_device( chandle, &subdevice );
4914 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4915 errorText_ = errorStream_.str();
4916 error( RtError::WARNING );
4919 if ( subdevice < 0 ) break;
4920 if ( nDevices == device ) {
4921 sprintf( name, "hw:%d,%d", card, subdevice );
4927 snd_ctl_close( chandle );
4928 snd_card_next( &card );
4931 if ( nDevices == 0 ) {
4932 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
4933 error( RtError::INVALID_USE );
4936 if ( device >= nDevices ) {
4937 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
4938 error( RtError::INVALID_USE );
4943 // If a stream is already open, we cannot probe the stream devices.
4944 // Thus, use the saved results.
4945 if ( stream_.state != STREAM_CLOSED &&
4946 ( stream_.device[0] == device || stream_.device[1] == device ) ) {
4947 if ( device >= devices_.size() ) {
4948 errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";
4949 error( RtError::WARNING );
4952 return devices_[ device ];
4955 int openMode = SND_PCM_ASYNC;
4956 snd_pcm_stream_t stream;
4957 snd_pcm_info_t *pcminfo;
4958 snd_pcm_info_alloca( &pcminfo );
4960 snd_pcm_hw_params_t *params;
4961 snd_pcm_hw_params_alloca( ¶ms );
4963 // First try for playback
4964 stream = SND_PCM_STREAM_PLAYBACK;
4965 snd_pcm_info_set_device( pcminfo, subdevice );
4966 snd_pcm_info_set_subdevice( pcminfo, 0 );
4967 snd_pcm_info_set_stream( pcminfo, stream );
4969 result = snd_ctl_pcm_info( chandle, pcminfo );
4971 // Device probably doesn't support playback.
4975 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
4977 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
4978 errorText_ = errorStream_.str();
4979 error( RtError::WARNING );
4983 // The device is open ... fill the parameter structure.
4984 result = snd_pcm_hw_params_any( phandle, params );
4986 snd_pcm_close( phandle );
4987 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
4988 errorText_ = errorStream_.str();
4989 error( RtError::WARNING );
4993 // Get output channel information.
4995 result = snd_pcm_hw_params_get_channels_max( params, &value );
4997 snd_pcm_close( phandle );
4998 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
4999 errorText_ = errorStream_.str();
5000 error( RtError::WARNING );
5003 info.outputChannels = value;
5004 snd_pcm_close( phandle );
5007 // Now try for capture
5008 stream = SND_PCM_STREAM_CAPTURE;
5009 snd_pcm_info_set_stream( pcminfo, stream );
5011 result = snd_ctl_pcm_info( chandle, pcminfo );
5012 snd_ctl_close( chandle );
5014 // Device probably doesn't support capture.
5015 if ( info.outputChannels == 0 ) return info;
5016 goto probeParameters;
5019 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5021 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5022 errorText_ = errorStream_.str();
5023 error( RtError::WARNING );
5024 if ( info.outputChannels == 0 ) return info;
5025 goto probeParameters;
5028 // The device is open ... fill the parameter structure.
5029 result = snd_pcm_hw_params_any( phandle, params );
5031 snd_pcm_close( phandle );
5032 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5033 errorText_ = errorStream_.str();
5034 error( RtError::WARNING );
5035 if ( info.outputChannels == 0 ) return info;
5036 goto probeParameters;
5039 result = snd_pcm_hw_params_get_channels_max( params, &value );
5041 snd_pcm_close( phandle );
5042 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5043 errorText_ = errorStream_.str();
5044 error( RtError::WARNING );
5045 if ( info.outputChannels == 0 ) return info;
5046 goto probeParameters;
5048 info.inputChannels = value;
5049 snd_pcm_close( phandle );
5051 // If device opens for both playback and capture, we determine the channels.
5052 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5053 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5055 // ALSA doesn't provide default devices so we'll use the first available one.
5056 if ( device == 0 && info.outputChannels > 0 )
5057 info.isDefaultOutput = true;
5058 if ( device == 0 && info.inputChannels > 0 )
5059 info.isDefaultInput = true;
5062 // At this point, we just need to figure out the supported data
5063 // formats and sample rates. We'll proceed by opening the device in
5064 // the direction with the maximum number of channels, or playback if
5065 // they are equal. This might limit our sample rate options, but so
5068 if ( info.outputChannels >= info.inputChannels )
5069 stream = SND_PCM_STREAM_PLAYBACK;
5071 stream = SND_PCM_STREAM_CAPTURE;
5072 snd_pcm_info_set_stream( pcminfo, stream );
5074 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5076 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5077 errorText_ = errorStream_.str();
5078 error( RtError::WARNING );
5082 // The device is open ... fill the parameter structure.
5083 result = snd_pcm_hw_params_any( phandle, params );
5085 snd_pcm_close( phandle );
5086 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5087 errorText_ = errorStream_.str();
5088 error( RtError::WARNING );
5092 // Test our discrete set of sample rate values.
5093 info.sampleRates.clear();
5094 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5095 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5096 info.sampleRates.push_back( SAMPLE_RATES[i] );
5098 if ( info.sampleRates.size() == 0 ) {
5099 snd_pcm_close( phandle );
5100 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5101 errorText_ = errorStream_.str();
5102 error( RtError::WARNING );
5106 // Probe the supported data formats ... we don't care about endian-ness just yet
5107 snd_pcm_format_t format;
5108 info.nativeFormats = 0;
5109 format = SND_PCM_FORMAT_S8;
5110 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5111 info.nativeFormats |= RTAUDIO_SINT8;
5112 format = SND_PCM_FORMAT_S16;
5113 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5114 info.nativeFormats |= RTAUDIO_SINT16;
5115 format = SND_PCM_FORMAT_S24;
5116 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5117 info.nativeFormats |= RTAUDIO_SINT24;
5118 format = SND_PCM_FORMAT_S32;
5119 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5120 info.nativeFormats |= RTAUDIO_SINT32;
5121 format = SND_PCM_FORMAT_FLOAT;
5122 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5123 info.nativeFormats |= RTAUDIO_FLOAT32;
5124 format = SND_PCM_FORMAT_FLOAT64;
5125 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5126 info.nativeFormats |= RTAUDIO_FLOAT64;
5128 // Check that we have at least one supported format
5129 if ( info.nativeFormats == 0 ) {
5130 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5131 errorText_ = errorStream_.str();
5132 error( RtError::WARNING );
5136 // Get the device name
5138 result = snd_card_get_name( card, &cardname );
5140 sprintf( name, "hw:%s,%d", cardname, subdevice );
5143 // That's all ... close the device and return
5144 snd_pcm_close( phandle );
5149 void RtApiAlsa :: saveDeviceInfo( void )
5153 unsigned int nDevices = getDeviceCount();
5154 devices_.resize( nDevices );
5155 for ( unsigned int i=0; i<nDevices; i++ )
5156 devices_[i] = getDeviceInfo( i );
5159 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5160 unsigned int firstChannel, unsigned int sampleRate,
5161 RtAudioFormat format, unsigned int *bufferSize,
5162 RtAudio::StreamOptions *options )
5165 #if defined(__RTAUDIO_DEBUG__)
5167 snd_output_stdio_attach(&out, stderr, 0);
5170 // I'm not using the "plug" interface ... too much inconsistent behavior.
5172 unsigned nDevices = 0;
5173 int result, subdevice, card;
5177 // Count cards and devices
5179 snd_card_next( &card );
5180 while ( card >= 0 ) {
5181 sprintf( name, "hw:%d", card );
5182 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5184 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5185 errorText_ = errorStream_.str();
5190 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5191 if ( result < 0 ) break;
5192 if ( subdevice < 0 ) break;
5193 if ( nDevices == device ) {
5194 sprintf( name, "hw:%d,%d", card, subdevice );
5195 snd_ctl_close( chandle );
5200 snd_ctl_close( chandle );
5201 snd_card_next( &card );
5204 if ( nDevices == 0 ) {
5205 // This should not happen because a check is made before this function is called.
5206 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5210 if ( device >= nDevices ) {
5211 // This should not happen because a check is made before this function is called.
5212 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5218 // The getDeviceInfo() function will not work for a device that is
5219 // already open. Thus, we'll probe the system before opening a
5220 // stream and save the results for use by getDeviceInfo().
5221 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once
5222 this->saveDeviceInfo();
5224 snd_pcm_stream_t stream;
5225 if ( mode == OUTPUT )
5226 stream = SND_PCM_STREAM_PLAYBACK;
5228 stream = SND_PCM_STREAM_CAPTURE;
5231 int openMode = SND_PCM_ASYNC;
5232 result = snd_pcm_open( &phandle, name, stream, openMode );
5234 if ( mode == OUTPUT )
5235 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5237 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5238 errorText_ = errorStream_.str();
5242 // Fill the parameter structure.
5243 snd_pcm_hw_params_t *hw_params;
5244 snd_pcm_hw_params_alloca( &hw_params );
5245 result = snd_pcm_hw_params_any( phandle, hw_params );
5247 snd_pcm_close( phandle );
5248 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5249 errorText_ = errorStream_.str();
5253 #if defined(__RTAUDIO_DEBUG__)
5254 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5255 snd_pcm_hw_params_dump( hw_params, out );
5258 // Set access ... check user preference.
5259 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5260 stream_.userInterleaved = false;
5261 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5263 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5264 stream_.deviceInterleaved[mode] = true;
5267 stream_.deviceInterleaved[mode] = false;
5270 stream_.userInterleaved = true;
5271 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5273 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5274 stream_.deviceInterleaved[mode] = false;
5277 stream_.deviceInterleaved[mode] = true;
5281 snd_pcm_close( phandle );
5282 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5283 errorText_ = errorStream_.str();
5287 // Determine how to set the device format.
5288 stream_.userFormat = format;
5289 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5291 if ( format == RTAUDIO_SINT8 )
5292 deviceFormat = SND_PCM_FORMAT_S8;
5293 else if ( format == RTAUDIO_SINT16 )
5294 deviceFormat = SND_PCM_FORMAT_S16;
5295 else if ( format == RTAUDIO_SINT24 )
5296 deviceFormat = SND_PCM_FORMAT_S24;
5297 else if ( format == RTAUDIO_SINT32 )
5298 deviceFormat = SND_PCM_FORMAT_S32;
5299 else if ( format == RTAUDIO_FLOAT32 )
5300 deviceFormat = SND_PCM_FORMAT_FLOAT;
5301 else if ( format == RTAUDIO_FLOAT64 )
5302 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5304 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5305 stream_.deviceFormat[mode] = format;
5309 // The user requested format is not natively supported by the device.
5310 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5311 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5312 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5316 deviceFormat = SND_PCM_FORMAT_FLOAT;
5317 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5318 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5322 deviceFormat = SND_PCM_FORMAT_S32;
5323 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5324 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5328 deviceFormat = SND_PCM_FORMAT_S24;
5329 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5330 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5334 deviceFormat = SND_PCM_FORMAT_S16;
5335 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5336 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5340 deviceFormat = SND_PCM_FORMAT_S8;
5341 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5342 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5346 // If we get here, no supported format was found.
5347 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5348 errorText_ = errorStream_.str();
5352 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5354 snd_pcm_close( phandle );
5355 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5356 errorText_ = errorStream_.str();
5360 // Determine whether byte-swaping is necessary.
5361 stream_.doByteSwap[mode] = false;
5362 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5363 result = snd_pcm_format_cpu_endian( deviceFormat );
5365 stream_.doByteSwap[mode] = true;
5366 else if (result < 0) {
5367 snd_pcm_close( phandle );
5368 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5369 errorText_ = errorStream_.str();
5374 // Set the sample rate.
5375 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5377 snd_pcm_close( phandle );
5378 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5379 errorText_ = errorStream_.str();
5383 // Determine the number of channels for this device. We support a possible
5384 // minimum device channel number > than the value requested by the user.
5385 stream_.nUserChannels[mode] = channels;
5387 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5388 unsigned int deviceChannels = value;
5389 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5390 snd_pcm_close( phandle );
5391 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5392 errorText_ = errorStream_.str();
5396 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5398 snd_pcm_close( phandle );
5399 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5400 errorText_ = errorStream_.str();
5403 deviceChannels = value;
5404 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5405 stream_.nDeviceChannels[mode] = deviceChannels;
5407 // Set the device channels.
5408 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5410 snd_pcm_close( phandle );
5411 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5412 errorText_ = errorStream_.str();
5416 // Set the buffer number, which in ALSA is referred to as the "period".
5418 unsigned int periods = 0;
5419 if ( options ) periods = options->numberOfBuffers;
5420 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5421 // Even though the hardware might allow 1 buffer, it won't work reliably.
5422 if ( periods < 2 ) periods = 2;
5423 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5425 snd_pcm_close( phandle );
5426 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5427 errorText_ = errorStream_.str();
5431 // Set the buffer (or period) size.
5432 snd_pcm_uframes_t periodSize = *bufferSize;
5433 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5435 snd_pcm_close( phandle );
5436 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5437 errorText_ = errorStream_.str();
5440 *bufferSize = periodSize;
5442 // If attempting to setup a duplex stream, the bufferSize parameter
5443 // MUST be the same in both directions!
5444 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5445 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5446 errorText_ = errorStream_.str();
5450 stream_.bufferSize = *bufferSize;
5452 // Install the hardware configuration
5453 result = snd_pcm_hw_params( phandle, hw_params );
5455 snd_pcm_close( phandle );
5456 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5457 errorText_ = errorStream_.str();
5461 #if defined(__RTAUDIO_DEBUG__)
5462 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5463 snd_pcm_hw_params_dump( hw_params, out );
5466 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5467 snd_pcm_sw_params_t *sw_params = NULL;
5468 snd_pcm_sw_params_alloca( &sw_params );
5469 snd_pcm_sw_params_current( phandle, sw_params );
5470 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5471 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, 0x7fffffff );
5472 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5473 snd_pcm_sw_params_set_silence_size( phandle, sw_params, INT_MAX );
5474 result = snd_pcm_sw_params( phandle, sw_params );
5476 snd_pcm_close( phandle );
5477 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5478 errorText_ = errorStream_.str();
5482 #if defined(__RTAUDIO_DEBUG__)
5483 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5484 snd_pcm_sw_params_dump( sw_params, out );
5487 // Set flags for buffer conversion
5488 stream_.doConvertBuffer[mode] = false;
5489 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5490 stream_.doConvertBuffer[mode] = true;
5491 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5492 stream_.doConvertBuffer[mode] = true;
5493 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5494 stream_.nUserChannels[mode] > 1 )
5495 stream_.doConvertBuffer[mode] = true;
5497 // Allocate the ApiHandle if necessary and then save.
5498 AlsaHandle *apiInfo = 0;
5499 if ( stream_.apiHandle == 0 ) {
5501 apiInfo = (AlsaHandle *) new AlsaHandle;
5503 catch ( std::bad_alloc& ) {
5504 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5507 stream_.apiHandle = (void *) apiInfo;
5508 apiInfo->handles[0] = 0;
5509 apiInfo->handles[1] = 0;
5512 apiInfo = (AlsaHandle *) stream_.apiHandle;
5514 apiInfo->handles[mode] = phandle;
5516 // Allocate necessary internal buffers.
5517 unsigned long bufferBytes;
5518 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5519 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5520 if ( stream_.userBuffer[mode] == NULL ) {
5521 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5525 if ( stream_.doConvertBuffer[mode] ) {
5527 bool makeBuffer = true;
5528 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5529 if ( mode == INPUT ) {
5530 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5531 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5532 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5537 bufferBytes *= *bufferSize;
5538 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5539 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5540 if ( stream_.deviceBuffer == NULL ) {
5541 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5547 stream_.sampleRate = sampleRate;
5548 stream_.nBuffers = periods;
5549 stream_.device[mode] = device;
5550 stream_.state = STREAM_STOPPED;
5552 // Setup the buffer conversion information structure.
5553 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5555 // Setup thread if necessary.
5556 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5557 // We had already set up an output stream.
5558 stream_.mode = DUPLEX;
5559 // Link the streams if possible.
5560 apiInfo->synchronized = false;
5561 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5562 apiInfo->synchronized = true;
5564 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5565 error( RtError::WARNING );
5569 stream_.mode = mode;
5571 // Setup callback thread.
5572 stream_.callbackInfo.object = (void *) this;
5574 // Set the thread attributes for joinable and realtime scheduling
5575 // priority. The higher priority will only take affect if the
5576 // program is run as root or suid.
5577 pthread_attr_t attr;
5578 pthread_attr_init( &attr );
5579 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5580 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5581 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5583 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5586 stream_.callbackInfo.isRunning = true;
5587 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5588 pthread_attr_destroy( &attr );
5590 stream_.callbackInfo.isRunning = false;
5591 errorText_ = "RtApiAlsa::error creating callback thread!";
5600 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5601 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5603 stream_.apiHandle = 0;
5606 for ( int i=0; i<2; i++ ) {
5607 if ( stream_.userBuffer[i] ) {
5608 free( stream_.userBuffer[i] );
5609 stream_.userBuffer[i] = 0;
5613 if ( stream_.deviceBuffer ) {
5614 free( stream_.deviceBuffer );
5615 stream_.deviceBuffer = 0;
5621 void RtApiAlsa :: closeStream()
5623 if ( stream_.state == STREAM_CLOSED ) {
5624 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5625 error( RtError::WARNING );
5629 stream_.callbackInfo.isRunning = false;
5630 pthread_join( stream_.callbackInfo.thread, NULL );
5632 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5633 if ( stream_.state == STREAM_RUNNING ) {
5634 stream_.state = STREAM_STOPPED;
5635 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5636 snd_pcm_drop( apiInfo->handles[0] );
5637 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5638 snd_pcm_drop( apiInfo->handles[1] );
5642 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5643 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5645 stream_.apiHandle = 0;
5648 for ( int i=0; i<2; i++ ) {
5649 if ( stream_.userBuffer[i] ) {
5650 free( stream_.userBuffer[i] );
5651 stream_.userBuffer[i] = 0;
5655 if ( stream_.deviceBuffer ) {
5656 free( stream_.deviceBuffer );
5657 stream_.deviceBuffer = 0;
5660 stream_.mode = UNINITIALIZED;
5661 stream_.state = STREAM_CLOSED;
5664 void RtApiAlsa :: startStream()
5666 // This method calls snd_pcm_prepare if the device isn't already in that state.
5669 if ( stream_.state == STREAM_RUNNING ) {
5670 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5671 error( RtError::WARNING );
5675 MUTEX_LOCK( &stream_.mutex );
5678 snd_pcm_state_t state;
5679 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5680 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5681 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5682 state = snd_pcm_state( handle[0] );
5683 if ( state != SND_PCM_STATE_PREPARED ) {
5684 result = snd_pcm_prepare( handle[0] );
5686 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5687 errorText_ = errorStream_.str();
5693 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5694 state = snd_pcm_state( handle[1] );
5695 if ( state != SND_PCM_STATE_PREPARED ) {
5696 result = snd_pcm_prepare( handle[1] );
5698 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5699 errorText_ = errorStream_.str();
5705 stream_.state = STREAM_RUNNING;
5708 MUTEX_UNLOCK( &stream_.mutex );
5710 if ( result >= 0 ) return;
5711 error( RtError::SYSTEM_ERROR );
5714 void RtApiAlsa :: stopStream()
5717 if ( stream_.state == STREAM_STOPPED ) {
5718 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
5719 error( RtError::WARNING );
5723 // Change the state before the lock to improve shutdown response
5724 // when using a callback.
5725 stream_.state = STREAM_STOPPED;
5726 MUTEX_LOCK( &stream_.mutex );
5729 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5730 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5731 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5732 if ( apiInfo->synchronized )
5733 result = snd_pcm_drop( handle[0] );
5735 result = snd_pcm_drain( handle[0] );
5737 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
5738 errorText_ = errorStream_.str();
5743 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5744 result = snd_pcm_drop( handle[1] );
5746 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
5747 errorText_ = errorStream_.str();
5753 MUTEX_UNLOCK( &stream_.mutex );
5755 if ( result >= 0 ) return;
5756 error( RtError::SYSTEM_ERROR );
5759 void RtApiAlsa :: abortStream()
5762 if ( stream_.state == STREAM_STOPPED ) {
5763 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
5764 error( RtError::WARNING );
5768 // Change the state before the lock to improve shutdown response
5769 // when using a callback.
5770 stream_.state = STREAM_STOPPED;
5771 MUTEX_LOCK( &stream_.mutex );
5774 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5775 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5776 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5777 result = snd_pcm_drop( handle[0] );
5779 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
5780 errorText_ = errorStream_.str();
5785 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5786 result = snd_pcm_drop( handle[1] );
5788 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
5789 errorText_ = errorStream_.str();
5795 MUTEX_UNLOCK( &stream_.mutex );
5797 stream_.state = STREAM_STOPPED;
5798 if ( result >= 0 ) return;
5799 error( RtError::SYSTEM_ERROR );
5802 void RtApiAlsa :: callbackEvent()
5804 if ( stream_.state == STREAM_STOPPED ) {
5805 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
5809 if ( stream_.state == STREAM_CLOSED ) {
5810 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
5811 error( RtError::WARNING );
5815 int doStopStream = 0;
5816 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5817 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
5818 double streamTime = getStreamTime();
5819 RtAudioStreamStatus status = 0;
5820 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
5821 status |= RTAUDIO_OUTPUT_UNDERFLOW;
5822 apiInfo->xrun[0] = false;
5824 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
5825 status |= RTAUDIO_INPUT_OVERFLOW;
5826 apiInfo->xrun[1] = false;
5828 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
5829 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
5831 MUTEX_LOCK( &stream_.mutex );
5833 // The state might change while waiting on a mutex.
5834 if ( stream_.state == STREAM_STOPPED ) goto unlock;
5840 snd_pcm_sframes_t frames;
5841 RtAudioFormat format;
5842 handle = (snd_pcm_t **) apiInfo->handles;
5844 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
5846 // Setup parameters.
5847 if ( stream_.doConvertBuffer[1] ) {
5848 buffer = stream_.deviceBuffer;
5849 channels = stream_.nDeviceChannels[1];
5850 format = stream_.deviceFormat[1];
5853 buffer = stream_.userBuffer[1];
5854 channels = stream_.nUserChannels[1];
5855 format = stream_.userFormat;
5858 // Read samples from device in interleaved/non-interleaved format.
5859 if ( stream_.deviceInterleaved[1] )
5860 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
5862 void *bufs[channels];
5863 size_t offset = stream_.bufferSize * formatBytes( format );
5864 for ( int i=0; i<channels; i++ )
5865 bufs[i] = (void *) (buffer + (i * offset));
5866 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
5869 if ( result < (int) stream_.bufferSize ) {
5870 // Either an error or underrun occured.
5871 if ( result == -EPIPE ) {
5872 snd_pcm_state_t state = snd_pcm_state( handle[1] );
5873 if ( state == SND_PCM_STATE_XRUN ) {
5874 apiInfo->xrun[1] = true;
5875 result = snd_pcm_prepare( handle[1] );
5877 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
5878 errorText_ = errorStream_.str();
5882 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5883 errorText_ = errorStream_.str();
5887 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
5888 errorText_ = errorStream_.str();
5890 error( RtError::WARNING );
5894 // Do byte swapping if necessary.
5895 if ( stream_.doByteSwap[1] )
5896 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
5898 // Do buffer conversion if necessary.
5899 if ( stream_.doConvertBuffer[1] )
5900 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
5902 // Check stream latency
5903 result = snd_pcm_delay( handle[1], &frames );
5904 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
5907 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5909 // Setup parameters and do buffer conversion if necessary.
5910 if ( stream_.doConvertBuffer[0] ) {
5911 buffer = stream_.deviceBuffer;
5912 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
5913 channels = stream_.nDeviceChannels[0];
5914 format = stream_.deviceFormat[0];
5917 buffer = stream_.userBuffer[0];
5918 channels = stream_.nUserChannels[0];
5919 format = stream_.userFormat;
5922 // Do byte swapping if necessary.
5923 if ( stream_.doByteSwap[0] )
5924 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
5926 // Write samples to device in interleaved/non-interleaved format.
5927 if ( stream_.deviceInterleaved[0] )
5928 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
5930 void *bufs[channels];
5931 size_t offset = stream_.bufferSize * formatBytes( format );
5932 for ( int i=0; i<channels; i++ )
5933 bufs[i] = (void *) (buffer + (i * offset));
5934 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
5937 if ( result < (int) stream_.bufferSize ) {
5938 // Either an error or underrun occured.
5939 if ( result == -EPIPE ) {
5940 snd_pcm_state_t state = snd_pcm_state( handle[0] );
5941 if ( state == SND_PCM_STATE_XRUN ) {
5942 apiInfo->xrun[0] = true;
5943 result = snd_pcm_prepare( handle[0] );
5945 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
5946 errorText_ = errorStream_.str();
5950 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5951 errorText_ = errorStream_.str();
5955 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
5956 errorText_ = errorStream_.str();
5958 error( RtError::WARNING );
5962 // Check stream latency
5963 result = snd_pcm_delay( handle[0], &frames );
5964 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
5968 MUTEX_UNLOCK( &stream_.mutex );
5970 RtApi::tickStreamTime();
5971 if ( doStopStream == 1 ) this->stopStream();
5972 else if ( doStopStream == 2 ) this->abortStream();
5975 extern "C" void *alsaCallbackHandler( void *ptr )
5977 CallbackInfo *info = (CallbackInfo *) ptr;
5978 RtApiAlsa *object = (RtApiAlsa *) info->object;
5979 bool *isRunning = &info->isRunning;
5982 // Set a higher scheduler priority (P.J. Leonard)
5983 struct sched_param param;
5984 int min = sched_get_priority_min( SCHED_RR );
5985 int max = sched_get_priority_max( SCHED_RR );
5986 param.sched_priority = min + ( max - min ) / 2; // Is this the best number?
5987 sched_setscheduler( 0, SCHED_RR, ¶m );
5990 while ( *isRunning == true ) {
5991 pthread_testcancel();
5992 object->callbackEvent();
5995 pthread_exit( NULL );
5998 //******************** End of __LINUX_ALSA__ *********************//
6002 #if defined(__LINUX_OSS__)
6005 #include <sys/ioctl.h>
6008 #include "soundcard.h"
6012 extern "C" void *ossCallbackHandler(void * ptr);
6014 // A structure to hold various information related to the OSS API
6017 int id[2]; // device ids
6022 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
6025 RtApiOss :: RtApiOss()
6027 // Nothing to do here.
6030 RtApiOss :: ~RtApiOss()
6032 if ( stream_.state != STREAM_CLOSED ) closeStream();
6035 unsigned int RtApiOss :: getDeviceCount( void )
6037 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6038 if ( mixerfd == -1 ) {
6039 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6040 error( RtError::WARNING );
6044 oss_sysinfo sysinfo;
6045 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6047 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6048 error( RtError::WARNING );
6053 return sysinfo.numaudios;
6056 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6058 RtAudio::DeviceInfo info;
6059 info.probed = false;
6061 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6062 if ( mixerfd == -1 ) {
6063 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6064 error( RtError::WARNING );
6068 oss_sysinfo sysinfo;
6069 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6070 if ( result == -1 ) {
6072 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6073 error( RtError::WARNING );
6077 unsigned nDevices = sysinfo.numaudios;
6078 if ( nDevices == 0 ) {
6080 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6081 error( RtError::INVALID_USE );
6084 if ( device >= nDevices ) {
6086 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6087 error( RtError::INVALID_USE );
6090 oss_audioinfo ainfo;
6092 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6094 if ( result == -1 ) {
6095 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6096 errorText_ = errorStream_.str();
6097 error( RtError::WARNING );
6102 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6103 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6104 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6105 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6106 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6109 // Probe data formats ... do for input
6110 unsigned long mask = ainfo.iformats;
6111 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6112 info.nativeFormats |= RTAUDIO_SINT16;
6113 if ( mask & AFMT_S8 )
6114 info.nativeFormats |= RTAUDIO_SINT8;
6115 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6116 info.nativeFormats |= RTAUDIO_SINT32;
6117 if ( mask & AFMT_FLOAT )
6118 info.nativeFormats |= RTAUDIO_FLOAT32;
6119 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6120 info.nativeFormats |= RTAUDIO_SINT24;
6122 // Check that we have at least one supported format
6123 if ( info.nativeFormats == 0 ) {
6124 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6125 errorText_ = errorStream_.str();
6126 error( RtError::WARNING );
6130 // Probe the supported sample rates.
6131 info.sampleRates.clear();
6132 if ( ainfo.nrates ) {
6133 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6134 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6135 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6136 info.sampleRates.push_back( SAMPLE_RATES[k] );
6143 // Check min and max rate values;
6144 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6145 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6146 info.sampleRates.push_back( SAMPLE_RATES[k] );
6150 if ( info.sampleRates.size() == 0 ) {
6151 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6152 errorText_ = errorStream_.str();
6153 error( RtError::WARNING );
6157 info.name = ainfo.name;
6164 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6165 unsigned int firstChannel, unsigned int sampleRate,
6166 RtAudioFormat format, unsigned int *bufferSize,
6167 RtAudio::StreamOptions *options )
6169 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6170 if ( mixerfd == -1 ) {
6171 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6175 oss_sysinfo sysinfo;
6176 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6177 if ( result == -1 ) {
6179 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6183 unsigned nDevices = sysinfo.numaudios;
6184 if ( nDevices == 0 ) {
6185 // This should not happen because a check is made before this function is called.
6187 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6191 if ( device >= nDevices ) {
6192 // This should not happen because a check is made before this function is called.
6194 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6198 oss_audioinfo ainfo;
6200 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6202 if ( result == -1 ) {
6203 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6204 errorText_ = errorStream_.str();
6208 // Check if device supports input or output
6209 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6210 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6211 if ( mode == OUTPUT )
6212 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6214 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6215 errorText_ = errorStream_.str();
6220 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6221 if ( mode == OUTPUT )
6223 else { // mode == INPUT
6224 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6225 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6226 close( handle->id[0] );
6228 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6229 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6230 errorText_ = errorStream_.str();
6233 // Check that the number previously set channels is the same.
6234 if ( stream_.nUserChannels[0] != channels ) {
6235 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6236 errorText_ = errorStream_.str();
6245 // Set exclusive access if specified.
6246 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6248 // Try to open the device.
6250 fd = open( ainfo.devnode, flags, 0 );
6252 if ( errno == EBUSY )
6253 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6255 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6256 errorText_ = errorStream_.str();
6260 // For duplex operation, specifically set this mode (this doesn't seem to work).
6262 if ( flags | O_RDWR ) {
6263 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6264 if ( result == -1) {
6265 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6266 errorText_ = errorStream_.str();
6272 // Check the device channel support.
6273 stream_.nUserChannels[mode] = channels;
6274 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6276 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6277 errorText_ = errorStream_.str();
6281 // Set the number of channels.
6282 int deviceChannels = channels + firstChannel;
6283 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6284 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6286 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6287 errorText_ = errorStream_.str();
6290 stream_.nDeviceChannels[mode] = deviceChannels;
6292 // Get the data format mask
6294 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6295 if ( result == -1 ) {
6297 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6298 errorText_ = errorStream_.str();
6302 // Determine how to set the device format.
6303 stream_.userFormat = format;
6304 int deviceFormat = -1;
6305 stream_.doByteSwap[mode] = false;
6306 if ( format == RTAUDIO_SINT8 ) {
6307 if ( mask & AFMT_S8 ) {
6308 deviceFormat = AFMT_S8;
6309 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6312 else if ( format == RTAUDIO_SINT16 ) {
6313 if ( mask & AFMT_S16_NE ) {
6314 deviceFormat = AFMT_S16_NE;
6315 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6317 else if ( mask & AFMT_S16_OE ) {
6318 deviceFormat = AFMT_S16_OE;
6319 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6320 stream_.doByteSwap[mode] = true;
6323 else if ( format == RTAUDIO_SINT24 ) {
6324 if ( mask & AFMT_S24_NE ) {
6325 deviceFormat = AFMT_S24_NE;
6326 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6328 else if ( mask & AFMT_S24_OE ) {
6329 deviceFormat = AFMT_S24_OE;
6330 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6331 stream_.doByteSwap[mode] = true;
6334 else if ( format == RTAUDIO_SINT32 ) {
6335 if ( mask & AFMT_S32_NE ) {
6336 deviceFormat = AFMT_S32_NE;
6337 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6339 else if ( mask & AFMT_S32_OE ) {
6340 deviceFormat = AFMT_S32_OE;
6341 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6342 stream_.doByteSwap[mode] = true;
6346 if ( deviceFormat == -1 ) {
6347 // The user requested format is not natively supported by the device.
6348 if ( mask & AFMT_S16_NE ) {
6349 deviceFormat = AFMT_S16_NE;
6350 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6352 else if ( mask & AFMT_S32_NE ) {
6353 deviceFormat = AFMT_S32_NE;
6354 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6356 else if ( mask & AFMT_S24_NE ) {
6357 deviceFormat = AFMT_S24_NE;
6358 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6360 else if ( mask & AFMT_S16_OE ) {
6361 deviceFormat = AFMT_S16_OE;
6362 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6363 stream_.doByteSwap[mode] = true;
6365 else if ( mask & AFMT_S32_OE ) {
6366 deviceFormat = AFMT_S32_OE;
6367 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6368 stream_.doByteSwap[mode] = true;
6370 else if ( mask & AFMT_S24_OE ) {
6371 deviceFormat = AFMT_S24_OE;
6372 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6373 stream_.doByteSwap[mode] = true;
6375 else if ( mask & AFMT_S8) {
6376 deviceFormat = AFMT_S8;
6377 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6381 if ( stream_.deviceFormat[mode] == 0 ) {
6382 // This really shouldn't happen ...
6384 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6385 errorText_ = errorStream_.str();
6389 // Set the data format.
6390 int temp = deviceFormat;
6391 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6392 if ( result == -1 || deviceFormat != temp ) {
6394 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6395 errorText_ = errorStream_.str();
6399 // Attempt to set the buffer size. According to OSS, the minimum
6400 // number of buffers is two. The supposed minimum buffer size is 16
6401 // bytes, so that will be our lower bound. The argument to this
6402 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6403 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6404 // We'll check the actual value used near the end of the setup
6406 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6407 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6409 if ( options ) buffers = options->numberOfBuffers;
6410 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6411 if ( buffers < 2 ) buffers = 3;
6412 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6413 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6414 if ( result == -1 ) {
6416 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6417 errorText_ = errorStream_.str();
6420 stream_.nBuffers = buffers;
6422 // Save buffer size (in sample frames).
6423 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6424 stream_.bufferSize = *bufferSize;
6426 // Set the sample rate.
6427 int srate = sampleRate;
6428 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6429 if ( result == -1 ) {
6431 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6432 errorText_ = errorStream_.str();
6436 // Verify the sample rate setup worked.
6437 if ( abs( srate - sampleRate ) > 100 ) {
6439 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6440 errorText_ = errorStream_.str();
6443 stream_.sampleRate = sampleRate;
6445 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6446 // We're doing duplex setup here.
6447 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6448 stream_.nDeviceChannels[0] = deviceChannels;
6451 // Set interleaving parameters.
6452 stream_.userInterleaved = true;
6453 stream_.deviceInterleaved[mode] = true;
6454 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6455 stream_.userInterleaved = false;
6457 // Set flags for buffer conversion
6458 stream_.doConvertBuffer[mode] = false;
6459 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6460 stream_.doConvertBuffer[mode] = true;
6461 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6462 stream_.doConvertBuffer[mode] = true;
6463 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6464 stream_.nUserChannels[mode] > 1 )
6465 stream_.doConvertBuffer[mode] = true;
6467 // Allocate the stream handles if necessary and then save.
6468 if ( stream_.apiHandle == 0 ) {
6470 handle = new OssHandle;
6472 catch ( std::bad_alloc& ) {
6473 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6477 stream_.apiHandle = (void *) handle;
6480 handle = (OssHandle *) stream_.apiHandle;
6482 handle->id[mode] = fd;
6484 // Allocate necessary internal buffers.
6485 unsigned long bufferBytes;
6486 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6487 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6488 if ( stream_.userBuffer[mode] == NULL ) {
6489 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6493 if ( stream_.doConvertBuffer[mode] ) {
6495 bool makeBuffer = true;
6496 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6497 if ( mode == INPUT ) {
6498 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6499 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6500 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6505 bufferBytes *= *bufferSize;
6506 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6507 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6508 if ( stream_.deviceBuffer == NULL ) {
6509 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6515 stream_.device[mode] = device;
6516 stream_.state = STREAM_STOPPED;
6518 // Setup the buffer conversion information structure.
6519 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6521 // Setup thread if necessary.
6522 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6523 // We had already set up an output stream.
6524 stream_.mode = DUPLEX;
6525 if ( stream_.device[0] == device ) handle->id[0] = fd;
6528 stream_.mode = mode;
6530 // Setup callback thread.
6531 stream_.callbackInfo.object = (void *) this;
6533 // Set the thread attributes for joinable and realtime scheduling
6534 // priority. The higher priority will only take affect if the
6535 // program is run as root or suid.
6536 pthread_attr_t attr;
6537 pthread_attr_init( &attr );
6538 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6539 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6540 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6542 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6545 stream_.callbackInfo.isRunning = true;
6546 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6547 pthread_attr_destroy( &attr );
6549 stream_.callbackInfo.isRunning = false;
6550 errorText_ = "RtApiOss::error creating callback thread!";
6559 if ( handle->id[0] ) close( handle->id[0] );
6560 if ( handle->id[1] ) close( handle->id[1] );
6562 stream_.apiHandle = 0;
6565 for ( int i=0; i<2; i++ ) {
6566 if ( stream_.userBuffer[i] ) {
6567 free( stream_.userBuffer[i] );
6568 stream_.userBuffer[i] = 0;
6572 if ( stream_.deviceBuffer ) {
6573 free( stream_.deviceBuffer );
6574 stream_.deviceBuffer = 0;
6580 void RtApiOss :: closeStream()
6582 if ( stream_.state == STREAM_CLOSED ) {
6583 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6584 error( RtError::WARNING );
6588 stream_.callbackInfo.isRunning = false;
6589 pthread_join( stream_.callbackInfo.thread, NULL );
6591 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6592 if ( stream_.state == STREAM_RUNNING ) {
6593 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6594 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6596 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6597 stream_.state = STREAM_STOPPED;
6601 if ( handle->id[0] ) close( handle->id[0] );
6602 if ( handle->id[1] ) close( handle->id[1] );
6604 stream_.apiHandle = 0;
6607 for ( int i=0; i<2; i++ ) {
6608 if ( stream_.userBuffer[i] ) {
6609 free( stream_.userBuffer[i] );
6610 stream_.userBuffer[i] = 0;
6614 if ( stream_.deviceBuffer ) {
6615 free( stream_.deviceBuffer );
6616 stream_.deviceBuffer = 0;
6619 stream_.mode = UNINITIALIZED;
6620 stream_.state = STREAM_CLOSED;
6623 void RtApiOss :: startStream()
6626 if ( stream_.state == STREAM_RUNNING ) {
6627 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6628 error( RtError::WARNING );
6632 MUTEX_LOCK( &stream_.mutex );
6634 stream_.state = STREAM_RUNNING;
6636 // No need to do anything else here ... OSS automatically starts
6637 // when fed samples.
6639 MUTEX_UNLOCK( &stream_.mutex );
6642 void RtApiOss :: stopStream()
6645 if ( stream_.state == STREAM_STOPPED ) {
6646 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6647 error( RtError::WARNING );
6651 // Change the state before the lock to improve shutdown response
6652 // when using a callback.
6653 stream_.state = STREAM_STOPPED;
6654 MUTEX_LOCK( &stream_.mutex );
6657 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6658 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6660 // Flush the output with zeros a few times.
6663 RtAudioFormat format;
6665 if ( stream_.doConvertBuffer[0] ) {
6666 buffer = stream_.deviceBuffer;
6667 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6668 format = stream_.deviceFormat[0];
6671 buffer = stream_.userBuffer[0];
6672 samples = stream_.bufferSize * stream_.nUserChannels[0];
6673 format = stream_.userFormat;
6676 memset( buffer, 0, samples * formatBytes(format) );
6677 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
6678 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6679 if ( result == -1 ) {
6680 errorText_ = "RtApiOss::stopStream: audio write error.";
6681 error( RtError::WARNING );
6685 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6686 if ( result == -1 ) {
6687 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6688 errorText_ = errorStream_.str();
6691 handle->triggered = false;
6694 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6695 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6696 if ( result == -1 ) {
6697 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6698 errorText_ = errorStream_.str();
6704 MUTEX_UNLOCK( &stream_.mutex );
6706 stream_.state = STREAM_STOPPED;
6707 if ( result != -1 ) return;
6708 error( RtError::SYSTEM_ERROR );
6711 void RtApiOss :: abortStream()
6714 if ( stream_.state == STREAM_STOPPED ) {
6715 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
6716 error( RtError::WARNING );
6720 // Change the state before the lock to improve shutdown response
6721 // when using a callback.
6722 stream_.state = STREAM_STOPPED;
6723 MUTEX_LOCK( &stream_.mutex );
6726 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6727 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6728 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6729 if ( result == -1 ) {
6730 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6731 errorText_ = errorStream_.str();
6734 handle->triggered = false;
6737 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6738 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6739 if ( result == -1 ) {
6740 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6741 errorText_ = errorStream_.str();
6747 MUTEX_UNLOCK( &stream_.mutex );
6749 stream_.state = STREAM_STOPPED;
6750 if ( result != -1 ) return;
6751 error( RtError::SYSTEM_ERROR );
6754 void RtApiOss :: callbackEvent()
6756 if ( stream_.state == STREAM_STOPPED ) {
6757 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
6761 if ( stream_.state == STREAM_CLOSED ) {
6762 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
6763 error( RtError::WARNING );
6767 // Invoke user callback to get fresh output data.
6768 int doStopStream = 0;
6769 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6770 double streamTime = getStreamTime();
6771 RtAudioStreamStatus status = 0;
6772 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6773 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
6774 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6775 handle->xrun[0] = false;
6777 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
6778 status |= RTAUDIO_INPUT_OVERFLOW;
6779 handle->xrun[1] = false;
6781 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6782 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6784 MUTEX_LOCK( &stream_.mutex );
6786 // The state might change while waiting on a mutex.
6787 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6792 RtAudioFormat format;
6794 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6796 // Setup parameters and do buffer conversion if necessary.
6797 if ( stream_.doConvertBuffer[0] ) {
6798 buffer = stream_.deviceBuffer;
6799 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6800 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6801 format = stream_.deviceFormat[0];
6804 buffer = stream_.userBuffer[0];
6805 samples = stream_.bufferSize * stream_.nUserChannels[0];
6806 format = stream_.userFormat;
6809 // Do byte swapping if necessary.
6810 if ( stream_.doByteSwap[0] )
6811 byteSwapBuffer( buffer, samples, format );
6813 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
6815 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6816 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6817 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
6818 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6819 handle->triggered = true;
6822 // Write samples to device.
6823 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6825 if ( result == -1 ) {
6826 // We'll assume this is an underrun, though there isn't a
6827 // specific means for determining that.
6828 handle->xrun[0] = true;
6829 errorText_ = "RtApiOss::callbackEvent: audio write error.";
6830 error( RtError::WARNING );
6835 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6837 // Setup parameters.
6838 if ( stream_.doConvertBuffer[1] ) {
6839 buffer = stream_.deviceBuffer;
6840 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
6841 format = stream_.deviceFormat[1];
6844 buffer = stream_.userBuffer[1];
6845 samples = stream_.bufferSize * stream_.nUserChannels[1];
6846 format = stream_.userFormat;
6849 // Read samples from device.
6850 result = read( handle->id[1], buffer, samples * formatBytes(format) );
6852 if ( result == -1 ) {
6853 // We'll assume this is an overrun, though there isn't a
6854 // specific means for determining that.
6855 handle->xrun[1] = true;
6856 errorText_ = "RtApiOss::callbackEvent: audio read error.";
6857 error( RtError::WARNING );
6861 // Do byte swapping if necessary.
6862 if ( stream_.doByteSwap[1] )
6863 byteSwapBuffer( buffer, samples, format );
6865 // Do buffer conversion if necessary.
6866 if ( stream_.doConvertBuffer[1] )
6867 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6871 MUTEX_UNLOCK( &stream_.mutex );
6873 RtApi::tickStreamTime();
6874 if ( doStopStream == 1 ) this->stopStream();
6875 else if ( doStopStream == 2 ) this->abortStream();
6878 extern "C" void *ossCallbackHandler( void *ptr )
6880 CallbackInfo *info = (CallbackInfo *) ptr;
6881 RtApiOss *object = (RtApiOss *) info->object;
6882 bool *isRunning = &info->isRunning;
6885 // Set a higher scheduler priority (P.J. Leonard)
6886 struct sched_param param;
6887 param.sched_priority = 39; // Is this the best number?
6888 sched_setscheduler( 0, SCHED_RR, ¶m );
6891 while ( *isRunning == true ) {
6892 pthread_testcancel();
6893 object->callbackEvent();
6896 pthread_exit( NULL );
6899 //******************** End of __LINUX_OSS__ *********************//
6903 // *************************************************** //
6905 // Protected common (OS-independent) RtAudio methods.
6907 // *************************************************** //
6909 // This method can be modified to control the behavior of error
6910 // message printing.
6911 void RtApi :: error( RtError::Type type )
6913 errorStream_.str(""); // clear the ostringstream
6914 if ( type == RtError::WARNING && showWarnings_ == true )
6915 std::cerr << '\n' << errorText_ << "\n\n";
6917 throw( RtError( errorText_, type ) );
6920 void RtApi :: verifyStream()
6922 if ( stream_.state == STREAM_CLOSED ) {
6923 errorText_ = "RtApi:: a stream is not open!";
6924 error( RtError::INVALID_USE );
6928 void RtApi :: clearStreamInfo()
6930 stream_.mode = UNINITIALIZED;
6931 stream_.state = STREAM_CLOSED;
6932 stream_.sampleRate = 0;
6933 stream_.bufferSize = 0;
6934 stream_.nBuffers = 0;
6935 stream_.userFormat = 0;
6936 stream_.userInterleaved = true;
6937 stream_.streamTime = 0.0;
6938 stream_.apiHandle = 0;
6939 stream_.deviceBuffer = 0;
6940 stream_.callbackInfo.callback = 0;
6941 stream_.callbackInfo.userData = 0;
6942 stream_.callbackInfo.isRunning = false;
6943 for ( int i=0; i<2; i++ ) {
6944 stream_.device[i] = 11111;
6945 stream_.doConvertBuffer[i] = false;
6946 stream_.deviceInterleaved[i] = true;
6947 stream_.doByteSwap[i] = false;
6948 stream_.nUserChannels[i] = 0;
6949 stream_.nDeviceChannels[i] = 0;
6950 stream_.channelOffset[i] = 0;
6951 stream_.deviceFormat[i] = 0;
6952 stream_.latency[i] = 0;
6953 stream_.userBuffer[i] = 0;
6954 stream_.convertInfo[i].channels = 0;
6955 stream_.convertInfo[i].inJump = 0;
6956 stream_.convertInfo[i].outJump = 0;
6957 stream_.convertInfo[i].inFormat = 0;
6958 stream_.convertInfo[i].outFormat = 0;
6959 stream_.convertInfo[i].inOffset.clear();
6960 stream_.convertInfo[i].outOffset.clear();
6964 unsigned int RtApi :: formatBytes( RtAudioFormat format )
6966 if ( format == RTAUDIO_SINT16 )
6968 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
6969 format == RTAUDIO_FLOAT32 )
6971 else if ( format == RTAUDIO_FLOAT64 )
6973 else if ( format == RTAUDIO_SINT8 )
6976 errorText_ = "RtApi::formatBytes: undefined format.";
6977 error( RtError::WARNING );
6982 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
6984 if ( mode == INPUT ) { // convert device to user buffer
6985 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
6986 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
6987 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
6988 stream_.convertInfo[mode].outFormat = stream_.userFormat;
6990 else { // convert user to device buffer
6991 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
6992 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
6993 stream_.convertInfo[mode].inFormat = stream_.userFormat;
6994 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
6997 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
6998 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
7000 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
7002 // Set up the interleave/deinterleave offsets.
7003 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
7004 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
7005 ( mode == INPUT && stream_.userInterleaved ) ) {
7006 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7007 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7008 stream_.convertInfo[mode].outOffset.push_back( k );
7009 stream_.convertInfo[mode].inJump = 1;
7013 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7014 stream_.convertInfo[mode].inOffset.push_back( k );
7015 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7016 stream_.convertInfo[mode].outJump = 1;
7020 else { // no (de)interleaving
7021 if ( stream_.userInterleaved ) {
7022 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7023 stream_.convertInfo[mode].inOffset.push_back( k );
7024 stream_.convertInfo[mode].outOffset.push_back( k );
7028 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7029 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7030 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7031 stream_.convertInfo[mode].inJump = 1;
7032 stream_.convertInfo[mode].outJump = 1;
7037 // Add channel offset.
7038 if ( firstChannel > 0 ) {
7039 if ( stream_.deviceInterleaved[mode] ) {
7040 if ( mode == OUTPUT ) {
7041 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7042 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7045 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7046 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7050 if ( mode == OUTPUT ) {
7051 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7052 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7055 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7056 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7062 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7064 // This function does format conversion, input/output channel compensation, and
7065 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7066 // the upper three bytes of a 32-bit integer.
7068 // Clear our device buffer when in/out duplex device channels are different
7069 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7070 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7071 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7074 if (info.outFormat == RTAUDIO_FLOAT64) {
7076 Float64 *out = (Float64 *)outBuffer;
7078 if (info.inFormat == RTAUDIO_SINT8) {
7079 signed char *in = (signed char *)inBuffer;
7080 scale = 1.0 / 128.0;
7081 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7082 for (j=0; j<info.channels; j++) {
7083 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7084 out[info.outOffset[j]] *= scale;
7087 out += info.outJump;
7090 else if (info.inFormat == RTAUDIO_SINT16) {
7091 Int16 *in = (Int16 *)inBuffer;
7092 scale = 1.0 / 32768.0;
7093 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7094 for (j=0; j<info.channels; j++) {
7095 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7096 out[info.outOffset[j]] *= scale;
7099 out += info.outJump;
7102 else if (info.inFormat == RTAUDIO_SINT24) {
7103 Int32 *in = (Int32 *)inBuffer;
7104 scale = 1.0 / 8388608.0;
7105 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7106 for (j=0; j<info.channels; j++) {
7107 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7108 out[info.outOffset[j]] *= scale;
7111 out += info.outJump;
7114 else if (info.inFormat == RTAUDIO_SINT32) {
7115 Int32 *in = (Int32 *)inBuffer;
7116 scale = 1.0 / 2147483648.0;
7117 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7118 for (j=0; j<info.channels; j++) {
7119 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7120 out[info.outOffset[j]] *= scale;
7123 out += info.outJump;
7126 else if (info.inFormat == RTAUDIO_FLOAT32) {
7127 Float32 *in = (Float32 *)inBuffer;
7128 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7129 for (j=0; j<info.channels; j++) {
7130 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7133 out += info.outJump;
7136 else if (info.inFormat == RTAUDIO_FLOAT64) {
7137 // Channel compensation and/or (de)interleaving only.
7138 Float64 *in = (Float64 *)inBuffer;
7139 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7140 for (j=0; j<info.channels; j++) {
7141 out[info.outOffset[j]] = in[info.inOffset[j]];
7144 out += info.outJump;
7148 else if (info.outFormat == RTAUDIO_FLOAT32) {
7150 Float32 *out = (Float32 *)outBuffer;
7152 if (info.inFormat == RTAUDIO_SINT8) {
7153 signed char *in = (signed char *)inBuffer;
7154 scale = 1.0 / 128.0;
7155 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7156 for (j=0; j<info.channels; j++) {
7157 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7158 out[info.outOffset[j]] *= scale;
7161 out += info.outJump;
7164 else if (info.inFormat == RTAUDIO_SINT16) {
7165 Int16 *in = (Int16 *)inBuffer;
7166 scale = 1.0 / 32768.0;
7167 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7168 for (j=0; j<info.channels; j++) {
7169 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7170 out[info.outOffset[j]] *= scale;
7173 out += info.outJump;
7176 else if (info.inFormat == RTAUDIO_SINT24) {
7177 Int32 *in = (Int32 *)inBuffer;
7178 scale = 1.0 / 8388608.0;
7179 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7180 for (j=0; j<info.channels; j++) {
7181 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7182 out[info.outOffset[j]] *= scale;
7185 out += info.outJump;
7188 else if (info.inFormat == RTAUDIO_SINT32) {
7189 Int32 *in = (Int32 *)inBuffer;
7190 scale = 1.0 / 2147483648.0;
7191 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7192 for (j=0; j<info.channels; j++) {
7193 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7194 out[info.outOffset[j]] *= scale;
7197 out += info.outJump;
7200 else if (info.inFormat == RTAUDIO_FLOAT32) {
7201 // Channel compensation and/or (de)interleaving only.
7202 Float32 *in = (Float32 *)inBuffer;
7203 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7204 for (j=0; j<info.channels; j++) {
7205 out[info.outOffset[j]] = in[info.inOffset[j]];
7208 out += info.outJump;
7211 else if (info.inFormat == RTAUDIO_FLOAT64) {
7212 Float64 *in = (Float64 *)inBuffer;
7213 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7214 for (j=0; j<info.channels; j++) {
7215 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7218 out += info.outJump;
7222 else if (info.outFormat == RTAUDIO_SINT32) {
7223 Int32 *out = (Int32 *)outBuffer;
7224 if (info.inFormat == RTAUDIO_SINT8) {
7225 signed char *in = (signed char *)inBuffer;
7226 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7227 for (j=0; j<info.channels; j++) {
7228 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7229 out[info.outOffset[j]] <<= 24;
7232 out += info.outJump;
7235 else if (info.inFormat == RTAUDIO_SINT16) {
7236 Int16 *in = (Int16 *)inBuffer;
7237 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7238 for (j=0; j<info.channels; j++) {
7239 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7240 out[info.outOffset[j]] <<= 16;
7243 out += info.outJump;
7246 else if (info.inFormat == RTAUDIO_SINT24) {
7247 Int32 *in = (Int32 *)inBuffer;
7248 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7249 for (j=0; j<info.channels; j++) {
7250 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7251 out[info.outOffset[j]] <<= 8;
7254 out += info.outJump;
7257 else if (info.inFormat == RTAUDIO_SINT32) {
7258 // Channel compensation and/or (de)interleaving only.
7259 Int32 *in = (Int32 *)inBuffer;
7260 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7261 for (j=0; j<info.channels; j++) {
7262 out[info.outOffset[j]] = in[info.inOffset[j]];
7265 out += info.outJump;
7268 else if (info.inFormat == RTAUDIO_FLOAT32) {
7269 Float32 *in = (Float32 *)inBuffer;
7270 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7271 for (j=0; j<info.channels; j++) {
7272 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7275 out += info.outJump;
7278 else if (info.inFormat == RTAUDIO_FLOAT64) {
7279 Float64 *in = (Float64 *)inBuffer;
7280 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7281 for (j=0; j<info.channels; j++) {
7282 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7285 out += info.outJump;
7289 else if (info.outFormat == RTAUDIO_SINT24) {
7290 Int32 *out = (Int32 *)outBuffer;
7291 if (info.inFormat == RTAUDIO_SINT8) {
7292 signed char *in = (signed char *)inBuffer;
7293 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7294 for (j=0; j<info.channels; j++) {
7295 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7296 out[info.outOffset[j]] <<= 16;
7299 out += info.outJump;
7302 else if (info.inFormat == RTAUDIO_SINT16) {
7303 Int16 *in = (Int16 *)inBuffer;
7304 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7305 for (j=0; j<info.channels; j++) {
7306 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7307 out[info.outOffset[j]] <<= 8;
7310 out += info.outJump;
7313 else if (info.inFormat == RTAUDIO_SINT24) {
7314 // Channel compensation and/or (de)interleaving only.
7315 Int32 *in = (Int32 *)inBuffer;
7316 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7317 for (j=0; j<info.channels; j++) {
7318 out[info.outOffset[j]] = in[info.inOffset[j]];
7321 out += info.outJump;
7324 else if (info.inFormat == RTAUDIO_SINT32) {
7325 Int32 *in = (Int32 *)inBuffer;
7326 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7327 for (j=0; j<info.channels; j++) {
7328 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7329 out[info.outOffset[j]] >>= 8;
7332 out += info.outJump;
7335 else if (info.inFormat == RTAUDIO_FLOAT32) {
7336 Float32 *in = (Float32 *)inBuffer;
7337 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7338 for (j=0; j<info.channels; j++) {
7339 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388608.0);
7342 out += info.outJump;
7345 else if (info.inFormat == RTAUDIO_FLOAT64) {
7346 Float64 *in = (Float64 *)inBuffer;
7347 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7348 for (j=0; j<info.channels; j++) {
7349 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7352 out += info.outJump;
7356 else if (info.outFormat == RTAUDIO_SINT16) {
7357 Int16 *out = (Int16 *)outBuffer;
7358 if (info.inFormat == RTAUDIO_SINT8) {
7359 signed char *in = (signed char *)inBuffer;
7360 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7361 for (j=0; j<info.channels; j++) {
7362 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7363 out[info.outOffset[j]] <<= 8;
7366 out += info.outJump;
7369 else if (info.inFormat == RTAUDIO_SINT16) {
7370 // Channel compensation and/or (de)interleaving only.
7371 Int16 *in = (Int16 *)inBuffer;
7372 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7373 for (j=0; j<info.channels; j++) {
7374 out[info.outOffset[j]] = in[info.inOffset[j]];
7377 out += info.outJump;
7380 else if (info.inFormat == RTAUDIO_SINT24) {
7381 Int32 *in = (Int32 *)inBuffer;
7382 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7383 for (j=0; j<info.channels; j++) {
7384 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7387 out += info.outJump;
7390 else if (info.inFormat == RTAUDIO_SINT32) {
7391 Int32 *in = (Int32 *)inBuffer;
7392 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7393 for (j=0; j<info.channels; j++) {
7394 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7397 out += info.outJump;
7400 else if (info.inFormat == RTAUDIO_FLOAT32) {
7401 Float32 *in = (Float32 *)inBuffer;
7402 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7403 for (j=0; j<info.channels; j++) {
7404 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7407 out += info.outJump;
7410 else if (info.inFormat == RTAUDIO_FLOAT64) {
7411 Float64 *in = (Float64 *)inBuffer;
7412 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7413 for (j=0; j<info.channels; j++) {
7414 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7417 out += info.outJump;
7421 else if (info.outFormat == RTAUDIO_SINT8) {
7422 signed char *out = (signed char *)outBuffer;
7423 if (info.inFormat == RTAUDIO_SINT8) {
7424 // Channel compensation and/or (de)interleaving only.
7425 signed char *in = (signed char *)inBuffer;
7426 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7427 for (j=0; j<info.channels; j++) {
7428 out[info.outOffset[j]] = in[info.inOffset[j]];
7431 out += info.outJump;
7434 if (info.inFormat == RTAUDIO_SINT16) {
7435 Int16 *in = (Int16 *)inBuffer;
7436 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7437 for (j=0; j<info.channels; j++) {
7438 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7441 out += info.outJump;
7444 else if (info.inFormat == RTAUDIO_SINT24) {
7445 Int32 *in = (Int32 *)inBuffer;
7446 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7447 for (j=0; j<info.channels; j++) {
7448 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7451 out += info.outJump;
7454 else if (info.inFormat == RTAUDIO_SINT32) {
7455 Int32 *in = (Int32 *)inBuffer;
7456 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7457 for (j=0; j<info.channels; j++) {
7458 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7461 out += info.outJump;
7464 else if (info.inFormat == RTAUDIO_FLOAT32) {
7465 Float32 *in = (Float32 *)inBuffer;
7466 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7467 for (j=0; j<info.channels; j++) {
7468 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7471 out += info.outJump;
7474 else if (info.inFormat == RTAUDIO_FLOAT64) {
7475 Float64 *in = (Float64 *)inBuffer;
7476 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7477 for (j=0; j<info.channels; j++) {
7478 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7481 out += info.outJump;
7487 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7493 if ( format == RTAUDIO_SINT16 ) {
7494 for ( unsigned int i=0; i<samples; i++ ) {
7495 // Swap 1st and 2nd bytes.
7500 // Increment 2 bytes.
7504 else if ( format == RTAUDIO_SINT24 ||
7505 format == RTAUDIO_SINT32 ||
7506 format == RTAUDIO_FLOAT32 ) {
7507 for ( unsigned int i=0; i<samples; i++ ) {
7508 // Swap 1st and 4th bytes.
7513 // Swap 2nd and 3rd bytes.
7519 // Increment 4 bytes.
7523 else if ( format == RTAUDIO_FLOAT64 ) {
7524 for ( unsigned int i=0; i<samples; i++ ) {
7525 // Swap 1st and 8th bytes
7530 // Swap 2nd and 7th bytes
7536 // Swap 3rd and 6th bytes
7542 // Swap 4th and 5th bytes
7548 // Increment 8 bytes.
7554 // Indentation settings for Vim and Emacs
7557 // c-basic-offset: 2
7558 // indent-tabs-mode: nil
7561 // vim: et sts=2 sw=2