and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
(DirectSound, ASIO and WASAPI) operating systems.
+ RtAudio GitHub site: https://github.com/thestk/rtaudio
RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
RtAudio: realtime audio i/o C++ classes
- Copyright (c) 2001-2017 Gary P. Scavone
+ Copyright (c) 2001-2019 Gary P. Scavone
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
*/
/************************************************************************/
-// RtAudio: Version 5.0.0
+// RtAudio: Version 5.1.0
#include "RtAudio.h"
#include <iostream>
return RTAUDIO_VERSION;
}
-void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis )
-{
- apis.clear();
+// Define API names and display names.
+// Must be in same order as API enum.
+extern "C" {
+const char* rtaudio_api_names[][2] = {
+ { "unspecified" , "Unknown" },
+ { "alsa" , "ALSA" },
+ { "pulse" , "Pulse" },
+ { "oss" , "OpenSoundSystem" },
+ { "jack" , "Jack" },
+ { "core" , "CoreAudio" },
+ { "wasapi" , "WASAPI" },
+ { "asio" , "ASIO" },
+ { "ds" , "DirectSound" },
+ { "dummy" , "Dummy" },
+};
+
+const unsigned int rtaudio_num_api_names =
+ sizeof(rtaudio_api_names)/sizeof(rtaudio_api_names[0]);
- // The order here will control the order of RtAudio's API search in
- // the constructor.
+// The order here will control the order of RtAudio's API search in
+// the constructor.
+extern "C" const RtAudio::Api rtaudio_compiled_apis[] = {
#if defined(__UNIX_JACK__)
- apis.push_back( UNIX_JACK );
+ RtAudio::UNIX_JACK,
#endif
#if defined(__LINUX_PULSE__)
- apis.push_back( LINUX_PULSE );
+ RtAudio::LINUX_PULSE,
#endif
#if defined(__LINUX_ALSA__)
- apis.push_back( LINUX_ALSA );
+ RtAudio::LINUX_ALSA,
#endif
#if defined(__LINUX_OSS__)
- apis.push_back( LINUX_OSS );
+ RtAudio::LINUX_OSS,
#endif
#if defined(__WINDOWS_ASIO__)
- apis.push_back( WINDOWS_ASIO );
+ RtAudio::WINDOWS_ASIO,
#endif
#if defined(__WINDOWS_WASAPI__)
- apis.push_back( WINDOWS_WASAPI );
+ RtAudio::WINDOWS_WASAPI,
#endif
#if defined(__WINDOWS_DS__)
- apis.push_back( WINDOWS_DS );
+ RtAudio::WINDOWS_DS,
#endif
#if defined(__MACOSX_CORE__)
- apis.push_back( MACOSX_CORE );
+ RtAudio::MACOSX_CORE,
#endif
#if defined(__RTAUDIO_DUMMY__)
- apis.push_back( RTAUDIO_DUMMY );
+ RtAudio::RTAUDIO_DUMMY,
#endif
+ RtAudio::UNSPECIFIED,
+};
+extern "C" const unsigned int rtaudio_num_compiled_apis =
+ sizeof(rtaudio_compiled_apis)/sizeof(rtaudio_compiled_apis[0])-1;
+}
+
+// This is a compile-time check that rtaudio_num_api_names == RtAudio::NUM_APIS.
+// If the build breaks here, check that they match.
+template<bool b> class StaticAssert { private: StaticAssert() {} };
+template<> class StaticAssert<true>{ public: StaticAssert() {} };
+class StaticAssertions { StaticAssertions() {
+ StaticAssert<rtaudio_num_api_names == RtAudio::NUM_APIS>();
+}};
+
+void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis )
+{
+ apis = std::vector<RtAudio::Api>(rtaudio_compiled_apis,
+ rtaudio_compiled_apis + rtaudio_num_compiled_apis);
+}
+
+std::string RtAudio :: getApiName( RtAudio::Api api )
+{
+ if (api < 0 || api >= RtAudio::NUM_APIS)
+ return "";
+ return rtaudio_api_names[api][0];
+}
+
+std::string RtAudio :: getApiDisplayName( RtAudio::Api api )
+{
+ if (api < 0 || api >= RtAudio::NUM_APIS)
+ return "Unknown";
+ return rtaudio_api_names[api][1];
+}
+
+RtAudio::Api RtAudio :: getCompiledApiByName( const std::string &name )
+{
+ unsigned int i=0;
+ for (i = 0; i < rtaudio_num_compiled_apis; ++i)
+ if (name == rtaudio_api_names[rtaudio_compiled_apis[i]][0])
+ return rtaudio_compiled_apis[i];
+ return RtAudio::UNSPECIFIED;
}
void RtAudio :: openRtApi( RtAudio::Api api )
RtAudio :: RtAudio( RtAudio::Api api )
{
rtapi_ = 0;
-
+
if ( api != UNSPECIFIED ) {
// Attempt to open the specified API.
openRtApi( api );
if ( rtapi_ ) return;
// It should not be possible to get here because the preprocessor
- // definition __RTAUDIO_DUMMY__ is automatically defined if no
- // API-specific definitions are passed to the compiler. But just in
- // case something weird happens, we'll thow an error.
+ // definition __RTAUDIO_DUMMY__ is automatically defined in RtAudio.h
+ // if no API-specific definitions are passed to the compiler. But just
+ // in case something weird happens, we'll thow an error.
std::string errorText = "\nRtAudio: no compiled API support found ... critical error!!\n\n";
throw( RtAudioError( errorText, RtAudioError::UNSPECIFIED ) );
}
RtApi :: RtApi()
{
- stream_.state = STREAM_CLOSED;
- stream_.mode = UNINITIALIZED;
- stream_.apiHandle = 0;
- stream_.userBuffer[0] = 0;
- stream_.userBuffer[1] = 0;
+ clearStreamInfo();
MUTEX_INITIALIZE( &stream_.mutex );
showWarnings_ = true;
firstErrorOccurred_ = false;
stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
+ /*
#if defined( HAVE_GETTIMEOFDAY )
gettimeofday( &stream_.lastTickTimestamp, NULL );
#endif
+ */
}
long RtApi :: getStreamLatency( void )
{
- verifyStream();
-
long totalLatency = 0;
if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
totalLatency = stream_.latency[0];
return totalLatency;
}
+/*
double RtApi :: getStreamTime( void )
{
- verifyStream();
-
#if defined( HAVE_GETTIMEOFDAY )
// Return a very accurate estimate of the stream time by
// adding in the elapsed time since the last tick.
(then.tv_sec + 0.000001 * then.tv_usec));
#else
return stream_.streamTime;
-#endif
+ #endif
}
+*/
void RtApi :: setStreamTime( double time )
{
- verifyStream();
+ // verifyStream();
if ( time >= 0.0 )
stream_.streamTime = time;
+ /*
#if defined( HAVE_GETTIMEOFDAY )
gettimeofday( &stream_.lastTickTimestamp, NULL );
#endif
+ */
}
+/*
unsigned int RtApi :: getStreamSampleRate( void )
{
- verifyStream();
+ verifyStream();
return stream_.sampleRate;
}
+*/
// *************************************************** //
return info;
}
+ // Probe the currently configured sample rate
+ Float64 nominalRate;
+ dataSize = sizeof( Float64 );
+ property.mSelector = kAudioDevicePropertyNominalSampleRate;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &nominalRate );
+ if ( result == noErr ) info.currentSampleRate = (unsigned int) nominalRate;
+
// CoreAudio always uses 32-bit floating point data for PCM streams.
// Thus, any other "physical" formats supported by the device are of
// no interest to the client.
return kAudioHardwareNoError;
}
+static OSStatus disconnectListener( AudioObjectID /*inDevice*/,
+ UInt32 nAddresses,
+ const AudioObjectPropertyAddress properties[],
+ void* infoPointer )
+{
+ for ( UInt32 i=0; i<nAddresses; i++ ) {
+ if ( properties[i].mSelector == kAudioDevicePropertyDeviceIsAlive ) {
+ CallbackInfo *info = (CallbackInfo *) infoPointer;
+ RtApiCore *object = (RtApiCore *) info->object;
+ info->deviceDisconnected = true;
+ object->closeStream();
+ return kAudioHardwareUnspecifiedError;
+ }
+ }
+
+ return kAudioHardwareNoError;
+}
+
static OSStatus xrunListener( AudioObjectID /*inDevice*/,
UInt32 nAddresses,
const AudioObjectPropertyAddress properties[],
return kAudioHardwareNoError;
}
-static OSStatus rateListener( AudioObjectID inDevice,
- UInt32 /*nAddresses*/,
- const AudioObjectPropertyAddress /*properties*/[],
- void* ratePointer )
-{
- Float64 *rate = (Float64 *) ratePointer;
- UInt32 dataSize = sizeof( Float64 );
- AudioObjectPropertyAddress property = { kAudioDevicePropertyNominalSampleRate,
- kAudioObjectPropertyScopeGlobal,
- kAudioObjectPropertyElementMaster };
- AudioObjectGetPropertyData( inDevice, &property, 0, NULL, &dataSize, rate );
- return kAudioHardwareNoError;
-}
-
bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
unsigned int firstChannel, unsigned int sampleRate,
RtAudioFormat format, unsigned int *bufferSize,
return FAILURE;
}
- // Only change the sample rate if off by more than 1 Hz.
+ // Only try to change the sample rate if off by more than 1 Hz.
if ( fabs( nominalRate - (double)sampleRate ) > 1.0 ) {
- // Set a property listener for the sample rate change
- Float64 reportedRate = 0.0;
- AudioObjectPropertyAddress tmp = { kAudioDevicePropertyNominalSampleRate, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
- result = AudioObjectAddPropertyListener( id, &tmp, rateListener, (void *) &reportedRate );
- if ( result != noErr ) {
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate property listener for device (" << device << ").";
- errorText_ = errorStream_.str();
- return FAILURE;
- }
-
nominalRate = (Float64) sampleRate;
result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &nominalRate );
if ( result != noErr ) {
- AudioObjectRemovePropertyListener( id, &tmp, rateListener, (void *) &reportedRate );
errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate for device (" << device << ").";
errorText_ = errorStream_.str();
return FAILURE;
// Now wait until the reported nominal rate is what we just set.
UInt32 microCounter = 0;
+ Float64 reportedRate = 0.0;
while ( reportedRate != nominalRate ) {
microCounter += 5000;
- if ( microCounter > 5000000 ) break;
+ if ( microCounter > 2000000 ) break;
usleep( 5000 );
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &reportedRate );
}
- // Remove the property listener.
- AudioObjectRemovePropertyListener( id, &tmp, rateListener, (void *) &reportedRate );
-
- if ( microCounter > 5000000 ) {
+ if ( microCounter > 2000000 ) {
errorStream_ << "RtApiCore::probeDeviceOpen: timeout waiting for sample rate update for device (" << device << ").";
errorText_ = errorStream_.str();
return FAILURE;
property.mSelector = kAudioDeviceProcessorOverload;
property.mScope = kAudioObjectPropertyScopeGlobal;
result = AudioObjectAddPropertyListener( id, &property, xrunListener, (void *) handle );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error setting xrun listener for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ // Setup a listener to detect a possible device disconnect.
+ property.mSelector = kAudioDevicePropertyDeviceIsAlive;
+ property.mScope = kAudioObjectPropertyScopeGlobal;
+ result = AudioObjectAddPropertyListener( id , &property, disconnectListener, (void *) &stream_.callbackInfo );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error setting disconnect listener for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
return SUCCESS;
stream_.deviceBuffer = 0;
}
- stream_.state = STREAM_CLOSED;
+ clearStreamInfo();
+ //stream_.state = STREAM_CLOSED;
return FAILURE;
}
}
// Destroy pthread condition variable.
+ pthread_cond_signal( &handle->condition ); // signal condition variable in case stopStream is blocked
pthread_cond_destroy( &handle->condition );
delete handle;
stream_.apiHandle = 0;
- stream_.mode = UNINITIALIZED;
- stream_.state = STREAM_CLOSED;
+ CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
+ if ( info->deviceDisconnected ) {
+ errorText_ = "RtApiCore: the stream device was disconnected (and closed)!";
+ error( RtAudioError::DEVICE_DISCONNECT );
+ }
+
+ clearStreamInfo();
+ //stream_.mode = UNINITIALIZED;
+ //stream_.state = STREAM_CLOSED;
}
void RtApiCore :: startStream( void )
{
- verifyStream();
- if ( stream_.state == STREAM_RUNNING ) {
- errorText_ = "RtApiCore::startStream(): the stream is already running!";
+ //verifyStream();
+ if ( stream_.state != STREAM_STOPPED ) {
+ if ( stream_.state == STREAM_RUNNING )
+ errorText_ = "RtApiCore::startStream(): the stream is already running!";
+ else if ( stream_.state == STREAM_STOPPING || stream_.state == STREAM_CLOSED )
+ errorText_ = "RtApiCore::startStream(): the stream is stopping or closed!";
error( RtAudioError::WARNING );
return;
}
+ /*
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+ */
+
OSStatus result = noErr;
CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
void RtApiCore :: stopStream( void )
{
- verifyStream();
- if ( stream_.state == STREAM_STOPPED ) {
- errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
+ //verifyStream();
+ if ( stream_.state != STREAM_RUNNING && stream_.state != STREAM_STOPPING ) {
+ if ( stream_.state == STREAM_STOPPED )
+ errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
+ else if ( stream_.state == STREAM_CLOSED )
+ errorText_ = "RtApiCore::stopStream(): the stream is closed!";
error( RtAudioError::WARNING );
return;
}
}
stream_.state = STREAM_STOPPED;
+ // set stream time to zero?
unlock:
if ( result == noErr ) return;
void RtApiCore :: abortStream( void )
{
- verifyStream();
- if ( stream_.state == STREAM_STOPPED ) {
- errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
+ //verifyStream();
+ if ( stream_.state != STREAM_RUNNING ) {
+ if ( stream_.state == STREAM_STOPPED )
+ errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
+ else if ( stream_.state == STREAM_STOPPING || stream_.state == STREAM_CLOSED )
+ errorText_ = "RtApiCore::abortStream(): the stream is stopping or closed!";
error( RtAudioError::WARNING );
return;
}
CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
handle->drainCounter = 2;
+ stream_.state = STREAM_STOPPING;
stopStream();
}
int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],
stream_.bufferSize, streamTime, status, info->userData );
if ( cbReturnValue == 2 ) {
- stream_.state = STREAM_STOPPING;
- handle->drainCounter = 2;
abortStream();
return SUCCESS;
}
unlock:
//MUTEX_UNLOCK( &stream_.mutex );
- RtApi::tickStreamTime();
+ // Make sure to only tick duplex stream time once if using two devices
+ if ( stream_.mode != DUPLEX || (stream_.mode == DUPLEX && handle->id[0] != handle->id[1] && deviceId == handle->id[0] ) )
+ RtApi::tickStreamTime();
+
return SUCCESS;
}
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
JackHandle *handle = (JackHandle *) stream_.apiHandle;
int result = jack_activate( handle->client );
if ( result ) {
result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
if ( result != ASE_OK ) {
// Standard method failed. This can happen with strict/misbehaving drivers that return valid buffer size ranges
- // but only accept the preferred buffer size as parameter for ASIOCreateBuffers. eg. Creatives ASIO driver
- // in that case, let's be naïve and try that instead
+ // but only accept the preferred buffer size as parameter for ASIOCreateBuffers (e.g. Creative's ASIO driver).
+ // In that case, let's be naïve and try that instead.
*bufferSize = preferSize;
stream_.bufferSize = *bufferSize;
result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
ASIOError result = ASIOStart();
if ( result != ASE_OK ) {
#include <mmdeviceapi.h>
#include <functiondiscoverykeys_devpkey.h>
+#ifndef MF_E_TRANSFORM_NEED_MORE_INPUT
+ #define MF_E_TRANSFORM_NEED_MORE_INPUT _HRESULT_TYPEDEF_(0xc00d6d72)
+#endif
+
+#ifndef MFSTARTUP_NOSOCKET
+ #define MFSTARTUP_NOSOCKET 0x1
+#endif
+
#ifdef _MSC_VER
#pragma comment( lib, "ksuser" )
#pragma comment( lib, "mfplat.lib" )
relOutIndex += bufferSize_;
}
- // "in" index can end on the "out" index but cannot begin at it
- if ( inIndex_ <= relOutIndex && inIndexEnd > relOutIndex ) {
+ // the "IN" index CAN BEGIN at the "OUT" index
+ // the "IN" index CANNOT END at the "OUT" index
+ if ( inIndex_ < relOutIndex && inIndexEnd >= relOutIndex ) {
return false; // not enough space between "in" index and "out" index
}
relInIndex += bufferSize_;
}
- // "out" index can begin at and end on the "in" index
- if ( outIndex_ < relInIndex && outIndexEnd > relInIndex ) {
+ // the "OUT" index CANNOT BEGIN at the "IN" index
+ // the "OUT" index CAN END at the "IN" index
+ if ( outIndex_ <= relInIndex && outIndexEnd > relInIndex ) {
return false; // not enough space between "out" index and "in" index
}
// 4. Send stream start messages to Resampler
- _transform->ProcessMessage( MFT_MESSAGE_COMMAND_FLUSH, NULL );
- _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL );
- _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL );
+ _transform->ProcessMessage( MFT_MESSAGE_COMMAND_FLUSH, 0 );
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0 );
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0 );
}
~WasapiResampler()
{
// 8. Send stream stop messages to Resampler
- _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL );
- _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_STREAMING, NULL );
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0 );
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_STREAMING, 0 );
// 9. Cleanup
CLSCTX_ALL, __uuidof( IMMDeviceEnumerator ),
( void** ) &deviceEnumerator_ );
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::RtApiWasapi: Unable to instantiate device enumerator";
- error( RtAudioError::DRIVER_ERROR );
- }
+ // If this runs on an old Windows, it will fail. Ignore and proceed.
+ if ( FAILED( hr ) )
+ deviceEnumerator_ = NULL;
}
//-----------------------------------------------------------------------------
IMMDeviceCollection* captureDevices = NULL;
IMMDeviceCollection* renderDevices = NULL;
+ if ( !deviceEnumerator_ )
+ return 0;
+
// Count capture devices
errorText_.clear();
HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
// update stream state
stream_.state = STREAM_RUNNING;
// Wait for the last buffer to play before stopping.
Sleep( 1000 * stream_.bufferSize / stream_.sampleRate );
- // stop capture client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::stopStream: Unable to stop capture stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
- // stop render client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::stopStream: Unable to stop render stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
// close thread handle
if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {
errorText_ = "RtApiWasapi::stopStream: Unable to close callback thread.";
Sleep( 1 );
}
- // stop capture client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::abortStream: Unable to stop capture stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
- // stop render client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::abortStream: Unable to stop render stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
// close thread handle
if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {
errorText_ = "RtApiWasapi::abortStream: Unable to close callback thread.";
goto Exit;
}
- // determine whether index falls within capture or render devices
+ // if device index falls within capture devices
if ( device >= renderDeviceCount ) {
if ( mode != INPUT ) {
errorType = RtAudioError::INVALID_USE;
hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,
NULL, ( void** ) &captureAudioClient );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client.";
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device audio client.";
goto Exit;
}
hr = captureAudioClient->GetMixFormat( &deviceFormat );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format.";
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device mix format.";
goto Exit;
}
stream_.nDeviceChannels[mode] = deviceFormat->nChannels;
captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );
}
- else {
- if ( mode != OUTPUT ) {
- errorType = RtAudioError::INVALID_USE;
- errorText_ = "RtApiWasapi::probeDeviceOpen: Render device selected as input device.";
+
+ // if device index falls within render devices and is configured for loopback
+ if ( device < renderDeviceCount && mode == INPUT )
+ {
+ // if renderAudioClient is not initialised, initialise it now
+ IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;
+ if ( !renderAudioClient )
+ {
+ probeDeviceOpen( device, OUTPUT, channels, firstChannel, sampleRate, format, bufferSize, options );
+ }
+
+ // retrieve captureAudioClient from devicePtr
+ IAudioClient*& captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;
+
+ hr = renderDevices->Item( device, &devicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device handle.";
+ goto Exit;
+ }
+
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,
+ NULL, ( void** ) &captureAudioClient );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device audio client.";
goto Exit;
}
- // retrieve renderAudioClient from devicePtr
+ hr = captureAudioClient->GetMixFormat( &deviceFormat );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device mix format.";
+ goto Exit;
+ }
+
+ stream_.nDeviceChannels[mode] = deviceFormat->nChannels;
+ captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );
+ }
+
+ // if device index falls within render devices and is configured for output
+ if ( device < renderDeviceCount && mode == OUTPUT )
+ {
+ // if renderAudioClient is already initialised, don't initialise it again
IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;
+ if ( renderAudioClient )
+ {
+ methodResult = SUCCESS;
+ goto Exit;
+ }
hr = renderDevices->Item( device, &devicePtr );
if ( FAILED( hr ) ) {
hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,
NULL, ( void** ) &renderAudioClient );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client.";
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device audio client.";
goto Exit;
}
hr = renderAudioClient->GetMixFormat( &deviceFormat );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format.";
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device mix format.";
goto Exit;
}
unsigned int bufferFrameCount = 0;
unsigned int numFramesPadding = 0;
unsigned int convBufferSize = 0;
+ bool loopbackEnabled = stream_.device[INPUT] == stream_.device[OUTPUT];
bool callbackPushed = true;
bool callbackPulled = false;
bool callbackStopped = false;
unsigned int convBuffSize = 0;
unsigned int deviceBuffSize = 0;
- errorText_.clear();
+ std::string errorText;
RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;
// Attempt to assign "Pro Audio" characteristic to thread
HMODULE AvrtDll = LoadLibrary( (LPCTSTR) "AVRT.dll" );
if ( AvrtDll ) {
DWORD taskIndex = 0;
- TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr = ( TAvSetMmThreadCharacteristicsPtr ) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" );
+ TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr =
+ ( TAvSetMmThreadCharacteristicsPtr ) (void(*)()) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" );
AvSetMmThreadCharacteristicsPtr( L"Pro Audio", &taskIndex );
FreeLibrary( AvrtDll );
}
if ( captureAudioClient ) {
hr = captureAudioClient->GetMixFormat( &captureFormat );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";
goto Exit;
}
captureSrRatio = ( ( float ) captureFormat->nSamplesPerSec / stream_.sampleRate );
- // initialize capture stream according to desire buffer size
- float desiredBufferSize = stream_.bufferSize * captureSrRatio;
- REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) desiredBufferSize * 10000000 / captureFormat->nSamplesPerSec );
-
if ( !captureClient ) {
hr = captureAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,
- AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
- desiredBufferPeriod,
- desiredBufferPeriod,
+ loopbackEnabled ? AUDCLNT_STREAMFLAGS_LOOPBACK : AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
+ 0,
+ 0,
captureFormat,
NULL );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize capture audio client.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to initialize capture audio client.";
goto Exit;
}
hr = captureAudioClient->GetService( __uuidof( IAudioCaptureClient ),
( void** ) &captureClient );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve capture client handle.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve capture client handle.";
goto Exit;
}
- // configure captureEvent to trigger on every available capture buffer
- captureEvent = CreateEvent( NULL, FALSE, FALSE, NULL );
- if ( !captureEvent ) {
- errorType = RtAudioError::SYSTEM_ERROR;
- errorText_ = "RtApiWasapi::wasapiThread: Unable to create capture event.";
- goto Exit;
+ // don't configure captureEvent if in loopback mode
+ if ( !loopbackEnabled )
+ {
+ // configure captureEvent to trigger on every available capture buffer
+ captureEvent = CreateEvent( NULL, FALSE, FALSE, NULL );
+ if ( !captureEvent ) {
+ errorType = RtAudioError::SYSTEM_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Unable to create capture event.";
+ goto Exit;
+ }
+
+ hr = captureAudioClient->SetEventHandle( captureEvent );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to set capture event handle.";
+ goto Exit;
+ }
+
+ ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent = captureEvent;
}
- hr = captureAudioClient->SetEventHandle( captureEvent );
+ ( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient;
+
+ // reset the capture stream
+ hr = captureAudioClient->Reset();
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to set capture event handle.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to reset capture stream.";
goto Exit;
}
- ( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient;
- ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent = captureEvent;
+ // start the capture stream
+ hr = captureAudioClient->Start();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to start capture stream.";
+ goto Exit;
+ }
}
unsigned int inBufferSize = 0;
hr = captureAudioClient->GetBufferSize( &inBufferSize );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to get capture buffer size.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to get capture buffer size.";
goto Exit;
}
// set captureBuffer size
captureBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[INPUT] ) );
-
- // reset the capture stream
- hr = captureAudioClient->Reset();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to reset capture stream.";
- goto Exit;
- }
-
- // start the capture stream
- hr = captureAudioClient->Start();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to start capture stream.";
- goto Exit;
- }
}
// start render stream if applicable
if ( renderAudioClient ) {
hr = renderAudioClient->GetMixFormat( &renderFormat );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";
goto Exit;
}
renderSrRatio = ( ( float ) renderFormat->nSamplesPerSec / stream_.sampleRate );
- // initialize render stream according to desire buffer size
- float desiredBufferSize = stream_.bufferSize * renderSrRatio;
- REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) desiredBufferSize * 10000000 / renderFormat->nSamplesPerSec );
-
if ( !renderClient ) {
hr = renderAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
- desiredBufferPeriod,
- desiredBufferPeriod,
+ 0,
+ 0,
renderFormat,
NULL );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize render audio client.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to initialize render audio client.";
goto Exit;
}
hr = renderAudioClient->GetService( __uuidof( IAudioRenderClient ),
( void** ) &renderClient );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render client handle.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render client handle.";
goto Exit;
}
renderEvent = CreateEvent( NULL, FALSE, FALSE, NULL );
if ( !renderEvent ) {
errorType = RtAudioError::SYSTEM_ERROR;
- errorText_ = "RtApiWasapi::wasapiThread: Unable to create render event.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to create render event.";
goto Exit;
}
hr = renderAudioClient->SetEventHandle( renderEvent );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to set render event handle.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to set render event handle.";
goto Exit;
}
( ( WasapiHandle* ) stream_.apiHandle )->renderClient = renderClient;
( ( WasapiHandle* ) stream_.apiHandle )->renderEvent = renderEvent;
+
+ // reset the render stream
+ hr = renderAudioClient->Reset();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to reset render stream.";
+ goto Exit;
+ }
+
+ // start the render stream
+ hr = renderAudioClient->Start();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to start render stream.";
+ goto Exit;
+ }
}
unsigned int outBufferSize = 0;
hr = renderAudioClient->GetBufferSize( &outBufferSize );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to get render buffer size.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to get render buffer size.";
goto Exit;
}
// set renderBuffer size
renderBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[OUTPUT] ) );
-
- // reset the render stream
- hr = renderAudioClient->Reset();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to reset render stream.";
- goto Exit;
- }
-
- // start the render stream
- hr = renderAudioClient->Start();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to start render stream.";
- goto Exit;
- }
}
// malloc buffer memory
}
convBuffSize *= 2; // allow overflow for *SrRatio remainders
- convBuffer = ( char* ) malloc( convBuffSize );
- stream_.deviceBuffer = ( char* ) malloc( deviceBuffSize );
+ convBuffer = ( char* ) calloc( convBuffSize, 1 );
+ stream_.deviceBuffer = ( char* ) calloc( deviceBuffSize, 1 );
if ( !convBuffer || !stream_.deviceBuffer ) {
errorType = RtAudioError::MEMORY_ERROR;
- errorText_ = "RtApiWasapi::wasapiThread: Error allocating device buffer memory.";
+ errorText = "RtApiWasapi::wasapiThread: Error allocating device buffer memory.";
goto Exit;
}
captureFlags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY ? RTAUDIO_INPUT_OVERFLOW : 0,
stream_.callbackInfo.userData );
+ // tick stream time
+ RtApi::tickStreamTime();
+
// Handle return value from callback
if ( callbackResult == 1 ) {
// instantiate a thread to stop this thread
HANDLE threadHandle = CreateThread( NULL, 0, stopWasapiThread, this, 0, NULL );
if ( !threadHandle ) {
errorType = RtAudioError::THREAD_ERROR;
- errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream stop thread.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to instantiate stream stop thread.";
goto Exit;
}
else if ( !CloseHandle( threadHandle ) ) {
errorType = RtAudioError::THREAD_ERROR;
- errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream stop thread handle.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to close stream stop thread handle.";
goto Exit;
}
HANDLE threadHandle = CreateThread( NULL, 0, abortWasapiThread, this, 0, NULL );
if ( !threadHandle ) {
errorType = RtAudioError::THREAD_ERROR;
- errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream abort thread.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to instantiate stream abort thread.";
goto Exit;
}
else if ( !CloseHandle( threadHandle ) ) {
errorType = RtAudioError::THREAD_ERROR;
- errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream abort thread handle.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to close stream abort thread handle.";
goto Exit;
}
stream_.convertInfo[OUTPUT] );
}
+ else {
+ // no further conversion, simple copy userBuffer to deviceBuffer
+ memcpy( stream_.deviceBuffer,
+ stream_.userBuffer[OUTPUT],
+ stream_.bufferSize * stream_.nUserChannels[OUTPUT] * formatBytes( stream_.userFormat ) );
+ }
// Convert callback buffer to stream sample rate
renderResampler->Convert( convBuffer,
if ( captureAudioClient ) {
// if the callback input buffer was not pulled from captureBuffer, wait for next capture event
if ( !callbackPulled ) {
- WaitForSingleObject( captureEvent, INFINITE );
+ WaitForSingleObject( loopbackEnabled ? renderEvent : captureEvent, INFINITE );
}
// Get capture buffer from stream
&bufferFrameCount,
&captureFlags, NULL, NULL );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve capture buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve capture buffer.";
goto Exit;
}
// Release capture buffer
hr = captureClient->ReleaseBuffer( bufferFrameCount );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
goto Exit;
}
}
// Inform WASAPI that capture was unsuccessful
hr = captureClient->ReleaseBuffer( 0 );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
goto Exit;
}
}
// Inform WASAPI that capture was unsuccessful
hr = captureClient->ReleaseBuffer( 0 );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
goto Exit;
}
}
// Get render buffer from stream
hr = renderAudioClient->GetBufferSize( &bufferFrameCount );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer size.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer size.";
goto Exit;
}
hr = renderAudioClient->GetCurrentPadding( &numFramesPadding );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer padding.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer padding.";
goto Exit;
}
if ( bufferFrameCount != 0 ) {
hr = renderClient->GetBuffer( bufferFrameCount, &streamBuffer );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer.";
goto Exit;
}
// Release render buffer
hr = renderClient->ReleaseBuffer( bufferFrameCount, 0 );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
goto Exit;
}
}
// Inform WASAPI that render was unsuccessful
hr = renderClient->ReleaseBuffer( 0, 0 );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
goto Exit;
}
}
// Inform WASAPI that render was unsuccessful
hr = renderClient->ReleaseBuffer( 0, 0 );
if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
+ errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
goto Exit;
}
}
// unsetting the callbackPulled flag lets the stream know that
// the audio device is ready for another callback output buffer.
callbackPulled = false;
-
- // tick stream time
- RtApi::tickStreamTime();
}
}
CoUninitialize();
- if ( !errorText_.empty() )
- error( errorType );
-
// update stream state
stream_.state = STREAM_STOPPED;
+
+ if ( !errorText.empty() )
+ {
+ errorText_ = errorText;
+ error( errorType );
+ }
}
//******************** End of __WINDOWS_WASAPI__ *********************//
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
DsHandle *handle = (DsHandle *) stream_.apiHandle;
// Increase scheduler frequency on lesser windows (a side-effect of
unsigned nDevices = 0;
int result, subdevice, card;
char name[64];
- snd_ctl_t *handle;
+ snd_ctl_t *handle = 0;
// Count cards and devices
card = -1;
sprintf( name, "hw:%d", card );
result = snd_ctl_open( &handle, name, 0 );
if ( result < 0 ) {
+ handle = 0;
errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
errorText_ = errorStream_.str();
error( RtAudioError::WARNING );
nDevices++;
}
nextcard:
- snd_ctl_close( handle );
+ if ( handle )
+ snd_ctl_close( handle );
snd_card_next( &card );
}
unsigned nDevices = 0;
int result, subdevice, card;
char name[64];
- snd_ctl_t *chandle;
+ snd_ctl_t *chandle = 0;
// Count cards and devices
card = -1;
sprintf( name, "hw:%d", card );
result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
if ( result < 0 ) {
+ chandle = 0;
errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
errorText_ = errorStream_.str();
error( RtAudioError::WARNING );
nDevices++;
}
nextcard:
- snd_ctl_close( chandle );
+ if ( chandle )
+ snd_ctl_close( chandle );
snd_card_next( &card );
}
if ( result == 0 ) {
if ( nDevices == device ) {
strcpy( name, "default" );
+ snd_ctl_close( chandle );
goto foundDevice;
}
nDevices++;
}
+ snd_ctl_close( chandle );
if ( nDevices == 0 ) {
// This should not happen because a check is made before this function is called.
pthread_attr_t attr;
pthread_attr_init( &attr );
pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
stream_.callbackInfo.doRealtime = true;
struct sched_param param;
MUTEX_LOCK( &stream_.mutex );
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
int result = 0;
snd_pcm_state_t state;
AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
RtApiAlsa *object = (RtApiAlsa *) info->object;
bool *isRunning = &info->isRunning;
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
if ( info->doRealtime ) {
std::cerr << "RtAudio alsa: " <<
(sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") <<
RtApiPulse *context = static_cast<RtApiPulse *>( cbi->object );
volatile bool *isRunning = &cbi->isRunning;
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
if (cbi->doRealtime) {
std::cerr << "RtAudio pulse: " <<
(sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") <<
MUTEX_LOCK( &stream_.mutex );
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
stream_.state = STREAM_RUNNING;
pah->runnable = true;
pthread_attr_t attr;
pthread_attr_init( &attr );
pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
stream_.callbackInfo.doRealtime = true;
struct sched_param param;
pthread_attr_t attr;
pthread_attr_init( &attr );
pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
stream_.callbackInfo.doRealtime = true;
struct sched_param param;
MUTEX_LOCK( &stream_.mutex );
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
stream_.state = STREAM_RUNNING;
// No need to do anything else here ... OSS automatically starts
RtApiOss *object = (RtApiOss *) info->object;
bool *isRunning = &info->isRunning;
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
if (info->doRealtime) {
std::cerr << "RtAudio oss: " <<
(sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") <<
RtAudioErrorCallback errorCallback = (RtAudioErrorCallback) stream_.callbackInfo.errorCallback;
if ( errorCallback ) {
- // abortStream() can generate new error messages. Ignore them. Just keep original one.
-
- if ( firstErrorOccurred_ )
- return;
-
- firstErrorOccurred_ = true;
const std::string errorMessage = errorText_;
-
- if ( type != RtAudioError::WARNING && stream_.state != STREAM_STOPPED) {
- stream_.callbackInfo.isRunning = false; // exit from the thread
- abortStream();
- }
-
errorCallback( type, errorMessage );
- firstErrorOccurred_ = false;
- return;
}
-
- if ( type == RtAudioError::WARNING && showWarnings_ == true )
- std::cerr << '\n' << errorText_ << "\n\n";
- else if ( type != RtAudioError::WARNING )
- throw( RtAudioError( errorText_, type ) );
+ else {
+ if ( showWarnings_ == true )
+ std::cerr << '\n' << errorText_ << "\n\n";
+ }
}
+/*
void RtApi :: verifyStream()
{
if ( stream_.state == STREAM_CLOSED ) {
error( RtAudioError::INVALID_USE );
}
}
+*/
void RtApi :: clearStreamInfo()
{