X-Git-Url: https://git.carlh.net/gitweb/?a=blobdiff_plain;f=RtAudio.cpp;h=e7c3ad12cce4542e6aa5663c7feaae279ba92cac;hb=50e9bba7559500f1ba01f3d8bc29ce15969d49ca;hp=7c87572fd79964819720eab69ccbce4cbaf1dd45;hpb=fa8b676a577de663b8622d0e8f6ce3605576b977;p=rtaudio.git diff --git a/RtAudio.cpp b/RtAudio.cpp index 7c87572..e7c3ad1 100644 --- a/RtAudio.cpp +++ b/RtAudio.cpp @@ -7,10 +7,11 @@ and OSS), Macintosh OS X (CoreAudio and Jack), and Windows (DirectSound, ASIO and WASAPI) operating systems. + RtAudio GitHub site: https://github.com/thestk/rtaudio RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/ RtAudio: realtime audio i/o C++ classes - Copyright (c) 2001-2017 Gary P. Scavone + Copyright (c) 2001-2019 Gary P. Scavone Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files @@ -38,7 +39,7 @@ */ /************************************************************************/ -// RtAudio: Version 5.0.0 +// RtAudio: Version 5.1.0 #include "RtAudio.h" #include @@ -1541,6 +1542,10 @@ void RtApiCore :: startStream( void ) return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + OSStatus result = noErr; CoreHandle *handle = (CoreHandle *) stream_.apiHandle; if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) { @@ -1901,7 +1906,10 @@ bool RtApiCore :: callbackEvent( AudioDeviceID deviceId, unlock: //MUTEX_UNLOCK( &stream_.mutex ); - RtApi::tickStreamTime(); + // Make sure to only tick duplex stream time once if using two devices + if ( stream_.mode != DUPLEX || (stream_.mode == DUPLEX && handle->id[0] != handle->id[1] && deviceId == handle->id[0] ) ) + RtApi::tickStreamTime(); + return SUCCESS; } @@ -2499,6 +2507,10 @@ void RtApiJack :: startStream( void ) return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + JackHandle *handle = (JackHandle *) stream_.apiHandle; int result = jack_activate( handle->client ); if ( result ) { @@ -3378,6 +3390,10 @@ void RtApiAsio :: startStream() return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + AsioHandle *handle = (AsioHandle *) stream_.apiHandle; ASIOError result = ASIOStart(); if ( result != ASE_OK ) { @@ -3829,8 +3845,9 @@ public: relOutIndex += bufferSize_; } - // "in" index can end on the "out" index but cannot begin at it - if ( inIndex_ <= relOutIndex && inIndexEnd > relOutIndex ) { + // the "IN" index CAN BEGIN at the "OUT" index + // the "IN" index CANNOT END at the "OUT" index + if ( inIndex_ < relOutIndex && inIndexEnd >= relOutIndex ) { return false; // not enough space between "in" index and "out" index } @@ -3890,8 +3907,9 @@ public: relInIndex += bufferSize_; } - // "out" index can begin at and end on the "in" index - if ( outIndex_ < relInIndex && outIndexEnd > relInIndex ) { + // the "OUT" index CANNOT BEGIN at the "IN" index + // the "OUT" index CAN END at the "IN" index + if ( outIndex_ <= relInIndex && outIndexEnd > relInIndex ) { return false; // not enough space between "out" index and "in" index } @@ -4551,6 +4569,10 @@ void RtApiWasapi::startStream( void ) return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + // update stream state stream_.state = STREAM_RUNNING; @@ -4590,26 +4612,6 @@ void RtApiWasapi::stopStream( void ) // Wait for the last buffer to play before stopping. Sleep( 1000 * stream_.bufferSize / stream_.sampleRate ); - // stop capture client if applicable - if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) { - HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop(); - if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::stopStream: Unable to stop capture stream."; - error( RtAudioError::DRIVER_ERROR ); - return; - } - } - - // stop render client if applicable - if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) { - HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop(); - if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::stopStream: Unable to stop render stream."; - error( RtAudioError::DRIVER_ERROR ); - return; - } - } - // close thread handle if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) { errorText_ = "RtApiWasapi::stopStream: Unable to close callback thread."; @@ -4640,26 +4642,6 @@ void RtApiWasapi::abortStream( void ) Sleep( 1 ); } - // stop capture client if applicable - if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) { - HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop(); - if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::abortStream: Unable to stop capture stream."; - error( RtAudioError::DRIVER_ERROR ); - return; - } - } - - // stop render client if applicable - if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) { - HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop(); - if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::abortStream: Unable to stop render stream."; - error( RtAudioError::DRIVER_ERROR ); - return; - } - } - // close thread handle if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) { errorText_ = "RtApiWasapi::abortStream: Unable to close callback thread."; @@ -4860,15 +4842,14 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne stream_.doConvertBuffer[mode] = false; if ( stream_.userFormat != stream_.deviceFormat[mode] || stream_.nUserChannels[0] != stream_.nDeviceChannels[0] || - stream_.nUserChannels[1] != stream_.nDeviceChannels[1] || - stream_.userInterleaved ) + stream_.nUserChannels[1] != stream_.nDeviceChannels[1] ) stream_.doConvertBuffer[mode] = true; else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] && stream_.nUserChannels[mode] > 1 ) stream_.doConvertBuffer[mode] = true; if ( stream_.doConvertBuffer[mode] ) - setConvertInfo( mode, 0 ); + setConvertInfo( mode, firstChannel ); // Allocate necessary internal buffers bufferBytes = stream_.nUserChannels[mode] * stream_.bufferSize * formatBytes( stream_.userFormat ); @@ -4982,7 +4963,8 @@ void RtApiWasapi::wasapiThread() HMODULE AvrtDll = LoadLibrary( (LPCTSTR) "AVRT.dll" ); if ( AvrtDll ) { DWORD taskIndex = 0; - TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr = ( TAvSetMmThreadCharacteristicsPtr ) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" ); + TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr = + ( TAvSetMmThreadCharacteristicsPtr ) (void(*)()) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" ); AvSetMmThreadCharacteristicsPtr( L"Pro Audio", &taskIndex ); FreeLibrary( AvrtDll ); } @@ -5042,6 +5024,20 @@ void RtApiWasapi::wasapiThread() } ( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient; + + // reset the capture stream + hr = captureAudioClient->Reset(); + if ( FAILED( hr ) ) { + errorText = "RtApiWasapi::wasapiThread: Unable to reset capture stream."; + goto Exit; + } + + // start the capture stream + hr = captureAudioClient->Start(); + if ( FAILED( hr ) ) { + errorText = "RtApiWasapi::wasapiThread: Unable to start capture stream."; + goto Exit; + } } unsigned int inBufferSize = 0; @@ -5057,20 +5053,6 @@ void RtApiWasapi::wasapiThread() // set captureBuffer size captureBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[INPUT] ) ); - - // reset the capture stream - hr = captureAudioClient->Reset(); - if ( FAILED( hr ) ) { - errorText = "RtApiWasapi::wasapiThread: Unable to reset capture stream."; - goto Exit; - } - - // start the capture stream - hr = captureAudioClient->Start(); - if ( FAILED( hr ) ) { - errorText = "RtApiWasapi::wasapiThread: Unable to start capture stream."; - goto Exit; - } } // start render stream if applicable @@ -5123,6 +5105,20 @@ void RtApiWasapi::wasapiThread() ( ( WasapiHandle* ) stream_.apiHandle )->renderClient = renderClient; ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent = renderEvent; + + // reset the render stream + hr = renderAudioClient->Reset(); + if ( FAILED( hr ) ) { + errorText = "RtApiWasapi::wasapiThread: Unable to reset render stream."; + goto Exit; + } + + // start the render stream + hr = renderAudioClient->Start(); + if ( FAILED( hr ) ) { + errorText = "RtApiWasapi::wasapiThread: Unable to start render stream."; + goto Exit; + } } unsigned int outBufferSize = 0; @@ -5138,20 +5134,6 @@ void RtApiWasapi::wasapiThread() // set renderBuffer size renderBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[OUTPUT] ) ); - - // reset the render stream - hr = renderAudioClient->Reset(); - if ( FAILED( hr ) ) { - errorText = "RtApiWasapi::wasapiThread: Unable to reset render stream."; - goto Exit; - } - - // start the render stream - hr = renderAudioClient->Start(); - if ( FAILED( hr ) ) { - errorText = "RtApiWasapi::wasapiThread: Unable to start render stream."; - goto Exit; - } } // malloc buffer memory @@ -5175,8 +5157,8 @@ void RtApiWasapi::wasapiThread() } convBuffSize *= 2; // allow overflow for *SrRatio remainders - convBuffer = ( char* ) malloc( convBuffSize ); - stream_.deviceBuffer = ( char* ) malloc( deviceBuffSize ); + convBuffer = ( char* ) calloc( convBuffSize, 1 ); + stream_.deviceBuffer = ( char* ) calloc( deviceBuffSize, 1 ); if ( !convBuffer || !stream_.deviceBuffer ) { errorType = RtAudioError::MEMORY_ERROR; errorText = "RtApiWasapi::wasapiThread: Error allocating device buffer memory."; @@ -5263,6 +5245,9 @@ void RtApiWasapi::wasapiThread() captureFlags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY ? RTAUDIO_INPUT_OVERFLOW : 0, stream_.callbackInfo.userData ); + // tick stream time + RtApi::tickStreamTime(); + // Handle return value from callback if ( callbackResult == 1 ) { // instantiate a thread to stop this thread @@ -5318,6 +5303,12 @@ void RtApiWasapi::wasapiThread() stream_.convertInfo[OUTPUT] ); } + else { + // no further conversion, simple copy userBuffer to deviceBuffer + memcpy( stream_.deviceBuffer, + stream_.userBuffer[OUTPUT], + stream_.bufferSize * stream_.nUserChannels[OUTPUT] * formatBytes( stream_.userFormat ) ); + } // Convert callback buffer to stream sample rate renderResampler->Convert( convBuffer, @@ -5465,9 +5456,6 @@ void RtApiWasapi::wasapiThread() // unsetting the callbackPulled flag lets the stream know that // the audio device is ready for another callback output buffer. callbackPulled = false; - - // tick stream time - RtApi::tickStreamTime(); } } @@ -6387,6 +6375,10 @@ void RtApiDs :: startStream() return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + DsHandle *handle = (DsHandle *) stream_.apiHandle; // Increase scheduler frequency on lesser windows (a side-effect of @@ -7154,7 +7146,7 @@ unsigned int RtApiAlsa :: getDeviceCount( void ) unsigned nDevices = 0; int result, subdevice, card; char name[64]; - snd_ctl_t *handle; + snd_ctl_t *handle = 0; // Count cards and devices card = -1; @@ -7163,6 +7155,7 @@ unsigned int RtApiAlsa :: getDeviceCount( void ) sprintf( name, "hw:%d", card ); result = snd_ctl_open( &handle, name, 0 ); if ( result < 0 ) { + handle = 0; errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << "."; errorText_ = errorStream_.str(); error( RtAudioError::WARNING ); @@ -7182,7 +7175,8 @@ unsigned int RtApiAlsa :: getDeviceCount( void ) nDevices++; } nextcard: - snd_ctl_close( handle ); + if ( handle ) + snd_ctl_close( handle ); snd_card_next( &card ); } @@ -7203,7 +7197,7 @@ RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device ) unsigned nDevices = 0; int result, subdevice, card; char name[64]; - snd_ctl_t *chandle; + snd_ctl_t *chandle = 0; // Count cards and devices card = -1; @@ -7213,6 +7207,7 @@ RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device ) sprintf( name, "hw:%d", card ); result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK ); if ( result < 0 ) { + chandle = 0; errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << "."; errorText_ = errorStream_.str(); error( RtAudioError::WARNING ); @@ -7235,7 +7230,8 @@ RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device ) nDevices++; } nextcard: - snd_ctl_close( chandle ); + if ( chandle ) + snd_ctl_close( chandle ); snd_card_next( &card ); } @@ -8079,6 +8075,10 @@ void RtApiAlsa :: startStream() MUTEX_LOCK( &stream_.mutex ); + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + int result = 0; snd_pcm_state_t state; AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle; @@ -8650,6 +8650,10 @@ void RtApiPulse::startStream( void ) MUTEX_LOCK( &stream_.mutex ); + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + stream_.state = STREAM_RUNNING; pah->runnable = true; @@ -9634,6 +9638,10 @@ void RtApiOss :: startStream() MUTEX_LOCK( &stream_.mutex ); + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + stream_.state = STREAM_RUNNING; // No need to do anything else here ... OSS automatically starts