summaryrefslogtreecommitdiff
path: root/RtAudio.cpp
diff options
context:
space:
mode:
authorGary Scavone <gary@music.mcgill.ca>2014-04-26 09:26:44 -0400
committerGary Scavone <gary@music.mcgill.ca>2014-04-26 09:26:44 -0400
commit1bf562b79a9a4940b2da19f7adbd6f065837c4d7 (patch)
tree7b7d3f52dfd87a57ddf49b1bdb0732efe0101d2a /RtAudio.cpp
parent154e121e500dc2e9f6e6277db5b4f35c4222a216 (diff)
parentbd6177260dcaa1eeb16cbd73b1097ab989ceed68 (diff)
Merge branch 'master' of https://github.com/thestk/rtaudio
Diffstat (limited to 'RtAudio.cpp')
-rw-r--r--RtAudio.cpp80
1 files changed, 35 insertions, 45 deletions
diff --git a/RtAudio.cpp b/RtAudio.cpp
index a31a54b..9f12016 100644
--- a/RtAudio.cpp
+++ b/RtAudio.cpp
@@ -3585,7 +3585,7 @@ static const char* getAsioErrorString( ASIOError result )
// - Introduces support for the Windows WASAPI API
// - Aims to deliver bit streams to and from hardware at the lowest possible latency, via the absolute minimum buffer sizes required
// - Provides flexible stream configuration to an otherwise strict and inflexible WASAPI interface
-// - Includes automatic internal conversion of sample rate, buffer size and channel count
+// - Includes automatic internal conversion of sample rate and buffer size between hardware and the user
#ifndef INITGUID
#define INITGUID
@@ -3767,15 +3767,14 @@ private:
//-----------------------------------------------------------------------------
-// In order to satisfy WASAPI's buffer requirements, we need a means of converting sample rate and
-// channel counts between HW and the user. The convertBufferWasapi function is used to perform
-// these conversions between HwIn->UserIn and UserOut->HwOut during the stream callback loop.
+// In order to satisfy WASAPI's buffer requirements, we need a means of converting sample rate
+// between HW and the user. The convertBufferWasapi function is used to perform this conversion
+// between HwIn->UserIn and UserOut->HwOut during the stream callback loop.
// This sample rate converter favors speed over quality, and works best with conversions between
// one rate and its multiple.
void convertBufferWasapi( char* outBuffer,
const char* inBuffer,
- const unsigned int& inChannelCount,
- const unsigned int& outChannelCount,
+ const unsigned int& channelCount,
const unsigned int& inSampleRate,
const unsigned int& outSampleRate,
const unsigned int& inSampleCount,
@@ -3786,7 +3785,6 @@ void convertBufferWasapi( char* outBuffer,
float sampleRatio = ( float ) outSampleRate / inSampleRate;
float sampleStep = 1.0f / sampleRatio;
float inSampleFraction = 0.0f;
- unsigned int commonChannelCount = std::min( inChannelCount, outChannelCount );
outSampleCount = ( unsigned int ) ( inSampleCount * sampleRatio );
@@ -3798,22 +3796,22 @@ void convertBufferWasapi( char* outBuffer,
switch ( format )
{
case RTAUDIO_SINT8:
- memcpy( &( ( char* ) outBuffer )[ outSample * outChannelCount ], &( ( char* ) inBuffer )[ inSample * inChannelCount ], commonChannelCount * sizeof( char ) );
+ memcpy( &( ( char* ) outBuffer )[ outSample * channelCount ], &( ( char* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( char ) );
break;
case RTAUDIO_SINT16:
- memcpy( &( ( short* ) outBuffer )[ outSample * outChannelCount ], &( ( short* ) inBuffer )[ inSample * inChannelCount ], commonChannelCount * sizeof( short ) );
+ memcpy( &( ( short* ) outBuffer )[ outSample * channelCount ], &( ( short* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( short ) );
break;
case RTAUDIO_SINT24:
- memcpy( &( ( S24* ) outBuffer )[ outSample * outChannelCount ], &( ( S24* ) inBuffer )[ inSample * inChannelCount ], commonChannelCount * sizeof( S24 ) );
+ memcpy( &( ( S24* ) outBuffer )[ outSample * channelCount ], &( ( S24* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( S24 ) );
break;
case RTAUDIO_SINT32:
- memcpy( &( ( int* ) outBuffer )[ outSample * outChannelCount ], &( ( int* ) inBuffer )[ inSample * inChannelCount ], commonChannelCount * sizeof( int ) );
+ memcpy( &( ( int* ) outBuffer )[ outSample * channelCount ], &( ( int* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( int ) );
break;
case RTAUDIO_FLOAT32:
- memcpy( &( ( float* ) outBuffer )[ outSample * outChannelCount ], &( ( float* ) inBuffer )[ inSample * inChannelCount ], commonChannelCount * sizeof( float ) );
+ memcpy( &( ( float* ) outBuffer )[ outSample * channelCount ], &( ( float* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( float ) );
break;
case RTAUDIO_FLOAT64:
- memcpy( &( ( double* ) outBuffer )[ outSample * outChannelCount ], &( ( double* ) inBuffer )[ inSample * inChannelCount ], commonChannelCount * sizeof( double ) );
+ memcpy( &( ( double* ) outBuffer )[ outSample * channelCount ], &( ( double* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( double ) );
break;
}
@@ -4508,10 +4506,11 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne
// Set flags for buffer conversion.
stream_.doConvertBuffer[mode] = false;
- if ( stream_.userFormat != stream_.deviceFormat[mode] )
+ if ( stream_.userFormat != stream_.deviceFormat[mode] ||
+ stream_.nUserChannels != stream_.nDeviceChannels )
stream_.doConvertBuffer[mode] = true;
- if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
- stream_.nUserChannels[mode] > 1 )
+ else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
+ stream_.nUserChannels[mode] > 1 )
stream_.doConvertBuffer[mode] = true;
if ( stream_.doConvertBuffer[mode] )
@@ -4830,11 +4829,10 @@ void RtApiWasapi::wasapiThread()
stream_.deviceFormat[INPUT] );
if ( callbackPulled ) {
- // Convert callback buffer to user sample rate and channel count
+ // Convert callback buffer to user sample rate
convertBufferWasapi( stream_.deviceBuffer,
convBuffer,
stream_.nDeviceChannels[INPUT],
- stream_.nUserChannels[INPUT],
captureFormat->nSamplesPerSec,
stream_.sampleRate,
( unsigned int ) ( stream_.bufferSize * captureSrRatio ),
@@ -4848,7 +4846,7 @@ void RtApiWasapi::wasapiThread()
stream_.convertInfo[INPUT] );
}
else {
- // no conversion, simple copy deviceBuffer to userBuffer
+ // no further conversion, simple copy deviceBuffer to userBuffer
memcpy( stream_.userBuffer[INPUT],
stream_.deviceBuffer,
stream_.bufferSize * stream_.nUserChannels[INPUT] * formatBytes( stream_.userFormat ) );
@@ -4924,35 +4922,27 @@ void RtApiWasapi::wasapiThread()
stream_.userBuffer[OUTPUT],
stream_.convertInfo[OUTPUT] );
- // Convert callback buffer to stream sample rate and channel count
- convertBufferWasapi( convBuffer,
- stream_.deviceBuffer,
- stream_.nUserChannels[OUTPUT],
- stream_.nDeviceChannels[OUTPUT],
- stream_.sampleRate,
- renderFormat->nSamplesPerSec,
- stream_.bufferSize,
- convBufferSize,
- stream_.deviceFormat[OUTPUT] );
- }
- else {
- // Convert callback buffer to stream sample rate and channel count
- convertBufferWasapi( convBuffer,
- stream_.userBuffer[OUTPUT],
- stream_.nUserChannels[OUTPUT],
- stream_.nDeviceChannels[OUTPUT],
- stream_.sampleRate,
- renderFormat->nSamplesPerSec,
- stream_.bufferSize,
- convBufferSize,
- stream_.deviceFormat[OUTPUT] );
}
+ // Convert callback buffer to stream sample rate
+ convertBufferWasapi( convBuffer,
+ stream_.deviceBuffer,
+ stream_.nDeviceChannels[OUTPUT],
+ stream_.sampleRate,
+ renderFormat->nSamplesPerSec,
+ stream_.bufferSize,
+ convBufferSize,
+ stream_.deviceFormat[OUTPUT] );
+
// Push callback buffer into outputBuffer
callbackPushed = renderBuffer.pushBuffer( convBuffer,
convBufferSize * stream_.nDeviceChannels[OUTPUT],
stream_.deviceFormat[OUTPUT] );
}
+ else {
+ // if there is no render stream, set callbackPushed flag
+ callbackPushed = true;
+ }
// Stream Capture
// ==============
@@ -4978,8 +4968,8 @@ void RtApiWasapi::wasapiThread()
if ( bufferFrameCount != 0 ) {
// Push capture buffer into inputBuffer
if ( captureBuffer.pushBuffer( ( char* ) streamBuffer,
- bufferFrameCount * stream_.nDeviceChannels[INPUT],
- stream_.deviceFormat[INPUT] ) )
+ bufferFrameCount * stream_.nDeviceChannels[INPUT],
+ stream_.deviceFormat[INPUT] ) )
{
// Release capture buffer
hr = captureClient->ReleaseBuffer( bufferFrameCount );
@@ -5047,8 +5037,8 @@ void RtApiWasapi::wasapiThread()
// Pull next buffer from outputBuffer
// Fill render buffer with next buffer
if ( renderBuffer.pullBuffer( ( char* ) streamBuffer,
- bufferFrameCount * stream_.nDeviceChannels[OUTPUT],
- stream_.deviceFormat[OUTPUT] ) )
+ bufferFrameCount * stream_.nDeviceChannels[OUTPUT],
+ stream_.deviceFormat[OUTPUT] ) )
{
// Release render buffer
hr = renderClient->ReleaseBuffer( bufferFrameCount, 0 );