More fixes
[rtaudio-cdist.git] / RtAudio.cpp
index 7fb2798f90092f7333e147f036da8141900a9105..b3192de3ff6e6c43ade1c5a7ac983d9877351e1a 100644 (file)
@@ -1,4 +1,4 @@
-/************************************************************************/
+/************************************************************************/
 /*! \class RtAudio
     \brief Realtime audio i/o C++ classes.
 
@@ -3689,18 +3689,28 @@ static const char* getAsioErrorString( ASIOError result )
 #ifndef INITGUID
   #define INITGUID
 #endif
+
+#include <mfapi.h>
+#include <mferror.h>
+#include <mfplay.h>
+#include <mftransform.h>
+#include <wmcodecdsp.h>
+
 #include <audioclient.h>
 #include <avrt.h>
 #include <mmdeviceapi.h>
 #include <functiondiscoverykeys_devpkey.h>
 
-#include <mfapi.h>
-#include <mferror.h>
-#include <mfplay.h>
-#include <Wmcodecdsp.h>
+#ifndef MF_E_TRANSFORM_NEED_MORE_INPUT
+  #define MF_E_TRANSFORM_NEED_MORE_INPUT _HRESULT_TYPEDEF_(0xc00d6d72)
+#endif
 
-#pragma comment( lib, "mfplat.lib" )
-#pragma comment( lib, "wmcodecdspuuid" )
+#ifdef _MSC_VER
+  #pragma comment( lib, "ksuser" )
+  #pragma comment( lib, "mfplat.lib" )
+  #pragma comment( lib, "mfuuid.lib" )
+  #pragma comment( lib, "wmcodecdspuuid" )
+#endif
 
 //=============================================================================
 
@@ -3887,14 +3897,17 @@ public:
     , _sampleRatio( ( float ) outSampleRate / inSampleRate )
     , _transformUnk( NULL )
     , _transform( NULL )
-    , _resamplerProps( NULL )
     , _mediaType( NULL )
     , _inputMediaType( NULL )
     , _outputMediaType( NULL )
+
+    #ifdef __IWMResamplerProps_FWD_DEFINED__
+      , _resamplerProps( NULL )
+    #endif
   {
     // 1. Initialization
 
-    MFStartup( MF_VERSION, MFSTARTUP_NOSOCKET );
+    MFStartup( MF_VERSION, MFSTARTUP_LITE );
 
     // 2. Create Resampler Transform Object
 
@@ -3903,10 +3916,12 @@ public:
 
     _transformUnk->QueryInterface( IID_PPV_ARGS( &_transform ) );
 
-    _transformUnk->QueryInterface( IID_PPV_ARGS( &_resamplerProps ) );
-    _resamplerProps->SetHalfFilterLength( 60 ); // best conversion quality
+    #ifdef __IWMResamplerProps_FWD_DEFINED__
+      _transformUnk->QueryInterface( IID_PPV_ARGS( &_resamplerProps ) );
+      _resamplerProps->SetHalfFilterLength( 60 ); // best conversion quality
+    #endif
 
-                                                // 3. Specify input / output format
+    // 3. Specify input / output format
 
     MFCreateMediaType( &_mediaType );
     _mediaType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio );
@@ -3933,17 +3948,17 @@ public:
 
     // 4. Send stream start messages to Resampler
 
-    _transform->ProcessMessage( MFT_MESSAGE_COMMAND_FLUSH, NULL );
-    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL );
-    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL );
+    _transform->ProcessMessage( MFT_MESSAGE_COMMAND_FLUSH, 0 );
+    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0 );
+    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0 );
   }
 
   ~WasapiResampler()
   {
     // 8. Send stream stop messages to Resampler
 
-    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL );
-    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_STREAMING, NULL );
+    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0 );
+    _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_STREAMING, 0 );
 
     // 9. Cleanup
 
@@ -3951,10 +3966,13 @@ public:
 
     SAFE_RELEASE( _transformUnk );
     SAFE_RELEASE( _transform );
-    SAFE_RELEASE( _resamplerProps );
     SAFE_RELEASE( _mediaType );
     SAFE_RELEASE( _inputMediaType );
     SAFE_RELEASE( _outputMediaType );
+
+    #ifdef __IWMResamplerProps_FWD_DEFINED__
+      SAFE_RELEASE( _resamplerProps );
+    #endif
   }
 
   void Convert( char* outBuffer, const char* inBuffer, unsigned int inSampleCount, unsigned int& outSampleCount )
@@ -4004,7 +4022,7 @@ public:
     DWORD rStatus;
     DWORD rBytes = outputBufferSize; // maximum bytes accepted per ProcessOutput
 
-                                     // 7.1 Create Sample object for output data
+    // 7.1 Create Sample object for output data
 
     memset( &rOutDataBuffer, 0, sizeof rOutDataBuffer );
     MFCreateSample( &( rOutDataBuffer.pSample ) );
@@ -4047,10 +4065,13 @@ private:
 
   IUnknown* _transformUnk;
   IMFTransform* _transform;
-  IWMResamplerProps* _resamplerProps;
   IMFMediaType* _mediaType;
   IMFMediaType* _inputMediaType;
   IMFMediaType* _outputMediaType;
+
+  #ifdef __IWMResamplerProps_FWD_DEFINED__
+    IWMResamplerProps* _resamplerProps;
+  #endif
 };
 
 //-----------------------------------------------------------------------------
@@ -4738,7 +4759,8 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne
   // Set flags for buffer conversion.
   stream_.doConvertBuffer[mode] = false;
   if ( stream_.userFormat != stream_.deviceFormat[mode] ||
-       stream_.nUserChannels != stream_.nDeviceChannels )
+       stream_.nUserChannels[0] != stream_.nDeviceChannels[0] ||
+       stream_.nUserChannels[1] != stream_.nDeviceChannels[1] )
     stream_.doConvertBuffer[mode] = true;
   else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
             stream_.nUserChannels[mode] > 1 )
@@ -5071,23 +5093,43 @@ void RtApiWasapi::wasapiThread()
       // 2. If 1. was successful: Convert callback buffer to user sample rate and channel count
       //                          Convert callback buffer to user format
 
-      if ( captureAudioClient ) {
-        // Pull callback buffer from inputBuffer
-        callbackPulled = captureBuffer.pullBuffer( convBuffer,
-                                                   ( unsigned int ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT],
-                                                   stream_.deviceFormat[INPUT] );
+      if ( captureAudioClient )
+      {
+        int samplesToPull = ( unsigned int ) floorf( stream_.bufferSize * captureSrRatio );
+        if ( captureSrRatio != 1 )
+        {
+          // account for remainders
+          samplesToPull--;
+        }
+
+        convBufferSize = 0;
+        while ( convBufferSize < stream_.bufferSize )
+        {
+          // Pull callback buffer from inputBuffer
+          callbackPulled = captureBuffer.pullBuffer( convBuffer,
+                                                     samplesToPull * stream_.nDeviceChannels[INPUT],
+                                                     stream_.deviceFormat[INPUT] );
+
+          if ( !callbackPulled )
+          {
+            break;
+          }
 
-        if ( callbackPulled ) {
           // Convert callback buffer to user sample rate
-          convertBufferWasapi( stream_.deviceBuffer,
-                               convBuffer,
-                               stream_.nDeviceChannels[INPUT],
-                               captureFormat->nSamplesPerSec,
-                               stream_.sampleRate,
-                               ( unsigned int ) ( stream_.bufferSize * captureSrRatio ),
-                               convBufferSize,
-                               stream_.deviceFormat[INPUT] );
+          unsigned int deviceBufferOffset = convBufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );
+          unsigned int convSamples = 0;
 
+          captureResampler->Convert( stream_.deviceBuffer + deviceBufferOffset,
+                                     convBuffer,
+                                     samplesToPull,
+                                     convSamples );
+
+          convBufferSize += convSamples;
+          samplesToPull = 1; // now pull one sample at a time until we have stream_.bufferSize samples
+        }
+
+        if ( callbackPulled )
+        {
           if ( stream_.doConvertBuffer[INPUT] ) {
             // Convert callback buffer to user format
             convertBuffer( stream_.userBuffer[INPUT],
@@ -5164,24 +5206,26 @@ void RtApiWasapi::wasapiThread()
     // 2. Convert callback buffer to stream sample rate and channel count
     // 3. Push callback buffer into outputBuffer
 
-    if ( renderAudioClient && callbackPulled ) {
-      if ( stream_.doConvertBuffer[OUTPUT] ) {
-        // Convert callback buffer to stream format
-        convertBuffer( stream_.deviceBuffer,
-                       stream_.userBuffer[OUTPUT],
-                       stream_.convertInfo[OUTPUT] );
+    if ( renderAudioClient && callbackPulled )
+    {
+      // if the last call to renderBuffer.PushBuffer() was successful
+      if ( callbackPushed || convBufferSize == 0 )
+      {
+        if ( stream_.doConvertBuffer[OUTPUT] )
+        {
+          // Convert callback buffer to stream format
+          convertBuffer( stream_.deviceBuffer,
+                         stream_.userBuffer[OUTPUT],
+                         stream_.convertInfo[OUTPUT] );
 
-      }
+        }
 
-      // Convert callback buffer to stream sample rate
-      convertBufferWasapi( convBuffer,
-                           stream_.deviceBuffer,
-                           stream_.nDeviceChannels[OUTPUT],
-                           stream_.sampleRate,
-                           renderFormat->nSamplesPerSec,
-                           stream_.bufferSize,
-                           convBufferSize,
-                           stream_.deviceFormat[OUTPUT] );
+        // Convert callback buffer to stream sample rate
+        renderResampler->Convert( convBuffer,
+                                  stream_.deviceBuffer,
+                                  stream_.bufferSize,
+                                  convBufferSize );
+      }
 
       // Push callback buffer into outputBuffer
       callbackPushed = renderBuffer.pushBuffer( convBuffer,
@@ -5319,7 +5363,10 @@ void RtApiWasapi::wasapiThread()
 
     // if the callback buffer was pushed renderBuffer reset callbackPulled flag
     if ( callbackPushed ) {
+      // unsetting the callbackPulled flag lets the stream know that
+      // the audio device is ready for another callback output buffer.
       callbackPulled = false;
+
       // tick stream time
       RtApi::tickStreamTime();
     }
@@ -5332,16 +5379,16 @@ Exit:
   CoTaskMemFree( renderFormat );
 
   free ( convBuffer );
+  delete renderResampler;
+  delete captureResampler;
 
   CoUninitialize();
 
+  if ( !errorText_.empty() )
+    error( errorType );
+
   // update stream state
   stream_.state = STREAM_STOPPED;
-
-  if ( errorText_.empty() )
-    return;
-  else
-    error( errorType );
 }
 
 //******************** End of __WINDOWS_WASAPI__ *********************//