Merge master.
[dcpomatic.git] / src / lib / dcp_decoder.cc
index 14672a23ba03d28a3fa680821f3028b1bca5fc07..d0642d8b6ca9f6fe06edd76a6f1b3611aa7a5496 100644 (file)
 #include <dcp/mono_picture_mxf.h>
 #include <dcp/stereo_picture_mxf.h>
 #include <dcp/reel_picture_asset.h>
+#include <dcp/reel_sound_asset.h>
 #include <dcp/mono_picture_frame.h>
 #include <dcp/stereo_picture_frame.h>
+#include <dcp/sound_frame.h>
 #include "dcp_decoder.h"
 #include "dcp_content.h"
-#include "image_proxy.h"
+#include "j2k_image_proxy.h"
 #include "image.h"
 
 using std::list;
@@ -57,37 +59,52 @@ DCPDecoder::pass ()
        }
 
        float const vfr = _dcp_content->video_frame_rate ();
+       int64_t const frame = _next.frames (vfr);
        
        if ((*_reel)->main_picture ()) {
                shared_ptr<dcp::PictureMXF> mxf = (*_reel)->main_picture()->mxf ();
                shared_ptr<dcp::MonoPictureMXF> mono = dynamic_pointer_cast<dcp::MonoPictureMXF> (mxf);
                shared_ptr<dcp::StereoPictureMXF> stereo = dynamic_pointer_cast<dcp::StereoPictureMXF> (mxf);
+               int64_t const entry_point = (*_reel)->main_picture()->entry_point ();
                if (mono) {
-                       shared_ptr<Image> image (new Image (PIX_FMT_RGB24, mxf->size(), false));
-                       mono->get_frame (_next.frames (vfr))->rgb_frame (image->data()[0]);
-                       shared_ptr<Image> aligned (new Image (image, true));
-                       video (shared_ptr<ImageProxy> (new RawImageProxy (aligned, _log)), _next.frames (vfr));
+                       video (shared_ptr<ImageProxy> (new J2KImageProxy (mono->get_frame (entry_point + frame), mxf->size(), _log)), frame);
                } else {
+                       video (
+                               shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_LEFT, _log)),
+                               frame
+                               );
+                       
+                       video (
+                               shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_RIGHT, _log)),
+                               frame
+                               );
+               }
+       }
 
-                       shared_ptr<Image> left (new Image (PIX_FMT_RGB24, mxf->size(), false));
-                       stereo->get_frame (_next.frames (vfr))->rgb_frame (dcp::EYE_LEFT, left->data()[0]);
-                       shared_ptr<Image> aligned_left (new Image (left, true));
-                       video (shared_ptr<ImageProxy> (new RawImageProxy (aligned_left, _log)), _next.frames (vfr));
-
-                       shared_ptr<Image> right (new Image (PIX_FMT_RGB24, mxf->size(), false));
-                       stereo->get_frame (_next.frames (vfr))->rgb_frame (dcp::EYE_RIGHT, right->data()[0]);
-                       shared_ptr<Image> aligned_right (new Image (right, true));
-                       video (shared_ptr<ImageProxy> (new RawImageProxy (aligned_right, _log)), _next.frames (vfr));
+       if ((*_reel)->main_sound ()) {
+               int64_t const entry_point = (*_reel)->main_sound()->entry_point ();
+               shared_ptr<const dcp::SoundFrame> sf = (*_reel)->main_sound()->mxf()->get_frame (entry_point + frame);
+               uint8_t const * from = sf->data ();
+
+               int const channels = _dcp_content->audio_channels ();
+               int const frames = sf->size() / (3 * channels);
+               shared_ptr<AudioBuffers> data (new AudioBuffers (channels, frames));
+               for (int i = 0; i < frames; ++i) {
+                       for (int j = 0; j < channels; ++j) {
+                               data->data()[j][i] = float (from[0] | (from[1] << 8) | (from[2] << 16)) / (1 << 23);
+                               from += 3;
+                       }
                }
+
+               audio (data, _next);
        }
 
-       /* XXX: sound */
        /* XXX: subtitle */
 
        _next += ContentTime::from_frames (1, vfr);
 
        if ((*_reel)->main_picture ()) {
-               if ((*_reel)->main_picture()->duration() >= _next.frames (vfr)) {
+               if (_next.frames (vfr) >= (*_reel)->main_picture()->duration()) {
                        ++_reel;
                }
        }