No-op: remove all trailing whitespace.
[dcpomatic.git] / src / lib / dcp_decoder.cc
index 51d16b43c6d767f03c2b2b85237fc06a657c8ea3..0ec50e0cd24ff9a52523707a121ddb50acb22544 100644 (file)
@@ -25,8 +25,8 @@
 #include <dcp/dcp.h>
 #include <dcp/cpl.h>
 #include <dcp/reel.h>
-#include <dcp/mono_picture_mxf.h>
-#include <dcp/stereo_picture_mxf.h>
+#include <dcp/mono_picture_asset.h>
+#include <dcp/stereo_picture_asset.h>
 #include <dcp/reel_picture_asset.h>
 #include <dcp/reel_sound_asset.h>
 #include <dcp/mono_picture_frame.h>
@@ -63,22 +63,22 @@ DCPDecoder::pass ()
 
        float const vfr = _dcp_content->video_frame_rate ();
        int64_t const frame = _next.frames (vfr);
-       
+
        if ((*_reel)->main_picture ()) {
-               shared_ptr<dcp::PictureMXF> mxf = (*_reel)->main_picture()->mxf ();
-               shared_ptr<dcp::MonoPictureMXF> mono = dynamic_pointer_cast<dcp::MonoPictureMXF> (mxf);
-               shared_ptr<dcp::StereoPictureMXF> stereo = dynamic_pointer_cast<dcp::StereoPictureMXF> (mxf);
+               shared_ptr<dcp::PictureAsset> asset = (*_reel)->main_picture()->asset ();
+               shared_ptr<dcp::MonoPictureAsset> mono = dynamic_pointer_cast<dcp::MonoPictureAsset> (asset);
+               shared_ptr<dcp::StereoPictureAsset> stereo = dynamic_pointer_cast<dcp::StereoPictureAsset> (asset);
                int64_t const entry_point = (*_reel)->main_picture()->entry_point ();
                if (mono) {
-                       video (shared_ptr<ImageProxy> (new J2KImageProxy (mono->get_frame (entry_point + frame), mxf->size())), frame);
+                       video (shared_ptr<ImageProxy> (new J2KImageProxy (mono->get_frame (entry_point + frame), asset->size())), frame);
                } else {
                        video (
-                               shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_LEFT)),
+                               shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), asset->size(), dcp::EYE_LEFT)),
                                frame
                                );
-                       
+
                        video (
-                               shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_RIGHT)),
+                               shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), asset->size(), dcp::EYE_RIGHT)),
                                frame
                                );
                }
@@ -86,10 +86,10 @@ DCPDecoder::pass ()
 
        if ((*_reel)->main_sound ()) {
                int64_t const entry_point = (*_reel)->main_sound()->entry_point ();
-               shared_ptr<const dcp::SoundFrame> sf = (*_reel)->main_sound()->mxf()->get_frame (entry_point + frame);
+               shared_ptr<const dcp::SoundFrame> sf = (*_reel)->main_sound()->asset()->get_frame (entry_point + frame);
                uint8_t const * from = sf->data ();
 
-               int const channels = _dcp_content->audio_channels ();
+               int const channels = _dcp_content->audio_stream()->channels ();
                int const frames = sf->size() / (3 * channels);
                shared_ptr<AudioBuffers> data (new AudioBuffers (channels, frames));
                for (int i = 0; i < frames; ++i) {
@@ -99,7 +99,7 @@ DCPDecoder::pass ()
                        }
                }
 
-               audio (data, _next);
+               audio (_dcp_content->audio_stream(), data, _next);
        }
 
        /* XXX: subtitle */
@@ -111,7 +111,7 @@ DCPDecoder::pass ()
                        ++_reel;
                }
        }
-       
+
        return false;
 }
 
@@ -133,7 +133,13 @@ DCPDecoder::seek (ContentTime t, bool accurate)
 
 
 list<ContentTimePeriod>
-DCPDecoder::subtitles_during (ContentTimePeriod, bool) const
+DCPDecoder::image_subtitles_during (ContentTimePeriod, bool) const
+{
+       return list<ContentTimePeriod> ();
+}
+
+list<ContentTimePeriod>
+DCPDecoder::text_subtitles_during (ContentTimePeriod, bool) const
 {
        /* XXX */
        return list<ContentTimePeriod> ();