Merge master.
[dcpomatic.git] / src / lib / player.cc
index 817a390d6fcabf44db00a8633a79ec36d8d89ead..d0eb27aa31cc4ab0eb404deec5ac47157dbf330f 100644 (file)
 #include "playlist.h"
 #include "job.h"
 #include "image.h"
+#include "image_proxy.h"
 #include "ratio.h"
 #include "log.h"
 #include "scaler.h"
 #include "render_subtitles.h"
-#include "dcp_video.h"
 #include "config.h"
 #include "content_video.h"
+#include "player_video_frame.h"
+#include "frame_rate_change.h"
+
+#define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
 
 using std::list;
 using std::cout;
@@ -188,6 +192,7 @@ Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
        }
 }
 
+/** @param already_resampled true if this data has already been through the chain up to the resampler */
 void
 Player::playlist_changed ()
 {
@@ -218,7 +223,7 @@ Player::film_changed (Film::Property p)
 }
 
 list<PositionImage>
-Player::process_content_image_subtitles (shared_ptr<SubtitleContent> content, list<shared_ptr<ContentImageSubtitle> > subs)
+Player::process_content_image_subtitles (shared_ptr<SubtitleContent> content, list<shared_ptr<ContentImageSubtitle> > subs) const
 {
        list<PositionImage> all;
        
@@ -269,7 +274,7 @@ Player::process_content_image_subtitles (shared_ptr<SubtitleContent> content, li
 }
 
 list<PositionImage>
-Player::process_content_text_subtitles (list<shared_ptr<ContentTextSubtitle> > sub)
+Player::process_content_text_subtitles (list<shared_ptr<ContentTextSubtitle> > sub) const
 {
        list<PositionImage> all;
        for (list<shared_ptr<ContentTextSubtitle> >::const_iterator i = sub.begin(); i != sub.end(); ++i) {
@@ -287,111 +292,132 @@ Player::set_approximate_size ()
        _approximate_size = true;
 }
 
-shared_ptr<DCPVideo>
-Player::black_dcp_video (DCPTime time) const
+shared_ptr<PlayerVideoFrame>
+Player::black_player_video_frame () const
 {
-       return shared_ptr<DCPVideo> (
-               new DCPVideo (
-                       _black_image,
-                       EYES_BOTH,
+       return shared_ptr<PlayerVideoFrame> (
+               new PlayerVideoFrame (
+                       shared_ptr<const ImageProxy> (new RawImageProxy (_black_image, _film->log ())),
                        Crop (),
                        _video_container_size,
                        _video_container_size,
                        Scaler::from_id ("bicubic"),
-                       Config::instance()->colour_conversions().front().conversion,
-                       time
+                       EYES_BOTH,
+                       PART_WHOLE,
+                       Config::instance()->colour_conversions().front().conversion
                )
        );
 }
 
-shared_ptr<DCPVideo>
-Player::get_video (DCPTime time, bool accurate)
+shared_ptr<PlayerVideoFrame>
+Player::content_to_player_video_frame (
+       shared_ptr<VideoContent> content,
+       ContentVideo content_video,
+       list<shared_ptr<Piece> > subs,
+       DCPTime time,
+       dcp::Size image_size) const
 {
-       if (!_have_valid_pieces) {
-               setup_pieces ();
-       }
-       
-       list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
-               time,
-               time + DCPTime::from_frames (1, _film->video_frame_rate ())
-               );
-               
-       if (ov.empty ()) {
-               /* No video content at this time */
-               return black_dcp_video (time);
-       }
-
-       /* Create a DCPVideo from the content's video at this time */
-
-       shared_ptr<Piece> piece = ov.back ();
-       shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
-       assert (decoder);
-       shared_ptr<VideoContent> content = dynamic_pointer_cast<VideoContent> (piece->content);
-       assert (content);
-
-       optional<ContentVideo> dec = decoder->get_video (dcp_to_content_video (piece, time), accurate);
-       if (!dec) {
-               return black_dcp_video (time);
-       }
-
-       dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size ());
-       if (_approximate_size) {
-               image_size.width &= ~3;
-               image_size.height &= ~3;
-       }
-
-       shared_ptr<DCPVideo> dcp_video (
-               new DCPVideo (
-                       dec->image,
-                       dec->eyes,
+       shared_ptr<PlayerVideoFrame> pvf (
+               new PlayerVideoFrame (
+                       content_video.image,
                        content->crop (),
                        image_size,
                        _video_container_size,
                        _film->scaler(),
-                       content->colour_conversion (),
-                       time
+                       content_video.eyes,
+                       content_video.part,
+                       content->colour_conversion ()
                        )
                );
-
+       
+       
        /* Add subtitles */
-
-       ov = overlaps<SubtitleContent> (
-               time,
-               time + DCPTime::from_frames (1, _film->video_frame_rate ())
-               );
        
        list<PositionImage> sub_images;
        
-       for (list<shared_ptr<Piece> >::const_iterator i = ov.begin(); i != ov.end(); ++i) {
+       for (list<shared_ptr<Piece> >::const_iterator i = subs.begin(); i != subs.end(); ++i) {
                shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*i)->decoder);
                shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*i)->content);
                ContentTime const from = dcp_to_content_subtitle (*i, time);
                ContentTime const to = from + ContentTime::from_frames (1, content->video_frame_rate ());
                
-               list<shared_ptr<ContentImageSubtitle> > image_subtitles = subtitle_decoder->get_image_subtitles (from, to);
+               list<shared_ptr<ContentImageSubtitle> > image_subtitles = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to));
                if (!image_subtitles.empty ()) {
                        list<PositionImage> im = process_content_image_subtitles (
                                subtitle_content,
                                image_subtitles
                                );
-
+                       
                        copy (im.begin(), im.end(), back_inserter (sub_images));
                }
-
+               
                if (_burn_subtitles) {
-                       list<shared_ptr<ContentTextSubtitle> > text_subtitles = subtitle_decoder->get_text_subtitles (from, to);
+                       list<shared_ptr<ContentTextSubtitle> > text_subtitles = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to));
                        if (!text_subtitles.empty ()) {
                                list<PositionImage> im = process_content_text_subtitles (text_subtitles);
                                copy (im.begin(), im.end(), back_inserter (sub_images));
                        }
                }
        }
-
+       
        if (!sub_images.empty ()) {
-               dcp_video->set_subtitle (merge (sub_images));
+               pvf->set_subtitle (merge (sub_images));
        }
 
-       return dcp_video;
+       return pvf;
+}
+
+/** @return All PlayerVideoFrames at the given time (there may be two frames for 3D) */
+list<shared_ptr<PlayerVideoFrame> >
+Player::get_video (DCPTime time, bool accurate)
+{
+       if (!_have_valid_pieces) {
+               setup_pieces ();
+       }
+       
+       list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
+               time,
+               time + DCPTime::from_frames (1, _film->video_frame_rate ())
+               );
+
+       list<shared_ptr<PlayerVideoFrame> > pvf;
+               
+       if (ov.empty ()) {
+               /* No video content at this time */
+               pvf.push_back (black_player_video_frame ());
+               return pvf;
+       }
+
+       /* Create a PlayerVideoFrame from the content's video at this time */
+
+       shared_ptr<Piece> piece = ov.back ();
+       shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
+       assert (decoder);
+       shared_ptr<VideoContent> content = dynamic_pointer_cast<VideoContent> (piece->content);
+       assert (content);
+
+       list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
+       if (content_video.empty ()) {
+               pvf.push_back (black_player_video_frame ());
+               return pvf;
+       }
+
+       dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size ());
+       if (_approximate_size) {
+               image_size.width &= ~3;
+               image_size.height &= ~3;
+       }
+
+       for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
+               list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (
+                       time,
+                       time + DCPTime::from_frames (1, _film->video_frame_rate ())
+                       );
+               
+               pvf.push_back (content_to_player_video_frame (content, *i, subs, time, image_size));
+       }
+               
+       return pvf;
 }
 
 shared_ptr<AudioBuffers>
@@ -418,7 +444,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate)
                shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
                assert (decoder);
 
-               if (content->content_audio_frame_rate() == 0) {
+               if (content->audio_frame_rate() == 0) {
                        /* This AudioContent has no audio (e.g. if it is an FFmpegContent with no
                         * audio stream).
                         */
@@ -474,8 +500,6 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate)
                        min (AudioFrame (all->audio->frames()), length_frames) - offset.frames (_film->audio_frame_rate ())
                        );
        }
-
-       return audio;
 }
 
 VideoFrame
@@ -510,14 +534,14 @@ Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
        s = DCPTime (max (int64_t (0), s.get ()));
        s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
 
-       return ContentTime (s, piece->frc);
+       return ContentTime (s + piece->content->trim_start(), piece->frc);
 }
 
 void
 PlayerStatistics::dump (shared_ptr<Log> log) const
 {
-       log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat));
-       log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()));
+       log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
+       log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
 }
 
 PlayerStatistics const &