Purge rint() and use llrint and friends.
[dcpomatic.git] / src / lib / player.cc
index b0ba415979a7ba61e8ab55499ee06d49a5871f1a..a40c65cd54027e7a553ea416098793302f490533 100644 (file)
@@ -213,11 +213,13 @@ Player::playlist_content_changed (weak_ptr<Content> w, int property, bool freque
                property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
                property == SubtitleContentProperty::SUBTITLE_X_SCALE ||
                property == SubtitleContentProperty::SUBTITLE_Y_SCALE ||
+               property == SubtitleContentProperty::FONTS ||
                property == VideoContentProperty::VIDEO_CROP ||
                property == VideoContentProperty::VIDEO_SCALE ||
                property == VideoContentProperty::VIDEO_FRAME_RATE ||
                property == VideoContentProperty::VIDEO_FADE_IN ||
-               property == VideoContentProperty::VIDEO_FADE_OUT
+               property == VideoContentProperty::VIDEO_FADE_OUT ||
+               property == VideoContentProperty::COLOUR_CONVERSION
                ) {
 
                Changed (frequent);
@@ -291,8 +293,8 @@ Player::transform_image_subtitles (list<ImageSubtitle> subs) const
                                        true
                                        ),
                                Position<int> (
-                                       rint (_video_container_size.width * i->rectangle.x),
-                                       rint (_video_container_size.height * i->rectangle.y)
+                                       lrint (_video_container_size.width * i->rectangle.x),
+                                       lrint (_video_container_size.height * i->rectangle.y)
                                        )
                                )
                        );
@@ -309,7 +311,7 @@ Player::black_player_video_frame (DCPTime time) const
                        shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
                        time,
                        Crop (),
-                       optional<float> (),
+                       optional<double> (),
                        _video_container_size,
                        _video_container_size,
                        EYES_BOTH,
@@ -339,7 +341,7 @@ Player::get_video (DCPTime time, bool accurate)
 
        /* Text subtitles (rendered to an image) */
        if (!ps.text.empty ()) {
-               list<PositionImage> s = render_subtitles (ps.text, _video_container_size);
+               list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
                copy (s.begin (), s.end (), back_inserter (sub_images));
        }
 
@@ -348,7 +350,7 @@ Player::get_video (DCPTime time, bool accurate)
                subtitles = merge (sub_images);
        }
 
-       /* Find video */
+       /* Find pieces containing video which is happening now */
 
        list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
                time,
@@ -361,56 +363,54 @@ Player::get_video (DCPTime time, bool accurate)
                /* No video content at this time */
                pvf.push_back (black_player_video_frame (time));
        } else {
-               /* Decide which pieces of content to use */
-               list<shared_ptr<Piece> > ov_to_use;
-
-               /* Always use the last one */
-               list<shared_ptr<Piece> >::reverse_iterator i = ov.rbegin ();
-               ov_to_use.push_back (*i);
-               VideoFrameType const first_type = dynamic_pointer_cast<VideoContent> ((*i)->content)->video_frame_type ();
-
-               ++i;
-               if (i != ov.rend ()) {
-                       shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> ((*i)->content);
-                       /* Use the second to last if it's the other part of a 3D content pair */
-                       if (
-                               (first_type == EYES_LEFT && vc->video_frame_type() == EYES_RIGHT) ||
-                               (first_type == EYES_RIGHT && vc->video_frame_type() == EYES_LEFT)
-                               ) {
-                               /* Other part of a pair of 3D content */
-                               ov_to_use.push_back (*i);
-                       }
-               }
+               /* Some video content at this time */
+               shared_ptr<Piece> last = *(ov.rbegin ());
+               VideoFrameType const last_type = dynamic_pointer_cast<VideoContent> (last->content)->video_frame_type ();
+
+               /* Get video from appropriate piece(s) */
+               BOOST_FOREACH (shared_ptr<Piece> piece, ov) {
 
-               BOOST_FOREACH (shared_ptr<Piece> piece, ov_to_use) {
                        shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
                        DCPOMATIC_ASSERT (decoder);
                        shared_ptr<VideoContent> video_content = dynamic_pointer_cast<VideoContent> (piece->content);
                        DCPOMATIC_ASSERT (video_content);
 
-                       list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
-                       if (content_video.empty ()) {
-                               pvf.push_back (black_player_video_frame (time));
-                       } else {
-                               dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ());
-
-                               for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
-                                       pvf.push_back (
-                                               shared_ptr<PlayerVideo> (
-                                                       new PlayerVideo (
-                                                               i->image,
-                                                               content_video_to_dcp (piece, i->frame),
-                                                               video_content->crop (),
-                                                               video_content->fade (i->frame),
-                                                               image_size,
-                                                               _video_container_size,
-                                                               i->eyes,
-                                                               i->part,
-                                                               video_content->colour_conversion ()
+                       bool const use =
+                               /* always use the last video */
+                               piece == last ||
+                               /* with a corresponding L/R eye if appropriate */
+                               (last_type == VIDEO_FRAME_TYPE_3D_LEFT && video_content->video_frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
+                               (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && video_content->video_frame_type() == VIDEO_FRAME_TYPE_3D_LEFT);
+
+                       if (use) {
+                               /* We want to use this piece */
+                               list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
+                               if (content_video.empty ()) {
+                                       pvf.push_back (black_player_video_frame (time));
+                               } else {
+                                       dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ());
+
+                                       for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
+                                               pvf.push_back (
+                                                       shared_ptr<PlayerVideo> (
+                                                               new PlayerVideo (
+                                                                       i->image,
+                                                                       content_video_to_dcp (piece, i->frame),
+                                                                       video_content->crop (),
+                                                                       video_content->fade (i->frame),
+                                                                       image_size,
+                                                                       _video_container_size,
+                                                                       i->eyes,
+                                                                       i->part,
+                                                                       video_content->colour_conversion ()
+                                                                       )
                                                                )
-                                                       )
-                                               );
+                                                       );
+                                       }
                                }
+                       } else {
+                               /* Discard unused video */
+                               decoder->get_video (dcp_to_content_video (piece, time), accurate);
                        }
                }
        }
@@ -431,7 +431,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate)
                setup_pieces ();
        }
 
-       Frame const length_frames = length.frames (_film->audio_frame_rate ());
+       Frame const length_frames = length.frames_round (_film->audio_frame_rate ());
 
        shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
        audio->make_silent ();
@@ -457,14 +457,14 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate)
                           the stuff we get back.
                        */
                        offset = -request;
-                       request_frames += request.frames (_film->audio_frame_rate ());
+                       request_frames += request.frames_round (_film->audio_frame_rate ());
                        if (request_frames < 0) {
                                request_frames = 0;
                        }
                        request = DCPTime ();
                }
 
-               Frame const content_frame = dcp_to_content_audio (*i, request);
+               Frame const content_frame = dcp_to_resampled_audio (*i, request);
 
                BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) {
 
@@ -509,7 +509,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate)
                        audio->accumulate_frames (
                                all.audio.get(),
                                content_frame - all.frame,
-                               offset.frames (_film->audio_frame_rate()),
+                               offset.frames_round (_film->audio_frame_rate()),
                                min (Frame (all.audio->frames()), request_frames)
                                );
                }
@@ -521,60 +521,35 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate)
 Frame
 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
 {
-       /* s is the offset of t from the start position of this content */
+       shared_ptr<const VideoContent> vc = dynamic_pointer_cast<const VideoContent> (piece->content);
        DCPTime s = t - piece->content->position ();
-       s = DCPTime (max (DCPTime::Type (0), s.get ()));
-       s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
-
-       /* Convert this to the content frame */
-       return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) / piece->frc.factor ();
+       s = min (piece->content->length_after_trim(), s);
+       return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start ()).frames_round (vc->video_frame_rate ());
 }
 
 DCPTime
 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
 {
-       DCPTime t = DCPTime::from_frames (f * piece->frc.factor (), _film->video_frame_rate()) - piece->content->trim_start () + piece->content->position ();
-       if (t < DCPTime ()) {
-               t = DCPTime ();
-       }
-
-       return t;
+       shared_ptr<const VideoContent> vc = dynamic_pointer_cast<const VideoContent> (piece->content);
+       ContentTime const c = ContentTime::from_frames (f, vc->video_frame_rate ()) - piece->content->trim_start ();
+       return max (DCPTime (), DCPTime (c, piece->frc) + piece->content->position ());
 }
 
 Frame
-Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
+Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
 {
-       /* s is the offset of t from the start position of this content */
        DCPTime s = t - piece->content->position ();
-       s = DCPTime (max (DCPTime::Type (0), s.get ()));
-       s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
-
-       /* Convert this to the content frame */
-       return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
+       s = min (piece->content->length_after_trim(), s);
+       /* See notes in dcp_to_content_video */
+       return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
 }
 
 ContentTime
 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
 {
-       /* s is the offset of t from the start position of this content */
        DCPTime s = t - piece->content->position ();
-       s = DCPTime (max (DCPTime::Type (0), s.get ()));
-       s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
-
-       return ContentTime (s + piece->content->trim_start(), piece->frc);
-}
-
-void
-PlayerStatistics::dump (shared_ptr<Log> log) const
-{
-       log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
-       log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
-}
-
-PlayerStatistics const &
-Player::statistics () const
-{
-       return _statistics;
+       s = min (piece->content->length_after_trim(), s);
+       return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
 }
 
 /** @param burnt true to return only subtitles to be burnt, false to return only
@@ -630,6 +605,7 @@ Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt)
                                        s.set_aspect_adjust (xs / ys);
                                }
                                ps.text.push_back (s);
+                               ps.add_fonts (subtitle_content->fonts ());
                        }
                }
        }