Use an enum for the effect in SubtitleContent.
[dcpomatic.git] / src / lib / player.cc
index 496153b0e445d001a416236c0ef57de10d057302..8e56991f816eb10a843fdf90261d561937fcaa5d 100644 (file)
@@ -151,6 +151,7 @@ Player::setup_pieces ()
                }
        }
 
+       _stream_states.clear ();
        BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
                if (i->content->audio) {
                        BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
@@ -185,10 +186,10 @@ Player::playlist_content_changed (weak_ptr<Content> w, int property, bool freque
                property == DCPContentProperty::NEEDS_ASSETS ||
                property == DCPContentProperty::NEEDS_KDM ||
                property == SubtitleContentProperty::COLOUR ||
-               property == SubtitleContentProperty::OUTLINE ||
-               property == SubtitleContentProperty::SHADOW ||
+               property == SubtitleContentProperty::EFFECT ||
                property == SubtitleContentProperty::EFFECT_COLOUR ||
                property == FFmpegContentProperty::SUBTITLE_STREAM ||
+               property == FFmpegContentProperty::FILTERS ||
                property == VideoContentProperty::COLOUR_CONVERSION
                ) {
 
@@ -345,8 +346,8 @@ DCPTime
 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
 {
        /* See comment in dcp_to_content_video */
-       DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
-       return max (DCPTime (), d + piece->content->position ());
+       DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime(piece->content->trim_start(), piece->frc);
+       return d + piece->content->position();
 }
 
 Frame
@@ -525,8 +526,14 @@ Player::pass ()
        optional<DCPTime> earliest_time;
 
        BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
-               if (!i->done) {
-                       DCPTime const t = content_time_to_dcp (i, i->decoder->position());
+               if (i->done) {
+                       continue;
+               }
+
+               DCPTime const t = content_time_to_dcp (i, i->decoder->position());
+               if (t > i->content->end()) {
+                       i->done = true;
+               } else {
                        /* Given two choices at the same time, pick the one with a subtitle so we see it before
                           the video.
                        */
@@ -637,17 +644,17 @@ Player::subtitles_for_frame (DCPTime time) const
        return merge (subtitles);
 }
 
-bool
+void
 Player::video (weak_ptr<Piece> wp, ContentVideo video)
 {
        shared_ptr<Piece> piece = wp.lock ();
        if (!piece) {
-               return false;
+               return;
        }
 
        FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
        if (frc.skip && (video.frame % 2) == 1) {
-               return false;
+               return;
        }
 
        /* Time of the first frame we will emit */
@@ -658,7 +665,7 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                time < piece->content->position() ||
                time >= piece->content->end() ||
                (_last_video_time && time < *_last_video_time)) {
-               return false;
+               return;
        }
 
        /* Fill gaps that we discover now that we have some video which needs to be emitted */
@@ -696,21 +703,16 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                emit_video (_last_video[wp], t);
                t += one_video_frame ();
        }
-
-       return true;
 }
 
-/** @return Number of input frames that were `accepted'.  This is the number of frames passed in
- *  unless some were discarded at the end of the block.
- */
-Frame
+void
 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
 {
        DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
 
        shared_ptr<Piece> piece = wp.lock ();
        if (!piece) {
-               return 0;
+               return;
        }
 
        shared_ptr<AudioContent> content = piece->content->audio;
@@ -721,33 +723,26 @@ Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_a
        /* And the end of this block in the DCP */
        DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
 
-       /* We consider frames trimmed off the beginning to nevertheless be `accepted'; it's only frames trimmed
-          off the end that are considered as discarded.  This logic is necessary to ensure correct reel lengths,
-          although the precise details escape me at the moment.
-       */
-       Frame accepted = content_audio.audio->frames();
-
        /* Remove anything that comes before the start or after the end of the content */
        if (time < piece->content->position()) {
                pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
                if (!cut.first) {
                        /* This audio is entirely discarded */
-                       return 0;
+                       return;
                }
                content_audio.audio = cut.first;
                time = cut.second;
        } else if (time > piece->content->end()) {
                /* Discard it all */
-               return 0;
+               return;
        } else if (end > piece->content->end()) {
                Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
                if (remaining_frames == 0) {
-                       return 0;
+                       return;
                }
                shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
                cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
                content_audio.audio = cut;
-               accepted = content_audio.audio->frames();
        }
 
        DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
@@ -775,7 +770,6 @@ Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_a
        _audio_merger.push (content_audio.audio, time);
        DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
        _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
-       return accepted;
 }
 
 void