Basics of forced reduction of JPEG2000 decode resolution.
[dcpomatic.git] / src / lib / player.cc
index adaee931e1dcb19347672a59a575df96c8fac614..7e21ef937b82a98f5ff3198ac07c2ea0a469cbfb 100644 (file)
@@ -127,7 +127,10 @@ Player::setup_pieces ()
 
                shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
                if (dcp && _play_referenced) {
-                       dcp->set_decode_referenced ();
+                       if (_play_referenced) {
+                               dcp->set_decode_referenced ();
+                       }
+                       dcp->set_forced_reduction (_dcp_decode_reduction);
                }
 
                shared_ptr<Piece> piece (new Piece (i, decoder, frc));
@@ -156,8 +159,8 @@ Player::setup_pieces ()
                }
        }
 
-       _black = Empty (_playlist, bind(&Content::video, _1));
-       _silent = Empty (_playlist, bind(&Content::audio, _1));
+       _black = Empty (_film->content(), _film->length(), bind(&Content::video, _1));
+       _silent = Empty (_film->content(), _film->length(), bind(&Content::audio, _1));
 
        _last_video_time = DCPTime ();
        _last_audio_time = DCPTime ();
@@ -509,10 +512,16 @@ Player::pass ()
                setup_pieces ();
        }
 
+       if (_playlist->length() == DCPTime()) {
+               /* Special case of an empty Film; just give one black frame */
+               emit_video (black_player_video_frame(), DCPTime());
+               return true;
+       }
+
        /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
 
-       shared_ptr<Piece> earliest;
-       DCPTime earliest_content;
+       shared_ptr<Piece> earliest_content;
+       optional<DCPTime> earliest_time;
 
        BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
                if (!i->done) {
@@ -520,39 +529,62 @@ Player::pass ()
                        /* Given two choices at the same time, pick the one with a subtitle so we see it before
                           the video.
                        */
-                       if (!earliest || t < earliest_content || (t == earliest_content && i->decoder->subtitle)) {
-                               earliest_content = t;
-                               earliest = i;
+                       if (!earliest_time || t < *earliest_time || (t == *earliest_time && i->decoder->subtitle)) {
+                               earliest_time = t;
+                               earliest_content = i;
                        }
                }
        }
 
        bool done = false;
 
-       if (!_black.done() && (!earliest || _black.position() < earliest_content)) {
-               /* There is some black that must be emitted */
+       enum {
+               NONE,
+               CONTENT,
+               BLACK,
+               SILENT
+       } which = NONE;
+
+       if (earliest_content) {
+               which = CONTENT;
+       }
+
+       if (!_black.done() && (!earliest_time || _black.position() < *earliest_time)) {
+               earliest_time = _black.position ();
+               which = BLACK;
+       }
+
+       if (!_silent.done() && (!earliest_time || _silent.position() < *earliest_time)) {
+               earliest_time = _silent.position ();
+               which = SILENT;
+       }
+
+       switch (which) {
+       case CONTENT:
+               earliest_content->done = earliest_content->decoder->pass ();
+               break;
+       case BLACK:
                emit_video (black_player_video_frame(), _black.position());
                _black.set_position (_black.position() + one_video_frame());
-       } else if (!_silent.done() && (!earliest || _silent.position() < earliest_content)) {
-               /* There is some silence that must be emitted */
+               break;
+       case SILENT:
+       {
                DCPTimePeriod period (_silent.period_at_position());
                if (period.duration() > one_video_frame()) {
                        period.to = period.from + one_video_frame();
                }
                fill_audio (period);
                _silent.set_position (period.to);
-       } else if (_playlist->length() == DCPTime()) {
-               /* Special case of an empty Film; just give one black frame */
-               emit_video (black_player_video_frame(), DCPTime());
-       } else if (earliest) {
-               earliest->done = earliest->decoder->pass ();
-       } else {
+               break;
+       }
+       case NONE:
                done = true;
+               break;
        }
 
        /* Emit any audio that is ready */
 
-       DCPTime pull_to = _playlist->length ();
+       DCPTime pull_to = _film->length ();
        for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
                if (!i->second.piece->done && i->second.last_push_end < pull_to) {
                        pull_to = i->second.last_push_end;
@@ -616,9 +648,16 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                return;
        }
 
-       /* Time and period of the frame we will emit */
+       /* Time of the first frame we will emit */
        DCPTime const time = content_video_to_dcp (piece, video.frame);
-       DCPTimePeriod const period (time, time + one_video_frame());
+
+       /* Discard if it's outside the content's period or if it's before the last accurate seek */
+       if (
+               time < piece->content->position() ||
+               time >= piece->content->end() ||
+               (_last_video_time && time < *_last_video_time)) {
+               return;
+       }
 
        /* Fill gaps that we discover now that we have some video which needs to be emitted */
 
@@ -635,14 +674,6 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                }
        }
 
-       /* Discard if it's outside the content's period or if it's before the last accurate seek */
-       if (
-               time < piece->content->position() ||
-               time >= piece->content->end() ||
-               (_last_video_time && time < *_last_video_time)) {
-               return;
-       }
-
        _last_video[wp].reset (
                new PlayerVideo (
                        video.image,
@@ -658,7 +689,11 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                        )
                );
 
-       emit_video (_last_video[wp], time);
+       DCPTime t = time;
+       for (int i = 0; i < frc.repeat; ++i) {
+               emit_video (_last_video[wp], t);
+               t += one_video_frame ();
+       }
 }
 
 void
@@ -818,6 +853,10 @@ Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
 void
 Player::seek (DCPTime time, bool accurate)
 {
+       if (!_have_valid_pieces) {
+               setup_pieces ();
+       }
+
        if (_audio_processor) {
                _audio_processor->flush ();
        }
@@ -918,3 +957,15 @@ Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTi
        cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
        return make_pair(cut, time + discard_time);
 }
+
+void
+Player::set_dcp_decode_reduction (optional<int> reduction)
+{
+       if (reduction == _dcp_decode_reduction) {
+               return;
+       }
+
+       _dcp_decode_reduction = reduction;
+       _have_valid_pieces = false;
+       Changed (false);
+}