Note that newer libsub version is required.
[dcpomatic.git] / src / lib / player.cc
index 610d7748dd131bd062d812bb3862281850ed5841..75f6b7919fd6c02fe0143e9d152917fc7f17ee41 100644 (file)
@@ -143,7 +143,7 @@ Player::construct ()
        connect();
        set_video_container_size(film->frame_size());
 
-       film_change (ChangeType::DONE, Film::Property::AUDIO_PROCESSOR);
+       film_change(ChangeType::DONE, FilmProperty::AUDIO_PROCESSOR);
 
        setup_pieces ();
        seek (DCPTime (), true);
@@ -191,6 +191,7 @@ Player::Player(Player&& other)
        , _silent(std::move(other._silent))
        , _active_texts(std::move(other._active_texts))
        , _audio_processor(std::move(other._audio_processor))
+       , _disable_audio_processor(other._disable_audio_processor)
        , _playback_length(other._playback_length.load())
        , _subtitle_alignment(other._subtitle_alignment)
 {
@@ -230,6 +231,7 @@ Player::operator=(Player&& other)
        _silent = std::move(other._silent);
        _active_texts = std::move(other._active_texts);
        _audio_processor = std::move(other._audio_processor);
+       _disable_audio_processor = other._disable_audio_processor;
        _playback_length = other._playback_length.load();
        _subtitle_alignment = other._subtitle_alignment;
 
@@ -249,7 +251,7 @@ have_video (shared_ptr<const Content> content)
 bool
 have_audio (shared_ptr<const Content> content)
 {
-       return static_cast<bool>(content->audio) && content->can_be_played();
+       return content->has_mapped_audio() && content->can_be_played();
 }
 
 
@@ -367,7 +369,7 @@ Player::setup_pieces ()
 
        _stream_states.clear ();
        for (auto i: _pieces) {
-               if (i->content->audio) {
+               if (i->content->has_mapped_audio()) {
                        for (auto j: i->content->audio->streams()) {
                                _stream_states[j] = StreamState(i);
                        }
@@ -481,7 +483,7 @@ Player::playlist_change (ChangeType type)
 
 
 void
-Player::film_change (ChangeType type, Film::Property p)
+Player::film_change(ChangeType type, FilmProperty p)
 {
        /* Here we should notice Film properties that affect our output, and
           alert listeners that our output now would be different to how it was
@@ -493,9 +495,9 @@ Player::film_change (ChangeType type, Film::Property p)
                return;
        }
 
-       if (p == Film::Property::CONTAINER) {
+       if (p == FilmProperty::CONTAINER) {
                Change (type, PlayerProperty::FILM_CONTAINER, false);
-       } else if (p == Film::Property::VIDEO_FRAME_RATE) {
+       } else if (p == FilmProperty::VIDEO_FRAME_RATE) {
                /* Pieces contain a FrameRateChange which contains the DCP frame rate,
                   so we need new pieces here.
                */
@@ -503,12 +505,12 @@ Player::film_change (ChangeType type, Film::Property p)
                        setup_pieces ();
                }
                Change (type, PlayerProperty::FILM_VIDEO_FRAME_RATE, false);
-       } else if (p == Film::Property::AUDIO_PROCESSOR) {
+       } else if (p == FilmProperty::AUDIO_PROCESSOR) {
                if (type == ChangeType::DONE && film->audio_processor ()) {
                        boost::mutex::scoped_lock lm (_mutex);
                        _audio_processor = film->audio_processor()->clone(film->audio_frame_rate());
                }
-       } else if (p == Film::Property::AUDIO_CHANNELS) {
+       } else if (p == FilmProperty::AUDIO_CHANNELS) {
                if (type == ChangeType::DONE) {
                        boost::mutex::scoped_lock lm (_mutex);
                        _audio_merger.clear ();
@@ -686,6 +688,39 @@ Player::set_play_referenced ()
 }
 
 
+pair<shared_ptr<Piece>, optional<DCPTime>>
+Player::earliest_piece_and_time() const
+{
+       auto film = _film.lock();
+       DCPOMATIC_ASSERT(film);
+
+       shared_ptr<Piece> earliest_content;
+       optional<DCPTime> earliest_time;
+
+       for (auto const& piece: _pieces) {
+               if (piece->done) {
+                       continue;
+               }
+
+               auto const t = content_time_to_dcp(piece, max(piece->decoder->position(), piece->content->trim_start()));
+               if (t > piece->content->end(film)) {
+                       piece->done = true;
+               } else {
+
+                       /* Given two choices at the same time, pick the one with texts so we see it before
+                          the video.
+                       */
+                       if (!earliest_time || t < *earliest_time || (t == *earliest_time && !piece->decoder->text.empty())) {
+                               earliest_time = t;
+                               earliest_content = piece;
+                       }
+               }
+       }
+
+       return { earliest_content, earliest_time };
+}
+
+
 bool
 Player::pass ()
 {
@@ -709,26 +744,7 @@ Player::pass ()
 
        shared_ptr<Piece> earliest_content;
        optional<DCPTime> earliest_time;
-
-       for (auto i: _pieces) {
-               if (i->done) {
-                       continue;
-               }
-
-               auto const t = content_time_to_dcp (i, max(i->decoder->position(), i->content->trim_start()));
-               if (t > i->content->end(film)) {
-                       i->done = true;
-               } else {
-
-                       /* Given two choices at the same time, pick the one with texts so we see it before
-                          the video.
-                       */
-                       if (!earliest_time || t < *earliest_time || (t == *earliest_time && !i->decoder->text.empty())) {
-                               earliest_time = t;
-                               earliest_content = i;
-                       }
-               }
-       }
+       std::tie(earliest_content, earliest_time) = earliest_piece_and_time();
 
        bool done = false;
 
@@ -832,17 +848,18 @@ Player::pass ()
                [](state_pair const& a, state_pair const& b) { return a.second.last_push_end.get() < b.second.last_push_end.get(); }
                );
 
+       std::map<AudioStreamPtr, StreamState> alive_stream_states;
+
        if (latest_last_push_end != have_pushed.end()) {
                LOG_DEBUG_PLAYER("Leading audio stream is in %1 at %2", latest_last_push_end->second.piece->content->path(0), to_string(latest_last_push_end->second.last_push_end.get()));
-       }
 
-       /* Now make a list of those streams that are less than ignore_streams_behind behind the leader */
-       std::map<AudioStreamPtr, StreamState> alive_stream_states;
-       for (auto const& i: _stream_states) {
-               if (!i.second.last_push_end || (latest_last_push_end->second.last_push_end.get() - i.second.last_push_end.get()) < dcpomatic::DCPTime::from_seconds(ignore_streams_behind)) {
-                       alive_stream_states.insert(i);
-               } else {
-                       LOG_DEBUG_PLAYER("Ignoring stream %1 because it is too far behind", i.second.piece->content->path(0));
+               /* Now make a list of those streams that are less than ignore_streams_behind behind the leader */
+               for (auto const& i: _stream_states) {
+                       if (!i.second.last_push_end || (latest_last_push_end->second.last_push_end.get() - i.second.last_push_end.get()) < dcpomatic::DCPTime::from_seconds(ignore_streams_behind)) {
+                               alive_stream_states.insert(i);
+                       } else {
+                               LOG_DEBUG_PLAYER("Ignoring stream %1 because it is too far behind", i.second.piece->content->path(0));
+                       }
                }
        }
 
@@ -920,7 +937,7 @@ Player::open_subtitles_for_frame (DCPTime time) const
 
                /* Bitmap subtitles */
                for (auto i: j.bitmap) {
-                       if (!i.image) {
+                       if (!i.image || i.image->size().width == 0 || i.image->size().height == 0) {
                                continue;
                        }
 
@@ -941,7 +958,10 @@ Player::open_subtitles_for_frame (DCPTime time) const
                /* String subtitles (rendered to an image) */
                if (!j.string.empty()) {
                        auto s = render_text(j.string, _video_container_size, time, vfr);
-                       copy (s.begin(), s.end(), back_inserter (captions));
+                       copy_if(s.begin(), s.end(), back_inserter(captions), [](PositionImage const& image) {
+                               return image.image->size().width && image.image->size().height;
+                       });
+
                }
        }
 
@@ -1088,13 +1108,16 @@ Player::video (weak_ptr<Piece> weak_piece, ContentVideo video)
 
        auto const content_video = piece->content->video;
 
+       auto scaled_size = content_video->scaled_size(film->frame_size());
+       DCPOMATIC_ASSERT(scaled_size);
+
        for (auto eyes: eyes_to_emit) {
                _last_video[weak_piece] = std::make_shared<PlayerVideo>(
                        video.image,
                        content_video->actual_crop(),
                        content_video->fade(film, video.frame),
                        scale_for_display(
-                               content_video->scaled_size(film->frame_size()),
+                               *scaled_size,
                                _video_container_size,
                                film->frame_size(),
                                content_video->pixel_quanta()
@@ -1203,7 +1226,7 @@ Player::audio (weak_ptr<Piece> weak_piece, AudioStreamPtr stream, ContentAudio c
 
        /* Process */
 
-       if (_audio_processor) {
+       if (_audio_processor && !_disable_audio_processor) {
                content_audio.audio = _audio_processor->run(content_audio.audio, film->audio_channels());
        }
 
@@ -1627,3 +1650,38 @@ Player::signal_change(ChangeType type, int property)
        Change(type, property, false);
 }
 
+
+/** Must be called from the same thread that calls ::pass() */
+void
+Player::set_disable_audio_processor()
+{
+       _disable_audio_processor = true;
+}
+
+
+Frame
+Player::frames_done() const
+{
+       auto film = _film.lock();
+       DCPOMATIC_ASSERT(film);
+
+       shared_ptr<Piece> earliest_content;
+       optional<DCPTime> earliest_time;
+       std::tie(earliest_content, earliest_time) = earliest_piece_and_time();
+
+       return earliest_time.get_value_or({}).frames_round(film->video_frame_rate());
+}
+
+
+float
+Player::progress() const
+{
+       auto film = _film.lock();
+       DCPOMATIC_ASSERT(film);
+
+       shared_ptr<Piece> earliest_content;
+       optional<DCPTime> earliest_time;
+       std::tie(earliest_content, earliest_time) = earliest_piece_and_time();
+
+       return static_cast<float>(earliest_time.get_value_or({}).get()) / film->length().get();
+}