Make EmptyVideo work with stereo a little better.
[dcpomatic.git] / src / lib / player.cc
index 981078636bf87764fe172783a371668b7fcbe380..392c929a31f7923f505207d2db72c69c1330b649 100644 (file)
@@ -144,20 +144,6 @@ Player::setup_pieces ()
 }
 
 
-bool
-have_video (shared_ptr<const Content> content)
-{
-       return static_cast<bool>(content->video) && content->video->use() && content->can_be_played();
-}
-
-
-bool
-have_audio (shared_ptr<const Content> content)
-{
-       return static_cast<bool>(content->audio);
-}
-
-
 void
 Player::setup_pieces_unlocked ()
 {
@@ -166,8 +152,19 @@ Player::setup_pieces_unlocked ()
        auto old_pieces = _pieces;
        _pieces.clear ();
 
-       _shuffler.reset (new Shuffler());
-       _shuffler->Video.connect(bind(&Player::video, this, _1, _2));
+       auto playlist_content = playlist()->content();
+       bool const have_threed = std::any_of(
+               playlist_content.begin(),
+               playlist_content.end(),
+               [](shared_ptr<const Content> c) {
+                       return c->video && (c->video->frame_type() == VideoFrameType::THREE_D_LEFT || c->video->frame_type() == VideoFrameType::THREE_D_RIGHT);
+               });
+
+
+       if (have_threed) {
+               _shuffler.reset(new Shuffler());
+               _shuffler->Video.connect(bind(&Player::video, this, _1, _2));
+       }
 
        for (auto i: playlist()->content()) {
 
@@ -219,7 +216,7 @@ Player::setup_pieces_unlocked ()
                _pieces.push_back (piece);
 
                if (decoder->video) {
-                       if (i->video->frame_type() == VideoFrameType::THREE_D_LEFT || i->video->frame_type() == VideoFrameType::THREE_D_RIGHT) {
+                       if (have_threed) {
                                /* We need a Shuffler to cope with 3D L/R video data arriving out of sequence */
                                decoder->video->Data.connect (bind(&Shuffler::video, _shuffler.get(), weak_ptr<Piece>(piece), _1));
                        } else {
@@ -261,24 +258,24 @@ Player::setup_pieces_unlocked ()
                }
        }
 
+       auto ignore_overlap = [](shared_ptr<VideoContent> v) {
+               return v && v->use() && v->frame_type() != VideoFrameType::THREE_D_LEFT && v->frame_type() != VideoFrameType::THREE_D_RIGHT;
+       };
+
        for (auto i = _pieces.begin(); i != _pieces.end(); ++i) {
-               if (auto video = (*i)->content->video) {
-                       if (video->use() && video->frame_type() != VideoFrameType::THREE_D_LEFT && video->frame_type() != VideoFrameType::THREE_D_RIGHT) {
-                               /* Look for content later in the content list with in-use video that overlaps this */
-                               auto period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(_film));
-                               auto j = i;
-                               ++j;
-                               for (; j != _pieces.end(); ++j) {
-                                       if ((*j)->content->video && (*j)->content->video->use()) {
-                                               (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(_film)).overlap(period);
-                                       }
+               if (ignore_overlap((*i)->content->video)) {
+                       /* Look for content later in the content list with in-use video that overlaps this */
+                       auto const period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(_film));
+                       for (auto j = std::next(i); j != _pieces.end(); ++j) {
+                               if ((*j)->content->video && ignore_overlap((*j)->content->video)) {
+                                       (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(_film)).overlap(period);
                                }
                        }
                }
        }
 
-       _black = Empty (_film, playlist(), bind(&have_video, _1), _playback_length);
-       _silent = Empty (_film, playlist(), bind(&have_audio, _1), _playback_length);
+       _black = EmptyVideo (_film, playlist(), _playback_length);
+       _silent = EmptyAudio (_film, playlist(), _playback_length);
 
        _next_video_time = boost::none;
        _next_video_eyes = Eyes::BOTH;
@@ -713,10 +710,13 @@ Player::pass ()
                break;
        }
        case BLACK:
+       {
                LOG_DEBUG_PLAYER ("Emit black for gap at %1", to_string(_black.position()));
-               emit_video (black_player_video_frame(Eyes::BOTH), _black.position());
+               auto period = _black.period_at_position();
+               emit_video (black_player_video_frame(period.second), _black.position());
                _black.set_position (_black.position() + one_video_frame());
                break;
+       }
        case SILENT:
        {
                LOG_DEBUG_PLAYER ("Emit silence for gap at %1", to_string(_silent.position()));
@@ -810,7 +810,9 @@ Player::pass ()
        }
 
        if (done) {
-               _shuffler->flush ();
+               if (_shuffler) {
+                       _shuffler->flush ();
+               }
                for (auto const& i: _delay) {
                        do_emit_video(i.first, i.second);
                }
@@ -903,7 +905,11 @@ Player::video (weak_ptr<Piece> weak_piece, ContentVideo video)
 
        /* Time of the first frame we will emit */
        DCPTime const time = content_video_to_dcp (piece, video.frame);
-       LOG_DEBUG_PLAYER("Received video frame %1 at %2", video.frame, to_string(time));
+       if (video.eyes == Eyes::BOTH) {
+               LOG_DEBUG_PLAYER("Received video frame %1 at %2", video.frame, to_string(time));
+       } else {
+               LOG_DEBUG_PLAYER("Received video frame %1 %2 at %3", (video.eyes == Eyes::LEFT ? "L" : "R"), video.frame, to_string(time));
+       }
 
        /* Discard if it's before the content's period or the last accurate seek.  We can't discard
           if it's after the content's period here as in that case we still need to fill any gap between
@@ -1104,32 +1110,35 @@ Player::bitmap_text_start (weak_ptr<Piece> weak_piece, weak_ptr<const TextConten
                return;
        }
 
-       /* Apply content's subtitle offsets */
-       subtitle.sub.rectangle.x += content->x_offset ();
-       subtitle.sub.rectangle.y += content->y_offset ();
+       PlayerText ps;
+       for (auto& sub: subtitle.subs)
+       {
+               /* Apply content's subtitle offsets */
+               sub.rectangle.x += content->x_offset ();
+               sub.rectangle.y += content->y_offset ();
 
-       /* Apply a corrective translation to keep the subtitle centred after the scale that is coming up */
-       subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * ((content->x_scale() - 1) / 2);
-       subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * ((content->y_scale() - 1) / 2);
+               /* Apply a corrective translation to keep the subtitle centred after the scale that is coming up */
+               sub.rectangle.x -= sub.rectangle.width * ((content->x_scale() - 1) / 2);
+               sub.rectangle.y -= sub.rectangle.height * ((content->y_scale() - 1) / 2);
 
-       /* Apply content's subtitle scale */
-       subtitle.sub.rectangle.width *= content->x_scale ();
-       subtitle.sub.rectangle.height *= content->y_scale ();
+               /* Apply content's subtitle scale */
+               sub.rectangle.width *= content->x_scale ();
+               sub.rectangle.height *= content->y_scale ();
 
-       PlayerText ps;
-       auto image = subtitle.sub.image;
+               auto image = sub.image;
 
-       /* We will scale the subtitle up to fit _video_container_size */
-       int const width = subtitle.sub.rectangle.width * _video_container_size.width;
-       int const height = subtitle.sub.rectangle.height * _video_container_size.height;
-       if (width == 0 || height == 0) {
-               return;
-       }
+               /* We will scale the subtitle up to fit _video_container_size */
+               int const width = sub.rectangle.width * _video_container_size.width;
+               int const height = sub.rectangle.height * _video_container_size.height;
+               if (width == 0 || height == 0) {
+                       return;
+               }
 
-       dcp::Size scaled_size (width, height);
-       ps.bitmap.push_back (BitmapText(image->scale(scaled_size, dcp::YUVToRGB::REC601, image->pixel_format(), Image::Alignment::PADDED, _fast), subtitle.sub.rectangle));
-       DCPTime from (content_time_to_dcp (piece, subtitle.from()));
+               dcp::Size scaled_size (width, height);
+               ps.bitmap.push_back (BitmapText(image->scale(scaled_size, dcp::YUVToRGB::REC601, image->pixel_format(), Image::Alignment::PADDED, _fast), sub.rectangle));
+       }
 
+       DCPTime from(content_time_to_dcp(piece, subtitle.from()));
        _active_texts[static_cast<int>(content->type())].add_from(weak_content, ps, from);
 }
 
@@ -1328,6 +1337,7 @@ Player::do_emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
                pv->set_text (subtitles.get ());
        }
 
+       LOG_DEBUG_PLAYER("Player --> Video %1 %2", to_string(time), static_cast<int>(pv->eyes()));
        Video (pv, time);
 }
 
@@ -1429,6 +1439,22 @@ Player::content_time_to_dcp (shared_ptr<const Content> content, ContentTime t)
 }
 
 
+optional<ContentTime>
+Player::dcp_to_content_time (shared_ptr<const Content> content, DCPTime t)
+{
+       boost::mutex::scoped_lock lm (_mutex);
+
+       for (auto i: _pieces) {
+               if (i->content == content) {
+                       return dcp_to_content_time (i, t);
+               }
+       }
+
+       /* We couldn't find this content; perhaps things are being changed over */
+       return {};
+}
+
+
 shared_ptr<const Playlist>
 Player::playlist () const
 {