X-Git-Url: https://git.carlh.net/gitweb/?p=dcpomatic.git;a=blobdiff_plain;f=src%2Flib%2Fplayer.cc;h=abcefcab5c57fcae7afbc5db48589cd12088d343;hp=732493f3b065d1c51bbbe339f315552229fd65e5;hb=86515d34200fa2a387e48b117eed9b02cabde30d;hpb=20da60db1c5a388629b054ed6e1b143decbbd5de diff --git a/src/lib/player.cc b/src/lib/player.cc index 732493f3b..abcefcab5 100644 --- a/src/lib/player.cc +++ b/src/lib/player.cc @@ -369,7 +369,7 @@ Player::setup_pieces () for (auto i: _pieces) { if (i->content->audio) { for (auto j: i->content->audio->streams()) { - _stream_states[j] = StreamState (i, i->content->position ()); + _stream_states[j] = StreamState(i); } } } @@ -378,13 +378,29 @@ Player::setup_pieces () return v && v->use() && v->frame_type() != VideoFrameType::THREE_D_LEFT && v->frame_type() != VideoFrameType::THREE_D_RIGHT; }; - for (auto i = _pieces.begin(); i != _pieces.end(); ++i) { - if (ignore_overlap((*i)->content->video)) { + for (auto piece = _pieces.begin(); piece != _pieces.end(); ++piece) { + if (ignore_overlap((*piece)->content->video)) { /* Look for content later in the content list with in-use video that overlaps this */ - auto const period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(film)); - for (auto j = std::next(i); j != _pieces.end(); ++j) { - if ((*j)->content->video && ignore_overlap((*j)->content->video)) { - (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(film)).overlap(period); + auto const period = (*piece)->content->period(film); + for (auto later_piece = std::next(piece); later_piece != _pieces.end(); ++later_piece) { + if (ignore_overlap((*later_piece)->content->video)) { + if (auto overlap = (*later_piece)->content->period(film).overlap(period)) { + (*piece)->ignore_video.push_back(*overlap); + } + } + } + } + } + + for (auto piece = _pieces.begin(); piece != _pieces.end(); ++piece) { + if ((*piece)->content->atmos) { + /* Look for content later in the content list with ATMOS that overlaps this */ + auto const period = (*piece)->content->period(film); + for (auto later_piece = std::next(piece); later_piece != _pieces.end(); ++later_piece) { + if ((*later_piece)->content->atmos) { + if (auto overlap = (*later_piece)->content->period(film).overlap(period)) { + (*piece)->ignore_atmos.push_back(*overlap); + } } } } @@ -754,7 +770,12 @@ Player::pass () } case BLACK: LOG_DEBUG_PLAYER ("Emit black for gap at %1", to_string(_black.position())); - emit_video (black_player_video_frame(Eyes::BOTH), _black.position()); + if (film->three_d()) { + emit_video(black_player_video_frame(Eyes::LEFT), _black.position()); + emit_video(black_player_video_frame(Eyes::RIGHT), _black.position()); + } else { + emit_video(black_player_video_frame(Eyes::BOTH), _black.position()); + } _black.set_position (_black.position() + one_video_frame()); break; case SILENT: @@ -800,21 +821,25 @@ Player::pass () using state_pair = std::pair; + /* Find streams that have pushed */ + std::vector have_pushed; + std::copy_if(_stream_states.begin(), _stream_states.end(), std::back_inserter(have_pushed), [](state_pair const& a) { return static_cast(a.second.last_push_end); }); + /* Find the 'leading' stream (i.e. the one that pushed data most recently) */ auto latest_last_push_end = std::max_element( - _stream_states.begin(), - _stream_states.end(), - [](state_pair const& a, state_pair const& b) { return a.second.last_push_end < b.second.last_push_end; } + have_pushed.begin(), + have_pushed.end(), + [](state_pair const& a, state_pair const& b) { return a.second.last_push_end.get() < b.second.last_push_end.get(); } ); - if (latest_last_push_end != _stream_states.end()) { - LOG_DEBUG_PLAYER("Leading audio stream is in %1 at %2", latest_last_push_end->second.piece->content->path(0), to_string(latest_last_push_end->second.last_push_end)); + if (latest_last_push_end != have_pushed.end()) { + LOG_DEBUG_PLAYER("Leading audio stream is in %1 at %2", latest_last_push_end->second.piece->content->path(0), to_string(latest_last_push_end->second.last_push_end.get())); } /* Now make a list of those streams that are less than ignore_streams_behind behind the leader */ std::map alive_stream_states; for (auto const& i: _stream_states) { - if ((latest_last_push_end->second.last_push_end - i.second.last_push_end) < dcpomatic::DCPTime::from_seconds(ignore_streams_behind)) { + if (!i.second.last_push_end || (latest_last_push_end->second.last_push_end.get() - i.second.last_push_end.get()) < dcpomatic::DCPTime::from_seconds(ignore_streams_behind)) { alive_stream_states.insert(i); } else { LOG_DEBUG_PLAYER("Ignoring stream %1 because it is too far behind", i.second.piece->content->path(0)); @@ -823,8 +848,9 @@ Player::pass () auto pull_to = _playback_length.load(); for (auto const& i: alive_stream_states) { - if (!i.second.piece->done && i.second.last_push_end < pull_to) { - pull_to = i.second.last_push_end; + auto position = i.second.last_push_end.get_value_or(i.second.piece->content->position()); + if (!i.second.piece->done && position < pull_to) { + pull_to = position; } } if (!_silent.done() && _silent.position() < pull_to) { @@ -927,6 +953,18 @@ Player::open_subtitles_for_frame (DCPTime time) const } +static +Eyes +increment_eyes (Eyes e) +{ + if (e == Eyes::LEFT) { + return Eyes::RIGHT; + } + + return Eyes::LEFT; +} + + void Player::video (weak_ptr weak_piece, ContentVideo video) { @@ -953,6 +991,28 @@ Player::video (weak_ptr weak_piece, ContentVideo video) return; } + vector eyes_to_emit; + + if (!film->three_d()) { + if (video.eyes == Eyes::RIGHT) { + /* 2D film, 3D content: discard right */ + return; + } else if (video.eyes == Eyes::LEFT) { + /* 2D film, 3D content: emit left as "both" */ + video.eyes = Eyes::BOTH; + eyes_to_emit = { Eyes::BOTH }; + } + } else { + if (video.eyes == Eyes::BOTH) { + /* 3D film, 2D content; emit "both" for left and right */ + eyes_to_emit = { Eyes::LEFT, Eyes::RIGHT }; + } + } + + if (eyes_to_emit.empty()) { + eyes_to_emit = { video.eyes }; + } + /* Time of the first frame we will emit */ DCPTime const time = content_video_to_dcp (piece, video.frame); LOG_DEBUG_PLAYER("Received video frame %1 at %2", video.frame, to_string(time)); @@ -965,7 +1025,12 @@ Player::video (weak_ptr weak_piece, ContentVideo video) return; } - if (piece->ignore_video && piece->ignore_video->contains(time)) { + auto ignore_video = std::find_if( + piece->ignore_video.begin(), + piece->ignore_video.end(), + [time](DCPTimePeriod period) { return period.contains(time); } + ); + if (ignore_video != piece->ignore_video.end()) { return; } @@ -981,7 +1046,7 @@ Player::video (weak_ptr weak_piece, ContentVideo video) if ((fill_to - fill_from) > one_video_frame() / 2) { auto last = _last_video.find (weak_piece); if (film->three_d()) { - auto fill_to_eyes = video.eyes; + auto fill_to_eyes = eyes_to_emit[0]; if (fill_to_eyes == Eyes::BOTH) { fill_to_eyes = Eyes::LEFT; } @@ -1023,32 +1088,34 @@ Player::video (weak_ptr weak_piece, ContentVideo video) auto const content_video = piece->content->video; - _last_video[weak_piece] = std::make_shared( - video.image, - content_video->actual_crop(), - content_video->fade(film, video.frame), - scale_for_display( - content_video->scaled_size(film->frame_size()), + for (auto eyes: eyes_to_emit) { + _last_video[weak_piece] = std::make_shared( + video.image, + content_video->actual_crop(), + content_video->fade(film, video.frame), + scale_for_display( + content_video->scaled_size(film->frame_size()), + _video_container_size, + film->frame_size(), + content_video->pixel_quanta() + ), _video_container_size, - film->frame_size(), - content_video->pixel_quanta() - ), - _video_container_size, - video.eyes, - video.part, - content_video->colour_conversion(), - content_video->range(), - piece->content, - video.frame, - false - ); - - DCPTime t = time; - for (int i = 0; i < frc.repeat; ++i) { - if (t < piece->content->end(film)) { - emit_video (_last_video[weak_piece], t); + eyes, + video.part, + content_video->colour_conversion(), + content_video->range(), + piece->content, + video.frame, + false + ); + + DCPTime t = time; + for (int i = 0; i < frc.repeat; ++i) { + if (t < piece->content->end(film)) { + emit_video (_last_video[weak_piece], t); + } + t += one_video_frame (); } - t += one_video_frame (); } } @@ -1079,10 +1146,10 @@ Player::audio (weak_ptr weak_piece, AudioStreamPtr stream, ContentAudio c /* Compute time in the DCP */ auto time = resampled_audio_to_dcp (piece, content_audio.frame); - LOG_DEBUG_PLAYER("Received audio frame %1 at %2", content_audio.frame, to_string(time)); /* And the end of this block in the DCP */ auto end = time + DCPTime::from_frames(content_audio.audio->frames(), rfr); + LOG_DEBUG_PLAYER("Received audio frame %1 covering %2 to %3 (%4)", content_audio.frame, to_string(time), to_string(end), piece->content->path(0).filename()); /* Remove anything that comes before the start or after the end of the content */ if (time < piece->content->position()) { @@ -1342,6 +1409,10 @@ Player::seek (DCPTime time, bool accurate) _silent.set_position (time); _last_video.clear (); + + for (auto& state: _stream_states) { + state.second.last_push_end = boost::none; + } } @@ -1547,6 +1618,15 @@ Player::atmos (weak_ptr weak_piece, ContentAtmos data) return; } + auto ignore_atmos = std::find_if( + piece->ignore_atmos.begin(), + piece->ignore_atmos.end(), + [dcp_time](DCPTimePeriod period) { return period.contains(dcp_time); } + ); + if (ignore_atmos != piece->ignore_atmos.end()) { + return; + } + Atmos (data.data, dcp_time, data.metadata); }