, _always_burn_subtitles (false)
, _fast (false)
, _play_referenced (false)
+ , _last_seek_accurate (true)
, _audio_merger (_film->audio_frame_rate())
{
_film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
return a;
}
-list<shared_ptr<Piece> >
-Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
-{
- if (!_have_valid_pieces) {
- setup_pieces ();
- }
-
- list<shared_ptr<Piece> > overlaps;
- BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
- if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
- overlaps.push_back (i);
- }
- }
-
- return overlaps;
-}
-
bool
Player::pass ()
{
}
}
- if (!earliest) {
- /* No more content; fill up with silent black */
- DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
- if (_last_video_time) {
- remaining_video.from = _last_video_time.get();
- }
- fill_video (remaining_video);
- DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
- if (_last_audio_time) {
- remaining_audio.from = _last_audio_time.get();
+ if (earliest) {
+ earliest->done = earliest->decoder->pass ();
+ if (earliest->done && earliest->content->audio) {
+ /* Flush the Player audio system for this piece */
+ BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
+ audio_flush (earliest, i);
+ }
}
- fill_audio (remaining_audio);
- return true;
}
- earliest->done = earliest->decoder->pass ();
- if (earliest->done && earliest->content->audio) {
- /* Flush the Player audio system for this piece */
- BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
- audio_flush (earliest, i);
- }
+ DCPTime fill_towards = earliest ? earliest_content : _playlist->length();
+
+ optional<DCPTime> fill_from;
+ if (_last_video_time) {
+ /* No seek; fill towards the next thing that might happen (or the end of the playlist) */
+ fill_from = _last_video_time;
+ } else if (_last_seek_time && !_playlist->video_content_at(_last_seek_time.get())) {
+ /* Seek into an empty area; fill from the seek time */
+ fill_from = _last_seek_time;
+ }
+
+ if (fill_from && ((fill_towards - fill_from.get())) > one_video_frame()) {
+ emit_video (black_player_video_frame(), fill_from.get());
+ } else if (_playlist->length() == DCPTime()) {
+ emit_video (black_player_video_frame(), DCPTime());
+ }
+
+ if (!earliest && !fill_from) {
+ return true;
}
/* Emit any audio that is ready */
return false;
}
+optional<PositionImage>
+Player::subtitles_for_frame (DCPTime time) const
+{
+ list<PositionImage> subtitles;
+
+ BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
+
+ /* Image subtitles */
+ list<PositionImage> c = transform_image_subtitles (i.image);
+ copy (c.begin(), c.end(), back_inserter (subtitles));
+
+ /* Text subtitles (rendered to an image) */
+ if (!i.text.empty ()) {
+ list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
+ copy (s.begin(), s.end(), back_inserter (subtitles));
+ }
+ }
+
+ if (subtitles.empty ()) {
+ return optional<PositionImage> ();
+ }
+
+ return merge (subtitles);
+}
+
void
Player::video (weak_ptr<Piece> wp, ContentVideo video)
{
DCPTimePeriod const period (time, time + one_video_frame());
/* Discard if it's outside the content's period or if it's before the last accurate seek */
- if (time < piece->content->position() || time >= piece->content->end() || (_last_video_time && time < _last_video_time)) {
+ if (
+ time < piece->content->position() ||
+ time >= piece->content->end() ||
+ (_last_seek_time && _last_seek_accurate && time < _last_seek_time.get())) {
return;
}
- /* Get any subtitles */
-
- optional<PositionImage> subtitles;
-
- for (ActiveSubtitles::const_iterator i = _active_subtitles.begin(); i != _active_subtitles.end(); ++i) {
-
- shared_ptr<Piece> sub_piece = i->first.lock ();
- if (!sub_piece) {
- continue;
- }
-
- if (!sub_piece->content->subtitle->use() || (!_always_burn_subtitles && !piece->content->subtitle->burn())) {
- continue;
- }
-
- pair<PlayerSubtitles, DCPTime> sub = i->second;
-
- list<PositionImage> sub_images;
-
- /* Image subtitles */
- list<PositionImage> c = transform_image_subtitles (sub.first.image);
- copy (c.begin(), c.end(), back_inserter (sub_images));
-
- /* Text subtitles (rendered to an image) */
- if (!sub.first.text.empty ()) {
- list<PositionImage> s = render_subtitles (sub.first.text, sub.first.fonts, _video_container_size, time);
- copy (s.begin (), s.end (), back_inserter (sub_images));
- }
-
- if (!sub_images.empty ()) {
- subtitles = merge (sub_images);
- }
- }
-
- /* Fill gaps */
+ /* Fill gaps caused by (the hopefully rare event of) a decoder not emitting contiguous video. We have to do this here
+ as in the problematic case we are about to emit a frame which is not contiguous with the previous.
+ */
if (_last_video_time) {
- fill_video (DCPTimePeriod (_last_video_time.get(), time));
+ /* XXX: this may not work for 3D */
+ BOOST_FOREACH (DCPTimePeriod i, subtract(DCPTimePeriod (_last_video_time.get(), time), _no_video)) {
+ for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
+ if (_last_video) {
+ emit_video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
+ } else {
+ emit_video (black_player_video_frame(), j);
+ }
+ }
+ }
}
_last_video.reset (
)
);
- if (subtitles) {
- _last_video->set_subtitle (subtitles.get ());
- }
-
- Video (_last_video, time);
-
- _last_video_time = time + one_video_frame ();
+ emit_video (_last_video, time);
}
void
ps.image.push_back (subtitle.sub);
DCPTime from (content_time_to_dcp (piece, subtitle.from()));
- _active_subtitles[wp] = make_pair (ps, from);
+ _active_subtitles.add_from (wp, ps, from);
}
void
ps.add_fonts (piece->content->subtitle->fonts ());
}
- _active_subtitles[wp] = make_pair (ps, from);
+ _active_subtitles.add_from (wp, ps, from);
}
void
Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
{
- if (_active_subtitles.find (wp) == _active_subtitles.end ()) {
+ if (!_active_subtitles.have (wp)) {
return;
}
return;
}
+ DCPTime const dcp_to = content_time_to_dcp (piece, to);
+
+ pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
+
if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
- Subtitle (_active_subtitles[wp].first, DCPTimePeriod (_active_subtitles[wp].second, content_time_to_dcp (piece, to)));
+ Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
}
-
- _active_subtitles.erase (wp);
}
void
}
}
- if (accurate) {
- _last_video_time = time;
- _last_audio_time = time;
- } else {
- _last_video_time = optional<DCPTime> ();
- _last_audio_time = optional<DCPTime> ();
- }
+ _last_video_time = optional<DCPTime> ();
+ _last_audio_time = optional<DCPTime> ();
+ _last_seek_time = time;
+ _last_seek_accurate = accurate;
}
shared_ptr<Resampler>
}
void
-Player::fill_video (DCPTimePeriod period)
-{
- /* XXX: this may not work for 3D */
- BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
- for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
- if (_playlist->video_content_at(j) && _last_video) {
- Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
- } else {
- Video (black_player_video_frame(), j);
- }
- }
+Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
+{
+ optional<PositionImage> subtitles = subtitles_for_frame (time);
+ if (subtitles) {
+ pv->set_subtitle (subtitles.get ());
}
+ Video (pv, time);
+ _last_video_time = time + one_video_frame();
+ _active_subtitles.clear_before (time);
}
void