X-Git-Url: https://git.carlh.net/gitweb/?a=blobdiff_plain;f=src%2Flib%2Fplayer.cc;h=c484ecc9a2de03243ebf2bbd3e091615ee4751a0;hb=f9068dcbfbb09082e29e2a779ef1a7a2f6ee849e;hp=a44d916d814dc2f58551c2b80ecf2203511e72db;hpb=df16b07cb1802c8f4485d75db3b2dec67521e599;p=dcpomatic.git diff --git a/src/lib/player.cc b/src/lib/player.cc index a44d916d8..c484ecc9a 100644 --- a/src/lib/player.cc +++ b/src/lib/player.cc @@ -46,13 +46,19 @@ #include "dcp_subtitle_decoder.h" #include "audio_processor.h" #include "playlist.h" +#include "referenced_reel_asset.h" +#include +#include +#include +#include #include #include #include +#include #include "i18n.h" -#define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL); +#define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL); using std::list; using std::cout; @@ -74,7 +80,10 @@ Player::Player (shared_ptr film, shared_ptr playlist , _playlist (playlist) , _have_valid_pieces (false) , _ignore_video (false) + , _ignore_audio (false) , _always_burn_subtitles (false) + , _fast (false) + , _play_referenced (false) { _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1)); _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this)); @@ -99,40 +108,16 @@ Player::setup_pieces () shared_ptr decoder; optional frc; - /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */ - DCPTime best_overlap_t; - shared_ptr best_overlap; - BOOST_FOREACH (shared_ptr j, _playlist->content ()) { - shared_ptr vc = dynamic_pointer_cast (j); - if (!vc) { - continue; - } - - DCPTime const overlap = max (vc->position(), i->position()) - min (vc->end(), i->end()); - if (overlap > best_overlap_t) { - best_overlap = vc; - best_overlap_t = overlap; - } - } - - optional best_overlap_frc; - if (best_overlap) { - best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ()); - } else { - /* No video overlap; e.g. if the DCP is just audio */ - best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ()); - } - /* FFmpeg */ shared_ptr fc = dynamic_pointer_cast (i); if (fc) { - decoder.reset (new FFmpegDecoder (fc, _film->log())); + decoder.reset (new FFmpegDecoder (fc, _film->log(), _fast)); frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate()); } shared_ptr dc = dynamic_pointer_cast (i); if (dc) { - decoder.reset (new DCPDecoder (dc)); + decoder.reset (new DCPDecoder (dc, _fast)); frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate()); } @@ -157,22 +142,49 @@ Player::setup_pieces () /* SndfileContent */ shared_ptr sc = dynamic_pointer_cast (i); if (sc) { - decoder.reset (new SndfileDecoder (sc)); - frc = best_overlap_frc; + decoder.reset (new SndfileDecoder (sc, _fast)); + + /* Work out a FrameRateChange for the best overlap video for this content */ + DCPTime best_overlap_t; + shared_ptr best_overlap; + BOOST_FOREACH (shared_ptr j, _playlist->content ()) { + shared_ptr vc = dynamic_pointer_cast (j); + if (!vc) { + continue; + } + + DCPTime const overlap = min (vc->end(), i->end()) - max (vc->position(), i->position()); + if (overlap > best_overlap_t) { + best_overlap = vc; + best_overlap_t = overlap; + } + } + + if (best_overlap) { + frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ()); + } else { + /* No video overlap; e.g. if the DCP is just audio */ + frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ()); + } } + /* It's questionable whether subtitle content should have a video frame rate; perhaps + it should be assumed that any subtitle content has been prepared at the same rate + as simultaneous video content (like we do with audio). + */ + /* SubRipContent */ shared_ptr rc = dynamic_pointer_cast (i); if (rc) { decoder.reset (new SubRipDecoder (rc)); - frc = best_overlap_frc; + frc = FrameRateChange (rc->subtitle_video_frame_rate(), _film->video_frame_rate()); } /* DCPSubtitleContent */ shared_ptr dsc = dynamic_pointer_cast (i); if (dsc) { decoder.reset (new DCPSubtitleDecoder (dsc)); - frc = best_overlap_frc; + frc = FrameRateChange (dsc->subtitle_video_frame_rate(), _film->video_frame_rate()); } shared_ptr vd = dynamic_pointer_cast (decoder); @@ -180,6 +192,11 @@ Player::setup_pieces () vd->set_ignore_video (); } + shared_ptr ad = dynamic_pointer_cast (decoder); + if (ad && _ignore_audio) { + ad->set_ignore_audio (); + } + _pieces.push_back (shared_ptr (new Piece (i, decoder, frc.get ()))); } @@ -213,11 +230,13 @@ Player::playlist_content_changed (weak_ptr w, int property, bool freque property == SubtitleContentProperty::SUBTITLE_Y_OFFSET || property == SubtitleContentProperty::SUBTITLE_X_SCALE || property == SubtitleContentProperty::SUBTITLE_Y_SCALE || + property == SubtitleContentProperty::FONTS || property == VideoContentProperty::VIDEO_CROP || property == VideoContentProperty::VIDEO_SCALE || property == VideoContentProperty::VIDEO_FRAME_RATE || property == VideoContentProperty::VIDEO_FADE_IN || - property == VideoContentProperty::VIDEO_FADE_OUT + property == VideoContentProperty::VIDEO_FADE_OUT || + property == VideoContentProperty::COLOUR_CONVERSION ) { Changed (frequent); @@ -248,7 +267,13 @@ Player::film_changed (Film::Property p) last time we were run. */ - if (p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) { + if (p == Film::CONTAINER) { + Changed (false); + } else if (p == Film::VIDEO_FRAME_RATE) { + /* Pieces contain a FrameRateChange which contains the DCP frame rate, + so we need new pieces here. + */ + _have_valid_pieces = false; Changed (false); } else if (p == Film::AUDIO_PROCESSOR) { if (_film->audio_processor ()) { @@ -291,8 +316,8 @@ Player::transform_image_subtitles (list subs) const true ), Position ( - rint (_video_container_size.width * i->rectangle.x), - rint (_video_container_size.height * i->rectangle.y) + lrint (_video_container_size.width * i->rectangle.x), + lrint (_video_container_size.height * i->rectangle.y) ) ) ); @@ -319,7 +344,10 @@ Player::black_player_video_frame (DCPTime time) const ); } -/** @return All PlayerVideos at the given time (there may be two frames for 3D) */ +/** @return All PlayerVideos at the given time. There may be none if the content + * at `time' is a DCP which we are passing through (i.e. referring to by reference) + * or 2 if we have 3D. + */ list > Player::get_video (DCPTime time, bool accurate) { @@ -339,7 +367,7 @@ Player::get_video (DCPTime time, bool accurate) /* Text subtitles (rendered to an image) */ if (!ps.text.empty ()) { - list s = render_subtitles (ps.text, _video_container_size); + list s = render_subtitles (ps.text, ps.fonts, _video_container_size); copy (s.begin (), s.end (), back_inserter (sub_images)); } @@ -348,7 +376,7 @@ Player::get_video (DCPTime time, bool accurate) subtitles = merge (sub_images); } - /* Find video */ + /* Find pieces containing video which is happening now */ list > ov = overlaps ( time, @@ -361,56 +389,59 @@ Player::get_video (DCPTime time, bool accurate) /* No video content at this time */ pvf.push_back (black_player_video_frame (time)); } else { - /* Decide which pieces of content to use */ - list > ov_to_use; - - /* Always use the last one */ - list >::reverse_iterator i = ov.rbegin (); - ov_to_use.push_back (*i); - VideoFrameType const first_type = dynamic_pointer_cast ((*i)->content)->video_frame_type (); - - ++i; - if (i != ov.rend ()) { - shared_ptr vc = dynamic_pointer_cast ((*i)->content); - /* Use the second to last if it's the other part of a 3D content pair */ - if ( - (first_type == VIDEO_FRAME_TYPE_3D_LEFT && vc->video_frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) || - (first_type == VIDEO_FRAME_TYPE_3D_RIGHT && vc->video_frame_type() == VIDEO_FRAME_TYPE_3D_LEFT) - ) { - /* Other part of a pair of 3D content */ - ov_to_use.push_back (*i); - } - } + /* Some video content at this time */ + shared_ptr last = *(ov.rbegin ()); + VideoFrameType const last_type = dynamic_pointer_cast (last->content)->video_frame_type (); + + /* Get video from appropriate piece(s) */ + BOOST_FOREACH (shared_ptr piece, ov) { - BOOST_FOREACH (shared_ptr piece, ov_to_use) { shared_ptr decoder = dynamic_pointer_cast (piece->decoder); DCPOMATIC_ASSERT (decoder); shared_ptr video_content = dynamic_pointer_cast (piece->content); DCPOMATIC_ASSERT (video_content); - list content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate); - if (content_video.empty ()) { - pvf.push_back (black_player_video_frame (time)); - } else { - dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ()); - - for (list::const_iterator i = content_video.begin(); i != content_video.end(); ++i) { - pvf.push_back ( - shared_ptr ( - new PlayerVideo ( - i->image, - content_video_to_dcp (piece, i->frame), - video_content->crop (), - video_content->fade (i->frame), - image_size, - _video_container_size, - i->eyes, - i->part, - video_content->colour_conversion () + shared_ptr dcp_content = dynamic_pointer_cast (video_content); + if (dcp_content && dcp_content->reference_video () && !_play_referenced) { + continue; + } + + bool const use = + /* always use the last video */ + piece == last || + /* with a corresponding L/R eye if appropriate */ + (last_type == VIDEO_FRAME_TYPE_3D_LEFT && video_content->video_frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) || + (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && video_content->video_frame_type() == VIDEO_FRAME_TYPE_3D_LEFT); + + if (use) { + /* We want to use this piece */ + list content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate); + if (content_video.empty ()) { + pvf.push_back (black_player_video_frame (time)); + } else { + dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ()); + + for (list::const_iterator i = content_video.begin(); i != content_video.end(); ++i) { + pvf.push_back ( + shared_ptr ( + new PlayerVideo ( + i->image, + content_video_to_dcp (piece, i->frame), + video_content->crop (), + video_content->fade (i->frame), + image_size, + _video_container_size, + i->eyes, + i->part, + video_content->colour_conversion () + ) ) - ) - ); + ); + } } + } else { + /* Discard unused video */ + decoder->get_video (dcp_to_content_video (piece, time), accurate); } } } @@ -424,6 +455,7 @@ Player::get_video (DCPTime time, bool accurate) return pvf; } +/** @return Audio data or 0 if the only audio data here is referenced DCP data */ shared_ptr Player::get_audio (DCPTime time, DCPTime length, bool accurate) { @@ -431,7 +463,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) setup_pieces (); } - Frame const length_frames = length.frames (_film->audio_frame_rate ()); + Frame const length_frames = length.frames_round (_film->audio_frame_rate ()); shared_ptr audio (new AudioBuffers (_film->audio_channels(), length_frames)); audio->make_silent (); @@ -441,11 +473,25 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) return audio; } - for (list >::iterator i = ov.begin(); i != ov.end(); ++i) { + bool all_referenced = true; + BOOST_FOREACH (shared_ptr i, ov) { + shared_ptr audio_content = dynamic_pointer_cast (i->content); + shared_ptr dcp_content = dynamic_pointer_cast (i->content); + if (audio_content && (!dcp_content || !dcp_content->reference_audio ())) { + /* There is audio content which is not from a DCP or not set to be referenced */ + all_referenced = false; + } + } + + if (all_referenced && !_play_referenced) { + return shared_ptr (); + } - shared_ptr content = dynamic_pointer_cast ((*i)->content); + BOOST_FOREACH (shared_ptr i, ov) { + + shared_ptr content = dynamic_pointer_cast (i->content); DCPOMATIC_ASSERT (content); - shared_ptr decoder = dynamic_pointer_cast ((*i)->decoder); + shared_ptr decoder = dynamic_pointer_cast (i->decoder); DCPOMATIC_ASSERT (decoder); /* The time that we should request from the content */ @@ -457,14 +503,14 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) the stuff we get back. */ offset = -request; - request_frames += request.frames (_film->audio_frame_rate ()); + request_frames += request.frames_round (_film->audio_frame_rate ()); if (request_frames < 0) { request_frames = 0; } request = DCPTime (); } - Frame const content_frame = dcp_to_resampled_audio (*i, request); + Frame const content_frame = dcp_to_resampled_audio (i, request); BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) { @@ -501,7 +547,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) } if (_audio_processor) { - dcp_mapped = _audio_processor->run (dcp_mapped); + dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ()); } all.audio = dcp_mapped; @@ -509,7 +555,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) audio->accumulate_frames ( all.audio.get(), content_frame - all.frame, - offset.frames (_film->audio_frame_rate()), + offset.frames_round (_film->audio_frame_rate()), min (Frame (all.audio->frames()), request_frames) ); } @@ -524,23 +570,34 @@ Player::dcp_to_content_video (shared_ptr piece, DCPTime t) const shared_ptr vc = dynamic_pointer_cast (piece->content); DCPTime s = t - piece->content->position (); s = min (piece->content->length_after_trim(), s); - return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start ()).frames (vc->video_frame_rate ()); + s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc)); + + /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange) + then convert that ContentTime to frames at the content's rate. However this fails for + situations like content at 29.9978733fps, DCP at 30fps. The accuracy of the Time type is not + enough to distinguish between the two with low values of time (e.g. 3200 in Time units). + + Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat. + */ + return s.frames_floor (piece->frc.dcp) / piece->frc.factor (); } DCPTime Player::content_video_to_dcp (shared_ptr piece, Frame f) const { shared_ptr vc = dynamic_pointer_cast (piece->content); - ContentTime const c = ContentTime::from_frames (f, vc->video_frame_rate ()) - piece->content->trim_start (); - return max (DCPTime (), DCPTime (c, piece->frc) + piece->content->position ()); + /* See comment in dcp_to_content_video */ + DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc); + return max (DCPTime (), d + piece->content->position ()); } Frame Player::dcp_to_resampled_audio (shared_ptr piece, DCPTime t) const { - DCPTime s = t - piece->content->position () + DCPTime (piece->content->trim_start (), piece->frc); - s = max (DCPTime (), min (piece->content->length_after_trim(), s)); - return s.frames (_film->audio_frame_rate ()); + DCPTime s = t - piece->content->position (); + s = min (piece->content->length_after_trim(), s); + /* See notes in dcp_to_content_video */ + return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ()); } ContentTime @@ -551,6 +608,12 @@ Player::dcp_to_content_subtitle (shared_ptr piece, DCPTime t) const return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start()); } +DCPTime +Player::content_subtitle_to_dcp (shared_ptr piece, ContentTime t) const +{ + return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position()); +} + /** @param burnt true to return only subtitles to be burnt, false to return only * subtitles that should not be burnt. This parameter will be ignored if * _always_burn_subtitles is true; in this case, all subtitles will be returned. @@ -568,6 +631,11 @@ Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt) continue; } + shared_ptr dcp_content = dynamic_pointer_cast (subtitle_content); + if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) { + continue; + } + shared_ptr subtitle_decoder = dynamic_pointer_cast ((*j)->decoder); ContentTime const from = dcp_to_content_subtitle (*j, time); /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */ @@ -593,7 +661,7 @@ Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt) list text = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to), starting); BOOST_FOREACH (ContentTextSubtitle& ts, text) { - BOOST_FOREACH (dcp::SubtitleString& s, ts.subs) { + BOOST_FOREACH (dcp::SubtitleString s, ts.subs) { s.set_h_position (s.h_position() + subtitle_content->subtitle_x_offset ()); s.set_v_position (s.v_position() + subtitle_content->subtitle_y_offset ()); float const xs = subtitle_content->subtitle_x_scale(); @@ -603,7 +671,10 @@ Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt) if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) { s.set_aspect_adjust (xs / ys); } + s.set_in (dcp::Time(content_subtitle_to_dcp (*j, ts.period().from).seconds())); + s.set_out (dcp::Time(content_subtitle_to_dcp (*j, ts.period().to).seconds())); ps.text.push_back (s); + ps.add_fonts (subtitle_content->fonts ()); } } } @@ -640,6 +711,13 @@ Player::set_ignore_video () _ignore_video = true; } +/** Set this player never to produce any audio data */ +void +Player::set_ignore_audio () +{ + _ignore_audio = true; +} + /** Set whether or not this player should always burn text subtitles into the image, * regardless of the content settings. * @param burn true to always burn subtitles, false to obey content settings. @@ -649,3 +727,66 @@ Player::set_always_burn_subtitles (bool burn) { _always_burn_subtitles = burn; } + +void +Player::set_fast () +{ + _fast = true; + _have_valid_pieces = false; +} + +void +Player::set_play_referenced () +{ + _play_referenced = true; + _have_valid_pieces = false; +} + +list +Player::get_reel_assets () +{ + list a; + + BOOST_FOREACH (shared_ptr i, _playlist->content ()) { + shared_ptr j = dynamic_pointer_cast (i); + if (!j) { + continue; + } + DCPDecoder decoder (j, false); + int64_t offset = 0; + BOOST_FOREACH (shared_ptr k, decoder.reels()) { + DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate()); + if (j->reference_video ()) { + a.push_back ( + ReferencedReelAsset ( + k->main_picture (), + DCPTimePeriod (from, from + DCPTime::from_frames (k->main_picture()->duration(), _film->video_frame_rate())) + ) + ); + } + + if (j->reference_audio ()) { + a.push_back ( + ReferencedReelAsset ( + k->main_sound (), + DCPTimePeriod (from, from + DCPTime::from_frames (k->main_sound()->duration(), _film->video_frame_rate())) + ) + ); + } + + if (j->reference_subtitle ()) { + a.push_back ( + ReferencedReelAsset ( + k->main_subtitle (), + DCPTimePeriod (from, from + DCPTime::from_frames (k->main_subtitle()->duration(), _film->video_frame_rate())) + ) + ); + } + + /* Assume that main picture duration is the length of the reel */ + offset += k->main_picture()->duration (); + } + } + + return a; +}