More string -> boost::filesystem::path.
[dcpomatic.git] / src / lib / player.cc
index bf5bc6489eb1e5f6c61009ff9cd0769c5e43cd37..f792655586362cac6b637e91358be4f2dd0ebfd6 100644 (file)
@@ -48,22 +48,20 @@ using boost::shared_ptr;
 using boost::weak_ptr;
 using boost::dynamic_pointer_cast;
 
-//#define DEBUG_PLAYER 1
-
 class Piece
 {
 public:
        Piece (shared_ptr<Content> c)
                : content (c)
-               , video_position (c->start ())
-               , audio_position (c->start ())
+               , video_position (c->position ())
+               , audio_position (c->position ())
        {}
        
        Piece (shared_ptr<Content> c, shared_ptr<Decoder> d)
                : content (c)
                , decoder (d)
-               , video_position (c->start ())
-               , audio_position (c->start ())
+               , video_position (c->position ())
+               , audio_position (c->position ())
        {}
        
        shared_ptr<Content> content;
@@ -72,23 +70,6 @@ public:
        Time audio_position;
 };
 
-#ifdef DEBUG_PLAYER
-std::ostream& operator<<(std::ostream& s, Piece const & p)
-{
-       if (dynamic_pointer_cast<FFmpegContent> (p.content)) {
-               s << "\tffmpeg     ";
-       } else if (dynamic_pointer_cast<StillImageContent> (p.content)) {
-               s << "\tstill image";
-       } else if (dynamic_pointer_cast<SndfileContent> (p.content)) {
-               s << "\tsndfile    ";
-       }
-       
-       s << " at " << p.content->start() << " until " << p.content->end();
-       
-       return s;
-}
-#endif 
-
 Player::Player (shared_ptr<const Film> f, shared_ptr<const Playlist> p)
        : _film (f)
        , _playlist (p)
@@ -100,10 +81,10 @@ Player::Player (shared_ptr<const Film> f, shared_ptr<const Playlist> p)
        , _audio_merger (f->audio_channels(), bind (&Film::time_to_audio_frames, f.get(), _1), bind (&Film::audio_frames_to_time, f.get(), _1))
        , _last_emit_was_black (false)
 {
-       _playlist->Changed.connect (bind (&Player::playlist_changed, this));
-       _playlist->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
-       _film->Changed.connect (bind (&Player::film_changed, this, _1));
-       set_video_container_size (_film->container()->size (_film->full_frame ()));
+       _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
+       _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
+       _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
+       set_video_container_size (fit_ratio_within (_film->container()->ratio (), _film->full_frame ()));
 }
 
 void
@@ -126,10 +107,6 @@ Player::pass ()
                _have_valid_pieces = true;
        }
 
-#ifdef DEBUG_PLAYER
-       cout << "= PASS\n";
-#endif 
-
        Time earliest_t = TIME_MAX;
        shared_ptr<Piece> earliest;
        enum {
@@ -160,10 +137,6 @@ Player::pass ()
        }
 
        if (!earliest) {
-#ifdef DEBUG_PLAYER
-               cout << "no earliest piece.\n";
-#endif         
-               
                flush ();
                return true;
        }
@@ -171,28 +144,16 @@ Player::pass ()
        switch (type) {
        case VIDEO:
                if (earliest_t > _video_position) {
-#ifdef DEBUG_PLAYER
-                       cout << "no video here; emitting black frame (earliest=" << earliest_t << ", video_position=" << _video_position << ").\n";
-#endif
                        emit_black ();
                } else {
-#ifdef DEBUG_PLAYER
-                       cout << "Pass " << *earliest << "\n";
-#endif                 
                        earliest->decoder->pass ();
                }
                break;
 
        case AUDIO:
                if (earliest_t > _audio_position) {
-#ifdef DEBUG_PLAYER
-                       cout << "no audio here; emitting silence.\n";
-#endif
                        emit_silence (_film->time_to_audio_frames (earliest_t - _audio_position));
                } else {
-#ifdef DEBUG_PLAYER
-                       cout << "Pass " << *earliest << "\n";
-#endif
                        earliest->decoder->pass ();
 
                        if (earliest->decoder->done()) {
@@ -206,35 +167,36 @@ Player::pass ()
                                        }
                                }
                        }
-
-                       
                }
+               break;
+       }
 
-               Time done_up_to = TIME_MAX;
+       if (_audio) {
+               Time audio_done_up_to = TIME_MAX;
                for (list<shared_ptr<Piece> >::iterator i = _pieces.begin(); i != _pieces.end(); ++i) {
-                       if (dynamic_pointer_cast<AudioContent> ((*i)->content)) {
-                               done_up_to = min (done_up_to, (*i)->audio_position);
+                       if (dynamic_pointer_cast<AudioDecoder> ((*i)->decoder)) {
+                               audio_done_up_to = min (audio_done_up_to, (*i)->audio_position);
                        }
                }
 
-               TimedAudioBuffers<Time> tb = _audio_merger.pull (done_up_to);
+               TimedAudioBuffers<Time> tb = _audio_merger.pull (audio_done_up_to);
                Audio (tb.audio, tb.time);
                _audio_position += _film->audio_frames_to_time (tb.audio->frames ());
-               break;
        }
-
-       
-
-#ifdef DEBUG_PLAYER
-       cout << "\tpost pass " << _video_position << " " << _audio_position << "\n";
-#endif 
-
+               
        return false;
 }
 
 void
 Player::process_video (weak_ptr<Piece> weak_piece, shared_ptr<const Image> image, Eyes eyes, bool same, VideoContent::Frame frame)
 {
+       /* Keep a note of what came in so that we can repeat it if required */
+       _last_process_video.weak_piece = weak_piece;
+       _last_process_video.image = image;
+       _last_process_video.eyes = eyes;
+       _last_process_video.same = same;
+       _last_process_video.frame = frame;
+       
        shared_ptr<Piece> piece = weak_piece.lock ();
        if (!piece) {
                return;
@@ -248,14 +210,23 @@ Player::process_video (weak_ptr<Piece> weak_piece, shared_ptr<const Image> image
                return;
        }
 
-       shared_ptr<Image> work_image = image->crop (content->crop(), true);
+       Time const relative_time = (frame * frc.factor() * TIME_HZ / _film->video_frame_rate());
+       if (content->trimmed (relative_time)) {
+               return;
+       }
+
+       /* Convert to RGB first, as FFmpeg doesn't seem to like handling YUV images with odd widths */
+       shared_ptr<Image> work_image = image->scale (image->size (), _film->scaler(), PIX_FMT_RGB24, true);
 
-       libdcp::Size const image_size = content->ratio()->size (_video_container_size);
-       
-       work_image = work_image->scale_and_convert_to_rgb (image_size, _film->scaler(), true);
+       work_image = work_image->crop (content->crop(), true);
 
-       Time time = content->start() + (frame * frc.factor() * TIME_HZ / _film->video_frame_rate());
+       float const ratio = content->ratio() ? content->ratio()->ratio() : content->video_size_after_crop().ratio();
+       libdcp::Size image_size = fit_ratio_within (ratio, _video_container_size);
        
+       work_image = work_image->scale (image_size, _film->scaler(), PIX_FMT_RGB24, true);
+
+       Time time = content->position() + relative_time - content->trim_start ();
+           
        if (_film->with_subtitles () && _out_subtitle.image && time >= _out_subtitle.from && time <= _out_subtitle.to) {
                work_image->alpha_blend (_out_subtitle.image, _out_subtitle.position);
        }
@@ -273,11 +244,11 @@ Player::process_video (weak_ptr<Piece> weak_piece, shared_ptr<const Image> image
        _last_video = piece->content;
 #endif
 
-       Video (work_image, eyes, same, time);
+       Video (work_image, eyes, content->colour_conversion(), same, time);
        time += TIME_HZ / _film->video_frame_rate();
 
        if (frc.repeat) {
-               Video (work_image, eyes, true, time);
+               Video (work_image, eyes, content->colour_conversion(), true, time);
                time += TIME_HZ / _film->video_frame_rate();
        }
 
@@ -297,6 +268,13 @@ Player::process_audio (weak_ptr<Piece> weak_piece, shared_ptr<const AudioBuffers
        shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> (piece->content);
        assert (content);
 
+       /* Gain */
+       if (content->audio_gain() != 0) {
+               shared_ptr<AudioBuffers> gain (new AudioBuffers (audio));
+               gain->apply_gain (content->audio_gain ());
+               audio = gain;
+       }
+
        /* Resample */
        if (content->content_audio_frame_rate() != content->output_audio_frame_rate()) {
                shared_ptr<Resampler> r = resampler (content, true);
@@ -305,6 +283,14 @@ Player::process_audio (weak_ptr<Piece> weak_piece, shared_ptr<const AudioBuffers
                frame = ro.second;
        }
        
+       Time const relative_time = _film->audio_frames_to_time (frame);
+
+       if (content->trimmed (relative_time)) {
+               return;
+       }
+
+       Time time = content->position() + (content->audio_delay() * TIME_HZ / 1000) + relative_time;
+       
        /* Remap channels */
        shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), audio->frames()));
        dcp_mapped->make_silent ();
@@ -317,10 +303,6 @@ Player::process_audio (weak_ptr<Piece> weak_piece, shared_ptr<const AudioBuffers
 
        audio = dcp_mapped;
 
-       Time time = content->start()
-               + _film->audio_frames_to_time (frame)
-               + (content->audio_delay() * TIME_HZ / 1000);
-
        /* We must cut off anything that comes before the start of all time */
        if (time < 0) {
                int const frames = - time * _film->audio_frame_rate() / TIME_HZ;
@@ -380,18 +362,18 @@ Player::seek (Time t, bool accurate)
                        continue;
                }
                
-               Time s = t - vc->start ();
+               Time s = t - vc->position ();
                s = max (static_cast<Time> (0), s);
-               s = min (vc->length(), s);
+               s = min (vc->length_after_trim(), s);
 
-               (*i)->video_position = (*i)->audio_position = vc->start() + s;
+               (*i)->video_position = (*i)->audio_position = vc->position() + s;
 
                FrameRateConversion frc (vc->video_frame_rate(), _film->video_frame_rate());
                /* Here we are converting from time (in the DCP) to a frame number in the content.
                   Hence we need to use the DCP's frame rate and the double/skip correction, not
                   the source's rate.
                */
-               VideoContent::Frame f = s * _film->video_frame_rate() / (frc.factor() * TIME_HZ);
+               VideoContent::Frame f = (s + vc->trim_start ()) * _film->video_frame_rate() / (frc.factor() * TIME_HZ);
                dynamic_pointer_cast<VideoDecoder>((*i)->decoder)->seek (f, accurate);
        }
 
@@ -469,13 +451,6 @@ Player::setup_pieces ()
 
                _pieces.push_back (piece);
        }
-
-#ifdef DEBUG_PLAYER
-       cout << "=== Player setup:\n";
-       for (list<shared_ptr<Piece> >::iterator i = _pieces.begin(); i != _pieces.end(); ++i) {
-               cout << *(i->get()) << "\n";
-       }
-#endif 
 }
 
 void
@@ -487,17 +462,23 @@ Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
        }
 
        if (
-               property == ContentProperty::START || property == ContentProperty::LENGTH ||
-               property == VideoContentProperty::VIDEO_CROP || property == VideoContentProperty::VIDEO_RATIO
+               property == ContentProperty::POSITION || property == ContentProperty::LENGTH ||
+               property == ContentProperty::TRIM_START || property == ContentProperty::TRIM_END
                ) {
                
                _have_valid_pieces = false;
                Changed (frequent);
 
        } else if (property == SubtitleContentProperty::SUBTITLE_OFFSET || property == SubtitleContentProperty::SUBTITLE_SCALE) {
+
                update_subtitle ();
                Changed (frequent);
-       } else if (property == VideoContentProperty::VIDEO_FRAME_TYPE) {
+
+       } else if (
+               property == VideoContentProperty::VIDEO_FRAME_TYPE || property == VideoContentProperty::VIDEO_CROP ||
+               property == VideoContentProperty::VIDEO_RATIO
+               ) {
+               
                Changed (frequent);
        }
 }
@@ -541,7 +522,7 @@ Player::emit_black ()
        _last_video.reset ();
 #endif
        
-       Video (_black_frame, EYES_BOTH, _last_emit_was_black, _video_position);
+       Video (_black_frame, EYES_BOTH, ColourConversion(), _last_emit_was_black, _video_position);
        _video_position += _film->video_frames_to_time (1);
        _last_emit_was_black = true;
 }
@@ -549,6 +530,10 @@ Player::emit_black ()
 void
 Player::emit_silence (OutputAudioFrame most)
 {
+       if (most == 0) {
+               return;
+       }
+       
        OutputAudioFrame N = min (most, _film->audio_frame_rate() / 2);
        shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), N));
        silence->make_silent ();
@@ -621,7 +606,33 @@ Player::update_subtitle ()
        _out_subtitle.position.x = rint (_video_container_size.width * (in_rect.x + (in_rect.width * (1 - sc->subtitle_scale ()) / 2)));
        _out_subtitle.position.y = rint (_video_container_size.height * (in_rect.y + (in_rect.height * (1 - sc->subtitle_scale ()) / 2)));
        
-       _out_subtitle.image = _in_subtitle.image->scale (libdcp::Size (scaled_size.width, scaled_size.height), Scaler::from_id ("bicubic"), true);
-       _out_subtitle.from = _in_subtitle.from + piece->content->start ();
-       _out_subtitle.to = _in_subtitle.to + piece->content->start ();
+       _out_subtitle.image = _in_subtitle.image->scale (
+               scaled_size,
+               Scaler::from_id ("bicubic"),
+               _in_subtitle.image->pixel_format (),
+               true
+               );
+       _out_subtitle.from = _in_subtitle.from + piece->content->position ();
+       _out_subtitle.to = _in_subtitle.to + piece->content->position ();
+}
+
+/** Re-emit the last frame that was emitted, using current settings for crop, ratio, scaler and subtitles.
+ *  @return false if this could not be done.
+ */
+bool
+Player::repeat_last_video ()
+{
+       if (!_last_process_video.image) {
+               return false;
+       }
+
+       process_video (
+               _last_process_video.weak_piece,
+               _last_process_video.image,
+               _last_process_video.eyes,
+               _last_process_video.same,
+               _last_process_video.frame
+               );
+
+       return true;
 }