X-Git-Url: https://git.carlh.net/gitweb/?a=blobdiff_plain;f=src%2Flib%2Fffmpeg_examiner.cc;h=48d85da6f11113cb6cabc83215505c8df766110a;hb=5e4f001bf32e3cdf65efa34803d70e6c1c00c66b;hp=013799d036c2fd20a3b60cb4539868b2e025b34f;hpb=d156fe45ee21fc416ce6b9e43ceed95bf42fde41;p=dcpomatic.git diff --git a/src/lib/ffmpeg_examiner.cc b/src/lib/ffmpeg_examiner.cc index 013799d03..48d85da6f 100644 --- a/src/lib/ffmpeg_examiner.cc +++ b/src/lib/ffmpeg_examiner.cc @@ -25,13 +25,14 @@ extern "C" { #include "ffmpeg_content.h" #include "ffmpeg_audio_stream.h" #include "ffmpeg_subtitle_stream.h" +#include "util.h" +#include "safe_stringstream.h" #include "i18n.h" using std::string; using std::cout; using std::max; -using std::stringstream; using boost::shared_ptr; using boost::optional; @@ -63,9 +64,16 @@ FFmpegExaminer::FFmpegExaminer (shared_ptr c) } } - /* Run through until we find the first audio (for each stream) and video */ - - while (1) { + /* Run through until we find: + * - the first video. + * - the first audio for each stream. + * - the subtitle periods for each stream. + * + * We have to note subtitle periods as otherwise we have no way of knowing + * where we should look for subtitles (video and audio are always present, + * so they are ok). + */ + while (true) { int r = av_read_frame (_format_context, &_packet); if (r < 0) { break; @@ -122,7 +130,17 @@ FFmpegExaminer::audio_packet (AVCodecContext* context, shared_ptr stream) { - + int frame_finished; + AVSubtitle sub; + if (avcodec_decode_subtitle2 (context, &sub, &frame_finished, &_packet) >= 0 && frame_finished) { + ContentTimePeriod const period = subtitle_period (sub); + if (sub.num_rects == 0 && !stream->periods.empty () && stream->periods.back().to > period.from) { + /* Finish the last subtitle */ + stream->periods.back().to = period.from; + } else if (sub.num_rects == 1) { + stream->periods.push_back (period); + } + } } optional @@ -159,13 +177,13 @@ ContentTime FFmpegExaminer::video_length () const { ContentTime const length = ContentTime::from_seconds (double (_format_context->duration) / AV_TIME_BASE); - return ContentTime (max (int64_t (1), length.get ())); + return ContentTime (max (ContentTime::Type (1), length.get ())); } string FFmpegExaminer::audio_stream_name (AVStream* s) const { - stringstream n; + SafeStringStream n; n << stream_name (s); @@ -181,7 +199,7 @@ FFmpegExaminer::audio_stream_name (AVStream* s) const string FFmpegExaminer::subtitle_stream_name (AVStream* s) const { - stringstream n; + SafeStringStream n; n << stream_name (s); @@ -195,7 +213,7 @@ FFmpegExaminer::subtitle_stream_name (AVStream* s) const string FFmpegExaminer::stream_name (AVStream* s) const { - stringstream n; + SafeStringStream n; if (s->metadata) { AVDictionaryEntry const * lang = av_dict_get (s->metadata, "language", 0, 0);