/*
- Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
+ Copyright (C) 2013-2021 Carl Hetherington <cth@carlh.net>
This file is part of DCP-o-matic.
You should have received a copy of the GNU General Public License
along with DCP-o-matic. If not, see <http://www.gnu.org/licenses/>.
+
*/
#include "ffmpeg_content.h"
#include "filter.h"
#include "film.h"
#include "log.h"
+#include "config.h"
#include "exceptions.h"
#include "frame_rate_change.h"
-#include "caption_content.h"
+#include "text_content.h"
#include <dcp/raw_convert.h>
#include <libcxml/cxml.h>
extern "C" {
#include <libavutil/pixdesc.h>
}
#include <libxml++/libxml++.h>
-#include <boost/foreach.hpp>
#include <iostream>
#include "i18n.h"
-#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
using std::string;
using std::vector;
using std::pair;
using std::make_pair;
using std::max;
-using boost::shared_ptr;
-using boost::dynamic_pointer_cast;
+using std::make_shared;
+using std::shared_ptr;
+using std::dynamic_pointer_cast;
using boost::optional;
using dcp::raw_convert;
+using namespace dcpomatic;
+
int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
int const FFmpegContentProperty::FILTERS = 102;
+int const FFmpegContentProperty::KDM = 103;
-FFmpegContent::FFmpegContent (shared_ptr<const Film> film, boost::filesystem::path p)
- : Content (film, p)
+
+FFmpegContent::FFmpegContent (boost::filesystem::path p)
+ : Content (p)
{
}
+
template <class T>
optional<T>
get_optional_enum (cxml::ConstNodePtr node, string name)
{
- optional<int> const v = node->optional_number_child<int>(name);
+ auto const v = node->optional_number_child<int>(name);
if (!v) {
return optional<T>();
}
return static_cast<T>(*v);
}
-FFmpegContent::FFmpegContent (shared_ptr<const Film> film, cxml::ConstNodePtr node, int version, list<string>& notes)
- : Content (film, node)
+
+FFmpegContent::FFmpegContent (cxml::ConstNodePtr node, int version, list<string>& notes)
+ : Content (node)
{
video = VideoContent::from_xml (this, node, version);
audio = AudioContent::from_xml (this, node, version);
- caption = CaptionContent::from_xml (this, node, version);
+ text = TextContent::from_xml (this, node, version, notes);
- list<cxml::NodePtr> c = node->node_children ("SubtitleStream");
- for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
- _subtitle_streams.push_back (shared_ptr<FFmpegSubtitleStream> (new FFmpegSubtitleStream (*i, version)));
- if ((*i)->optional_number_child<int> ("Selected")) {
+ for (auto i: node->node_children("SubtitleStream")) {
+ _subtitle_streams.push_back (make_shared<FFmpegSubtitleStream>(i, version));
+ if (i->optional_number_child<int>("Selected")) {
_subtitle_stream = _subtitle_streams.back ();
}
}
- c = node->node_children ("AudioStream");
- for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
- shared_ptr<FFmpegAudioStream> as (new FFmpegAudioStream (*i, version));
+ for (auto i: node->node_children("AudioStream")) {
+ auto as = make_shared<FFmpegAudioStream>(i, version);
audio->add_stream (as);
- if (version < 11 && !(*i)->optional_node_child ("Selected")) {
+ if (version < 11 && !i->optional_node_child ("Selected")) {
/* This is an old file and this stream is not selected, so un-map it */
as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
}
}
- c = node->node_children ("Filter");
- for (list<cxml::NodePtr>::iterator i = c.begin(); i != c.end(); ++i) {
- Filter const * f = Filter::from_id ((*i)->content ());
+ for (auto i: node->node_children("Filter")) {
+ Filter const * f = Filter::from_id(i->content());
if (f) {
_filters.push_back (f);
} else {
- notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), (*i)->content()));
+ notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), i->content()));
}
}
- optional<ContentTime::Type> const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
+ auto const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
if (f) {
_first_video = ContentTime (f.get ());
}
_color_trc = get_optional_enum<AVColorTransferCharacteristic>(node, "ColorTransferCharacteristic");
_colorspace = get_optional_enum<AVColorSpace>(node, "Colorspace");
_bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
+}
+
+
+FFmpegContent::FFmpegContent (vector<shared_ptr<Content>> c)
+ : Content (c)
+{
+ auto i = c.begin ();
+
+ bool need_video = false;
+ bool need_audio = false;
+ bool need_text = false;
+
+ if (i != c.end ()) {
+ need_video = static_cast<bool> ((*i)->video);
+ need_audio = static_cast<bool> ((*i)->audio);
+ need_text = !(*i)->text.empty();
+ }
+
+ while (i != c.end ()) {
+ if (need_video != static_cast<bool> ((*i)->video)) {
+ throw JoinError (_("Content to be joined must all have or not have video"));
+ }
+ if (need_audio != static_cast<bool> ((*i)->audio)) {
+ throw JoinError (_("Content to be joined must all have or not have audio"));
+ }
+ if (need_text != !(*i)->text.empty()) {
+ throw JoinError (_("Content to be joined must all have or not have subtitles or captions"));
+ }
+ ++i;
+ }
+
+ if (need_video) {
+ video = make_shared<VideoContent>(this, c);
+ }
+ if (need_audio) {
+ audio = make_shared<AudioContent>(this, c);
+ }
+ if (need_text) {
+ text.push_back (make_shared<TextContent>(this, c));
+ }
+
+ auto ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
+ DCPOMATIC_ASSERT (ref);
+
+ for (size_t i = 0; i < c.size(); ++i) {
+ auto fc = dynamic_pointer_cast<FFmpegContent>(c[i]);
+ if (fc->only_text() && fc->only_text()->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
+ throw JoinError (_("Content to be joined must use the same subtitle stream."));
+ }
+ }
+
+ /* XXX: should probably check that more of the stuff below is the same in *this and ref */
+ _subtitle_streams = ref->subtitle_streams ();
+ _subtitle_stream = ref->subtitle_stream ();
+ _first_video = ref->_first_video;
+ _filters = ref->_filters;
+ _color_range = ref->_color_range;
+ _color_primaries = ref->_color_primaries;
+ _color_trc = ref->_color_trc;
+ _colorspace = ref->_colorspace;
+ _bits_per_pixel = ref->_bits_per_pixel;
}
+
void
FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const
{
- node->add_child("Type")->add_child_text ("FFmpeg");
+ node->add_child("Type")->add_child_text("FFmpeg");
Content::as_xml (node, with_paths);
if (video) {
if (audio) {
audio->as_xml (node);
- BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
- shared_ptr<FFmpegAudioStream> f = dynamic_pointer_cast<FFmpegAudioStream> (i);
+ for (auto i: audio->streams()) {
+ auto f = dynamic_pointer_cast<FFmpegAudioStream> (i);
DCPOMATIC_ASSERT (f);
f->as_xml (node->add_child("AudioStream"));
}
}
- if (caption) {
- caption->as_xml (node);
+ if (only_text()) {
+ only_text()->as_xml (node);
}
boost::mutex::scoped_lock lm (_mutex);
- for (vector<shared_ptr<FFmpegSubtitleStream> >::const_iterator i = _subtitle_streams.begin(); i != _subtitle_streams.end(); ++i) {
- xmlpp::Node* t = node->add_child("SubtitleStream");
- if (_subtitle_stream && *i == _subtitle_stream) {
+ for (auto i: _subtitle_streams) {
+ auto t = node->add_child("SubtitleStream");
+ if (_subtitle_stream && i == _subtitle_stream) {
t->add_child("Selected")->add_child_text("1");
}
- (*i)->as_xml (t);
+ i->as_xml (t);
}
- for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
- node->add_child("Filter")->add_child_text ((*i)->id ());
+ for (auto i: _filters) {
+ node->add_child("Filter")->add_child_text(i->id());
}
if (_first_video) {
- node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
+ node->add_child("FirstVideo")->add_child_text(raw_convert<string>(_first_video.get().get()));
}
if (_color_range) {
- node->add_child("ColorRange")->add_child_text (raw_convert<string> (static_cast<int> (*_color_range)));
+ node->add_child("ColorRange")->add_child_text(raw_convert<string>(static_cast<int>(*_color_range)));
}
if (_color_primaries) {
- node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (static_cast<int> (*_color_primaries)));
+ node->add_child("ColorPrimaries")->add_child_text(raw_convert<string>(static_cast<int>(*_color_primaries)));
}
if (_color_trc) {
- node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (static_cast<int> (*_color_trc)));
+ node->add_child("ColorTransferCharacteristic")->add_child_text(raw_convert<string>(static_cast<int>(*_color_trc)));
}
if (_colorspace) {
- node->add_child("Colorspace")->add_child_text (raw_convert<string> (static_cast<int> (*_colorspace)));
+ node->add_child("Colorspace")->add_child_text(raw_convert<string>(static_cast<int>(*_colorspace)));
}
if (_bits_per_pixel) {
- node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (*_bits_per_pixel));
+ node->add_child("BitsPerPixel")->add_child_text(raw_convert<string>(*_bits_per_pixel));
}
}
+
void
-FFmpegContent::examine (shared_ptr<Job> job)
+FFmpegContent::examine (shared_ptr<const Film> film, shared_ptr<Job> job)
{
- job->set_progress_unknown ();
+ ContentChangeSignaller cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
+ ContentChangeSignaller cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
- Content::examine (job);
+ if (job) {
+ job->set_progress_unknown ();
+ }
+
+ Content::examine (film, job);
- shared_ptr<FFmpegExaminer> examiner (new FFmpegExaminer (shared_from_this (), job));
+ auto examiner = make_shared<FFmpegExaminer>(shared_from_this (), job);
if (examiner->has_video ()) {
video.reset (new VideoContent (this));
video->take_from_examiner (examiner);
}
- boost::filesystem::path first_path = path (0);
+ auto first_path = path (0);
{
boost::mutex::scoped_lock lm (_mutex);
_bits_per_pixel = examiner->bits_per_pixel ();
if (examiner->rotation()) {
- double rot = *examiner->rotation ();
+ auto rot = *examiner->rotation ();
if (fabs (rot - 180) < 1.0) {
_filters.push_back (Filter::from_id ("vflip"));
_filters.push_back (Filter::from_id ("hflip"));
}
}
- if (!examiner->audio_streams().empty ()) {
- audio.reset (new AudioContent (this));
+ if (!examiner->audio_streams().empty()) {
+ audio = make_shared<AudioContent>(this);
- BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, examiner->audio_streams ()) {
+ for (auto i: examiner->audio_streams()) {
audio->add_stream (i);
}
- AudioStreamPtr as = audio->streams().front();
- AudioMapping m = as->mapping ();
- film()->make_audio_mapping_default (m, first_path);
+ auto as = audio->streams().front();
+ auto m = as->mapping ();
+ m.make_default (film ? film->audio_processor() : 0, first_path);
as->set_mapping (m);
}
_subtitle_streams = examiner->subtitle_streams ();
if (!_subtitle_streams.empty ()) {
- caption.reset (new CaptionContent (this));
+ text.clear ();
+ text.push_back (make_shared<TextContent>(this, TextType::OPEN_SUBTITLE, TextType::UNKNOWN));
_subtitle_stream = _subtitle_streams.front ();
}
-
}
if (examiner->has_video ()) {
set_default_colour_conversion ();
}
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS);
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
+ if (examiner->has_video() && examiner->pulldown() && video_frame_rate() && fabs(*video_frame_rate() - 29.97) < 0.001) {
+ /* FFmpeg has detected this file as 29.97 and the examiner thinks it is using "soft" 2:3 pulldown (telecine).
+ * This means we can treat it as a 23.976fps file.
+ */
+ set_video_frame_rate (24000.0 / 1001);
+ video->set_length (video->length() * 24.0 / 30);
+ }
}
+
string
FFmpegContent::summary () const
{
if (video && audio) {
- return String::compose (_("%1 [movie]"), path_summary ());
+ return String::compose (_("%1 [movie]"), path_summary());
} else if (video) {
- return String::compose (_("%1 [video]"), path_summary ());
+ return String::compose (_("%1 [video]"), path_summary());
} else if (audio) {
- return String::compose (_("%1 [audio]"), path_summary ());
+ return String::compose (_("%1 [audio]"), path_summary());
}
return path_summary ();
}
+
string
FFmpegContent::technical_summary () const
{
string as = "";
- BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, ffmpeg_audio_streams ()) {
+ for (auto i: ffmpeg_audio_streams ()) {
as += i->technical_summary () + " " ;
}
ss = _subtitle_stream->technical_summary ();
}
- string filt = Filter::ffmpeg_string (_filters);
+ auto filt = Filter::ffmpeg_string (_filters);
- string s = Content::technical_summary ();
+ auto s = Content::technical_summary ();
if (video) {
s += " - " + video->technical_summary ();
);
}
+
void
FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
{
+ ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
+
{
boost::mutex::scoped_lock lm (_mutex);
_subtitle_stream = s;
}
-
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
}
+
bool
operator== (FFmpegStream const & a, FFmpegStream const & b)
{
return a._id == b._id;
}
+
bool
operator!= (FFmpegStream const & a, FFmpegStream const & b)
{
return a._id != b._id;
}
+
DCPTime
-FFmpegContent::full_length () const
+FFmpegContent::full_length (shared_ptr<const Film> film) const
{
- FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ());
+ FrameRateChange const frc (film, shared_from_this());
if (video) {
- return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate());
+ return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film->video_frame_rate());
+ }
+
+ if (audio) {
+ DCPTime longest;
+ for (auto i: audio->streams()) {
+ longest = max (longest, DCPTime::from_frames(llrint(i->length() / frc.speed_up), i->frame_rate()));
+ }
+ return longest;
+ }
+
+ /* XXX: subtitle content? */
+
+ return {};
+}
+
+
+DCPTime
+FFmpegContent::approximate_length () const
+{
+ if (video) {
+ return DCPTime::from_frames (video->length_after_3d_combine(), 24);
}
DCPOMATIC_ASSERT (audio);
- DCPTime longest;
- BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
- longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate()));
+ Frame longest = 0;
+ for (auto i: audio->streams()) {
+ longest = max (longest, Frame(llrint(i->length())));
}
- return longest;
+ return DCPTime::from_frames (longest, 24);
}
+
void
FFmpegContent::set_filters (vector<Filter const *> const & filters)
{
+ ContentChangeSignaller cc (this, FFmpegContentProperty::FILTERS);
+
{
boost::mutex::scoped_lock lm (_mutex);
_filters = filters;
}
-
- signal_changed (FFmpegContentProperty::FILTERS);
}
+
string
FFmpegContent::identifier () const
{
s += "_" + video->identifier();
}
- if (caption && caption->use() && caption->burn()) {
- s += "_" + caption->identifier();
+ if (only_text() && only_text()->use() && only_text()->burn()) {
+ s += "_" + only_text()->identifier();
}
boost::mutex::scoped_lock lm (_mutex);
s += "_" + _subtitle_stream->identifier ();
}
- for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
- s += "_" + (*i)->id ();
+ for (auto i: _filters) {
+ s += "_" + i->id();
}
return s;
}
+
void
FFmpegContent::set_default_colour_conversion ()
{
DCPOMATIC_ASSERT (video);
- dcp::Size const s = video->size ();
+ auto const s = video->size ();
boost::mutex::scoped_lock lm (_mutex);
}
}
+
void
-FFmpegContent::add_properties (list<UserProperty>& p) const
+FFmpegContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
{
- Content::add_properties (p);
+ Content::add_properties (film, p);
if (video) {
video->add_properties (p);
if (_bits_per_pixel) {
- int const sub = 219 * pow (2, _bits_per_pixel.get() - 8);
- int const total = pow (2, _bits_per_pixel.get());
+ /* Assuming there's three components, so bits per pixel component is _bits_per_pixel / 3 */
+ int const lim_start = pow(2, _bits_per_pixel.get() / 3 - 4);
+ int const lim_end = 235 * pow(2, _bits_per_pixel.get() / 3 - 8);
+ int const total = pow(2, _bits_per_pixel.get() / 3);
switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
case AVCOL_RANGE_UNSPECIFIED:
/// file is limited, so that not all possible values are valid.
p.push_back (
UserProperty (
- UserProperty::VIDEO, _("Colour range"), String::compose (_("Limited (%1-%2)"), (total - sub) / 2, (total + sub) / 2)
+ UserProperty::VIDEO, _("Colour range"), String::compose(_("Limited (%1-%2)"), lim_start, lim_end)
)
);
break;
}
if (audio) {
- audio->add_properties (p);
+ audio->add_properties (film, p);
}
}
+
/** Our subtitle streams have colour maps, which can be changed, but
* they have no way of signalling that change. As a hack, we have this
* method which callers can use when they've modified one of our subtitle
void
FFmpegContent::signal_subtitle_stream_changed ()
{
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
+ /* XXX: this is too late; really it should be before the change */
+ ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
}
-vector<shared_ptr<FFmpegAudioStream> >
+
+vector<shared_ptr<FFmpegAudioStream>>
FFmpegContent::ffmpeg_audio_streams () const
{
- vector<shared_ptr<FFmpegAudioStream> > fa;
+ vector<shared_ptr<FFmpegAudioStream>> fa;
if (audio) {
- BOOST_FOREACH (AudioStreamPtr i, audio->streams()) {
- fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
+ for (auto i: audio->streams()) {
+ fa.push_back (dynamic_pointer_cast<FFmpegAudioStream>(i));
}
}
return fa;
}
+
void
FFmpegContent::take_settings_from (shared_ptr<const Content> c)
{
- shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (c);
+ auto fc = dynamic_pointer_cast<const FFmpegContent> (c);
if (!fc) {
return;
}
Content::take_settings_from (c);
_filters = fc->_filters;
}
+