You should have received a copy of the GNU General Public License
along with DCP-o-matic. If not, see <http://www.gnu.org/licenses/>.
+
*/
-#include "ffmpeg_content.h"
-#include "video_content.h"
#include "audio_content.h"
-#include "ffmpeg_examiner.h"
-#include "ffmpeg_subtitle_stream.h"
-#include "ffmpeg_audio_stream.h"
#include "compose.hpp"
-#include "job.h"
-#include "util.h"
-#include "filter.h"
-#include "film.h"
-#include "log.h"
#include "config.h"
+#include "constants.h"
#include "exceptions.h"
+#include "ffmpeg_audio_stream.h"
+#include "ffmpeg_content.h"
+#include "ffmpeg_examiner.h"
+#include "ffmpeg_subtitle_stream.h"
+#include "film.h"
+#include "filter.h"
#include "frame_rate_change.h"
+#include "job.h"
+#include "log.h"
#include "text_content.h"
+#include "video_content.h"
#include <dcp/raw_convert.h>
#include <libcxml/cxml.h>
extern "C" {
#include "i18n.h"
+
using std::string;
using std::vector;
using std::list;
using dcp::raw_convert;
using namespace dcpomatic;
+
int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
int const FFmpegContentProperty::FILTERS = 102;
int const FFmpegContentProperty::KDM = 103;
+
FFmpegContent::FFmpegContent (boost::filesystem::path p)
: Content (p)
{
}
+
template <class T>
optional<T>
get_optional_enum (cxml::ConstNodePtr node, string name)
return static_cast<T>(*v);
}
+
FFmpegContent::FFmpegContent (cxml::ConstNodePtr node, int version, list<string>& notes)
: Content (node)
{
- video = VideoContent::from_xml (this, node, version);
+ _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
+
+ VideoRange const video_range_hint = (_color_range && *_color_range == AVCOL_RANGE_JPEG) ? VideoRange::FULL : VideoRange::VIDEO;
+
+ video = VideoContent::from_xml (this, node, version, video_range_hint);
audio = AudioContent::from_xml (this, node, version);
- text = TextContent::from_xml (this, node, version);
+ text = TextContent::from_xml (this, node, version, notes);
for (auto i: node->node_children("SubtitleStream")) {
_subtitle_streams.push_back (make_shared<FFmpegSubtitleStream>(i, version));
}
for (auto i: node->node_children("Filter")) {
- Filter const * f = Filter::from_id(i->content());
- if (f) {
- _filters.push_back (f);
+ if (auto filter = Filter::from_id(i->content())) {
+ _filters.push_back(*filter);
} else {
notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), i->content()));
}
_first_video = ContentTime (f.get ());
}
- _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
_color_primaries = get_optional_enum<AVColorPrimaries>(node, "ColorPrimaries");
_color_trc = get_optional_enum<AVColorTransferCharacteristic>(node, "ColorTransferCharacteristic");
_colorspace = get_optional_enum<AVColorSpace>(node, "Colorspace");
_bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
}
-FFmpegContent::FFmpegContent (vector<shared_ptr<Content> > c)
+
+FFmpegContent::FFmpegContent (vector<shared_ptr<Content>> c)
: Content (c)
{
auto i = c.begin ();
_bits_per_pixel = ref->_bits_per_pixel;
}
+
void
FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const
{
- node->add_child("Type")->add_child_text ("FFmpeg");
+ node->add_child("Type")->add_child_text("FFmpeg");
Content::as_xml (node, with_paths);
if (video) {
if (audio) {
audio->as_xml (node);
- for (auto i: audio->streams ()) {
+ for (auto i: audio->streams()) {
auto f = dynamic_pointer_cast<FFmpegAudioStream> (i);
DCPOMATIC_ASSERT (f);
f->as_xml (node->add_child("AudioStream"));
}
for (auto i: _filters) {
- node->add_child("Filter")->add_child_text(i->id());
+ node->add_child("Filter")->add_child_text(i.id());
}
if (_first_video) {
- node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
+ node->add_child("FirstVideo")->add_child_text(raw_convert<string>(_first_video.get().get()));
}
if (_color_range) {
- node->add_child("ColorRange")->add_child_text (raw_convert<string> (static_cast<int> (*_color_range)));
+ node->add_child("ColorRange")->add_child_text(raw_convert<string>(static_cast<int>(*_color_range)));
}
if (_color_primaries) {
- node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (static_cast<int> (*_color_primaries)));
+ node->add_child("ColorPrimaries")->add_child_text(raw_convert<string>(static_cast<int>(*_color_primaries)));
}
if (_color_trc) {
- node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (static_cast<int> (*_color_trc)));
+ node->add_child("ColorTransferCharacteristic")->add_child_text(raw_convert<string>(static_cast<int>(*_color_trc)));
}
if (_colorspace) {
- node->add_child("Colorspace")->add_child_text (raw_convert<string> (static_cast<int> (*_colorspace)));
+ node->add_child("Colorspace")->add_child_text(raw_convert<string>(static_cast<int>(*_colorspace)));
}
if (_bits_per_pixel) {
- node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (*_bits_per_pixel));
+ node->add_child("BitsPerPixel")->add_child_text(raw_convert<string>(*_bits_per_pixel));
}
}
+
void
FFmpegContent::examine (shared_ptr<const Film> film, shared_ptr<Job> job)
{
- ChangeSignaller<Content> cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
- ChangeSignaller<Content> cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
+ ContentChangeSignaller cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
+ ContentChangeSignaller cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
if (job) {
job->set_progress_unknown ();
if (examiner->has_video ()) {
video.reset (new VideoContent (this));
- video->take_from_examiner (examiner);
+ video->take_from_examiner(film, examiner);
}
auto first_path = path (0);
if (examiner->rotation()) {
auto rot = *examiner->rotation ();
if (fabs (rot - 180) < 1.0) {
- _filters.push_back (Filter::from_id ("vflip"));
- _filters.push_back (Filter::from_id ("hflip"));
+ _filters.push_back(*Filter::from_id("vflip"));
+ _filters.push_back(*Filter::from_id("hflip"));
} else if (fabs (rot - 90) < 1.0) {
- _filters.push_back (Filter::from_id ("90clock"));
+ _filters.push_back(*Filter::from_id("90clock"));
+ video->rotate_size();
} else if (fabs (rot - 270) < 1.0) {
- _filters.push_back (Filter::from_id ("90anticlock"));
+ _filters.push_back(*Filter::from_id("90anticlock"));
+ video->rotate_size();
}
}
+ if (examiner->has_alpha()) {
+ _filters.push_back(*Filter::from_id("premultiply"));
+ }
}
- if (!examiner->audio_streams().empty ()) {
+ if (!examiner->audio_streams().empty()) {
audio = make_shared<AudioContent>(this);
for (auto i: examiner->audio_streams()) {
text.clear ();
text.push_back (make_shared<TextContent>(this, TextType::OPEN_SUBTITLE, TextType::UNKNOWN));
_subtitle_stream = _subtitle_streams.front ();
+ text.front()->add_font(make_shared<dcpomatic::Font>(""));
}
}
/* FFmpeg has detected this file as 29.97 and the examiner thinks it is using "soft" 2:3 pulldown (telecine).
* This means we can treat it as a 23.976fps file.
*/
- set_video_frame_rate (24000.0 / 1001);
+ set_video_frame_rate(film, 24000.0 / 1001);
video->set_length (video->length() * 24.0 / 30);
}
}
+
string
FFmpegContent::summary () const
{
if (video && audio) {
- return String::compose (_("%1 [movie]"), path_summary ());
+ return String::compose (_("%1 [movie]"), path_summary());
} else if (video) {
- return String::compose (_("%1 [video]"), path_summary ());
+ return String::compose (_("%1 [video]"), path_summary());
} else if (audio) {
- return String::compose (_("%1 [audio]"), path_summary ());
+ return String::compose (_("%1 [audio]"), path_summary());
}
return path_summary ();
}
+
string
FFmpegContent::technical_summary () const
{
);
}
+
void
FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
{
- ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
+ ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
{
boost::mutex::scoped_lock lm (_mutex);
}
}
+
bool
operator== (FFmpegStream const & a, FFmpegStream const & b)
{
return a._id == b._id;
}
+
bool
operator!= (FFmpegStream const & a, FFmpegStream const & b)
{
return a._id != b._id;
}
+
DCPTime
FFmpegContent::full_length (shared_ptr<const Film> film) const
{
/* XXX: subtitle content? */
- return DCPTime();
+ return {};
}
+
DCPTime
FFmpegContent::approximate_length () const
{
return DCPTime::from_frames (longest, 24);
}
+
void
-FFmpegContent::set_filters (vector<Filter const *> const & filters)
+FFmpegContent::set_filters(vector<Filter> const& filters)
{
- ChangeSignaller<Content> cc (this, FFmpegContentProperty::FILTERS);
+ ContentChangeSignaller cc (this, FFmpegContentProperty::FILTERS);
{
boost::mutex::scoped_lock lm (_mutex);
}
}
+
string
FFmpegContent::identifier () const
{
}
for (auto i: _filters) {
- s += "_" + i->id();
+ s += "_" + i.id();
}
return s;
}
+
void
FFmpegContent::set_default_colour_conversion ()
{
video->set_colour_conversion (PresetColourConversion::from_id ("rec2020").conversion);
break;
default:
- if (s.width < 1080) {
+ if (s && s->width < 1080) {
video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
} else {
video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
}
}
+
void
FFmpegContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
{
video->add_properties (p);
if (_bits_per_pixel) {
- /* Assuming there's three components, so bits per pixel component is _bits_per_pixel / 3 */
- int const lim_start = pow(2, _bits_per_pixel.get() / 3 - 4);
- int const lim_end = 235 * pow(2, _bits_per_pixel.get() / 3 - 8);
- int const total = pow(2, _bits_per_pixel.get() / 3);
+ auto pixel_quanta_product = video->pixel_quanta().x * video->pixel_quanta().y;
+ auto bits_per_main_pixel = pixel_quanta_product * _bits_per_pixel.get() / (pixel_quanta_product + 2);
+
+ int const lim_start = pow(2, bits_per_main_pixel - 4);
+ int const lim_end = 235 * pow(2, bits_per_main_pixel - 8);
+ int const total = pow(2, bits_per_main_pixel);
switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
case AVCOL_RANGE_UNSPECIFIED:
/// file is limited, so that not all possible values are valid.
p.push_back (
UserProperty (
- UserProperty::VIDEO, _("Colour range"), String::compose(_("Limited (%1-%2)"), lim_start, lim_end)
+ UserProperty::VIDEO, _("Colour range"), String::compose(_("Limited / video (%1-%2)"), lim_start, lim_end)
)
);
break;
case AVCOL_RANGE_JPEG:
/// TRANSLATORS: this means that the range of pixel values used in this
/// file is full, so that all possible pixel values are valid.
- p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total)));
+ p.push_back(UserProperty(UserProperty::VIDEO, _("Colour range"), String::compose(_("Full (0-%1)"), total - 1)));
break;
default:
DCPOMATIC_ASSERT (false);
}
}
+
/** Our subtitle streams have colour maps, which can be changed, but
* they have no way of signalling that change. As a hack, we have this
* method which callers can use when they've modified one of our subtitle
FFmpegContent::signal_subtitle_stream_changed ()
{
/* XXX: this is too late; really it should be before the change */
- ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
+ ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
}
-vector<shared_ptr<FFmpegAudioStream> >
+
+vector<shared_ptr<FFmpegAudioStream>>
FFmpegContent::ffmpeg_audio_streams () const
{
- vector<shared_ptr<FFmpegAudioStream> > fa;
+ vector<shared_ptr<FFmpegAudioStream>> fa;
if (audio) {
for (auto i: audio->streams()) {
- fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
+ fa.push_back (dynamic_pointer_cast<FFmpegAudioStream>(i));
}
}
return fa;
}
+
void
FFmpegContent::take_settings_from (shared_ptr<const Content> c)
{