You should have received a copy of the GNU General Public License
along with DCP-o-matic. If not, see <http://www.gnu.org/licenses/>.
+
*/
#include "ffmpeg_content.h"
#include "i18n.h"
+
using std::string;
using std::vector;
using std::list;
using dcp::raw_convert;
using namespace dcpomatic;
+
int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
int const FFmpegContentProperty::FILTERS = 102;
int const FFmpegContentProperty::KDM = 103;
+
FFmpegContent::FFmpegContent (boost::filesystem::path p)
: Content (p)
{
}
+
template <class T>
optional<T>
get_optional_enum (cxml::ConstNodePtr node, string name)
return static_cast<T>(*v);
}
+
FFmpegContent::FFmpegContent (cxml::ConstNodePtr node, int version, list<string>& notes)
: Content (node)
{
- video = VideoContent::from_xml (this, node, version);
+ _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
+
+ VideoRange const video_range_hint = (_color_range && *_color_range == AVCOL_RANGE_JPEG) ? VideoRange::FULL : VideoRange::VIDEO;
+
+ video = VideoContent::from_xml (this, node, version, video_range_hint);
audio = AudioContent::from_xml (this, node, version);
- text = TextContent::from_xml (this, node, version);
+ text = TextContent::from_xml (this, node, version, notes);
for (auto i: node->node_children("SubtitleStream")) {
_subtitle_streams.push_back (make_shared<FFmpegSubtitleStream>(i, version));
_first_video = ContentTime (f.get ());
}
- _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
_color_primaries = get_optional_enum<AVColorPrimaries>(node, "ColorPrimaries");
_color_trc = get_optional_enum<AVColorTransferCharacteristic>(node, "ColorTransferCharacteristic");
_colorspace = get_optional_enum<AVColorSpace>(node, "Colorspace");
_bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
}
-FFmpegContent::FFmpegContent (vector<shared_ptr<Content> > c)
+
+FFmpegContent::FFmpegContent (vector<shared_ptr<Content>> c)
: Content (c)
{
auto i = c.begin ();
_bits_per_pixel = ref->_bits_per_pixel;
}
+
void
FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const
{
- node->add_child("Type")->add_child_text ("FFmpeg");
+ node->add_child("Type")->add_child_text("FFmpeg");
Content::as_xml (node, with_paths);
if (video) {
if (audio) {
audio->as_xml (node);
- for (auto i: audio->streams ()) {
+ for (auto i: audio->streams()) {
auto f = dynamic_pointer_cast<FFmpegAudioStream> (i);
DCPOMATIC_ASSERT (f);
f->as_xml (node->add_child("AudioStream"));
}
if (_first_video) {
- node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
+ node->add_child("FirstVideo")->add_child_text(raw_convert<string>(_first_video.get().get()));
}
if (_color_range) {
- node->add_child("ColorRange")->add_child_text (raw_convert<string> (static_cast<int> (*_color_range)));
+ node->add_child("ColorRange")->add_child_text(raw_convert<string>(static_cast<int>(*_color_range)));
}
if (_color_primaries) {
- node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (static_cast<int> (*_color_primaries)));
+ node->add_child("ColorPrimaries")->add_child_text(raw_convert<string>(static_cast<int>(*_color_primaries)));
}
if (_color_trc) {
- node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (static_cast<int> (*_color_trc)));
+ node->add_child("ColorTransferCharacteristic")->add_child_text(raw_convert<string>(static_cast<int>(*_color_trc)));
}
if (_colorspace) {
- node->add_child("Colorspace")->add_child_text (raw_convert<string> (static_cast<int> (*_colorspace)));
+ node->add_child("Colorspace")->add_child_text(raw_convert<string>(static_cast<int>(*_colorspace)));
}
if (_bits_per_pixel) {
- node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (*_bits_per_pixel));
+ node->add_child("BitsPerPixel")->add_child_text(raw_convert<string>(*_bits_per_pixel));
}
}
+
void
FFmpegContent::examine (shared_ptr<const Film> film, shared_ptr<Job> job)
{
- ChangeSignaller<Content> cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
- ChangeSignaller<Content> cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
+ ContentChangeSignaller cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
+ ContentChangeSignaller cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
if (job) {
job->set_progress_unknown ();
}
}
- if (!examiner->audio_streams().empty ()) {
+ if (!examiner->audio_streams().empty()) {
audio = make_shared<AudioContent>(this);
for (auto i: examiner->audio_streams()) {
_subtitle_streams = examiner->subtitle_streams ();
if (!_subtitle_streams.empty ()) {
text.clear ();
- text.push_back (make_shared<TextContent>(this, TEXT_OPEN_SUBTITLE, TEXT_UNKNOWN));
+ text.push_back (make_shared<TextContent>(this, TextType::OPEN_SUBTITLE, TextType::UNKNOWN));
_subtitle_stream = _subtitle_streams.front ();
}
}
}
}
+
string
FFmpegContent::summary () const
{
if (video && audio) {
- return String::compose (_("%1 [movie]"), path_summary ());
+ return String::compose (_("%1 [movie]"), path_summary());
} else if (video) {
- return String::compose (_("%1 [video]"), path_summary ());
+ return String::compose (_("%1 [video]"), path_summary());
} else if (audio) {
- return String::compose (_("%1 [audio]"), path_summary ());
+ return String::compose (_("%1 [audio]"), path_summary());
}
return path_summary ();
}
+
string
FFmpegContent::technical_summary () const
{
);
}
+
void
FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
{
- ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
+ ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
{
boost::mutex::scoped_lock lm (_mutex);
}
}
+
bool
operator== (FFmpegStream const & a, FFmpegStream const & b)
{
return a._id == b._id;
}
+
bool
operator!= (FFmpegStream const & a, FFmpegStream const & b)
{
return a._id != b._id;
}
+
DCPTime
FFmpegContent::full_length (shared_ptr<const Film> film) const
{
/* XXX: subtitle content? */
- return DCPTime();
+ return {};
}
+
DCPTime
FFmpegContent::approximate_length () const
{
return DCPTime::from_frames (longest, 24);
}
+
void
FFmpegContent::set_filters (vector<Filter const *> const & filters)
{
- ChangeSignaller<Content> cc (this, FFmpegContentProperty::FILTERS);
+ ContentChangeSignaller cc (this, FFmpegContentProperty::FILTERS);
{
boost::mutex::scoped_lock lm (_mutex);
}
}
+
string
FFmpegContent::identifier () const
{
return s;
}
+
void
FFmpegContent::set_default_colour_conversion ()
{
}
}
+
void
FFmpegContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
{
}
}
+
/** Our subtitle streams have colour maps, which can be changed, but
* they have no way of signalling that change. As a hack, we have this
* method which callers can use when they've modified one of our subtitle
FFmpegContent::signal_subtitle_stream_changed ()
{
/* XXX: this is too late; really it should be before the change */
- ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
+ ContentChangeSignaller cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
}
-vector<shared_ptr<FFmpegAudioStream> >
+
+vector<shared_ptr<FFmpegAudioStream>>
FFmpegContent::ffmpeg_audio_streams () const
{
- vector<shared_ptr<FFmpegAudioStream> > fa;
+ vector<shared_ptr<FFmpegAudioStream>> fa;
if (audio) {
for (auto i: audio->streams()) {
- fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
+ fa.push_back (dynamic_pointer_cast<FFmpegAudioStream>(i));
}
}
return fa;
}
+
void
FFmpegContent::take_settings_from (shared_ptr<const Content> c)
{