Basic grunt-work, untested and unfinished, but it compiles.
[dcpomatic.git] / src / lib / ffmpeg_content.cc
1 /*
2     Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "ffmpeg_content.h"
22 #include "video_content.h"
23 #include "audio_content.h"
24 #include "ffmpeg_examiner.h"
25 #include "ffmpeg_subtitle_stream.h"
26 #include "ffmpeg_audio_stream.h"
27 #include "compose.hpp"
28 #include "job.h"
29 #include "util.h"
30 #include "filter.h"
31 #include "film.h"
32 #include "log.h"
33 #include "exceptions.h"
34 #include "frame_rate_change.h"
35 #include "subtitle_content.h"
36 #include <dcp/raw_convert.h>
37 #include <libcxml/cxml.h>
38 extern "C" {
39 #include <libavformat/avformat.h>
40 #include <libavutil/pixdesc.h>
41 }
42 #include <libxml++/libxml++.h>
43 #include <boost/foreach.hpp>
44 #include <iostream>
45
46 #include "i18n.h"
47
48 #define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
49
50 using std::string;
51 using std::vector;
52 using std::list;
53 using std::cout;
54 using std::pair;
55 using std::make_pair;
56 using std::max;
57 using boost::shared_ptr;
58 using boost::dynamic_pointer_cast;
59 using boost::optional;
60 using dcp::raw_convert;
61
62 int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
63 int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
64 int const FFmpegContentProperty::FILTERS = 102;
65
66 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, boost::filesystem::path p)
67         : Content (film, p)
68 {
69
70 }
71
72 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, cxml::ConstNodePtr node, int version, list<string>& notes)
73         : Content (film, node)
74 {
75         video = VideoContent::from_xml (this, node, version);
76         audio = AudioContent::from_xml (this, node, version);
77         subtitle = SubtitleContent::from_xml (this, node, version);
78
79         list<cxml::NodePtr> c = node->node_children ("SubtitleStream");
80         for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
81                 _subtitle_streams.push_back (shared_ptr<FFmpegSubtitleStream> (new FFmpegSubtitleStream (*i, version)));
82                 if ((*i)->optional_number_child<int> ("Selected")) {
83                         _subtitle_stream = _subtitle_streams.back ();
84                 }
85         }
86
87         c = node->node_children ("AudioStream");
88         for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
89                 shared_ptr<FFmpegAudioStream> as (new FFmpegAudioStream (*i, version));
90                 audio->add_stream (as);
91                 if (version < 11 && !(*i)->optional_node_child ("Selected")) {
92                         /* This is an old file and this stream is not selected, so un-map it */
93                         as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
94                 }
95         }
96
97         c = node->node_children ("Filter");
98         for (list<cxml::NodePtr>::iterator i = c.begin(); i != c.end(); ++i) {
99                 Filter const * f = Filter::from_id ((*i)->content ());
100                 if (f) {
101                         _filters.push_back (f);
102                 } else {
103                         notes.push_back (String::compose (_("DCP-o-matic no longer supports the `%1' filter, so it has been turned off."), (*i)->content()));
104                 }
105         }
106
107         optional<ContentTime::Type> const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
108         if (f) {
109                 _first_video = ContentTime (f.get ());
110         }
111
112         _color_range = static_cast<AVColorRange> (node->optional_number_child<int>("ColorRange").get_value_or (AVCOL_RANGE_UNSPECIFIED));
113         _color_primaries = static_cast<AVColorPrimaries> (node->optional_number_child<int>("ColorPrimaries").get_value_or (AVCOL_PRI_UNSPECIFIED));
114         _color_trc = static_cast<AVColorTransferCharacteristic> (
115                 node->optional_number_child<int>("ColorTransferCharacteristic").get_value_or (AVCOL_TRC_UNSPECIFIED)
116                 );
117         _colorspace = static_cast<AVColorSpace> (node->optional_number_child<int>("Colorspace").get_value_or (AVCOL_SPC_UNSPECIFIED));
118         _bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
119
120 }
121
122 FFmpegContent::FFmpegContent (shared_ptr<const Film> film, vector<shared_ptr<Content> > c)
123         : Content (film, c)
124 {
125         vector<shared_ptr<Content> >::const_iterator i = c.begin ();
126
127         bool need_video = false;
128         bool need_audio = false;
129         bool need_subtitle = false;
130
131         if (i != c.end ()) {
132                 need_video = static_cast<bool> ((*i)->video);
133                 need_audio = static_cast<bool> ((*i)->audio);
134                 need_subtitle = static_cast<bool> ((*i)->subtitle);
135         }
136
137         while (i != c.end ()) {
138                 if (need_video != static_cast<bool> ((*i)->video)) {
139                         throw JoinError (_("Content to be joined must all have or not have video"));
140                 }
141                 if (need_audio != static_cast<bool> ((*i)->audio)) {
142                         throw JoinError (_("Content to be joined must all have or not have audio"));
143                 }
144                 if (need_subtitle != static_cast<bool> ((*i)->subtitle)) {
145                         throw JoinError (_("Content to be joined must all have or not have subtitles"));
146                 }
147                 ++i;
148         }
149
150         if (need_video) {
151                 video.reset (new VideoContent (this, c));
152         }
153         if (need_audio) {
154                 audio.reset (new AudioContent (this, c));
155         }
156         if (need_subtitle) {
157                 subtitle.reset (new SubtitleContent (this, c));
158         }
159
160         shared_ptr<FFmpegContent> ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
161         DCPOMATIC_ASSERT (ref);
162
163         for (size_t i = 0; i < c.size(); ++i) {
164                 shared_ptr<FFmpegContent> fc = dynamic_pointer_cast<FFmpegContent> (c[i]);
165                 if (fc->subtitle && fc->subtitle->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
166                         throw JoinError (_("Content to be joined must use the same subtitle stream."));
167                 }
168         }
169
170         /* XXX: should probably check that more of the stuff below is the same in *this and ref */
171
172         _subtitle_streams = ref->subtitle_streams ();
173         _subtitle_stream = ref->subtitle_stream ();
174         _first_video = ref->_first_video;
175         _filters = ref->_filters;
176         _color_range = ref->_color_range;
177         _color_primaries = ref->_color_primaries;
178         _color_trc = ref->_color_trc;
179         _colorspace = ref->_colorspace;
180         _bits_per_pixel = ref->_bits_per_pixel;
181 }
182
183 void
184 FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const
185 {
186         node->add_child("Type")->add_child_text ("FFmpeg");
187         Content::as_xml (node, with_paths);
188
189         if (video) {
190                 video->as_xml (node);
191         }
192
193         if (audio) {
194                 audio->as_xml (node);
195
196                 BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
197                         shared_ptr<FFmpegAudioStream> f = dynamic_pointer_cast<FFmpegAudioStream> (i);
198                         DCPOMATIC_ASSERT (f);
199                         f->as_xml (node->add_child("AudioStream"));
200                 }
201         }
202
203         if (subtitle) {
204                 subtitle->as_xml (node);
205         }
206
207         boost::mutex::scoped_lock lm (_mutex);
208
209         for (vector<shared_ptr<FFmpegSubtitleStream> >::const_iterator i = _subtitle_streams.begin(); i != _subtitle_streams.end(); ++i) {
210                 xmlpp::Node* t = node->add_child("SubtitleStream");
211                 if (_subtitle_stream && *i == _subtitle_stream) {
212                         t->add_child("Selected")->add_child_text("1");
213                 }
214                 (*i)->as_xml (t);
215         }
216
217         for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
218                 node->add_child("Filter")->add_child_text ((*i)->id ());
219         }
220
221         if (_first_video) {
222                 node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
223         }
224
225         node->add_child("ColorRange")->add_child_text (raw_convert<string> (static_cast<int> (_color_range)));
226         node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (static_cast<int> (_color_primaries)));
227         node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (static_cast<int> (_color_trc)));
228         node->add_child("Colorspace")->add_child_text (raw_convert<string> (static_cast<int> (_colorspace)));
229         if (_bits_per_pixel) {
230                 node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (_bits_per_pixel.get ()));
231         }
232 }
233
234 void
235 FFmpegContent::examine (shared_ptr<Job> job)
236 {
237         job->set_progress_unknown ();
238
239         Content::examine (job);
240
241         shared_ptr<FFmpegExaminer> examiner (new FFmpegExaminer (shared_from_this (), job));
242
243         if (examiner->has_video ()) {
244                 video.reset (new VideoContent (this));
245                 video->take_from_examiner (examiner);
246                 set_default_colour_conversion ();
247         }
248
249         boost::filesystem::path first_path = path (0);
250
251         {
252                 boost::mutex::scoped_lock lm (_mutex);
253
254                 if (examiner->has_video ()) {
255                         _first_video = examiner->first_video ();
256                         _color_range = examiner->color_range ();
257                         _color_primaries = examiner->color_primaries ();
258                         _color_trc = examiner->color_trc ();
259                         _colorspace = examiner->colorspace ();
260                         _bits_per_pixel = examiner->bits_per_pixel ();
261                 }
262
263                 if (!examiner->audio_streams().empty ()) {
264                         audio.reset (new AudioContent (this));
265
266                         BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, examiner->audio_streams ()) {
267                                 audio->add_stream (i);
268                         }
269
270                         AudioStreamPtr as = audio->streams().front();
271                         AudioMapping m = as->mapping ();
272                         film()->make_audio_mapping_default (m, first_path);
273                         as->set_mapping (m);
274                 }
275
276                 _subtitle_streams = examiner->subtitle_streams ();
277                 if (!_subtitle_streams.empty ()) {
278                         subtitle.reset (new SubtitleContent (this));
279                         _subtitle_stream = _subtitle_streams.front ();
280                 }
281
282         }
283
284         signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS);
285         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
286 }
287
288 string
289 FFmpegContent::summary () const
290 {
291         if (video && audio) {
292                 return String::compose (_("%1 [movie]"), path_summary ());
293         } else if (video) {
294                 return String::compose (_("%1 [video]"), path_summary ());
295         } else if (audio) {
296                 return String::compose (_("%1 [audio]"), path_summary ());
297         }
298
299         return path_summary ();
300 }
301
302 string
303 FFmpegContent::technical_summary () const
304 {
305         string as = "";
306         BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, ffmpeg_audio_streams ()) {
307                 as += i->technical_summary () + " " ;
308         }
309
310         if (as.empty ()) {
311                 as = "none";
312         }
313
314         string ss = "none";
315         if (_subtitle_stream) {
316                 ss = _subtitle_stream->technical_summary ();
317         }
318
319         string filt = Filter::ffmpeg_string (_filters);
320
321         string s = Content::technical_summary ();
322
323         if (video) {
324                 s += " - " + video->technical_summary ();
325         }
326
327         if (audio) {
328                 s += " - " + audio->technical_summary ();
329         }
330
331         return s + String::compose (
332                 "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt
333                 );
334 }
335
336 void
337 FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
338 {
339         {
340                 boost::mutex::scoped_lock lm (_mutex);
341                 _subtitle_stream = s;
342         }
343
344         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
345 }
346
347 bool
348 operator== (FFmpegStream const & a, FFmpegStream const & b)
349 {
350         return a._id == b._id;
351 }
352
353 bool
354 operator!= (FFmpegStream const & a, FFmpegStream const & b)
355 {
356         return a._id != b._id;
357 }
358
359 DCPTime
360 FFmpegContent::full_length () const
361 {
362         FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ());
363         if (video) {
364                 return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate());
365         }
366
367         DCPOMATIC_ASSERT (audio);
368
369         DCPTime longest;
370         BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
371                 longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate()));
372         }
373
374         return longest;
375 }
376
377 void
378 FFmpegContent::set_filters (vector<Filter const *> const & filters)
379 {
380         {
381                 boost::mutex::scoped_lock lm (_mutex);
382                 _filters = filters;
383         }
384
385         signal_changed (FFmpegContentProperty::FILTERS);
386 }
387
388 string
389 FFmpegContent::identifier () const
390 {
391         string s = Content::identifier();
392
393         if (video) {
394                 s += "_" + video->identifier();
395         }
396
397         if (subtitle) {
398                 s += "_" + subtitle->identifier();
399         }
400
401         boost::mutex::scoped_lock lm (_mutex);
402
403         if (_subtitle_stream) {
404                 s += "_" + _subtitle_stream->identifier ();
405         }
406
407         for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
408                 s += "_" + (*i)->id ();
409         }
410
411         return s;
412 }
413
414 void
415 FFmpegContent::set_default_colour_conversion ()
416 {
417         DCPOMATIC_ASSERT (video);
418
419         dcp::Size const s = video->size ();
420
421         boost::mutex::scoped_lock lm (_mutex);
422
423         if (s.width < 1080) {
424                 video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
425         } else {
426                 video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
427         }
428 }
429
430 void
431 FFmpegContent::add_properties (list<UserProperty>& p) const
432 {
433         Content::add_properties (p);
434
435         if (video) {
436                 video->add_properties (p);
437
438                 if (_bits_per_pixel) {
439                         int const sub = 219 * pow (2, _bits_per_pixel.get() - 8);
440                         int const total = pow (2, _bits_per_pixel.get());
441
442                         switch (_color_range) {
443                         case AVCOL_RANGE_UNSPECIFIED:
444                                 /// TRANSLATORS: this means that the range of pixel values used in this
445                                 /// file is unknown (not specified in the file).
446                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
447                                 break;
448                         case AVCOL_RANGE_MPEG:
449                                 /// TRANSLATORS: this means that the range of pixel values used in this
450                                 /// file is limited, so that not all possible values are valid.
451                                 p.push_back (
452                                         UserProperty (
453                                                 UserProperty::VIDEO, _("Colour range"), String::compose (_("Limited (%1-%2)"), (total - sub) / 2, (total + sub) / 2)
454                                                 )
455                                         );
456                                 break;
457                         case AVCOL_RANGE_JPEG:
458                                 /// TRANSLATORS: this means that the range of pixel values used in this
459                                 /// file is full, so that all possible pixel values are valid.
460                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total)));
461                                 break;
462                         default:
463                                 DCPOMATIC_ASSERT (false);
464                         }
465                 } else {
466                         switch (_color_range) {
467                         case AVCOL_RANGE_UNSPECIFIED:
468                                 /// TRANSLATORS: this means that the range of pixel values used in this
469                                 /// file is unknown (not specified in the file).
470                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
471                                 break;
472                         case AVCOL_RANGE_MPEG:
473                                 /// TRANSLATORS: this means that the range of pixel values used in this
474                                 /// file is limited, so that not all possible values are valid.
475                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited")));
476                                 break;
477                         case AVCOL_RANGE_JPEG:
478                                 /// TRANSLATORS: this means that the range of pixel values used in this
479                                 /// file is full, so that all possible pixel values are valid.
480                                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full")));
481                                 break;
482                         default:
483                                 DCPOMATIC_ASSERT (false);
484                         }
485                 }
486
487                 char const * primaries[] = {
488                         _("Unspecified"),
489                         _("BT709"),
490                         _("Unspecified"),
491                         _("Unspecified"),
492                         _("BT470M"),
493                         _("BT470BG"),
494                         _("SMPTE 170M (BT601)"),
495                         _("SMPTE 240M"),
496                         _("Film"),
497                         _("BT2020"),
498                         _("SMPTE ST 428-1 (CIE 1931 XYZ)"),
499                         _("SMPTE ST 431-2 (2011)"),
500                         _("SMPTE ST 432-1 D65 (2010)")
501                 };
502
503                 DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 13);
504                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries]));
505
506                 char const * transfers[] = {
507                         _("Unspecified"),
508                         _("BT709"),
509                         _("Unspecified"),
510                         _("Unspecified"),
511                         _("Gamma 22 (BT470M)"),
512                         _("Gamma 28 (BT470BG)"),
513                         _("SMPTE 170M (BT601)"),
514                         _("SMPTE 240M"),
515                         _("Linear"),
516                         _("Logarithmic (100:1 range)"),
517                         _("Logarithmic (316:1 range)"),
518                         _("IEC61966-2-4"),
519                         _("BT1361 extended colour gamut"),
520                         _("IEC61966-2-1 (sRGB or sYCC)"),
521                         _("BT2020 for a 10-bit system"),
522                         _("BT2020 for a 12-bit system"),
523                         _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"),
524                         _("SMPTE ST 428-1"),
525                         _("ARIB STD-B67 ('Hybrid log-gamma')")
526                 };
527
528                 DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19);
529                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc]));
530
531                 char const * spaces[] = {
532                         _("RGB / sRGB (IEC61966-2-1)"),
533                         _("BT709"),
534                         _("Unspecified"),
535                         _("Unspecified"),
536                         _("FCC"),
537                         _("BT470BG (BT601-6)"),
538                         _("SMPTE 170M (BT601-6)"),
539                         _("SMPTE 240M"),
540                         _("YCOCG"),
541                         _("BT2020 non-constant luminance"),
542                         _("BT2020 constant luminance"),
543                         _("SMPTE 2085, Y'D'zD'x"),
544                 };
545
546                 DCPOMATIC_ASSERT (AVCOL_SPC_NB == 12);
547                 p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace]));
548
549                 if (_bits_per_pixel) {
550                         p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), _bits_per_pixel.get ()));
551                 }
552         }
553
554         if (audio) {
555                 audio->add_properties (p);
556         }
557 }
558
559 /** Our subtitle streams have colour maps, which can be changed, but
560  *  they have no way of signalling that change.  As a hack, we have this
561  *  method which callers can use when they've modified one of our subtitle
562  *  streams.
563  */
564 void
565 FFmpegContent::signal_subtitle_stream_changed ()
566 {
567         signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
568 }
569
570 vector<shared_ptr<FFmpegAudioStream> >
571 FFmpegContent::ffmpeg_audio_streams () const
572 {
573         vector<shared_ptr<FFmpegAudioStream> > fa;
574
575         if (audio) {
576                 BOOST_FOREACH (AudioStreamPtr i, audio->streams()) {
577                         fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
578                 }
579         }
580
581         return fa;
582 }
583
584 void
585 FFmpegContent::use_template (shared_ptr<const Content> c)
586 {
587         Content::use_template (c);
588
589         shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (c);
590         _filters = fc->_filters;
591 }