X-Git-Url: https://git.carlh.net/gitweb/?a=blobdiff_plain;f=src%2Flib%2Freel_writer.cc;h=7a2c9c67087b68d3b500817de039303042ca293b;hb=7ec6c86c913fba820870565ee757fdf43ae47433;hp=136d2405e3aabff47f266b27409a751b5babdc67;hpb=f5fd225decd41e5716fcc5aaf972c3b05d114e0d;p=dcpomatic.git diff --git a/src/lib/reel_writer.cc b/src/lib/reel_writer.cc index 136d2405e..7a2c9c670 100644 --- a/src/lib/reel_writer.cc +++ b/src/lib/reel_writer.cc @@ -18,64 +18,70 @@ */ -#include "reel_writer.h" -#include "film.h" + +#include "audio_buffers.h" +#include "compose.hpp" +#include "config.h" #include "cross.h" -#include "job.h" -#include "log.h" #include "dcpomatic_log.h" #include "digester.h" +#include "film.h" #include "font_data.h" -#include "compose.hpp" -#include "config.h" -#include "audio_buffers.h" #include "image.h" +#include "image_png.h" +#include "job.h" +#include "log.h" +#include "reel_writer.h" #include #include +#include +#include +#include +#include #include -#include -#include -#include +#include #include #include +#include +#include +#include #include -#include +#include +#include #include -#include -#include -#include -#include -#include -#include -#include +#include #include -#include +#include +#include +#include #include #include "i18n.h" -using std::list; -using std::string; -using std::cout; + +using std::dynamic_pointer_cast; using std::exception; +using std::list; +using std::make_shared; using std::map; using std::set; -using std::vector; using std::shared_ptr; -using std::make_shared; +using std::string; +using std::vector; +using std::weak_ptr; using boost::optional; -using std::dynamic_pointer_cast; #if BOOST_VERSION >= 106100 using namespace boost::placeholders; #endif -using std::weak_ptr; using dcp::ArrayData; using dcp::Data; using dcp::raw_convert; using namespace dcpomatic; + int const ReelWriter::_info_size = 48; + static dcp::MXFMetadata mxf_metadata () { @@ -93,6 +99,7 @@ mxf_metadata () return meta; } + /** @param job Related job, or 0. * @param text_only true to enable a special mode where the writer will expect only subtitles and closed captions to be written * (no picture nor sound) and not give errors in that case. This is used by the hints system to check the potential sizes of @@ -166,8 +173,13 @@ ReelWriter::ReelWriter ( } if (film()->audio_channels()) { + auto lang = film()->audio_language(); _sound_asset = make_shared ( - dcp::Fraction(film()->video_frame_rate(), 1), film()->audio_frame_rate(), film()->audio_channels(), film()->audio_language(), standard + dcp::Fraction(film()->video_frame_rate(), 1), + film()->audio_frame_rate(), + film()->audio_channels(), + lang ? *lang : dcp::LanguageTag("en-US"), + standard ); _sound_asset->set_metadata (mxf_metadata()); @@ -178,17 +190,11 @@ ReelWriter::ReelWriter ( DCPOMATIC_ASSERT (film()->directory()); - vector active; - for (auto i: film()->mapped_audio_channels()) { - active.push_back (static_cast(i)); - } - /* Write the sound asset into the film directory so that we leave the creation of the DCP directory until the last minute. */ _sound_asset_writer = _sound_asset->start_write ( film()->directory().get() / audio_asset_filename (_sound_asset, _reel_index, _reel_count, _content_summary), - active, film()->contains_atmos_content() ); } @@ -196,6 +202,7 @@ ReelWriter::ReelWriter ( _default_font = dcp::ArrayData(default_font_file()); } + /** @param frame reel-relative frame */ void ReelWriter::write_frame_info (Frame frame, Eyes eyes, dcp::FrameInfo info) const @@ -207,6 +214,7 @@ ReelWriter::write_frame_info (Frame frame, Eyes eyes, dcp::FrameInfo info) const checked_fwrite (info.hash.c_str(), info.hash.size(), handle->get(), handle->file()); } + dcp::FrameInfo ReelWriter::read_frame_info (shared_ptr info, Frame frame, Eyes eyes) const { @@ -223,6 +231,7 @@ ReelWriter::read_frame_info (shared_ptr info, Frame frame, Eyes return frame_info; } + long ReelWriter::frame_info_position (Frame frame, Eyes eyes) const { @@ -240,6 +249,7 @@ ReelWriter::frame_info_position (Frame frame, Eyes eyes) const DCPOMATIC_ASSERT (false); } + Frame ReelWriter::check_existing_picture_asset (boost::filesystem::path asset) { @@ -298,6 +308,7 @@ ReelWriter::check_existing_picture_asset (boost::filesystem::path asset) return first_nonexistant_frame; } + void ReelWriter::write (shared_ptr encoded, Frame frame, Eyes eyes) { @@ -339,6 +350,7 @@ ReelWriter::fake_write (int size) _picture_asset_writer->fake_write (size); } + void ReelWriter::repeat_write (Frame frame, Eyes eyes) { @@ -354,6 +366,7 @@ ReelWriter::repeat_write (Frame frame, Eyes eyes) write_frame_info (frame, eyes, fin); } + void ReelWriter::finish (boost::filesystem::path output_dcp) { @@ -439,8 +452,12 @@ ReelWriter::finish (boost::filesystem::path output_dcp) } } -template -shared_ptr + +/** Try to make a ReelAsset for a subtitles or closed captions in a given period in the DCP. + * A SubtitleAsset can be provided, or we will use one from @ref refs if not. + */ +template +shared_ptr maybe_add_text ( shared_ptr asset, int64_t picture_duration, @@ -456,7 +473,7 @@ maybe_add_text ( { Frame const period_duration = period.duration().frames_round(film->video_frame_rate()); - shared_ptr reel_asset; + shared_ptr reel_asset; if (asset) { /* Add the font to the subtitle content */ @@ -464,33 +481,38 @@ maybe_add_text ( asset->add_font (j.id, j.data.get_value_or(default_font)); } - if (dynamic_pointer_cast (asset)) { - auto directory = output_dcp / asset->id (); + if (auto interop = dynamic_pointer_cast(asset)) { + auto directory = output_dcp / interop->id (); boost::filesystem::create_directories (directory); - asset->write (directory / ("sub_" + asset->id() + ".xml")); - } else { + interop->write (directory / ("sub_" + interop->id() + ".xml")); + reel_asset = make_shared ( + interop, + dcp::Fraction(film->video_frame_rate(), 1), + picture_duration, + 0 + ); + } else if (auto smpte = dynamic_pointer_cast(asset)) { /* All our assets should be the same length; use the picture asset length here as a reference to set the subtitle one. We'll use the duration rather than the intrinsic duration; we don't care if the picture asset has been trimmed, we're just interested in its presentation length. */ - dynamic_pointer_cast(asset)->set_intrinsic_duration (picture_duration); - - asset->write ( + smpte->set_intrinsic_duration(picture_duration); + smpte->write ( output_dcp / ("sub_" + asset->id() + ".mxf") ); + reel_asset = make_shared ( + smpte, + dcp::Fraction(film->video_frame_rate(), 1), + picture_duration, + 0 + ); } - reel_asset = make_shared ( - asset, - dcp::Fraction(film->video_frame_rate(), 1), - picture_duration, - 0 - ); } else { /* We don't have a subtitle asset of our own; hopefully we have one to reference */ for (auto j: refs) { - auto k = dynamic_pointer_cast (j.asset); + auto k = dynamic_pointer_cast (j.asset); if (k && j.period == period) { reel_asset = k; /* If we have a hash for this asset in the CPL, assume that it is correct */ @@ -623,19 +645,19 @@ ReelWriter::create_reel_text ( set ensure_closed_captions ) const { - auto subtitle = maybe_add_text ( + auto subtitle = maybe_add_text ( _subtitle_asset, duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only ); if (subtitle) { /* We have a subtitle asset that we either made or are referencing */ - if (!film()->subtitle_languages().empty()) { - subtitle->set_language (film()->subtitle_languages().front()); + if (auto main_language = film()->subtitle_languages().first) { + subtitle->set_language (*main_language); } } else if (ensure_subtitles) { /* We had no subtitle asset, but we've been asked to make sure there is one */ - subtitle = maybe_add_text( - empty_text_asset(TextType::OPEN_SUBTITLE, optional()), + subtitle = maybe_add_text ( + empty_text_asset(TextType::OPEN_SUBTITLE, optional(), true), duration, reel, refs, @@ -649,13 +671,13 @@ ReelWriter::create_reel_text ( } for (auto const& i: _closed_caption_assets) { - auto a = maybe_add_text ( + auto a = maybe_add_text ( i.second, duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only ); DCPOMATIC_ASSERT (a); a->set_annotation_text (i.first.name); - if (!i.first.language.empty()) { - a->set_language (dcp::LanguageTag(i.first.language)); + if (i.first.language) { + a->set_language (i.first.language.get()); } ensure_closed_captions.erase (i.first); @@ -663,19 +685,18 @@ ReelWriter::create_reel_text ( /* Make empty tracks for anything we've been asked to ensure but that we haven't added */ for (auto i: ensure_closed_captions) { - auto a = maybe_add_text ( - empty_text_asset(TextType::CLOSED_CAPTION, i), duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only + auto a = maybe_add_text ( + empty_text_asset(TextType::CLOSED_CAPTION, i, true), duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only ); DCPOMATIC_ASSERT (a); a->set_annotation_text (i.name); - if (!i.language.empty()) { - a->set_language (dcp::LanguageTag(i.language)); + if (i.language) { + a->set_language (i.language.get()); } } } - void ReelWriter::create_reel_markers (shared_ptr reel) const { @@ -689,12 +710,11 @@ ReelWriter::create_reel_markers (shared_ptr reel) const } if (!reel_markers.empty ()) { - auto ma = make_shared(dcp::Fraction(film()->video_frame_rate(), 1), reel->duration(), 0); + auto ma = make_shared(dcp::Fraction(film()->video_frame_rate(), 1), reel->duration()); for (auto const& i: reel_markers) { - int h, m, s, f; DCPTime relative = i.second - _period.from; - relative.split (film()->video_frame_rate(), h, m, s, f); - ma->set (i.first, dcp::Time(h, m, s, f, film()->video_frame_rate())); + auto hmsf = relative.split (film()->video_frame_rate()); + ma->set (i.first, dcp::Time(hmsf.h, hmsf.m, hmsf.s, hmsf.f, film()->video_frame_rate())); } reel->add (ma); } @@ -739,7 +759,8 @@ ReelWriter::create_reel ( } void -ReelWriter::calculate_digests (boost::function set_progress) +ReelWriter::calculate_digests (std::function set_progress) +try { if (_picture_asset) { _picture_asset->hash (set_progress); @@ -752,8 +773,13 @@ ReelWriter::calculate_digests (boost::function set_progress) if (_atmos_asset) { _atmos_asset->hash (set_progress); } +} catch (boost::thread_interrupted) { + /* set_progress contains an interruption_point, so any of these methods + * may throw thread_interrupted, at which point we just give up. + */ } + Frame ReelWriter::start () const { @@ -774,7 +800,7 @@ ReelWriter::write (shared_ptr audio) shared_ptr -ReelWriter::empty_text_asset (TextType type, optional track) const +ReelWriter::empty_text_asset (TextType type, optional track, bool with_dummy) const { shared_ptr asset; @@ -783,9 +809,9 @@ ReelWriter::empty_text_asset (TextType type, optional track) const auto s = make_shared(); s->set_movie_title (film()->name()); if (type == TextType::OPEN_SUBTITLE) { - s->set_language (lang.empty() ? "Unknown" : lang.front().to_string()); - } else if (!track->language.empty()) { - s->set_language (track->language); + s->set_language (lang.first ? lang.first->to_string() : "Unknown"); + } else if (track->language) { + s->set_language (track->language->to_string()); } s->set_reel_number (raw_convert (_reel_index + 1)); asset = s; @@ -793,10 +819,10 @@ ReelWriter::empty_text_asset (TextType type, optional track) const auto s = make_shared(); s->set_content_title_text (film()->name()); s->set_metadata (mxf_metadata()); - if (type == TextType::OPEN_SUBTITLE && !lang.empty()) { - s->set_language (lang.front()); - } else if (track && !track->language.empty()) { - s->set_language (dcp::LanguageTag(track->language)); + if (type == TextType::OPEN_SUBTITLE && lang.first) { + s->set_language (*lang.first); + } else if (track && track->language) { + s->set_language (dcp::LanguageTag(track->language->to_string())); } s->set_edit_rate (dcp::Fraction (film()->video_frame_rate(), 1)); s->set_reel_number (_reel_index + 1); @@ -805,29 +831,32 @@ ReelWriter::empty_text_asset (TextType type, optional track) const if (film()->encrypted()) { s->set_key (film()->key()); } - s->add ( - std::make_shared( - optional(), - false, - false, - false, - dcp::Colour(), - 42, - 1.0, - dcp::Time(0, 0, 0, 0, 24), - dcp::Time(0, 0, 1, 0, 24), - 0.5, - dcp::HAlign::CENTER, - 0.5, - dcp::VAlign::CENTER, - dcp::Direction::LTR, - "", - dcp::Effect::NONE, - dcp::Colour(), - dcp::Time(), - dcp::Time() - ) - ); + if (with_dummy) { + s->add ( + std::make_shared( + optional(), + false, + false, + false, + dcp::Colour(), + 42, + 1.0, + dcp::Time(0, 0, 0, 0, 24), + dcp::Time(0, 0, 1, 0, 24), + 0.5, + dcp::HAlign::CENTER, + 0.5, + dcp::VAlign::CENTER, + dcp::Direction::LTR, + " ", + dcp::Effect::NONE, + dcp::Colour(), + dcp::Time(), + dcp::Time(), + 0 + ) + ); + } asset = s; } @@ -853,7 +882,7 @@ ReelWriter::write (PlayerText subs, TextType type, optional track, } if (!asset) { - asset = empty_text_asset (type, track); + asset = empty_text_asset (type, track, false); } switch (type) { @@ -868,19 +897,21 @@ ReelWriter::write (PlayerText subs, TextType type, optional track, DCPOMATIC_ASSERT (false); } + /* timecode rate for subtitles we emit; we might as well stick to ms accuracy here, I think */ + auto const tcr = 1000; + for (auto i: subs.string) { - /* XXX: couldn't / shouldn't we use period here rather than getting time from the subtitle? */ - i.set_in (i.in() - dcp::Time (_period.from.seconds(), i.in().tcr)); - i.set_out (i.out() - dcp::Time (_period.from.seconds(), i.out().tcr)); - asset->add (shared_ptr(new dcp::SubtitleString(i))); + i.set_in (dcp::Time(period.from.seconds() - _period.from.seconds(), tcr)); + i.set_out (dcp::Time(period.to.seconds() - _period.from.seconds(), tcr)); + asset->add (make_shared(i)); } for (auto i: subs.bitmap) { asset->add ( make_shared( - i.image->as_png(), - dcp::Time(period.from.seconds() - _period.from.seconds(), film()->video_frame_rate()), - dcp::Time(period.to.seconds() - _period.from.seconds(), film()->video_frame_rate()), + image_as_png(i.image), + dcp::Time(period.from.seconds() - _period.from.seconds(), tcr), + dcp::Time(period.to.seconds() - _period.from.seconds(), tcr), i.rectangle.x, dcp::HAlign::LEFT, i.rectangle.y, dcp::VAlign::TOP, dcp::Time(), dcp::Time() ) @@ -888,6 +919,7 @@ ReelWriter::write (PlayerText subs, TextType type, optional track, } } + bool ReelWriter::existing_picture_frame_ok (FILE* asset_file, shared_ptr info_file, Frame frame) const {