#include "job_manager.h"
#include "transcode_job.h"
#include "upload_job.h"
-#include "log.h"
+#include "null_log.h"
+#include "file_log.h"
#include "exceptions.h"
#include "examine_content_job.h"
#include "config.h"
#include "video_content.h"
#include "subtitle_content.h"
#include "ffmpeg_content.h"
+#include "dcp_content.h"
+#include "screen_kdm.h"
#include <libcxml/cxml.h>
#include <dcp/cpl.h>
#include <dcp/certificate_chain.h>
using std::cout;
using std::list;
using std::set;
+using std::runtime_error;
using boost::shared_ptr;
using boost::weak_ptr;
using boost::dynamic_pointer_cast;
using boost::optional;
using boost::is_any_of;
-#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
-#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, Log::TYPE_GENERAL);
+#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
+#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, LogEntry::TYPE_GENERAL);
/* 5 -> 6
* AudioMapping XML changed.
*
* Bumped to 32 for 2.0 branch; some times are expressed in Times rather
* than frames now.
+ *
+ * 32 -> 33
+ * Changed <Period> to <Subtitle> in FFmpegSubtitleStream
*/
-int const Film::current_state_version = 32;
+int const Film::current_state_version = 33;
/** Construct a Film object in a given directory.
*
, _audio_channels (6)
, _three_d (false)
, _sequence_video (true)
- , _interop (false)
+ , _interop (Config::instance()->default_interop ())
, _audio_processor (0)
+ , _reel_type (REELTYPE_SINGLE)
+ , _reel_length (2000000000)
, _state_version (current_state_version)
, _dirty (false)
{
set_isdcf_date_today ();
_playlist_changed_connection = _playlist->Changed.connect (bind (&Film::playlist_changed, this));
+ _playlist_order_changed_connection = _playlist->OrderChanged.connect (bind (&Film::playlist_order_changed, this));
_playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Film::playlist_content_changed, this, _1, _2, _3));
/* Make state.directory a complete path without ..s (where possible)
(Code swiped from Adam Bowen on stackoverflow)
+ XXX: couldn't/shouldn't this just be boost::filesystem::canonical?
*/
boost::filesystem::path p (boost::filesystem::system_complete (dir));
Film::~Film ()
{
- for (list<boost::signals2::connection>::const_iterator i = _job_connections.begin(); i != _job_connections.end(); ++i) {
- i->disconnect ();
+ BOOST_FOREACH (boost::signals2::connection& i, _job_connections) {
+ i.disconnect ();
+ }
+
+ BOOST_FOREACH (boost::signals2::connection& i, _audio_analysis_connections) {
+ i.disconnect ();
}
}
/** @return The file to write video frame info to */
boost::filesystem::path
-Film::info_file () const
+Film::info_file (DCPTimePeriod period) const
{
boost::filesystem::path p;
p /= "info";
- p /= video_identifier ();
+ p /= video_identifier () + "_" + raw_convert<string> (period.from.get()) + "_" + raw_convert<string> (period.to.get());
return file (p);
}
}
boost::filesystem::path
-Film::internal_video_asset_filename () const
+Film::internal_video_asset_filename (DCPTimePeriod p) const
{
- return video_identifier() + ".mxf";
+ return video_identifier() + "_" + raw_convert<string> (p.from.get()) + "_" + raw_convert<string> (p.to.get()) + ".mxf";
}
boost::filesystem::path
set_isdcf_date_today ();
- environment_info (log ());
+ BOOST_FOREACH (string i, environment_info ()) {
+ LOG_GENERAL_NC (i);
+ }
BOOST_FOREACH (shared_ptr<const Content> i, content ()) {
LOG_GENERAL ("Content: %1", i->technical_summary());
}
if (content().empty()) {
- throw StringError (_("You must add some content to the DCP before creating it"));
+ throw runtime_error (_("You must add some content to the DCP before creating it"));
}
if (dcp_content_type() == 0) {
if (_audio_processor) {
root->add_child("AudioProcessor")->add_child_text (_audio_processor->id ());
}
+ root->add_child("ReelType")->add_child_text (raw_convert<string> (_reel_type));
+ root->add_child("ReelLength")->add_child_text (raw_convert<string> (_reel_length));
_playlist->as_xml (root->add_child ("Playlist"));
return doc;
Film::read_metadata ()
{
if (boost::filesystem::exists (file ("metadata")) && !boost::filesystem::exists (file ("metadata.xml"))) {
- throw StringError (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
+ throw runtime_error (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
}
cxml::Document f ("Metadata");
_state_version = f.number_child<int> ("Version");
if (_state_version > current_state_version) {
- throw StringError (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
+ throw runtime_error (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
}
_name = f.string_child ("Name");
_audio_processor = 0;
}
+ _reel_type = static_cast<ReelType> (f.optional_number_child<int>("ReelType").get_value_or (static_cast<int>(REELTYPE_SINGLE)));
+ _reel_length = f.optional_number_child<int64_t>("ReelLength").get_value_or (2000000000);
+
list<string> notes;
/* This method is the only one that can return notes (so far) */
_playlist->set_from_xml (shared_from_this(), f.node_child ("Playlist"), _state_version, notes);
d << "_" << container()->isdcf_name();
}
- ContentList cl = content ();
-
/* XXX: this uses the first bit of content only */
/* The standard says we don't do this for trailers, for some strange reason */
if (dcp_content_type() && dcp_content_type()->libdcp_kind() != dcp::TRAILER) {
Ratio const * content_ratio = 0;
- for (ContentList::iterator i = cl.begin(); i != cl.end(); ++i) {
- shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*i);
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (i);
if (vc) {
/* Here's the first piece of video content */
if (vc->scale().ratio ()) {
if (!dm.audio_language.empty ()) {
d << "_" << dm.audio_language;
if (!dm.subtitle_language.empty()) {
- d << "-" << dm.subtitle_language;
+
+ bool burnt_in = false;
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<SubtitleContent> sc = dynamic_pointer_cast<SubtitleContent> (i);
+ if (!sc) {
+ continue;
+ }
+
+ if (sc->use_subtitles() && sc->burn_subtitles()) {
+ burnt_in = true;
+ }
+ }
+
+ string language = dm.subtitle_language;
+ if (burnt_in) {
+ transform (language.begin(), language.end(), language.begin(), ::tolower);
+ } else {
+ transform (language.begin(), language.end(), language.begin(), ::toupper);
+ }
+
+ d << "-" << language;
} else {
d << "-XX";
}
}
} else {
list<int> mapped;
- for (ContentList::const_iterator i = cl.begin(); i != cl.end(); ++i) {
- shared_ptr<const AudioContent> ac = dynamic_pointer_cast<const AudioContent> (*i);
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<const AudioContent> ac = dynamic_pointer_cast<const AudioContent> (i);
if (ac) {
list<int> c = ac->audio_mapping().mapped_output_channels ();
copy (c.begin(), c.end(), back_inserter (mapped));
d << "-3D";
}
- if (!dm.package_type.empty ()) {
- d << "_" << dm.package_type;
+ bool vf = false;
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (i);
+ if (dc && (dc->reference_video() || dc->reference_audio() || dc->reference_subtitle())) {
+ vf = true;
+ }
+ }
+
+ if (vf) {
+ d << "_VF";
+ } else {
+ d << "_OV";
}
return d.str ();
signal_changed (AUDIO_CHANNELS);
}
+void
+Film::set_reel_type (ReelType t)
+{
+ _reel_type = t;
+ signal_changed (REEL_TYPE);
+}
+
+/** @param r Desired reel length in bytes */
+void
+Film::set_reel_length (int64_t r)
+{
+ _reel_length = r;
+ signal_changed (REEL_LENGTH);
+}
+
void
Film::signal_changed (Property p)
{
}
boost::filesystem::path
-Film::j2c_path (int f, Eyes e, bool t) const
+Film::j2c_path (int reel, Frame frame, Eyes eyes, bool tmp) const
{
boost::filesystem::path p;
p /= "j2c";
SafeStringStream s;
s.width (8);
- s << setfill('0') << f;
+ s << setfill('0') << reel << "_" << frame;
- if (e == EYES_LEFT) {
+ if (eyes == EYES_LEFT) {
s << ".L";
- } else if (e == EYES_RIGHT) {
+ } else if (eyes == EYES_RIGHT) {
s << ".R";
}
s << ".j2c";
- if (t) {
+ if (tmp) {
s << ".tmp";
}
}
shared_ptr<Content> content = c.lock ();
- if (content) {
- add_content (content);
+ if (!content) {
+ return;
+ }
+
+ add_content (content);
+ if (Config::instance()->automatic_audio_analysis ()) {
+ shared_ptr<Playlist> playlist (new Playlist);
+ playlist->add (content);
+ boost::signals2::connection c;
+ JobManager::instance()->analyse_audio (
+ shared_from_this (), playlist, c, bind (&Film::audio_analysis_finished, this)
+ );
+ _audio_analysis_connections.push_back (c);
}
}
_playlist->move_later (c);
}
+/** @return length of the film from time 0 to the last thing on the playlist */
DCPTime
Film::length () const
{
signal_changed (NAME);
}
+void
+Film::playlist_order_changed ()
+{
+ signal_changed (CONTENT_ORDER);
+}
+
int
Film::audio_frame_rate () const
{
dcp::EncryptedKDM
Film::make_kdm (
- dcp::Certificate target,
+ dcp::Certificate recipient,
+ vector<dcp::Certificate> trusted_devices,
boost::filesystem::path cpl_file,
dcp::LocalTime from,
dcp::LocalTime until,
return dcp::DecryptedKDM (
cpl, key(), from, until, "DCP-o-matic", cpl->content_title_text(), dcp::LocalTime().as_string()
- ).encrypt (signer, target, formulation);
+ ).encrypt (signer, recipient, trusted_devices, formulation);
}
-list<dcp::EncryptedKDM>
+list<ScreenKDM>
Film::make_kdms (
list<shared_ptr<Screen> > screens,
boost::filesystem::path dcp,
dcp::Formulation formulation
) const
{
- list<dcp::EncryptedKDM> kdms;
+ list<ScreenKDM> kdms;
- for (list<shared_ptr<Screen> >::iterator i = screens.begin(); i != screens.end(); ++i) {
- if ((*i)->certificate) {
- kdms.push_back (make_kdm ((*i)->certificate.get(), dcp, from, until, formulation));
+ BOOST_FOREACH (shared_ptr<Screen> i, screens) {
+ if (i->recipient) {
+ kdms.push_back (ScreenKDM (i, make_kdm (i->recipient.get(), i->trusted_devices, dcp, from, until, formulation)));
}
}
uint64_t
Film::required_disk_space () const
{
- return uint64_t (j2k_bandwidth() / 8) * length().seconds();
+ return _playlist->required_disk_space (j2k_bandwidth(), audio_channels(), audio_frame_rate());
}
/** This method checks the disk that the Film is on and tries to decide whether or not
* there will be enough space to make a DCP for it. If so, true is returned; if not,
- * false is returned and required and availabe are filled in with the amount of disk space
+ * false is returned and required and available are filled in with the amount of disk space
* required and available respectively (in Gb).
*
* Note: the decision made by this method isn't, of course, 100% reliable.
return audio_processor()->input_names ();
}
+ DCPOMATIC_ASSERT (MAX_DCP_AUDIO_CHANNELS == 16);
+
vector<string> n;
n.push_back (_("L"));
n.push_back (_("R"));
n.push_back (_("Rc"));
n.push_back (_("BsL"));
n.push_back (_("BsR"));
+ n.push_back (_("DBP"));
+ n.push_back (_("DBS"));
+ n.push_back ("");
+ n.push_back ("");
return vector<string> (n.begin(), n.begin() + audio_channels ());
}
{
_playlist->remove (c);
}
+
+void
+Film::audio_analysis_finished ()
+{
+ /* XXX */
+}
+
+list<DCPTimePeriod>
+Film::reels () const
+{
+ list<DCPTimePeriod> p;
+ DCPTime const len = length().round_up (video_frame_rate ());
+
+ switch (reel_type ()) {
+ case REELTYPE_SINGLE:
+ p.push_back (DCPTimePeriod (DCPTime (), len));
+ break;
+ case REELTYPE_BY_VIDEO_CONTENT:
+ {
+ optional<DCPTime> last_split;
+ shared_ptr<VideoContent> last_video;
+ ContentList cl = content ();
+ BOOST_FOREACH (shared_ptr<Content> c, content ()) {
+ shared_ptr<VideoContent> v = dynamic_pointer_cast<VideoContent> (c);
+ if (v) {
+ BOOST_FOREACH (DCPTime t, v->reel_split_points()) {
+ if (last_split) {
+ p.push_back (DCPTimePeriod (last_split.get(), t));
+ }
+ last_split = t;
+ }
+ last_video = v;
+ }
+ }
+
+ DCPTime video_end = last_video ? last_video->end() : DCPTime(0);
+ if (last_split) {
+ /* Definitely go from the last split to the end of the video content */
+ p.push_back (DCPTimePeriod (last_split.get(), video_end));
+ }
+
+ if (video_end < len) {
+ /* And maybe go after that as well if there is any non-video hanging over the end */
+ p.push_back (DCPTimePeriod (video_end, len));
+ }
+ break;
+ }
+ case REELTYPE_BY_LENGTH:
+ {
+ DCPTime current;
+ /* Integer-divide reel length by the size of one frame to give the number of frames per reel */
+ Frame const reel_in_frames = _reel_length / ((j2k_bandwidth() / video_frame_rate()) / 8);
+ while (current < len) {
+ DCPTime end = min (len, current + DCPTime::from_frames (reel_in_frames, video_frame_rate ()));
+ p.push_back (DCPTimePeriod (current, end));
+ current = end;
+ }
+ break;
+ }
+ }
+
+ return p;
+}