#include "job_manager.h"
#include "transcode_job.h"
#include "upload_job.h"
-#include "log.h"
+#include "null_log.h"
+#include "file_log.h"
#include "exceptions.h"
#include "examine_content_job.h"
#include "config.h"
#include "playlist.h"
-#include "player.h"
#include "dcp_content_type.h"
#include "ratio.h"
#include "cross.h"
-#include "cinema.h"
#include "safe_stringstream.h"
#include "environment_info.h"
#include "raw_convert.h"
#include "audio_processor.h"
#include "md5_digester.h"
+#include "compose.hpp"
+#include "screen.h"
+#include "audio_content.h"
+#include "video_content.h"
+#include "subtitle_content.h"
+#include "ffmpeg_content.h"
+#include "dcp_content.h"
+#include "screen_kdm.h"
#include <libcxml/cxml.h>
#include <dcp/cpl.h>
-#include <dcp/signer.h>
+#include <dcp/certificate_chain.h>
#include <dcp/util.h>
#include <dcp/local_time.h>
#include <dcp/decrypted_kdm.h>
#include <libxml++/libxml++.h>
#include <boost/filesystem.hpp>
#include <boost/algorithm/string.hpp>
-#include <boost/lexical_cast.hpp>
#include <boost/foreach.hpp>
#include <unistd.h>
#include <stdexcept>
#include <iostream>
#include <algorithm>
-#include <fstream>
#include <cstdlib>
#include <iomanip>
#include <set>
#include "i18n.h"
using std::string;
-using std::multimap;
using std::pair;
-using std::map;
using std::vector;
using std::setfill;
using std::min;
using std::max;
using std::make_pair;
-using std::endl;
using std::cout;
using std::list;
using std::set;
+using std::runtime_error;
using boost::shared_ptr;
using boost::weak_ptr;
using boost::dynamic_pointer_cast;
-using boost::to_upper_copy;
-using boost::ends_with;
-using boost::starts_with;
using boost::optional;
using boost::is_any_of;
-using dcp::Size;
-using dcp::Signer;
-#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
-#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, Log::TYPE_GENERAL);
+#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
+#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, LogEntry::TYPE_GENERAL);
/* 5 -> 6
* AudioMapping XML changed.
, _audio_channels (6)
, _three_d (false)
, _sequence_video (true)
- , _interop (false)
+ , _interop (Config::instance()->default_interop ())
, _audio_processor (0)
+ , _reel_type (REELTYPE_SINGLE)
+ , _reel_length (2000000000)
, _state_version (current_state_version)
, _dirty (false)
{
Film::~Film ()
{
- for (list<boost::signals2::connection>::const_iterator i = _job_connections.begin(); i != _job_connections.end(); ++i) {
- i->disconnect ();
+ BOOST_FOREACH (boost::signals2::connection& i, _job_connections) {
+ i.disconnect ();
+ }
+
+ BOOST_FOREACH (boost::signals2::connection& i, _audio_analysis_connections) {
+ i.disconnect ();
}
}
/** @return The file to write video frame info to */
boost::filesystem::path
-Film::info_file () const
+Film::info_file (DCPTimePeriod period) const
{
boost::filesystem::path p;
p /= "info";
- p /= video_identifier ();
+ p /= video_identifier () + "_" + raw_convert<string> (period.from.get()) + "_" + raw_convert<string> (period.to.get());
return file (p);
}
}
boost::filesystem::path
-Film::internal_video_asset_filename () const
+Film::internal_video_asset_filename (DCPTimePeriod p) const
{
- return video_identifier() + ".mxf";
+ return video_identifier() + "_" + raw_convert<string> (p.from.get()) + "_" + raw_convert<string> (p.to.get()) + ".mxf";
}
boost::filesystem::path
LOG_GENERAL ("Content: %1", i->technical_summary());
}
LOG_GENERAL ("DCP video rate %1 fps", video_frame_rate());
- LOG_GENERAL ("%1 threads", Config::instance()->num_local_encoding_threads());
+ if (Config::instance()->only_servers_encode ()) {
+ LOG_GENERAL_NC ("0 threads: ONLY SERVERS SET TO ENCODE");
+ } else {
+ LOG_GENERAL ("%1 threads", Config::instance()->num_local_encoding_threads());
+ }
LOG_GENERAL ("J2K bandwidth %1", j2k_bandwidth());
if (container() == 0) {
}
if (content().empty()) {
- throw StringError (_("You must add some content to the DCP before creating it"));
+ throw runtime_error (_("You must add some content to the DCP before creating it"));
}
if (dcp_content_type() == 0) {
if (_audio_processor) {
root->add_child("AudioProcessor")->add_child_text (_audio_processor->id ());
}
+ root->add_child("ReelType")->add_child_text (raw_convert<string> (_reel_type));
+ root->add_child("ReelLength")->add_child_text (raw_convert<string> (_reel_length));
_playlist->as_xml (root->add_child ("Playlist"));
return doc;
Film::read_metadata ()
{
if (boost::filesystem::exists (file ("metadata")) && !boost::filesystem::exists (file ("metadata.xml"))) {
- throw StringError (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
+ throw runtime_error (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
}
cxml::Document f ("Metadata");
_state_version = f.number_child<int> ("Version");
if (_state_version > current_state_version) {
- throw StringError (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
+ throw runtime_error (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
}
_name = f.string_child ("Name");
_audio_processor = 0;
}
+ _reel_type = static_cast<ReelType> (f.optional_number_child<int>("ReelType").get_value_or (static_cast<int>(REELTYPE_SINGLE)));
+ _reel_length = f.optional_number_child<int64_t>("ReelLength").get_value_or (2000000000);
+
list<string> notes;
/* This method is the only one that can return notes (so far) */
_playlist->set_from_xml (shared_from_this(), f.node_child ("Playlist"), _state_version, notes);
/* Split the raw name up into words */
vector<string> words;
- split (words, raw_name, is_any_of (" "));
+ split (words, raw_name, is_any_of (" _-"));
string fixed_name;
d << "_" << container()->isdcf_name();
}
- ContentList cl = content ();
-
/* XXX: this uses the first bit of content only */
/* The standard says we don't do this for trailers, for some strange reason */
if (dcp_content_type() && dcp_content_type()->libdcp_kind() != dcp::TRAILER) {
Ratio const * content_ratio = 0;
- for (ContentList::iterator i = cl.begin(); i != cl.end(); ++i) {
- shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*i);
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (i);
if (vc) {
/* Here's the first piece of video content */
if (vc->scale().ratio ()) {
}
} else {
list<int> mapped;
- for (ContentList::const_iterator i = cl.begin(); i != cl.end(); ++i) {
- shared_ptr<const AudioContent> ac = dynamic_pointer_cast<const AudioContent> (*i);
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<const AudioContent> ac = dynamic_pointer_cast<const AudioContent> (i);
if (ac) {
list<int> c = ac->audio_mapping().mapped_output_channels ();
copy (c.begin(), c.end(), back_inserter (mapped));
d << "-3D";
}
- if (!dm.package_type.empty ()) {
- d << "_" << dm.package_type;
+ bool vf = false;
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (i);
+ if (dc && (dc->reference_video() || dc->reference_audio() || dc->reference_subtitle())) {
+ vf = true;
+ }
+ }
+
+ if (vf) {
+ d << "_VF";
+ } else {
+ d << "_OV";
}
return d.str ();
{
_three_d = t;
signal_changed (THREE_D);
+
+ if (_three_d && _isdcf_metadata.two_d_version_of_three_d) {
+ _isdcf_metadata.two_d_version_of_three_d = false;
+ signal_changed (ISDCF_METADATA);
+ }
}
void
signal_changed (AUDIO_CHANNELS);
}
+void
+Film::set_reel_type (ReelType t)
+{
+ _reel_type = t;
+ signal_changed (REEL_TYPE);
+}
+
+void
+Film::set_reel_length (int64_t r)
+{
+ _reel_length = r;
+ signal_changed (REEL_LENGTH);
+}
+
void
Film::signal_changed (Property p)
{
}
boost::filesystem::path
-Film::j2c_path (int f, Eyes e, bool t) const
+Film::j2c_path (int reel, Frame frame, Eyes eyes, bool tmp) const
{
boost::filesystem::path p;
p /= "j2c";
SafeStringStream s;
s.width (8);
- s << setfill('0') << f;
+ s << setfill('0') << reel << "_" << frame;
- if (e == EYES_LEFT) {
+ if (eyes == EYES_LEFT) {
s << ".L";
- } else if (e == EYES_RIGHT) {
+ } else if (eyes == EYES_RIGHT) {
s << ".R";
}
s << ".j2c";
- if (t) {
+ if (tmp) {
s << ".tmp";
}
void
Film::examine_and_add_content (shared_ptr<Content> c)
{
- if (dynamic_pointer_cast<FFmpegContent> (c)) {
+ if (dynamic_pointer_cast<FFmpegContent> (c) && !_directory.empty ()) {
run_ffprobe (c->path(0), file ("ffprobe.log"), _log);
}
shared_ptr<Job> j (new ExamineContentJob (shared_from_this(), c));
_job_connections.push_back (
- j->Finished.connect (bind (&Film::maybe_add_content, this, boost::weak_ptr<Job> (j), boost::weak_ptr<Content> (c)))
+ j->Finished.connect (bind (&Film::maybe_add_content, this, weak_ptr<Job> (j), weak_ptr<Content> (c)))
);
JobManager::instance()->add (j);
}
shared_ptr<Content> content = c.lock ();
- if (content) {
- add_content (content);
+ if (!content) {
+ return;
+ }
+
+ add_content (content);
+ if (Config::instance()->automatic_audio_analysis ()) {
+ shared_ptr<Playlist> playlist (new Playlist);
+ playlist->add (content);
+ boost::signals2::connection c;
+ JobManager::instance()->analyse_audio (
+ shared_from_this (), playlist, c, bind (&Film::audio_analysis_finished, this)
+ );
+ _audio_analysis_connections.push_back (c);
}
}
_playlist->move_later (c);
}
+/** @return length of the film from time 0 to the last thing on the playlist */
DCPTime
Film::length () const
{
}
void
-Film::playlist_content_changed (boost::weak_ptr<Content> c, int p, bool frequent)
+Film::playlist_content_changed (weak_ptr<Content> c, int p, bool frequent)
{
_dirty = true;
int
Film::audio_frame_rate () const
{
- /* XXX */
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<AudioContent> a = dynamic_pointer_cast<AudioContent> (i);
+ if (a && a->has_rate_above_48k ()) {
+ return 96000;
+ }
+ }
+
return 48000;
}
) const
{
shared_ptr<const dcp::CPL> cpl (new dcp::CPL (cpl_file));
- shared_ptr<const dcp::Signer> signer = Config::instance()->signer();
+ shared_ptr<const dcp::CertificateChain> signer = Config::instance()->signer_chain ();
if (!signer->valid ()) {
throw InvalidSignerError ();
}
).encrypt (signer, target, formulation);
}
-list<dcp::EncryptedKDM>
+list<ScreenKDM>
Film::make_kdms (
list<shared_ptr<Screen> > screens,
boost::filesystem::path dcp,
dcp::Formulation formulation
) const
{
- list<dcp::EncryptedKDM> kdms;
+ list<ScreenKDM> kdms;
- for (list<shared_ptr<Screen> >::iterator i = screens.begin(); i != screens.end(); ++i) {
- if ((*i)->certificate) {
- kdms.push_back (make_kdm ((*i)->certificate.get(), dcp, from, until, formulation));
+ BOOST_FOREACH (shared_ptr<Screen> i, screens) {
+ if (i->certificate) {
+ kdms.push_back (ScreenKDM (i, make_kdm (i->certificate.get(), dcp, from, until, formulation)));
}
}
{
_playlist->remove (c);
}
+
+void
+Film::audio_analysis_finished ()
+{
+ /* XXX */
+}
+
+list<DCPTimePeriod>
+Film::reels () const
+{
+ list<DCPTimePeriod> p;
+ DCPTime const len = length().round_up (video_frame_rate ());
+
+ switch (reel_type ()) {
+ case REELTYPE_SINGLE:
+ p.push_back (DCPTimePeriod (DCPTime (), len));
+ break;
+ case REELTYPE_BY_VIDEO_CONTENT:
+ {
+ optional<DCPTime> last_split;
+ shared_ptr<VideoContent> last_video;
+ ContentList cl = content ();
+ BOOST_FOREACH (shared_ptr<Content> c, content ()) {
+ shared_ptr<VideoContent> v = dynamic_pointer_cast<VideoContent> (c);
+ if (v) {
+ BOOST_FOREACH (DCPTime t, v->reel_split_points()) {
+ if (last_split) {
+ p.push_back (DCPTimePeriod (last_split.get(), t));
+ }
+ last_split = t;
+ }
+ last_video = v;
+ }
+ }
+
+ DCPTime video_end = last_video ? last_video->end() : DCPTime(0);
+ if (last_split) {
+ /* Definitely go from the last split to the end of the video content */
+ p.push_back (DCPTimePeriod (last_split.get(), video_end));
+ }
+
+ if (video_end < len) {
+ /* And maybe go after that as well if there is any non-video hanging over the end */
+ p.push_back (DCPTimePeriod (video_end, len));
+ }
+ break;
+ }
+ case REELTYPE_BY_LENGTH:
+ {
+ DCPTime current;
+ /* Integer-divide reel length by the size of one frame to give the number of frames per reel */
+ Frame const reel_in_frames = _reel_length / ((j2k_bandwidth() / video_frame_rate()) / 8);
+ while (current < len) {
+ DCPTime end = min (len, current + DCPTime::from_frames (reel_in_frames, video_frame_rate ()));
+ p.push_back (DCPTimePeriod (current, end));
+ current = end;
+ }
+ break;
+ }
+ }
+
+ return p;
+}