using std::cout;
using std::list;
using std::set;
+using std::runtime_error;
using boost::shared_ptr;
using boost::weak_ptr;
using boost::dynamic_pointer_cast;
using boost::optional;
using boost::is_any_of;
-#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
-#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, Log::TYPE_GENERAL);
+#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
+#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, LogEntry::TYPE_GENERAL);
/* 5 -> 6
* AudioMapping XML changed.
, _sequence_video (true)
, _interop (Config::instance()->default_interop ())
, _audio_processor (0)
+ , _reel_type (REELTYPE_SINGLE)
+ , _reel_length (2000000000)
, _state_version (current_state_version)
, _dirty (false)
{
/** @return The file to write video frame info to */
boost::filesystem::path
-Film::info_file () const
+Film::info_file (DCPTimePeriod period) const
{
boost::filesystem::path p;
p /= "info";
- p /= video_identifier ();
+ p /= video_identifier () + "_" + raw_convert<string> (period.from.get()) + "_" + raw_convert<string> (period.to.get());
return file (p);
}
}
boost::filesystem::path
-Film::internal_video_asset_filename () const
+Film::internal_video_asset_filename (DCPTimePeriod p) const
{
- return video_identifier() + ".mxf";
+ return video_identifier() + "_" + raw_convert<string> (p.from.get()) + "_" + raw_convert<string> (p.to.get()) + ".mxf";
}
boost::filesystem::path
}
if (content().empty()) {
- throw StringError (_("You must add some content to the DCP before creating it"));
+ throw runtime_error (_("You must add some content to the DCP before creating it"));
}
if (dcp_content_type() == 0) {
if (_audio_processor) {
root->add_child("AudioProcessor")->add_child_text (_audio_processor->id ());
}
+ root->add_child("ReelType")->add_child_text (raw_convert<string> (_reel_type));
+ root->add_child("ReelLength")->add_child_text (raw_convert<string> (_reel_length));
_playlist->as_xml (root->add_child ("Playlist"));
return doc;
Film::read_metadata ()
{
if (boost::filesystem::exists (file ("metadata")) && !boost::filesystem::exists (file ("metadata.xml"))) {
- throw StringError (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
+ throw runtime_error (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
}
cxml::Document f ("Metadata");
_state_version = f.number_child<int> ("Version");
if (_state_version > current_state_version) {
- throw StringError (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
+ throw runtime_error (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
}
_name = f.string_child ("Name");
_audio_processor = 0;
}
+ _reel_type = static_cast<ReelType> (f.optional_number_child<int>("ReelType").get_value_or (static_cast<int>(REELTYPE_SINGLE)));
+ _reel_length = f.optional_number_child<int64_t>("ReelLength").get_value_or (2000000000);
+
list<string> notes;
/* This method is the only one that can return notes (so far) */
_playlist->set_from_xml (shared_from_this(), f.node_child ("Playlist"), _state_version, notes);
signal_changed (AUDIO_CHANNELS);
}
+void
+Film::set_reel_type (ReelType t)
+{
+ _reel_type = t;
+ signal_changed (REEL_TYPE);
+}
+
+void
+Film::set_reel_length (int64_t r)
+{
+ _reel_length = r;
+ signal_changed (REEL_LENGTH);
+}
+
void
Film::signal_changed (Property p)
{
}
boost::filesystem::path
-Film::j2c_path (int f, Eyes e, bool t) const
+Film::j2c_path (int reel, Frame frame, Eyes eyes, bool tmp) const
{
boost::filesystem::path p;
p /= "j2c";
SafeStringStream s;
s.width (8);
- s << setfill('0') << f;
+ s << setfill('0') << reel << "_" << frame;
- if (e == EYES_LEFT) {
+ if (eyes == EYES_LEFT) {
s << ".L";
- } else if (e == EYES_RIGHT) {
+ } else if (eyes == EYES_RIGHT) {
s << ".R";
}
s << ".j2c";
- if (t) {
+ if (tmp) {
s << ".tmp";
}
list<ScreenKDM> kdms;
BOOST_FOREACH (shared_ptr<Screen> i, screens) {
- if (i->certificate) {
- kdms.push_back (ScreenKDM (i, make_kdm (i->certificate.get(), dcp, from, until, formulation)));
+ if (i->recipient) {
+ kdms.push_back (ScreenKDM (i, make_kdm (i->recipient.get(), dcp, from, until, formulation)));
}
}
return audio_processor()->input_names ();
}
+ DCPOMATIC_ASSERT (MAX_DCP_AUDIO_CHANNELS == 16);
+
vector<string> n;
n.push_back (_("L"));
n.push_back (_("R"));
n.push_back (_("Rc"));
n.push_back (_("BsL"));
n.push_back (_("BsR"));
+ n.push_back (_("DBP"));
+ n.push_back (_("DBS"));
+ n.push_back (_("NC"));
+ n.push_back (_("NC"));
return vector<string> (n.begin(), n.begin() + audio_channels ());
}
{
/* XXX */
}
+
+list<DCPTimePeriod>
+Film::reels () const
+{
+ list<DCPTimePeriod> p;
+ DCPTime const len = length().round_up (video_frame_rate ());
+
+ switch (reel_type ()) {
+ case REELTYPE_SINGLE:
+ p.push_back (DCPTimePeriod (DCPTime (), len));
+ break;
+ case REELTYPE_BY_VIDEO_CONTENT:
+ {
+ optional<DCPTime> last_split;
+ shared_ptr<VideoContent> last_video;
+ ContentList cl = content ();
+ BOOST_FOREACH (shared_ptr<Content> c, content ()) {
+ shared_ptr<VideoContent> v = dynamic_pointer_cast<VideoContent> (c);
+ if (v) {
+ BOOST_FOREACH (DCPTime t, v->reel_split_points()) {
+ if (last_split) {
+ p.push_back (DCPTimePeriod (last_split.get(), t));
+ }
+ last_split = t;
+ }
+ last_video = v;
+ }
+ }
+
+ DCPTime video_end = last_video ? last_video->end() : DCPTime(0);
+ if (last_split) {
+ /* Definitely go from the last split to the end of the video content */
+ p.push_back (DCPTimePeriod (last_split.get(), video_end));
+ }
+
+ if (video_end < len) {
+ /* And maybe go after that as well if there is any non-video hanging over the end */
+ p.push_back (DCPTimePeriod (video_end, len));
+ }
+ break;
+ }
+ case REELTYPE_BY_LENGTH:
+ {
+ DCPTime current;
+ /* Integer-divide reel length by the size of one frame to give the number of frames per reel */
+ Frame const reel_in_frames = _reel_length / ((j2k_bandwidth() / video_frame_rate()) / 8);
+ while (current < len) {
+ DCPTime end = min (len, current + DCPTime::from_frames (reel_in_frames, video_frame_rate ()));
+ p.push_back (DCPTimePeriod (current, end));
+ current = end;
+ }
+ break;
+ }
+ }
+
+ return p;
+}