#include "util.h"
#include "job_manager.h"
#include "transcode_job.h"
-#include "scp_dcp_job.h"
-#include "log.h"
+#include "upload_job.h"
+#include "null_log.h"
+#include "file_log.h"
#include "exceptions.h"
#include "examine_content_job.h"
#include "config.h"
#include "playlist.h"
-#include "player.h"
#include "dcp_content_type.h"
#include "ratio.h"
#include "cross.h"
-#include "cinema.h"
#include "safe_stringstream.h"
#include "environment_info.h"
#include "raw_convert.h"
+#include "audio_processor.h"
+#include "md5_digester.h"
+#include "compose.hpp"
+#include "screen.h"
+#include "audio_content.h"
+#include "video_content.h"
+#include "subtitle_content.h"
+#include "ffmpeg_content.h"
+#include "dcp_content.h"
+#include "screen_kdm.h"
#include <libcxml/cxml.h>
#include <dcp/cpl.h>
-#include <dcp/signer.h>
+#include <dcp/certificate_chain.h>
#include <dcp/util.h>
#include <dcp/local_time.h>
#include <dcp/decrypted_kdm.h>
#include <libxml++/libxml++.h>
#include <boost/filesystem.hpp>
#include <boost/algorithm/string.hpp>
-#include <boost/lexical_cast.hpp>
#include <boost/foreach.hpp>
#include <unistd.h>
#include <stdexcept>
#include <iostream>
#include <algorithm>
-#include <fstream>
#include <cstdlib>
#include <iomanip>
#include <set>
#include "i18n.h"
using std::string;
-using std::multimap;
using std::pair;
-using std::map;
using std::vector;
using std::setfill;
using std::min;
+using std::max;
using std::make_pair;
-using std::endl;
using std::cout;
using std::list;
using std::set;
+using std::runtime_error;
using boost::shared_ptr;
using boost::weak_ptr;
using boost::dynamic_pointer_cast;
-using boost::to_upper_copy;
-using boost::ends_with;
-using boost::starts_with;
using boost::optional;
using boost::is_any_of;
-using dcp::Size;
-using dcp::Signer;
-#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
-#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, Log::TYPE_GENERAL);
+#define LOG_GENERAL(...) log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
+#define LOG_GENERAL_NC(...) log()->log (__VA_ARGS__, LogEntry::TYPE_GENERAL);
/* 5 -> 6
* AudioMapping XML changed.
, _audio_channels (6)
, _three_d (false)
, _sequence_video (true)
- , _interop (false)
- , _burn_subtitles (false)
+ , _interop (Config::instance()->default_interop ())
+ , _audio_processor (0)
+ , _reel_type (REELTYPE_SINGLE)
+ , _reel_length (2000000000)
, _state_version (current_state_version)
, _dirty (false)
{
set_isdcf_date_today ();
_playlist_changed_connection = _playlist->Changed.connect (bind (&Film::playlist_changed, this));
- _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Film::playlist_content_changed, this, _1, _2));
-
+ _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Film::playlist_content_changed, this, _1, _2, _3));
+
/* Make state.directory a complete path without ..s (where possible)
(Code swiped from Adam Bowen on stackoverflow)
*/
-
+
boost::filesystem::path p (boost::filesystem::system_complete (dir));
boost::filesystem::path result;
for (boost::filesystem::path::iterator i = p.begin(); i != p.end(); ++i) {
Film::~Film ()
{
- for (list<boost::signals2::connection>::const_iterator i = _job_connections.begin(); i != _job_connections.end(); ++i) {
- i->disconnect ();
+ BOOST_FOREACH (boost::signals2::connection& i, _job_connections) {
+ i.disconnect ();
}
-}
+
+ BOOST_FOREACH (boost::signals2::connection& i, _audio_analysis_connections) {
+ i.disconnect ();
+ }
+}
string
Film::video_identifier () const
SafeStringStream s;
s.imbue (std::locale::classic ());
-
+
s << container()->id()
<< "_" << resolution_to_string (_resolution)
<< "_" << _playlist->video_identifier()
s << "_S";
}
- if (_burn_subtitles) {
- s << "_B";
- }
-
if (_three_d) {
s << "_3D";
}
return s.str ();
}
-
+
/** @return The file to write video frame info to */
boost::filesystem::path
-Film::info_file () const
+Film::info_file (DCPTimePeriod period) const
{
boost::filesystem::path p;
p /= "info";
- p /= video_identifier ();
+ p /= video_identifier () + "_" + raw_convert<string> (period.from.get()) + "_" + raw_convert<string> (period.to.get());
return file (p);
}
boost::filesystem::path
-Film::internal_video_mxf_dir () const
+Film::internal_video_asset_dir () const
{
return dir ("video");
}
boost::filesystem::path
-Film::internal_video_mxf_filename () const
+Film::internal_video_asset_filename (DCPTimePeriod p) const
{
- return video_identifier() + ".mxf";
+ return video_identifier() + "_" + raw_convert<string> (p.from.get()) + "_" + raw_convert<string> (p.to.get()) + ".mxf";
}
-string
-Film::filename_safe_name () const
+boost::filesystem::path
+Film::audio_analysis_path (shared_ptr<const Playlist> playlist) const
{
- string const n = name ();
- string o;
- for (size_t i = 0; i < n.length(); ++i) {
- if (isalnum (n[i])) {
- o += n[i];
- } else {
- o += "_";
+ boost::filesystem::path p = dir ("analysis");
+
+ MD5Digester digester;
+ BOOST_FOREACH (shared_ptr<Content> i, playlist->content ()) {
+ shared_ptr<AudioContent> ac = dynamic_pointer_cast<AudioContent> (i);
+ if (!ac) {
+ continue;
+ }
+
+ digester.add (ac->digest ());
+ digester.add (ac->audio_mapping().digest ());
+ if (playlist->content().size() != 1) {
+ /* Analyses should be considered equal regardless of gain
+ if they were made from just one piece of content. This
+ is because we can fake any gain change in a single-content
+ analysis at the plotting stage rather than having to
+ recompute it.
+ */
+ digester.add (ac->audio_gain ());
}
}
- return o;
-}
+ if (audio_processor ()) {
+ digester.add (audio_processor()->id ());
+ }
-boost::filesystem::path
-Film::audio_analysis_dir () const
-{
- return dir ("analysis");
+ p /= digester.get ();
+ return p;
}
/** Add suitable Jobs to the JobManager to create a DCP for this Film */
void
Film::make_dcp ()
{
- set_isdcf_date_today ();
-
if (dcp_name().find ("/") != string::npos) {
throw BadSettingError (_("name"), _("cannot contain slashes"));
}
+ set_isdcf_date_today ();
+
environment_info (log ());
- ContentList cl = content ();
- for (ContentList::const_iterator i = cl.begin(); i != cl.end(); ++i) {
- LOG_GENERAL ("Content: %1", (*i)->technical_summary());
+ BOOST_FOREACH (shared_ptr<const Content> i, content ()) {
+ LOG_GENERAL ("Content: %1", i->technical_summary());
}
LOG_GENERAL ("DCP video rate %1 fps", video_frame_rate());
- LOG_GENERAL ("%1 threads", Config::instance()->num_local_encoding_threads());
+ if (Config::instance()->only_servers_encode ()) {
+ LOG_GENERAL_NC ("0 threads: ONLY SERVERS SET TO ENCODE");
+ } else {
+ LOG_GENERAL ("%1 threads", Config::instance()->num_local_encoding_threads());
+ }
LOG_GENERAL ("J2K bandwidth %1", j2k_bandwidth());
-
+
if (container() == 0) {
throw MissingSettingError (_("container"));
}
if (content().empty()) {
- throw StringError (_("You must add some content to the DCP before creating it"));
+ throw runtime_error (_("You must add some content to the DCP before creating it"));
}
if (dcp_content_type() == 0) {
void
Film::send_dcp_to_tms ()
{
- shared_ptr<Job> j (new SCPDCPJob (shared_from_this()));
+ shared_ptr<Job> j (new UploadJob (shared_from_this()));
JobManager::instance()->add (j);
}
root->add_child("ThreeD")->add_child_text (_three_d ? "1" : "0");
root->add_child("SequenceVideo")->add_child_text (_sequence_video ? "1" : "0");
root->add_child("Interop")->add_child_text (_interop ? "1" : "0");
- root->add_child("BurnSubtitles")->add_child_text (_burn_subtitles ? "1" : "0");
root->add_child("Signed")->add_child_text (_signed ? "1" : "0");
root->add_child("Encrypted")->add_child_text (_encrypted ? "1" : "0");
root->add_child("Key")->add_child_text (_key.hex ());
+ if (_audio_processor) {
+ root->add_child("AudioProcessor")->add_child_text (_audio_processor->id ());
+ }
+ root->add_child("ReelType")->add_child_text (raw_convert<string> (_reel_type));
+ root->add_child("ReelLength")->add_child_text (raw_convert<string> (_reel_length));
_playlist->as_xml (root->add_child ("Playlist"));
return doc;
Film::read_metadata ()
{
if (boost::filesystem::exists (file ("metadata")) && !boost::filesystem::exists (file ("metadata.xml"))) {
- throw StringError (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
+ throw runtime_error (_("This film was created with an older version of DCP-o-matic, and unfortunately it cannot be loaded into this version. You will need to create a new Film, re-add your content and set it up again. Sorry!"));
}
cxml::Document f ("Metadata");
_state_version = f.number_child<int> ("Version");
if (_state_version > current_state_version) {
- throw StringError (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
+ throw runtime_error (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
}
-
+
_name = f.string_child ("Name");
if (_state_version >= 9) {
_use_isdcf_name = f.bool_child ("UseISDCFName");
_sequence_video = f.bool_child ("SequenceVideo");
_three_d = f.bool_child ("ThreeD");
_interop = f.bool_child ("Interop");
- if (_state_version >= 32) {
- _burn_subtitles = f.bool_child ("BurnSubtitles");
- }
_key = dcp::Key (f.string_child ("Key"));
+ if (f.optional_string_child ("AudioProcessor")) {
+ _audio_processor = AudioProcessor::from_id (f.string_child ("AudioProcessor"));
+ } else {
+ _audio_processor = 0;
+ }
+
+ _reel_type = static_cast<ReelType> (f.optional_number_child<int>("ReelType").get_value_or (static_cast<int>(REELTYPE_SINGLE)));
+ _reel_length = f.optional_number_child<int64_t>("ReelLength").get_value_or (2000000000);
+
list<string> notes;
/* This method is the only one that can return notes (so far) */
_playlist->set_from_xml (shared_from_this(), f.node_child ("Playlist"), _state_version, notes);
boost::filesystem::path p;
p /= _directory;
p /= d;
-
+
boost::filesystem::create_directories (p);
-
+
return p;
}
p /= f;
boost::filesystem::create_directories (p.parent_path ());
-
+
return p;
}
/* Split the raw name up into words */
vector<string> words;
- split (words, raw_name, is_any_of (" "));
+ split (words, raw_name, is_any_of (" _-"));
string fixed_name;
-
+
/* Add each word to fixed_name */
for (vector<string>::const_iterator i = words.begin(); i != words.end(); ++i) {
string w = *i;
++caps;
}
}
-
+
/* If w is all caps make the rest of it lower case, otherwise
leave it alone.
*/
if (dm.temp_version) {
d << "-Temp";
}
-
+
if (dm.pre_release) {
d << "-Pre";
}
-
+
if (dm.red_band) {
d << "-RedBand";
}
-
+
if (!dm.chain.empty ()) {
d << "-" << dm.chain;
}
if (video_frame_rate() != 24) {
d << "-" << video_frame_rate();
}
-
+
if (container()) {
d << "_" << container()->isdcf_name();
}
- ContentList cl = content ();
-
/* XXX: this uses the first bit of content only */
/* The standard says we don't do this for trailers, for some strange reason */
if (dcp_content_type() && dcp_content_type()->libdcp_kind() != dcp::TRAILER) {
Ratio const * content_ratio = 0;
- for (ContentList::iterator i = cl.begin(); i != cl.end(); ++i) {
- shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*i);
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (i);
if (vc) {
/* Here's the first piece of video content */
if (vc->scale().ratio ()) {
break;
}
}
-
+
if (content_ratio && content_ratio != container()) {
d << "-" << content_ratio->isdcf_name();
}
if (!dm.territory.empty ()) {
d << "_" << dm.territory;
- if (!dm.rating.empty ()) {
+ if (dm.rating.empty ()) {
+ d << "-NR";
+ } else {
d << "-" << dm.rating;
}
}
/* Find all mapped channels */
- list<dcp::Channel> mapped;
- for (ContentList::const_iterator i = cl.begin(); i != cl.end(); ++i) {
- shared_ptr<const AudioContent> ac = dynamic_pointer_cast<const AudioContent> (*i);
- if (ac) {
- list<dcp::Channel> c = ac->audio_mapping().mapped_dcp_channels ();
- copy (c.begin(), c.end(), back_inserter (mapped));
- }
- }
-
- mapped.sort ();
- mapped.unique ();
-
- /* Count them */
-
int non_lfe = 0;
int lfe = 0;
- for (list<dcp::Channel>::const_iterator i = mapped.begin(); i != mapped.end(); ++i) {
- if (static_cast<int> (*i) >= audio_channels()) {
- /* This channel is mapped but is not included in the DCP */
- continue;
- }
-
- if ((*i) == dcp::LFE) {
+
+ if (audio_processor ()) {
+ /* Processors are mapped 1:1 to DCP outputs so we can guess the number of LFE/
+ non-LFE from the channel counts.
+ */
+ non_lfe = audio_processor()->out_channels ();
+ if (non_lfe >= 4) {
+ --non_lfe;
++lfe;
- } else {
- ++non_lfe;
+ }
+ } else {
+ list<int> mapped;
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<const AudioContent> ac = dynamic_pointer_cast<const AudioContent> (i);
+ if (ac) {
+ list<int> c = ac->audio_mapping().mapped_output_channels ();
+ copy (c.begin(), c.end(), back_inserter (mapped));
+ }
+ }
+
+ mapped.sort ();
+ mapped.unique ();
+
+ /* Count them */
+
+ for (list<int>::const_iterator i = mapped.begin(); i != mapped.end(); ++i) {
+ if (*i >= audio_channels()) {
+ /* This channel is mapped but is not included in the DCP */
+ continue;
+ }
+
+ if (static_cast<dcp::Channel> (*i) == dcp::LFE) {
+ ++lfe;
+ } else {
+ ++non_lfe;
+ }
}
}
/* XXX: HI/VI */
d << "_" << resolution_to_string (_resolution);
-
+
if (!dm.studio.empty ()) {
d << "_" << dm.studio;
}
} else {
d << "_SMPTE";
}
-
+
if (three_d ()) {
d << "-3D";
}
- if (!dm.package_type.empty ()) {
- d << "_" << dm.package_type;
+ bool vf = false;
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (i);
+ if (dc && (dc->reference_video() || dc->reference_audio() || dc->reference_subtitle())) {
+ vf = true;
+ }
+ }
+
+ if (vf) {
+ d << "_VF";
+ } else {
+ d << "_OV";
}
return d.str ();
filtered += unfiltered[i];
}
}
-
+
return filtered;
}
{
_three_d = t;
signal_changed (THREE_D);
+
+ if (_three_d && _isdcf_metadata.two_d_version_of_three_d) {
+ _isdcf_metadata.two_d_version_of_three_d = false;
+ signal_changed (ISDCF_METADATA);
+ }
}
void
}
void
-Film::set_burn_subtitles (bool b)
+Film::set_audio_processor (AudioProcessor const * processor)
{
- _burn_subtitles = b;
- signal_changed (BURN_SUBTITLES);
+ _audio_processor = processor;
+ signal_changed (AUDIO_PROCESSOR);
+ signal_changed (AUDIO_CHANNELS);
+}
+
+void
+Film::set_reel_type (ReelType t)
+{
+ _reel_type = t;
+ signal_changed (REEL_TYPE);
+}
+
+void
+Film::set_reel_length (int64_t r)
+{
+ _reel_length = r;
+ signal_changed (REEL_LENGTH);
}
void
}
boost::filesystem::path
-Film::j2c_path (int f, Eyes e, bool t) const
+Film::j2c_path (int reel, Frame frame, Eyes eyes, bool tmp) const
{
boost::filesystem::path p;
p /= "j2c";
SafeStringStream s;
s.width (8);
- s << setfill('0') << f;
+ s << setfill('0') << reel << "_" << frame;
- if (e == EYES_LEFT) {
+ if (eyes == EYES_LEFT) {
s << ".L";
- } else if (e == EYES_RIGHT) {
+ } else if (eyes == EYES_RIGHT) {
s << ".R";
}
-
+
s << ".j2c";
- if (t) {
+ if (tmp) {
s << ".tmp";
}
Film::cpls () const
{
vector<CPLSummary> out;
-
+
boost::filesystem::path const dir = directory ();
for (boost::filesystem::directory_iterator i = boost::filesystem::directory_iterator(dir); i != boost::filesystem::directory_iterator(); ++i) {
if (
}
}
}
-
- return out;
-}
-shared_ptr<Player>
-Film::make_player () const
-{
- return shared_ptr<Player> (new Player (shared_from_this (), _playlist));
+ return out;
}
void
signal_changed (KEY);
}
-shared_ptr<Playlist>
-Film::playlist () const
-{
- return _playlist;
-}
-
ContentList
Film::content () const
{
void
Film::examine_and_add_content (shared_ptr<Content> c)
{
- if (dynamic_pointer_cast<FFmpegContent> (c)) {
+ if (dynamic_pointer_cast<FFmpegContent> (c) && !_directory.empty ()) {
run_ffprobe (c->path(0), file ("ffprobe.log"), _log);
}
-
+
shared_ptr<Job> j (new ExamineContentJob (shared_from_this(), c));
_job_connections.push_back (
- j->Finished.connect (bind (&Film::maybe_add_content, this, boost::weak_ptr<Job> (j), boost::weak_ptr<Content> (c)))
+ j->Finished.connect (bind (&Film::maybe_add_content, this, weak_ptr<Job> (j), weak_ptr<Content> (c)))
);
-
+
JobManager::instance()->add (j);
}
if (!job || !job->finished_ok ()) {
return;
}
-
+
shared_ptr<Content> content = c.lock ();
- if (content) {
- add_content (content);
+ if (!content) {
+ return;
+ }
+
+ add_content (content);
+ if (Config::instance()->automatic_audio_analysis ()) {
+ shared_ptr<Playlist> playlist (new Playlist);
+ playlist->add (content);
+ boost::signals2::connection c;
+ JobManager::instance()->analyse_audio (
+ shared_from_this (), playlist, c, bind (&Film::audio_analysis_finished, this)
+ );
+ _audio_analysis_connections.push_back (c);
}
}
_playlist->move_later (c);
}
+/** @return length of the film from time 0 to the last thing on the playlist */
DCPTime
Film::length () const
{
}
void
-Film::playlist_content_changed (boost::weak_ptr<Content> c, int p)
+Film::playlist_content_changed (weak_ptr<Content> c, int p, bool frequent)
{
+ _dirty = true;
+
if (p == VideoContentProperty::VIDEO_FRAME_RATE) {
set_video_frame_rate (_playlist->best_dcp_frame_rate ());
- } else if (
- p == AudioContentProperty::AUDIO_MAPPING ||
- p == AudioContentProperty::AUDIO_CHANNELS) {
+ } else if (p == AudioContentProperty::AUDIO_STREAMS) {
signal_changed (NAME);
}
- emit (boost::bind (boost::ref (ContentChanged), c, p));
+ emit (boost::bind (boost::ref (ContentChanged), c, p, frequent));
}
void
{
signal_changed (CONTENT);
signal_changed (NAME);
-}
+}
int
Film::audio_frame_rate () const
{
- /* XXX */
+ BOOST_FOREACH (shared_ptr<Content> i, content ()) {
+ shared_ptr<AudioContent> a = dynamic_pointer_cast<AudioContent> (i);
+ if (a && a->has_rate_above_48k ()) {
+ return 96000;
+ }
+ }
+
return 48000;
}
) const
{
shared_ptr<const dcp::CPL> cpl (new dcp::CPL (cpl_file));
- shared_ptr<const dcp::Signer> signer = Config::instance()->signer();
+ shared_ptr<const dcp::CertificateChain> signer = Config::instance()->signer_chain ();
if (!signer->valid ()) {
throw InvalidSignerError ();
}
-
+
return dcp::DecryptedKDM (
cpl, key(), from, until, "DCP-o-matic", cpl->content_title_text(), dcp::LocalTime().as_string()
).encrypt (signer, target, formulation);
}
-list<dcp::EncryptedKDM>
+list<ScreenKDM>
Film::make_kdms (
list<shared_ptr<Screen> > screens,
boost::filesystem::path dcp,
dcp::Formulation formulation
) const
{
- list<dcp::EncryptedKDM> kdms;
+ list<ScreenKDM> kdms;
- for (list<shared_ptr<Screen> >::iterator i = screens.begin(); i != screens.end(); ++i) {
- if ((*i)->certificate) {
- kdms.push_back (make_kdm ((*i)->certificate.get(), dcp, from, until, formulation));
+ BOOST_FOREACH (shared_ptr<Screen> i, screens) {
+ if (i->certificate) {
+ kdms.push_back (ScreenKDM (i, make_kdm (i->certificate.get(), dcp, from, until, formulation)));
}
}
Film::should_be_enough_disk_space (double& required, double& available, bool& can_hard_link) const
{
/* Create a test file and see if we can hard-link it */
- boost::filesystem::path test = internal_video_mxf_dir() / "test";
- boost::filesystem::path test2 = internal_video_mxf_dir() / "test2";
+ boost::filesystem::path test = internal_video_asset_dir() / "test";
+ boost::filesystem::path test2 = internal_video_asset_dir() / "test2";
can_hard_link = true;
FILE* f = fopen_boost (test, "w");
if (f) {
boost::filesystem::remove (test2);
}
- boost::filesystem::space_info s = boost::filesystem::space (internal_video_mxf_dir ());
+ boost::filesystem::space_info s = boost::filesystem::space (internal_video_asset_dir ());
required = double (required_disk_space ()) / 1073741824.0f;
if (!can_hard_link) {
required *= 2;
Film::subtitle_language () const
{
set<string> languages;
-
+
ContentList cl = content ();
BOOST_FOREACH (shared_ptr<Content>& c, cl) {
shared_ptr<SubtitleContent> sc = dynamic_pointer_cast<SubtitleContent> (c);
return all;
}
+
+/** Change the gains of the supplied AudioMapping to make it a default
+ * for this film. The defaults are guessed based on what processor (if any)
+ * is in use and the number of input channels.
+ */
+void
+Film::make_audio_mapping_default (AudioMapping& mapping) const
+{
+ if (audio_processor ()) {
+ audio_processor()->make_audio_mapping_default (mapping);
+ } else {
+ mapping.make_zero ();
+ if (mapping.input_channels() == 1) {
+ /* Mono -> Centre */
+ mapping.set (0, static_cast<int> (dcp::CENTRE), 1);
+ } else {
+ /* 1:1 mapping */
+ for (int i = 0; i < min (mapping.input_channels(), mapping.output_channels()); ++i) {
+ mapping.set (i, i, 1);
+ }
+ }
+ }
+}
+
+/** @return The names of the channels that audio contents' outputs are passed into;
+ * this is either the DCP or a AudioProcessor.
+ */
+vector<string>
+Film::audio_output_names () const
+{
+ if (audio_processor ()) {
+ return audio_processor()->input_names ();
+ }
+
+ vector<string> n;
+ n.push_back (_("L"));
+ n.push_back (_("R"));
+ n.push_back (_("C"));
+ n.push_back (_("Lfe"));
+ n.push_back (_("Ls"));
+ n.push_back (_("Rs"));
+ n.push_back (_("HI"));
+ n.push_back (_("VI"));
+ n.push_back (_("Lc"));
+ n.push_back (_("Rc"));
+ n.push_back (_("BsL"));
+ n.push_back (_("BsR"));
+
+ return vector<string> (n.begin(), n.begin() + audio_channels ());
+}
+
+void
+Film::repeat_content (ContentList c, int n)
+{
+ _playlist->repeat (c, n);
+}
+
+void
+Film::remove_content (ContentList c)
+{
+ _playlist->remove (c);
+}
+
+void
+Film::audio_analysis_finished ()
+{
+ /* XXX */
+}
+
+list<DCPTimePeriod>
+Film::reels () const
+{
+ list<DCPTimePeriod> p;
+ DCPTime const len = length().round_up (video_frame_rate ());
+
+ switch (reel_type ()) {
+ case REELTYPE_SINGLE:
+ p.push_back (DCPTimePeriod (DCPTime (), len));
+ break;
+ case REELTYPE_BY_VIDEO_CONTENT:
+ {
+ optional<DCPTime> last_split;
+ shared_ptr<VideoContent> last_video;
+ ContentList cl = content ();
+ BOOST_FOREACH (shared_ptr<Content> c, content ()) {
+ shared_ptr<VideoContent> v = dynamic_pointer_cast<VideoContent> (c);
+ if (v) {
+ BOOST_FOREACH (DCPTime t, v->reel_split_points()) {
+ if (last_split) {
+ p.push_back (DCPTimePeriod (last_split.get(), t));
+ }
+ last_split = t;
+ }
+ last_video = v;
+ }
+ }
+
+ DCPTime video_end = last_video ? last_video->end() : DCPTime(0);
+ if (last_split) {
+ /* Definitely go from the last split to the end of the video content */
+ p.push_back (DCPTimePeriod (last_split.get(), video_end));
+ }
+
+ if (video_end < len) {
+ /* And maybe go after that as well if there is any non-video hanging over the end */
+ p.push_back (DCPTimePeriod (video_end, len));
+ }
+ break;
+ }
+ case REELTYPE_BY_LENGTH:
+ {
+ DCPTime current;
+ /* Integer-divide reel length by the size of one frame to give the number of frames per reel */
+ Frame const reel_in_frames = _reel_length / ((j2k_bandwidth() / video_frame_rate()) / 8);
+ while (current < len) {
+ DCPTime end = min (len, current + DCPTime::from_frames (reel_in_frames, video_frame_rate ()));
+ p.push_back (DCPTimePeriod (current, end));
+ current = end;
+ }
+ break;
+ }
+ }
+
+ return p;
+}