+
+/** Change the gains of the supplied AudioMapping to make it a default
+ * for this film. The defaults are guessed based on what processor (if any)
+ * is in use and the number of input channels.
+ */
+void
+Film::make_audio_mapping_default (AudioMapping& mapping) const
+{
+ if (audio_processor ()) {
+ audio_processor()->make_audio_mapping_default (mapping);
+ } else {
+ mapping.make_zero ();
+ if (mapping.input_channels() == 1) {
+ /* Mono -> Centre */
+ mapping.set (0, static_cast<int> (dcp::CENTRE), 1);
+ } else {
+ /* 1:1 mapping */
+ for (int i = 0; i < min (mapping.input_channels(), mapping.output_channels()); ++i) {
+ mapping.set (i, i, 1);
+ }
+ }
+ }
+}
+
+/** @return The names of the channels that audio contents' outputs are passed into;
+ * this is either the DCP or a AudioProcessor.
+ */
+vector<string>
+Film::audio_output_names () const
+{
+ if (audio_processor ()) {
+ return audio_processor()->input_names ();
+ }
+
+ vector<string> n;
+ n.push_back (_("L"));
+ n.push_back (_("R"));
+ n.push_back (_("C"));
+ n.push_back (_("Lfe"));
+ n.push_back (_("Ls"));
+ n.push_back (_("Rs"));
+ n.push_back (_("HI"));
+ n.push_back (_("VI"));
+ n.push_back (_("Lc"));
+ n.push_back (_("Rc"));
+ n.push_back (_("BsL"));
+ n.push_back (_("BsR"));
+
+ return vector<string> (n.begin(), n.begin() + audio_channels ());
+}
+
+void
+Film::repeat_content (ContentList c, int n)
+{
+ _playlist->repeat (c, n);
+}
+
+void
+Film::remove_content (ContentList c)
+{
+ _playlist->remove (c);
+}
+
+void
+Film::audio_analysis_finished ()
+{
+ /* XXX */
+}
+
+list<DCPTimePeriod>
+Film::reels () const
+{
+ list<DCPTimePeriod> p;
+ DCPTime const len = length().round_up (video_frame_rate ());
+
+ switch (reel_type ()) {
+ case REELTYPE_SINGLE:
+ p.push_back (DCPTimePeriod (DCPTime (), len));
+ break;
+ case REELTYPE_BY_VIDEO_CONTENT:
+ {
+ optional<DCPTime> last_split;
+ shared_ptr<VideoContent> last_video;
+ ContentList cl = content ();
+ BOOST_FOREACH (shared_ptr<Content> c, content ()) {
+ shared_ptr<VideoContent> v = dynamic_pointer_cast<VideoContent> (c);
+ if (v) {
+ BOOST_FOREACH (DCPTime t, v->reel_split_points()) {
+ if (last_split) {
+ p.push_back (DCPTimePeriod (last_split.get(), t));
+ }
+ last_split = t;
+ }
+ last_video = v;
+ }
+ }
+
+ DCPTime video_end = last_video ? last_video->end() : DCPTime(0);
+ if (last_split) {
+ /* Definitely go from the last split to the end of the video content */
+ p.push_back (DCPTimePeriod (last_split.get(), video_end));
+ }
+
+ if (video_end < len) {
+ /* And maybe go after that as well if there is any non-video hanging over the end */
+ p.push_back (DCPTimePeriod (video_end, len));
+ }
+ break;
+ }
+ case REELTYPE_BY_LENGTH:
+ {
+ DCPTime current;
+ /* Integer-divide reel length by the size of one frame to give the number of frames per reel */
+ Frame const reel_in_frames = _reel_length / ((j2k_bandwidth() / video_frame_rate()) / 8);
+ while (current < len) {
+ DCPTime end = min (len, current + DCPTime::from_frames (reel_in_frames, video_frame_rate ()));
+ p.push_back (DCPTimePeriod (current, end));
+ current = end;
+ }
+ break;
+ }
+ }
+
+ return p;
+}