+ node->add_child("AudioGain")->add_child_text(raw_convert<string>(_gain));
+ node->add_child("AudioDelay")->add_child_text(raw_convert<string>(_delay));
+ node->add_child("AudioFadeIn")->add_child_text(raw_convert<string>(_fade_in.get()));
+ node->add_child("AudioFadeOut")->add_child_text(raw_convert<string>(_fade_out.get()));
+ node->add_child("AudioUseSameFadesAsVideo")->add_child_text(_use_same_fades_as_video ? "1" : "0");
+}
+
+
+void
+AudioContent::set_gain (double g)
+{
+ maybe_set (_gain, g, AudioContentProperty::GAIN);
+}
+
+
+void
+AudioContent::set_delay (int d)
+{
+ maybe_set (_delay, d, AudioContentProperty::DELAY);
+}
+
+
+string
+AudioContent::technical_summary () const
+{
+ string s = "audio: ";
+ for (auto i: streams()) {
+ s += String::compose ("stream channels %1 rate %2 ", i->channels(), i->frame_rate());
+ }
+
+ return s;
+}
+
+
+void
+AudioContent::set_mapping (AudioMapping mapping)
+{
+ ContentChangeSignaller cc (_parent, AudioContentProperty::STREAMS);
+
+ int c = 0;
+ for (auto i: streams()) {
+ AudioMapping stream_mapping (i->channels(), MAX_DCP_AUDIO_CHANNELS);
+ for (int j = 0; j < i->channels(); ++j) {
+ for (int k = 0; k < MAX_DCP_AUDIO_CHANNELS; ++k) {
+ stream_mapping.set (j, k, mapping.get(c, k));
+ }
+ ++c;
+ }
+ i->set_mapping (stream_mapping);
+ }
+}
+
+
+AudioMapping
+AudioContent::mapping () const
+{
+ int channels = 0;
+ for (auto i: streams()) {
+ channels += i->channels ();
+ }
+
+ AudioMapping merged (channels, MAX_DCP_AUDIO_CHANNELS);
+ merged.make_zero ();
+
+ int c = 0;
+ int s = 0;
+ for (auto i: streams()) {
+ auto mapping = i->mapping ();
+ for (int j = 0; j < mapping.input_channels(); ++j) {
+ for (int k = 0; k < MAX_DCP_AUDIO_CHANNELS; ++k) {
+ if (k < mapping.output_channels()) {
+ merged.set (c, k, mapping.get(j, k));
+ }
+ }
+ ++c;
+ }
+ ++s;
+ }
+
+ return merged;
+}
+
+
+/** @return the frame rate that this content should be resampled to in order
+ * that it is in sync with the active video content at its start time.
+ */
+int
+AudioContent::resampled_frame_rate (shared_ptr<const Film> film) const
+{
+ double t = film->audio_frame_rate ();
+
+ FrameRateChange frc (film, _parent);
+
+ /* Compensate if the DCP is being run at a different frame rate
+ to the source; that is, if the video is run such that it will
+ look different in the DCP compared to the source (slower or faster).
+ */
+
+ if (frc.change_speed) {
+ t /= frc.speed_up;
+ }
+
+ return lrint (t);
+}
+
+string
+AudioContent::processing_description (shared_ptr<const Film> film) const
+{
+ if (streams().empty()) {
+ return "";
+ }
+
+ /* Possible answers are:
+ 1. all audio will be resampled from x to y.
+ 2. all audio will be resampled to y (from a variety of rates)
+ 3. some audio will be resampled to y (from a variety of rates)
+ 4. nothing will be resampled.
+ */
+
+ bool not_resampled = false;
+ bool resampled = false;
+ bool same = true;
+
+ optional<int> common_frame_rate;
+ for (auto i: streams()) {
+ if (i->frame_rate() != resampled_frame_rate(film)) {
+ resampled = true;
+ } else {
+ not_resampled = true;
+ }
+
+ if (common_frame_rate && common_frame_rate != i->frame_rate ()) {
+ same = false;
+ }
+ common_frame_rate = i->frame_rate ();
+ }
+
+ if (not_resampled && !resampled) {
+ return _("Audio will not be resampled");
+ }
+
+ if (not_resampled && resampled) {
+ return String::compose (_("Some audio will be resampled to %1Hz"), resampled_frame_rate(film));
+ }
+
+ if (!not_resampled && resampled) {
+ if (same) {
+ return String::compose (_("Audio will be resampled from %1Hz to %2Hz"), common_frame_rate.get(), resampled_frame_rate(film));
+ } else {
+ return String::compose (_("Audio will be resampled to %1Hz"), resampled_frame_rate(film));
+ }
+ }
+
+ return "";
+}
+
+
+/** @return User-visible names of each of our audio channels */
+vector<NamedChannel>
+AudioContent::channel_names () const
+{
+ vector<NamedChannel> n;
+
+ int index = 0;
+ int stream = 1;
+ for (auto i: streams()) {
+ for (int j = 0; j < i->channels(); ++j) {
+ n.push_back (NamedChannel(String::compose ("%1:%2", stream, j + 1), index++));
+ }
+ ++stream;
+ }
+
+ return n;
+}
+
+
+void
+AudioContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
+{
+ shared_ptr<const AudioStream> stream;
+ if (streams().size() == 1) {
+ stream = streams().front();
+ }
+
+ if (stream) {
+ p.push_back (UserProperty(UserProperty::AUDIO, _("Channels"), stream->channels()));
+ p.push_back (UserProperty(UserProperty::AUDIO, _("Content audio sample rate"), stream->frame_rate(), _("Hz")));
+ }
+
+ FrameRateChange const frc (_parent->active_video_frame_rate(film), film->video_frame_rate());
+ ContentTime const c (_parent->full_length(film), frc);
+
+ p.push_back (
+ UserProperty (UserProperty::LENGTH, _("Full length in video frames at content rate"), c.frames_round(frc.source))
+ );
+
+ if (stream) {
+ p.push_back (
+ UserProperty (
+ UserProperty::LENGTH,
+ _("Full length in audio samples at content rate"),
+ c.frames_round (stream->frame_rate ())
+ )
+ );
+ }
+
+ p.push_back (UserProperty(UserProperty::AUDIO, _("DCP sample rate"), resampled_frame_rate(film), _("Hz")));
+ p.push_back (UserProperty(UserProperty::LENGTH, _("Full length in video frames at DCP rate"), c.frames_round (frc.dcp)));
+
+ if (stream) {
+ p.push_back (
+ UserProperty (
+ UserProperty::LENGTH,
+ _("Full length in audio samples at DCP rate"),
+ c.frames_round(resampled_frame_rate(film))
+ )
+ );
+ }
+}
+
+
+void
+AudioContent::set_streams (vector<AudioStreamPtr> streams)
+{
+ ContentChangeSignaller cc (_parent, AudioContentProperty::STREAMS);
+
+ {
+ boost::mutex::scoped_lock lm (_mutex);
+ _streams = streams;
+ }
+}
+
+
+AudioStreamPtr
+AudioContent::stream () const
+{
+ boost::mutex::scoped_lock lm (_mutex);
+ DCPOMATIC_ASSERT (_streams.size() == 1);
+ return _streams.front ();