#include "digester.h"
#include "audio_processor.h"
#include "compose.hpp"
+#include "audio_buffers.h"
+#include <dcp/locale_convert.h>
#include <dcp/util.h>
+#include <dcp/raw_convert.h>
#include <dcp/picture_asset.h>
#include <dcp/sound_asset.h>
#include <dcp/subtitle_asset.h>
extern "C" {
#include <libavfilter/avfilter.h>
+#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
}
#include <curl/curl.h>
#include <glib.h>
#include <pangomm/init.h>
#include <boost/algorithm/string.hpp>
+#include <boost/range/algorithm/replace_if.hpp>
#include <boost/thread.hpp>
#include <boost/filesystem.hpp>
#ifdef DCPOMATIC_WINDOWS
using std::cout;
using std::bad_alloc;
using std::set_terminate;
+using std::make_pair;
using boost::shared_ptr;
using boost::thread;
using boost::optional;
using boost::bad_lexical_cast;
using dcp::Size;
using dcp::raw_convert;
+using dcp::locale_convert;
/** Path to our executable, required by the stacktrace stuff and filled
* in during App::onInit().
return buffer;
}
+string
+time_to_hmsf (DCPTime time, Frame rate)
+{
+ Frame f = time.frames_round (rate);
+ int s = f / rate;
+ f -= (s * rate);
+ int m = s / 60;
+ s -= m * 60;
+ int h = m / 60;
+ m -= h * 60;
+
+ char buffer[64];
+ snprintf (buffer, sizeof(buffer), "%d:%02d:%02d.%d", h, m, s, static_cast<int>(f));
+ return buffer;
+}
+
/** @param s Number of seconds.
* @return String containing an approximate description of s (e.g. "about 2 hours")
*/
if (hours) {
if (m > 30 && !minutes) {
/// TRANSLATORS: h here is an abbreviation for hours
- ap += raw_convert<string>(h + 1) + _("h");
+ ap += locale_convert<string>(h + 1) + _("h");
} else {
/// TRANSLATORS: h here is an abbreviation for hours
- ap += raw_convert<string>(h) + _("h");
+ ap += locale_convert<string>(h) + _("h");
}
if (minutes || seconds) {
/* Minutes */
if (s > 30 && !seconds) {
/// TRANSLATORS: m here is an abbreviation for minutes
- ap += raw_convert<string>(m + 1) + _("m");
+ ap += locale_convert<string>(m + 1) + _("m");
} else {
/// TRANSLATORS: m here is an abbreviation for minutes
- ap += raw_convert<string>(m) + _("m");
+ ap += locale_convert<string>(m) + _("m");
}
if (seconds) {
if (seconds) {
/* Seconds */
/// TRANSLATORS: s here is an abbreviation for seconds
- ap += raw_convert<string>(s) + _("s");
+ ap += locale_convert<string>(s) + _("s");
}
return ap;
SetUnhandledExceptionFilter(exception_handler);
#endif
+ av_register_all ();
avfilter_register_all ();
#ifdef DCPOMATIC_OSX
while (i < int64_t (files.size()) && to_do > 0) {
FILE* f = fopen_boost (files[i], "rb");
if (!f) {
- throw OpenFileError (files[i].string());
+ throw OpenFileError (files[i].string(), errno, true);
}
boost::uintmax_t this_time = min (to_do, boost::filesystem::file_size (files[i]));
while (i >= 0 && to_do > 0) {
FILE* f = fopen_boost (files[i], "rb");
if (!f) {
- throw OpenFileError (files[i].string());
+ throw OpenFileError (files[i].string(), errno, true);
}
boost::uintmax_t this_time = min (to_do, boost::filesystem::file_size (files[i]));
/** Round a number up to the nearest multiple of another number.
* @param c Index.
- * @param s Array of numbers to round, indexed by c.
+ * @param stride Array of numbers to round, indexed by c.
* @param t Multiple to round to.
* @return Rounded number.
*/
DCPOMATIC_ASSERT (MAX_DCP_AUDIO_CHANNELS == 16);
/// TRANSLATORS: these are the names of audio channels; Lfe (sub) is the low-frequency
- /// enhancement channel (sub-woofer). HI is the hearing-impaired audio track and
- /// VI is the visually-impaired audio track (audio describe).
+ /// enhancement channel (sub-woofer).
string const channels[] = {
_("Left"),
_("Right"),
return channels[c];
}
+string
+short_audio_channel_name (int c)
+{
+ DCPOMATIC_ASSERT (MAX_DCP_AUDIO_CHANNELS == 16);
+
+ /// TRANSLATORS: these are short names of audio channels; Lfe is the low-frequency
+ /// enhancement channel (sub-woofer). HI is the hearing-impaired audio track and
+ /// VI is the visually-impaired audio track (audio describe). DBP is the D-BOX
+ /// primary channel and DBS is the D-BOX secondary channel.
+ string const channels[] = {
+ _("L"),
+ _("R"),
+ _("C"),
+ _("Lfe"),
+ _("Ls"),
+ _("Rs"),
+ _("HI"),
+ _("VI"),
+ _("Lc"),
+ _("Rc"),
+ _("BsL"),
+ _("BsR"),
+ _("DBP"),
+ _("DBS"),
+ "",
+ ""
+ };
+
+ return channels[c];
+}
+
+
bool
valid_image_file (boost::filesystem::path f)
{
);
}
+bool
+valid_sound_file (boost::filesystem::path f)
+{
+ if (boost::starts_with (f.leaf().string(), "._")) {
+ return false;
+ }
+
+ string ext = f.extension().string();
+ transform (ext.begin(), ext.end(), ext.begin(), ::tolower);
+ return (ext == ".wav" || ext == ".mp3" || ext == ".aif" || ext == ".aiff");
+}
+
bool
valid_j2k_file (boost::filesystem::path f)
{
string
tidy_for_filename (string f)
{
- string t;
- for (size_t i = 0; i < f.length(); ++i) {
- if (isalnum (f[i]) || f[i] == '_' || f[i] == '-') {
- t += f[i];
- } else {
- t += '_';
- }
- }
-
- return t;
+ boost::replace_if (f, boost::is_any_of ("\\/:"), '_');
+ return f;
}
dcp::Size
{
dcp::NameFormat::Map values;
values['t'] = "j2c";
- values['i'] = asset->id();
values['r'] = raw_convert<string> (reel_index + 1);
values['n'] = raw_convert<string> (reel_count);
if (summary) {
- values['c'] = summary.get();
+ values['c'] = careful_string_filter (summary.get());
}
- return Config::instance()->dcp_asset_filename_format().get(values) + ".mxf";
+ return Config::instance()->dcp_asset_filename_format().get(values, "_" + asset->id() + ".mxf");
}
string
{
dcp::NameFormat::Map values;
values['t'] = "pcm";
- values['i'] = asset->id();
values['r'] = raw_convert<string> (reel_index + 1);
values['n'] = raw_convert<string> (reel_count);
if (summary) {
- values['c'] = summary.get();
+ values['c'] = careful_string_filter (summary.get());
}
- return Config::instance()->dcp_asset_filename_format().get(values) + ".mxf";
+ return Config::instance()->dcp_asset_filename_format().get(values, "_" + asset->id() + ".mxf");
}
float
}
}
-bool
-string_not_empty (string s)
+string
+careful_string_filter (string s)
+{
+ /* Filter out `bad' characters which `may' cause problems with some systems (either for DCP name or filename).
+ There's no apparent list of what really is allowed, so this is a guess.
+ Safety first and all that.
+ */
+
+ string out;
+ string const allowed = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_%.";
+ for (size_t i = 0; i < s.size(); ++i) {
+ if (allowed.find (s[i]) != string::npos) {
+ out += s[i];
+ }
+ }
+
+ return out;
+}
+
+/** @param mapped List of mapped audio channels from a Film.
+ * @param channels Total number of channels in the Film.
+ * @return First: number of non-LFE channels, second: number of LFE channels.
+ */
+pair<int, int>
+audio_channel_types (list<int> mapped, int channels)
{
- return !s.empty ();
+ int non_lfe = 0;
+ int lfe = 0;
+
+ BOOST_FOREACH (int i, mapped) {
+ if (i >= channels) {
+ /* This channel is mapped but is not included in the DCP */
+ continue;
+ }
+
+ if (static_cast<dcp::Channel> (i) == dcp::LFE) {
+ ++lfe;
+ } else {
+ ++non_lfe;
+ }
+ }
+
+ return make_pair (non_lfe, lfe);
+}
+
+shared_ptr<AudioBuffers>
+remap (shared_ptr<const AudioBuffers> input, int output_channels, AudioMapping map)
+{
+ shared_ptr<AudioBuffers> mapped (new AudioBuffers (output_channels, input->frames()));
+ mapped->make_silent ();
+
+ for (int i = 0; i < map.input_channels(); ++i) {
+ for (int j = 0; j < mapped->channels(); ++j) {
+ if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
+ mapped->accumulate_channel (
+ input.get(),
+ i,
+ static_cast<dcp::Channel> (j),
+ map.get (i, static_cast<dcp::Channel> (j))
+ );
+ }
+ }
+ }
+
+ return mapped;
}