# Use distro-provided FFmpeg on Arch
deps = []
- deps.append(('libdcp', '6c3db78'))
- deps.append(('libsub', 'af2520e'))
+ deps.append(('libdcp', 'e5f89bf'))
+ deps.append(('libsub', '948df33'))
deps.append(('leqm-nrt', '131f971'))
deps.append(('rtaudio', 'f619b76'))
# We get our OpenSSL libraries from the environment, but we
if (!guessed) {
/* If we have no idea, just put it on centre */
- set (0, static_cast<int>(dcp::CENTRE), 1);
+ set (0, static_cast<int>(dcp::Channel::CENTRE), 1);
}
} else {
/* 1:1 mapping */
if (state_version <= 5) {
/* Old-style: on/off mapping */
for (auto i: node->node_children ("Map")) {
- set (i->number_child<int>("ContentIndex"), static_cast<dcp::Channel>(i->number_child<int>("DCP")), 1);
+ set (i->number_child<int>("ContentIndex"), i->number_child<int>("DCP"), 1);
}
} else {
for (auto i: node->node_children("Gain")) {
if (state_version < 32) {
set (
i->number_attribute<int>("Content"),
- static_cast<dcp::Channel>(i->number_attribute<int>("DCP")),
+ i->number_attribute<int>("DCP"),
raw_convert<float>(i->content())
);
} else {
}
}
+
+void
+AudioMapping::set (dcp::Channel input_channel, int output_channel, float g)
+{
+ set (static_cast<int>(input_channel), output_channel, g);
+}
+
+
+void
+AudioMapping::set (int input_channel, dcp::Channel output_channel, float g)
+{
+ set (input_channel, static_cast<int>(output_channel), g);
+}
+
+
void
AudioMapping::set (int input_channel, int output_channel, float g)
{
_gain[input_channel][output_channel] = g;
}
+
+float
+AudioMapping::get (int input_channel, dcp::Channel output_channel) const
+{
+ return get (input_channel, static_cast<int>(output_channel));
+}
+
+
float
AudioMapping::get (int input_channel, int output_channel) const
{
for (auto const& i: _gain) {
for (auto j: dcp::used_audio_channels()) {
- if (abs(i[j]) > minus_96_db) {
- mapped.push_back (j);
+ if (abs(i[static_cast<int>(j)]) > minus_96_db) {
+ mapped.push_back (static_cast<int>(j));
}
}
}
/*
- Copyright (C) 2013-2015 Carl Hetherington <cth@carlh.net>
+ Copyright (C) 2013-2021 Carl Hetherington <cth@carlh.net>
This file is part of DCP-o-matic.
#ifndef DCPOMATIC_AUDIO_MAPPING_H
#define DCPOMATIC_AUDIO_MAPPING_H
+#include <dcp/types.h>
#include <libcxml/cxml.h>
#include <vector>
void make_zero ();
void make_default (AudioProcessor const * processor, boost::optional<boost::filesystem::path> filename = boost::optional<boost::filesystem::path>());
+ void set (dcp::Channel input_channel, int output_channel, float);
void set (int input_channel, int output_channel, float);
+ void set (int input_channel, dcp::Channel output_channel, float);
float get (int input_channel, int output_channel) const;
+ float get (int input_channel, dcp::Channel output_channel) const;
int input_channels () const {
return _input_channels;
}
}
- _yuv_to_rgb = static_cast<dcp::YUVToRGB> (node->optional_number_child<int>("YUVToRGB").get_value_or (dcp::YUV_TO_RGB_REC601));
+ _yuv_to_rgb = static_cast<dcp::YUVToRGB>(node->optional_number_child<int>("YUVToRGB").get_value_or(static_cast<int>(dcp::YUVToRGB::REC601)));
auto m = node->node_children ("Matrix");
if (!m.empty ()) {
Map so that Lt = L(-3dB) + Ls(-3dB) + C(-6dB) + Lfe(-10dB)
Rt = R(-3dB) + Rs(-3dB) + C(-6dB) + Lfe(-10dB)
*/
- _audio_mapping->set (dcp::LEFT, 0, 1 / sqrt(2)); // L -> Lt
- _audio_mapping->set (dcp::RIGHT, 1, 1 / sqrt(2)); // R -> Rt
- _audio_mapping->set (dcp::CENTRE, 0, 1 / 2.0); // C -> Lt
- _audio_mapping->set (dcp::CENTRE, 1, 1 / 2.0); // C -> Rt
- _audio_mapping->set (dcp::LFE, 0, 1 / sqrt(10)); // Lfe -> Lt
- _audio_mapping->set (dcp::LFE, 1, 1 / sqrt(10)); // Lfe -> Rt
- _audio_mapping->set (dcp::LS, 0, 1 / sqrt(2)); // Ls -> Lt
- _audio_mapping->set (dcp::RS, 1, 1 / sqrt(2)); // Rs -> Rt
+ _audio_mapping->set (dcp::Channel::LEFT, 0, 1 / sqrt(2)); // L -> Lt
+ _audio_mapping->set (dcp::Channel::RIGHT, 1, 1 / sqrt(2)); // R -> Rt
+ _audio_mapping->set (dcp::Channel::CENTRE, 0, 1 / 2.0); // C -> Lt
+ _audio_mapping->set (dcp::Channel::CENTRE, 1, 1 / 2.0); // C -> Rt
+ _audio_mapping->set (dcp::Channel::LFE, 0, 1 / sqrt(10)); // Lfe -> Lt
+ _audio_mapping->set (dcp::Channel::LFE, 1, 1 / sqrt(10)); // Lfe -> Rt
+ _audio_mapping->set (dcp::Channel::LS, 0, 1 / sqrt(2)); // Ls -> Lt
+ _audio_mapping->set (dcp::Channel::RS, 1, 1 / sqrt(2)); // Rs -> Rt
} else {
/* 1:1 mapping */
for (int i = 0; i < min (MAX_DCP_AUDIO_CHANNELS, output_channels); ++i) {
}
film->set_encrypted (dcp->encrypted());
film->set_reel_type (REELTYPE_BY_VIDEO_CONTENT);
- film->set_interop (dcp->standard() == dcp::INTEROP);
+ film->set_interop (dcp->standard() == dcp::Standard::INTEROP);
film->set_three_d (dcp->three_d());
if (dcp->video) {
, dcp_content_type (0)
, container_ratio (0)
, still_length (10)
- , standard (dcp::SMPTE)
+ , standard (dcp::Standard::SMPTE)
, no_use_isdcf_name (false)
, fourk (false)
{
}
if (standard_string == "interop") {
- standard = dcp::INTEROP;
+ standard = dcp::Standard::INTEROP;
}
if (content.empty()) {
if (!_tolerant) {
/** We accept and ignore EMPTY_ASSET_PATH and EXTERNAL_ASSET but everything else is bad */
for (auto j: notes) {
- if (j.code() == dcp::VerificationNote::EMPTY_ASSET_PATH || j.code() == dcp::VerificationNote::EXTERNAL_ASSET) {
+ if (j.code() == dcp::VerificationNote::Code::EMPTY_ASSET_PATH || j.code() == dcp::VerificationNote::Code::EXTERNAL_ASSET) {
LOG_WARNING("Empty path in ASSETMAP of %1", i.string());
} else {
boost::throw_exception(dcp::ReadError(dcp::note_to_string(j)));
if (node->optional_string_child("Standard")) {
string const s = node->optional_string_child("Standard").get();
if (s == "Interop") {
- _standard = dcp::INTEROP;
+ _standard = dcp::Standard::INTEROP;
} else if (s == "SMPTE") {
- _standard = dcp::SMPTE;
+ _standard = dcp::Standard::SMPTE;
} else {
DCPOMATIC_ASSERT (false);
}
node->add_child("ReferenceClosedCaption")->add_child_text(_reference_text[TEXT_CLOSED_CAPTION] ? "1" : "0");
if (_standard) {
switch (_standard.get ()) {
- case dcp::INTEROP:
+ case dcp::Standard::INTEROP:
node->add_child("Standard")->add_child_text ("Interop");
break;
- case dcp::SMPTE:
+ case dcp::Standard::SMPTE:
node->add_child("Standard")->add_child_text ("SMPTE");
break;
default:
{
/* We must be using the same standard as the film */
if (_standard) {
- if (_standard.get() == dcp::INTEROP && !film->interop()) {
+ if (_standard.get() == dcp::Standard::INTEROP && !film->interop()) {
/// TRANSLATORS: this string will follow "Cannot reference this DCP: "
why_not = _("it is Interop and the film is set to SMPTE.");
return false;
- } else if (_standard.get() == dcp::SMPTE && film->interop()) {
+ } else if (_standard.get() == dcp::Standard::SMPTE && film->interop()) {
/// TRANSLATORS: this string will follow "Cannot reference this DCP: "
why_not = _("it is SMPTE and the film is set to Interop.");
return false;
void
DCPContentType::setup_dcp_content_types ()
{
- _dcp_content_types.push_back (new DCPContentType (_("Feature"), dcp::FEATURE, N_("FTR")));
- _dcp_content_types.push_back (new DCPContentType (_("Short"), dcp::SHORT, N_("SHR")));
- _dcp_content_types.push_back (new DCPContentType (_("Trailer"), dcp::TRAILER, N_("TLR")));
- _dcp_content_types.push_back (new DCPContentType (_("Test"), dcp::TEST, N_("TST")));
- _dcp_content_types.push_back (new DCPContentType (_("Transitional"), dcp::TRANSITIONAL, N_("XSN")));
- _dcp_content_types.push_back (new DCPContentType (_("Rating"), dcp::RATING, N_("RTG")));
- _dcp_content_types.push_back (new DCPContentType (_("Teaser"), dcp::TEASER, N_("TSR")));
- _dcp_content_types.push_back (new DCPContentType (_("Policy"), dcp::POLICY, N_("POL")));
- _dcp_content_types.push_back (new DCPContentType (_("Public Service Announcement"), dcp::PUBLIC_SERVICE_ANNOUNCEMENT, N_("PSA")));
- _dcp_content_types.push_back (new DCPContentType (_("Advertisement"), dcp::ADVERTISEMENT, N_("ADV")));
- _dcp_content_types.push_back (new DCPContentType (_("Episode"), dcp::EPISODE, N_("EPS")));
- _dcp_content_types.push_back (new DCPContentType (_("Promo"), dcp::PROMO, N_("PRO")));
+ _dcp_content_types.push_back (new DCPContentType(_("Feature"), dcp::ContentKind::FEATURE, N_("FTR")));
+ _dcp_content_types.push_back (new DCPContentType(_("Short"), dcp::ContentKind::SHORT, N_("SHR")));
+ _dcp_content_types.push_back (new DCPContentType(_("Trailer"), dcp::ContentKind::TRAILER, N_("TLR")));
+ _dcp_content_types.push_back (new DCPContentType(_("Test"), dcp::ContentKind::TEST, N_("TST")));
+ _dcp_content_types.push_back (new DCPContentType(_("Transitional"), dcp::ContentKind::TRANSITIONAL, N_("XSN")));
+ _dcp_content_types.push_back (new DCPContentType(_("Rating"), dcp::ContentKind::RATING, N_("RTG")));
+ _dcp_content_types.push_back (new DCPContentType(_("Teaser"), dcp::ContentKind::TEASER, N_("TSR")));
+ _dcp_content_types.push_back (new DCPContentType(_("Policy"), dcp::ContentKind::POLICY, N_("POL")));
+ _dcp_content_types.push_back (new DCPContentType(_("Public Service Announcement"), dcp::ContentKind::PUBLIC_SERVICE_ANNOUNCEMENT, N_("PSA")));
+ _dcp_content_types.push_back (new DCPContentType(_("Advertisement"), dcp::ContentKind::ADVERTISEMENT, N_("ADV")));
+ _dcp_content_types.push_back (new DCPContentType(_("Episode"), dcp::ContentKind::EPISODE, N_("EPS")));
+ _dcp_content_types.push_back (new DCPContentType(_("Promo"), dcp::ContentKind::PROMO, N_("PRO")));
}
DCPContentType const *
new J2KImageProxy (
_stereo_reader->get_frame (entry_point + frame),
picture_asset->size(),
- dcp::EYE_LEFT,
+ dcp::Eye::LEFT,
AV_PIX_FMT_XYZ12LE,
_forced_reduction
)
new J2KImageProxy (
_stereo_reader->get_frame (entry_point + frame),
picture_asset->size(),
- dcp::EYE_RIGHT,
+ dcp::Eye::RIGHT,
AV_PIX_FMT_XYZ12LE,
_forced_reduction
)
/* Check that we can read the first picture, sound and subtitle frames of each reel */
try {
for (auto i: cpl->reels()) {
- shared_ptr<dcp::PictureAsset> pic = i->main_picture()->asset ();
- shared_ptr<dcp::MonoPictureAsset> mono = dynamic_pointer_cast<dcp::MonoPictureAsset> (pic);
- shared_ptr<dcp::StereoPictureAsset> stereo = dynamic_pointer_cast<dcp::StereoPictureAsset> (pic);
+ auto pic = i->main_picture()->asset ();
+ auto mono = dynamic_pointer_cast<dcp::MonoPictureAsset> (pic);
+ auto stereo = dynamic_pointer_cast<dcp::StereoPictureAsset> (pic);
if (mono) {
mono->start_read()->get_frame(0)->xyz_image ();
} else {
- stereo->start_read()->get_frame(0)->xyz_image (dcp::EYE_LEFT);
+ stereo->start_read()->get_frame(0)->xyz_image(dcp::Eye::LEFT);
}
if (i->main_sound()) {
cpl,
begin,
end,
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
true,
0
);
float const overall_gain = 2 / (4 + sqrt(2));
float const minus_3dB = 1 / sqrt(2);
if (ch == 2) {
- map.set (dcp::LEFT, 0, 1);
- map.set (dcp::RIGHT, 1, 1);
+ map.set (dcp::Channel::LEFT, 0, 1);
+ map.set (dcp::Channel::RIGHT, 1, 1);
} else if (ch == 4) {
- map.set (dcp::LEFT, 0, overall_gain);
- map.set (dcp::RIGHT, 1, overall_gain);
- map.set (dcp::CENTRE, 0, overall_gain * minus_3dB);
- map.set (dcp::CENTRE, 1, overall_gain * minus_3dB);
- map.set (dcp::LS, 0, overall_gain);
+ map.set (dcp::Channel::LEFT, 0, overall_gain);
+ map.set (dcp::Channel::RIGHT, 1, overall_gain);
+ map.set (dcp::Channel::CENTRE, 0, overall_gain * minus_3dB);
+ map.set (dcp::Channel::CENTRE, 1, overall_gain * minus_3dB);
+ map.set (dcp::Channel::LS, 0, overall_gain);
} else if (ch >= 6) {
- map.set (dcp::LEFT, 0, overall_gain);
- map.set (dcp::RIGHT, 1, overall_gain);
- map.set (dcp::CENTRE, 0, overall_gain * minus_3dB);
- map.set (dcp::CENTRE, 1, overall_gain * minus_3dB);
- map.set (dcp::LS, 0, overall_gain);
- map.set (dcp::RS, 1, overall_gain);
+ map.set (dcp::Channel::LEFT, 0, overall_gain);
+ map.set (dcp::Channel::RIGHT, 1, overall_gain);
+ map.set (dcp::Channel::CENTRE, 0, overall_gain * minus_3dB);
+ map.set (dcp::Channel::CENTRE, 1, overall_gain * minus_3dB);
+ map.set (dcp::Channel::LS, 0, overall_gain);
+ map.set (dcp::Channel::RS, 1, overall_gain);
}
/* XXX: maybe we should do something better for >6 channel DCPs */
} else {
, _audio_language (dcp::LanguageTag("en-US"))
, _release_territory (dcp::LanguageTag::RegionSubtag("US"))
, _version_number (1)
- , _status (dcp::FINAL)
- , _luminance (dcp::Luminance(4.5, dcp::Luminance::FOOT_LAMBERT))
+ , _status (dcp::Status::FINAL)
+ , _luminance (dcp::Luminance(4.5, dcp::Luminance::Unit::FOOT_LAMBERT))
, _state_version (current_state_version)
, _dirty (false)
, _tolerant (false)
mapped.push_back (i);
}
} else {
- for (auto i: content ()) {
+ for (auto i: content()) {
if (i->audio) {
- auto c = i->audio->mapping().mapped_output_channels ();
- copy (c.begin(), c.end(), back_inserter (mapped));
+ auto c = i->audio->mapping().mapped_output_channels();
+ copy (c.begin(), c.end(), back_inserter(mapped));
}
}
/* XXX: this uses the first bit of content only */
/* Interior aspect ratio. The standard says we don't do this for trailers, for some strange reason */
- if (dcp_content_type() && dcp_content_type()->libdcp_kind() != dcp::TRAILER) {
+ if (dcp_content_type() && dcp_content_type()->libdcp_kind() != dcp::ContentKind::TRAILER) {
Ratio const* content_ratio = nullptr;
for (auto i: content ()) {
if (i->video) {
d += String::compose("_%1%2", ch.first, ch.second);
}
- if (audio_channels() > static_cast<int>(dcp::HI) && find(mapped.begin(), mapped.end(), dcp::HI) != mapped.end()) {
+ if (audio_channels() > static_cast<int>(dcp::Channel::HI) && find(mapped.begin(), mapped.end(), static_cast<int>(dcp::Channel::HI)) != mapped.end()) {
d += "-HI";
}
- if (audio_channels() > static_cast<int>(dcp::VI) && find(mapped.begin(), mapped.end(), dcp::VI) != mapped.end()) {
+ if (audio_channels() > static_cast<int>(dcp::Channel::VI) && find(mapped.begin(), mapped.end(), static_cast<int>(dcp::Channel::VI)) != mapped.end()) {
d += "-VI";
}
Hints::check_ffec_and_ffmc_in_smpte_feature ()
{
shared_ptr<const Film> f = film();
- if (!f->interop() && f->dcp_content_type()->libdcp_kind() == dcp::FEATURE && (!f->marker(dcp::Marker::FFEC) || !f->marker(dcp::Marker::FFMC))) {
+ if (!f->interop() && f->dcp_content_type()->libdcp_kind() == dcp::ContentKind::FEATURE && (!f->marker(dcp::Marker::FFEC) || !f->marker(dcp::Marker::FFMC))) {
hint (_("SMPTE DCPs with the type FTR (feature) should have markers for the first frame of end credits (FFEC) and the first frame of moving credits (FFMC). You should add these markers using the 'Markers' button in the DCP tab."));
}
}
throw runtime_error (N_("Could not allocate SwsContext"));
}
- DCPOMATIC_ASSERT (yuv_to_rgb < dcp::YUV_TO_RGB_COUNT);
- int const lut[dcp::YUV_TO_RGB_COUNT] = {
+ DCPOMATIC_ASSERT (yuv_to_rgb < dcp::YUVToRGB::COUNT);
+ int const lut[static_cast<int>(dcp::YUVToRGB::COUNT)] = {
SWS_CS_ITU601,
SWS_CS_ITU709
};
*/
sws_setColorspaceDetails (
scale_context,
- sws_getCoefficients (lut[yuv_to_rgb]), video_range == VIDEO_RANGE_VIDEO ? 0 : 1,
- sws_getCoefficients (lut[yuv_to_rgb]), out_video_range == VIDEO_RANGE_VIDEO ? 0 : 1,
+ sws_getCoefficients (lut[static_cast<int>(yuv_to_rgb)]), video_range == VIDEO_RANGE_VIDEO ? 0 : 1,
+ sws_getCoefficients (lut[static_cast<int>(yuv_to_rgb)]), out_video_range == VIDEO_RANGE_VIDEO ? 0 : 1,
0, 1 << 16, 1 << 16
);
(fast ? SWS_FAST_BILINEAR : SWS_BICUBIC) | SWS_ACCURATE_RND, 0, 0, 0
);
- DCPOMATIC_ASSERT (yuv_to_rgb < dcp::YUV_TO_RGB_COUNT);
- int const lut[dcp::YUV_TO_RGB_COUNT] = {
+ DCPOMATIC_ASSERT (yuv_to_rgb < dcp::YUVToRGB::COUNT);
+ int const lut[static_cast<int>(dcp::YUVToRGB::COUNT)] = {
SWS_CS_ITU601,
SWS_CS_ITU709
};
*/
sws_setColorspaceDetails (
scale_context,
- sws_getCoefficients (lut[yuv_to_rgb]), 0,
- sws_getCoefficients (lut[yuv_to_rgb]), 0,
+ sws_getCoefficients (lut[static_cast<int>(yuv_to_rgb)]), 0,
+ sws_getCoefficients (lut[static_cast<int>(yuv_to_rgb)]), 0,
0, 1 << 16, 1 << 16
);
}
case AV_PIX_FMT_YUV420P:
{
- shared_ptr<Image> yuv = other->convert_pixel_format (dcp::YUV_TO_RGB_REC709, _pixel_format, false, false);
+ shared_ptr<Image> yuv = other->convert_pixel_format (dcp::YUVToRGB::REC709, _pixel_format, false, false);
dcp::Size const ts = size();
dcp::Size const os = yuv->size();
for (int ty = start_ty, oy = start_oy; ty < ts.height && oy < os.height; ++ty, ++oy) {
}
case AV_PIX_FMT_YUV420P10:
{
- shared_ptr<Image> yuv = other->convert_pixel_format (dcp::YUV_TO_RGB_REC709, _pixel_format, false, false);
+ shared_ptr<Image> yuv = other->convert_pixel_format (dcp::YUVToRGB::REC709, _pixel_format, false, false);
dcp::Size const ts = size();
dcp::Size const os = yuv->size();
for (int ty = start_ty, oy = start_oy; ty < ts.height && oy < os.height; ++ty, ++oy) {
}
case AV_PIX_FMT_YUV422P10LE:
{
- shared_ptr<Image> yuv = other->convert_pixel_format (dcp::YUV_TO_RGB_REC709, _pixel_format, false, false);
+ shared_ptr<Image> yuv = other->convert_pixel_format (dcp::YUVToRGB::REC709, _pixel_format, false, false);
dcp::Size const ts = size();
dcp::Size const os = yuv->size();
for (int ty = start_ty, oy = start_oy; ty < ts.height && oy < os.height; ++ty, ++oy) {
DCPOMATIC_ASSERT (bytes_per_pixel(0) == 4);
DCPOMATIC_ASSERT (planes() == 1);
if (pixel_format() != AV_PIX_FMT_RGBA) {
- return convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGBA, true, false)->as_png();
+ return convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_RGBA, true, false)->as_png();
}
/* error handling? */
AVPixelFormat pixel_format,
optional<int> forced_reduction
)
- : _data (eye ? frame->left() : frame->right())
+ : _data (eye == dcp::Eye::LEFT ? frame->left() : frame->right())
, _size (size)
, _eye (eye)
, _pixel_format (pixel_format)
#include "i18n.h"
+
using std::string;
using std::cout;
using std::shared_ptr;
+using std::make_shared;
+
Log::Log ()
: _types (0)
return;
}
- shared_ptr<StringLogEntry> e (new StringLogEntry (type, message));
+ auto e = make_shared<StringLogEntry>(type, message);
do_log (e);
}
Log::dcp_log (dcp::NoteType type, string m)
{
switch (type) {
- case dcp::DCP_PROGRESS:
- do_log (shared_ptr<const LogEntry> (new StringLogEntry (LogEntry::TYPE_GENERAL, m)));
+ case dcp::NoteType::PROGRESS:
+ do_log (make_shared<StringLogEntry>(LogEntry::TYPE_GENERAL, m));
break;
- case dcp::DCP_ERROR:
- do_log (shared_ptr<const LogEntry> (new StringLogEntry (LogEntry::TYPE_ERROR, m)));
+ case dcp::NoteType::ERROR:
+ do_log (make_shared<StringLogEntry>(LogEntry::TYPE_ERROR, m));
break;
- case dcp::DCP_NOTE:
- do_log (shared_ptr<const LogEntry> (new StringLogEntry (LogEntry::TYPE_WARNING, m)));
+ case dcp::NoteType::NOTE:
+ do_log (make_shared<StringLogEntry>(LogEntry::TYPE_WARNING, m));
break;
}
}
}
dcp::Size scaled_size (width, height);
- ps.bitmap.push_back (BitmapText(image->scale(scaled_size, dcp::YUV_TO_RGB_REC601, image->pixel_format(), true, _fast), subtitle.sub.rectangle));
+ ps.bitmap.push_back (BitmapText(image->scale(scaled_size, dcp::YUVToRGB::REC601, image->pixel_format(), true, _fast), subtitle.sub.rectangle));
DCPTime from (content_time_to_dcp (piece, subtitle.from()));
_active_texts[text->type()].add_from (wc, ps, from);
total_crop.bottom /= r;
}
- dcp::YUVToRGB yuv_to_rgb = dcp::YUV_TO_RGB_REC601;
+ dcp::YUVToRGB yuv_to_rgb = dcp::YUVToRGB::REC601;
if (_colour_conversion) {
yuv_to_rgb = _colour_conversion.get().yuv_to_rgb();
}
output. We will hard-link it into the DCP later.
*/
- dcp::Standard const standard = film()->interop() ? dcp::INTEROP : dcp::SMPTE;
+ dcp::Standard const standard = film()->interop() ? dcp::Standard::INTEROP : dcp::Standard::SMPTE;
boost::filesystem::path const asset =
film()->internal_video_asset_dir() / film()->internal_video_asset_filename(_period);
i.image->as_png(),
dcp::Time(period.from.seconds() - _period.from.seconds(), film()->video_frame_rate()),
dcp::Time(period.to.seconds() - _period.from.seconds(), film()->video_frame_rate()),
- i.rectangle.x, dcp::HALIGN_LEFT, i.rectangle.y, dcp::VALIGN_TOP,
+ i.rectangle.x, dcp::HAlign::LEFT, i.rectangle.y, dcp::VAlign::TOP,
dcp::Time(), dcp::Time()
)
)
{
int x = 0;
switch (first.h_align ()) {
- case dcp::HALIGN_LEFT:
+ case dcp::HAlign::LEFT:
/* h_position is distance between left of frame and left of subtitle */
x = first.h_position() * target_width;
break;
- case dcp::HALIGN_CENTER:
+ case dcp::HAlign::CENTER:
/* h_position is distance between centre of frame and centre of subtitle */
x = (0.5 + first.h_position()) * target_width - layout_width / 2;
break;
- case dcp::HALIGN_RIGHT:
+ case dcp::HAlign::RIGHT:
/* h_position is distance between right of frame and right of subtitle */
x = (1.0 - first.h_position()) * target_width - layout_width;
break;
{
int y = 0;
switch (first.v_align ()) {
- case dcp::VALIGN_TOP:
+ case dcp::VAlign::TOP:
/* SMPTE says that v_position is the distance between top
of frame and top of subtitle, but this doesn't always seem to be
the case in practice; Gunnar Ásgeirsson's Dolby server appears
- to put VALIGN_TOP subs with v_position as the distance between top
+ to put VAlign::TOP subs with v_position as the distance between top
of frame and bottom of subtitle.
*/
y = first.v_position() * target_height - layout_height;
break;
- case dcp::VALIGN_CENTER:
+ case dcp::VAlign::CENTER:
/* v_position is distance between centre of frame and centre of subtitle */
y = (0.5 + first.v_position()) * target_height - layout_height / 2;
break;
- case dcp::VALIGN_BOTTOM:
+ case dcp::VAlign::BOTTOM:
/* v_position is distance between bottom of frame and bottom of subtitle */
y = (1.0 - first.v_position()) * target_height - layout_height;
break;
}
}
- float const border_width = first.effect() == dcp::BORDER ? (first.outline_width * target.width / 2048.0) : 0;
+ float const border_width = first.effect() == dcp::Effect::BORDER ? (first.outline_width * target.width / 2048.0) : 0;
size.width += 2 * ceil (border_width);
size.height += 2 * ceil (border_width);
context->scale (x_scale, y_scale);
layout->update_from_cairo_context (context);
- if (first.effect() == dcp::SHADOW) {
+ if (first.effect() == dcp::Effect::SHADOW) {
/* Drop-shadow effect */
set_source_rgba (context, first.effect_colour(), fade_factor);
context->move_to (x_offset + 4, y_offset + 4);
context->fill ();
}
- if (first.effect() == dcp::BORDER) {
+ if (first.effect() == dcp::Effect::BORDER) {
/* Border effect */
set_source_rgba (context, first.effect_colour(), fade_factor);
context->set_line_width (border_width);
name = dcp->name ();
DCPOMATIC_ASSERT (dcp->cpl());
id = *dcp->cpl();
- kind = dcp->content_kind().get_value_or(dcp::FEATURE);
+ kind = dcp->content_kind().get_value_or(dcp::ContentKind::FEATURE);
encrypted = dcp->encrypted ();
} else {
name = content->path(0).filename().string();
- kind = dcp::FEATURE;
+ kind = dcp::ContentKind::FEATURE;
}
}
}
if (node->optional_bool_child("Outline").get_value_or(false)) {
- _effect = dcp::BORDER;
+ _effect = dcp::Effect::BORDER;
} else if (node->optional_bool_child("Shadow").get_value_or(false)) {
- _effect = dcp::SHADOW;
+ _effect = dcp::Effect::SHADOW;
} else {
- _effect = dcp::NONE;
+ _effect = dcp::Effect::NONE;
}
auto effect = node->optional_string_child("Effect");
if (effect) {
if (*effect == "none") {
- _effect = dcp::NONE;
+ _effect = dcp::Effect::NONE;
} else if (*effect == "outline") {
- _effect = dcp::BORDER;
+ _effect = dcp::Effect::BORDER;
} else if (*effect == "shadow") {
- _effect = dcp::SHADOW;
+ _effect = dcp::Effect::SHADOW;
}
}
}
if (_effect) {
switch (*_effect) {
- case dcp::NONE:
+ case dcp::Effect::NONE:
text->add_child("Effect")->add_child_text("none");
break;
- case dcp::BORDER:
+ case dcp::Effect::BORDER:
text->add_child("Effect")->add_child_text("outline");
break;
- case dcp::SHADOW:
+ case dcp::Effect::SHADOW:
text->add_child("Effect")->add_child_text("shadow");
break;
}
+ "_" + raw_convert<string> (fade_out().get_value_or(ContentTime()).get())
+ "_" + raw_convert<string> (outline_width())
+ "_" + raw_convert<string> (colour().get_value_or(dcp::Colour(255, 255, 255)).to_argb_string())
- + "_" + raw_convert<string> (dcp::effect_to_string(effect().get_value_or(dcp::NONE)))
+ + "_" + raw_convert<string> (dcp::effect_to_string(effect().get_value_or(dcp::Effect::NONE)))
+ "_" + raw_convert<string> (effect_colour().get_value_or(dcp::Colour(0, 0, 0)).to_argb_string())
+ "_" + raw_convert<string> (_parent->video_frame_rate().get_value_or(0));
v_position = 1.015 -
(1 + bottom_line.get() - i.vertical_position.line.get()) * multiplier;
- v_align = dcp::VALIGN_TOP;
+ v_align = dcp::VAlign::TOP;
break;
case sub::TOP_OF_SCREEN:
/* This 0.1 is another fudge factor to bring the top line away from the top of the screen a little */
v_position = 0.12 + i.vertical_position.line.get() * multiplier;
- v_align = dcp::VALIGN_TOP;
+ v_align = dcp::VAlign::TOP;
break;
case sub::VERTICAL_CENTRE_OF_SCREEN:
v_position = i.vertical_position.line.get() * multiplier;
- v_align = dcp::VALIGN_CENTER;
+ v_align = dcp::VAlign::CENTER;
break;
}
} else {
switch (i.vertical_position.reference.get()) {
case sub::TOP_OF_SCREEN:
- v_align = dcp::VALIGN_TOP;
+ v_align = dcp::VAlign::TOP;
break;
case sub::VERTICAL_CENTRE_OF_SCREEN:
- v_align = dcp::VALIGN_CENTER;
+ v_align = dcp::VAlign::CENTER;
break;
case sub::BOTTOM_OF_SCREEN:
- v_align = dcp::VALIGN_BOTTOM;
+ v_align = dcp::VAlign::BOTTOM;
break;
default:
- v_align = dcp::VALIGN_TOP;
+ v_align = dcp::VAlign::TOP;
break;
}
}
float h_position = i.horizontal_position.proportional;
switch (i.horizontal_position.reference) {
case sub::LEFT_OF_SCREEN:
- h_align = dcp::HALIGN_LEFT;
+ h_align = dcp::HAlign::LEFT;
h_position = max(h_position, 0.05f);
break;
case sub::HORIZONTAL_CENTRE_OF_SCREEN:
- h_align = dcp::HALIGN_CENTER;
+ h_align = dcp::HAlign::CENTER;
break;
case sub::RIGHT_OF_SCREEN:
- h_align = dcp::HALIGN_RIGHT;
+ h_align = dcp::HAlign::RIGHT;
h_position = max(h_position, 0.05f);
break;
default:
- h_align = dcp::HALIGN_CENTER;
+ h_align = dcp::HAlign::CENTER;
break;
}
h_align,
v_position,
v_align,
- dcp::DIRECTION_LTR,
+ dcp::Direction::LTR,
j.text,
- dcp::NONE,
+ dcp::Effect::NONE,
j.effect_colour.get_value_or(sub::Colour(0, 0, 0)).dcp(),
/* Hack: we should use subtitle.fade_up and subtitle.fade_down here
but the times of these often don't have a frame rate associated
vector<dcp::VerificationNote> notes;
dcp.read (¬es);
for (auto i: notes) {
- if (i.code() != dcp::VerificationNote::EXTERNAL_ASSET) {
+ if (i.code() != dcp::VerificationNote::Code::EXTERNAL_ASSET) {
/* It's not just a warning about this DCP being a VF */
throw dcp::ReadError(dcp::note_to_string(i));
}
/* Render something to fontconfig to create its cache */
list<StringText> subs;
dcp::SubtitleString ss(
- optional<string>(), false, false, false, dcp::Colour(), 42, 1, dcp::Time(), dcp::Time(), 0, dcp::HALIGN_CENTER, 0, dcp::VALIGN_CENTER, dcp::DIRECTION_LTR,
+ optional<string>(), false, false, false, dcp::Colour(), 42, 1, dcp::Time(), dcp::Time(), 0, dcp::HAlign::CENTER, 0, dcp::VAlign::CENTER, dcp::DIRECTION_LTR,
"Hello dolly", dcp::NONE, dcp::Colour(), dcp::Time(), dcp::Time()
);
subs.push_back (StringText(ss, 0));
}
switch (static_cast<dcp::Channel>(i)) {
- case dcp::LFE:
+ case dcp::Channel::LFE:
++lfe;
break;
- case dcp::LEFT:
- case dcp::RIGHT:
- case dcp::CENTRE:
- case dcp::LS:
- case dcp::RS:
- case dcp::BSL:
- case dcp::BSR:
+ case dcp::Channel::LEFT:
+ case dcp::Channel::RIGHT:
+ case dcp::Channel::CENTRE:
+ case dcp::Channel::LS:
+ case dcp::Channel::RS:
+ case dcp::Channel::BSL:
+ case dcp::Channel::BSR:
++non_lfe;
break;
- case dcp::HI:
- case dcp::VI:
- case dcp::MOTION_DATA:
- case dcp::SYNC_SIGNAL:
- case dcp::SIGN_LANGUAGE:
- case dcp::CHANNEL_COUNT:
+ case dcp::Channel::HI:
+ case dcp::Channel::VI:
+ case dcp::Channel::MOTION_DATA:
+ case dcp::Channel::SYNC_SIGNAL:
+ case dcp::Channel::SIGN_LANGUAGE:
+ case dcp::Channel::CHANNEL_COUNT:
break;
}
}
for (int i = 0; i < to_do; ++i) {
for (int j = 0; j < mapped->channels(); ++j) {
- if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
- mapped->accumulate_channel (
+ if (map.get(i, j) > 0) {
+ mapped->accumulate_channel(
input.get(),
i,
- static_cast<dcp::Channel> (j),
- map.get (i, static_cast<dcp::Channel> (j))
+ j,
+ map.get(i, j)
);
}
}
/* add in position */
switch (sub.h_align()) {
- case dcp::HALIGN_LEFT:
+ case dcp::HAlign::LEFT:
rect.x += sub.h_position();
break;
- case dcp::HALIGN_CENTER:
+ case dcp::HAlign::CENTER:
rect.x += 0.5 + sub.h_position() - rect.width / 2;
break;
- case dcp::HALIGN_RIGHT:
+ case dcp::HAlign::RIGHT:
rect.x += 1 - sub.h_position() - rect.width;
break;
}
switch (sub.v_align()) {
- case dcp::VALIGN_TOP:
+ case dcp::VAlign::TOP:
rect.y += sub.v_position();
break;
- case dcp::VALIGN_CENTER:
+ case dcp::VAlign::CENTER:
rect.y += 0.5 + sub.v_position() - rect.height / 2;
break;
- case dcp::VALIGN_BOTTOM:
+ case dcp::VAlign::BOTTOM:
rect.y += 1 - sub.v_position() - rect.height;
break;
}
bool failed = false;
for (auto i: _notes) {
- if (i.type() == dcp::VerificationNote::VERIFY_ERROR) {
+ if (i.type() == dcp::VerificationNote::Type::ERROR) {
failed = true;
}
}
video->emit (
film(),
shared_ptr<ImageProxy> (
- new J2KImageProxy (_stereo_reader->get_frame(frame), _size, dcp::EYE_LEFT, AV_PIX_FMT_XYZ12LE, optional<int>())
+ new J2KImageProxy (_stereo_reader->get_frame(frame), _size, dcp::Eye::LEFT, AV_PIX_FMT_XYZ12LE, optional<int>())
),
frame
);
video->emit (
film(),
shared_ptr<ImageProxy> (
- new J2KImageProxy (_stereo_reader->get_frame(frame), _size, dcp::EYE_RIGHT, AV_PIX_FMT_XYZ12LE, optional<int>())
+ new J2KImageProxy (_stereo_reader->get_frame(frame), _size, dcp::Eye::RIGHT, AV_PIX_FMT_XYZ12LE, optional<int>())
),
frame
);
cpl->set_facility (film()->facility());
cpl->set_luminance (film()->luminance());
- list<int> ac = film()->mapped_audio_channels();
+ auto ac = film()->mapped_audio_channels();
dcp::MCASoundField field = (
- find(ac.begin(), ac.end(), static_cast<int>(dcp::BSL)) != ac.end() ||
- find(ac.begin(), ac.end(), static_cast<int>(dcp::BSR)) != ac.end()
- ) ? dcp::SEVEN_POINT_ONE : dcp::FIVE_POINT_ONE;
+ find(ac.begin(), ac.end(), static_cast<int>(dcp::Channel::BSL)) != ac.end() ||
+ find(ac.begin(), ac.end(), static_cast<int>(dcp::Channel::BSR)) != ac.end()
+ ) ? dcp::MCASoundField::SEVEN_POINT_ONE : dcp::MCASoundField::FIVE_POINT_ONE;
dcp::MainSoundConfiguration msc (field, film()->audio_channels());
for (auto i: ac) {
- if (i < film()->audio_channels()) {
+ if (static_cast<int>(i) < film()->audio_channels()) {
msc.set_mapping (i, static_cast<dcp::Channel>(i));
}
}
}
dcp.write_xml (
- film()->interop() ? dcp::INTEROP : dcp::SMPTE,
+ film()->interop() ? dcp::Standard::INTEROP : dcp::Standard::SMPTE,
issuer,
creator,
dcp::LocalTime().as_string(),
vector<string>(),
d->cpl (),
from, to,
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
true,
0
);
film->set_container (cc.container_ratio);
}
film->set_dcp_content_type (cc.dcp_content_type);
- film->set_interop (cc.standard == dcp::INTEROP);
+ film->set_interop (cc.standard == dcp::Standard::INTEROP);
film->set_use_isdcf_name (!cc.no_use_isdcf_name);
film->set_encrypted (cc.encrypt);
film->set_three_d (cc.threed);
bool list_dkdm_cpls = false;
optional<string> duration_string;
bool verbose = false;
- dcp::Formulation formulation = dcp::MODIFIED_TRANSITIONAL_1;
+ dcp::Formulation formulation = dcp::Formulation::MODIFIED_TRANSITIONAL_1;
bool disable_forensic_marking_picture = false;
optional<int> disable_forensic_marking_audio;
break;
case 'F':
if (string (optarg) == "modified-transitional-1") {
- formulation = dcp::MODIFIED_TRANSITIONAL_1;
+ formulation = dcp::Formulation::MODIFIED_TRANSITIONAL_1;
} else if (string (optarg) == "multiple-modified-transitional-1") {
- formulation = dcp::MULTIPLE_MODIFIED_TRANSITIONAL_1;
+ formulation = dcp::Formulation::MULTIPLE_MODIFIED_TRANSITIONAL_1;
} else if (string (optarg) == "dci-any") {
- formulation = dcp::DCI_ANY;
+ formulation = dcp::Formulation::DCI_ANY;
} else if (string (optarg) == "dci-specific") {
- formulation = dcp::DCI_SPECIFIC;
+ formulation = dcp::Formulation::DCI_SPECIFIC;
} else {
error ("unrecognised KDM formulation " + string (optarg));
}
float from_top (StringText const & c) const
{
switch (c.v_align()) {
- case dcp::VALIGN_TOP:
+ case dcp::VAlign::TOP:
return c.v_position();
- case dcp::VALIGN_CENTER:
+ case dcp::VAlign::CENTER:
return c.v_position() + 0.5;
- case dcp::VALIGN_BOTTOM:
+ case dcp::VAlign::BOTTOM:
return 1.0 - c.v_position();
}
DCPOMATIC_ASSERT (false);
checked_set (_input, 2);
}
- _yuv_to_rgb->SetSelection (conversion.yuv_to_rgb ());
+ _yuv_to_rgb->SetSelection (static_cast<int>(conversion.yuv_to_rgb()));
_ignore_chromaticity_changed = true;
wxBoxSizer* type = new wxBoxSizer (wxHORIZONTAL);
_type = new wxChoice (this, wxID_ANY);
- _type->Append ("Modified Transitional 1", ((void *) dcp::MODIFIED_TRANSITIONAL_1));
- _type->Append ("Multiple Modified Transitional 1", ((void *) dcp::MULTIPLE_MODIFIED_TRANSITIONAL_1));
- _type->Append ("Modified Transitional 1 (without AuthorizedDeviceInfo)", ((void *) dcp::MODIFIED_TRANSITIONAL_TEST));
+ _type->Append ("Modified Transitional 1", ((void *) dcp::Formulation::MODIFIED_TRANSITIONAL_1));
+ _type->Append ("Multiple Modified Transitional 1", ((void *) dcp::Formulation::MULTIPLE_MODIFIED_TRANSITIONAL_1));
+ _type->Append ("Modified Transitional 1 (without AuthorizedDeviceInfo)", ((void *) dcp::Formulation::MODIFIED_TRANSITIONAL_TEST));
if (!interop) {
- _type->Append ("DCI Any", ((void *) dcp::DCI_ANY));
- _type->Append ("DCI Specific", ((void *) dcp::DCI_SPECIFIC));
+ _type->Append ("DCI Any", ((void *) dcp::Formulation::DCI_ANY));
+ _type->Append ("DCI Specific", ((void *) dcp::Formulation::DCI_SPECIFIC));
}
type->Add (_type, 1, wxEXPAND);
_type->SetSelection (0);
button_sizer->Add (_remove, 0, wxTOP | wxBOTTOM | wxEXPAND, 2);
_choose_subtag_panel = new LanguageSubtagPanel (this);
- _choose_subtag_panel->set (dcp::LanguageTag::LANGUAGE, "");
+ _choose_subtag_panel->set (dcp::LanguageTag::SubtagType::LANGUAGE, "");
wxBoxSizer* ltor_sizer = new wxBoxSizer (wxHORIZONTAL);
ltor_sizer->Add (_current_tag_list, 1, wxALL, 8);
set (tag);
- _add_script->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::SCRIPT, boost::optional<dcp::LanguageTag::SubtagData>()));
- _add_region->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::REGION, boost::optional<dcp::LanguageTag::SubtagData>()));
- _add_variant->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::VARIANT, boost::optional<dcp::LanguageTag::SubtagData>()));
- _add_external->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::EXTLANG, boost::optional<dcp::LanguageTag::SubtagData>()));
+ _add_script->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::SubtagType::SCRIPT, boost::optional<dcp::LanguageTag::SubtagData>()));
+ _add_region->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::SubtagType::REGION, boost::optional<dcp::LanguageTag::SubtagData>()));
+ _add_variant->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::SubtagType::VARIANT, boost::optional<dcp::LanguageTag::SubtagData>()));
+ _add_external->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::add_to_current_tag, this, dcp::LanguageTag::SubtagType::EXTLANG, boost::optional<dcp::LanguageTag::SubtagData>()));
_remove->Bind (wxEVT_BUTTON, boost::bind(&LanguageTagDialog::remove_from_current_tag, this));
_choose_subtag_panel->SelectionChanged.connect(bind(&LanguageTagDialog::chosen_subtag_changed, this, _1));
_choose_subtag_panel->SearchChanged.connect(bind(&LanguageTagDialog::search_changed, this, _1));
continue;
}
switch (i.type) {
- case dcp::LanguageTag::LANGUAGE:
+ case dcp::LanguageTag::SubtagType::LANGUAGE:
tag.set_language (i.subtag->subtag);
break;
- case dcp::LanguageTag::SCRIPT:
+ case dcp::LanguageTag::SubtagType::SCRIPT:
tag.set_script (i.subtag->subtag);
break;
- case dcp::LanguageTag::REGION:
+ case dcp::LanguageTag::SubtagType::REGION:
tag.set_region (i.subtag->subtag);
break;
- case dcp::LanguageTag::VARIANT:
+ case dcp::LanguageTag::SubtagType::VARIANT:
variants.push_back (i.subtag->subtag);
break;
- case dcp::LanguageTag::EXTLANG:
+ case dcp::LanguageTag::SubtagType::EXTLANG:
extlangs.push_back (i.subtag->subtag);
break;
}
_current_tag_list->DeleteAllItems ();
bool have_language = false;
- vector<pair<dcp::LanguageTag::SubtagType, dcp::LanguageTag::SubtagData> > subtags = tag.subtags();
- for (vector<pair<dcp::LanguageTag::SubtagType, dcp::LanguageTag::SubtagData> >::const_iterator i = subtags.begin(); i != subtags.end(); ++i) {
- add_to_current_tag (i->first, i->second);
- if (i->first == dcp::LanguageTag::LANGUAGE) {
+ for (auto const& i: tag.subtags()) {
+ add_to_current_tag (i.first, i.second);
+ if (i.first == dcp::LanguageTag::SubtagType::LANGUAGE) {
have_language = true;
}
}
if (!have_language) {
- add_to_current_tag (dcp::LanguageTag::LANGUAGE, dcp::LanguageTag::SubtagData("en", "English"));
+ add_to_current_tag (dcp::LanguageTag::SubtagType::LANGUAGE, dcp::LanguageTag::SubtagData("en", "English"));
}
}
string LanguageTagDialog::subtag_type_name (dcp::LanguageTag::SubtagType type)
{
switch (type) {
- case dcp::LanguageTag::LANGUAGE:
+ case dcp::LanguageTag::SubtagType::LANGUAGE:
return "Language";
- case dcp::LanguageTag::SCRIPT:
+ case dcp::LanguageTag::SubtagType::SCRIPT:
return "Script";
- case dcp::LanguageTag::REGION:
+ case dcp::LanguageTag::SubtagType::REGION:
return "Region";
- case dcp::LanguageTag::VARIANT:
+ case dcp::LanguageTag::SubtagType::VARIANT:
return "Variant";
- case dcp::LanguageTag::EXTLANG:
+ case dcp::LanguageTag::SubtagType::EXTLANG:
return "External";
}
_add_external->Enable ();
for (auto const& i: _current_tag_subtags) {
switch (i.type) {
- case dcp::LanguageTag::SCRIPT:
+ case dcp::LanguageTag::SubtagType::SCRIPT:
_add_script->Enable (false);
break;
- case dcp::LanguageTag::REGION:
+ case dcp::LanguageTag::SubtagType::REGION:
_add_region->Enable (false);
break;
- case dcp::LanguageTag::VARIANT:
+ case dcp::LanguageTag::SubtagType::VARIANT:
_add_variant->Enable (false);
break;
- case dcp::LanguageTag::EXTLANG:
+ case dcp::LanguageTag::SubtagType::EXTLANG:
_add_external->Enable (false);
break;
default:
SetSizer (sizer);
- _panel->set (dcp::LanguageTag::REGION, "", *dcp::LanguageTag::get_subtag_data(region));
+ _panel->set (dcp::LanguageTag::SubtagType::REGION, "", *dcp::LanguageTag::get_subtag_data(region));
}
if (property == Film::NAME_LANGUAGE) {
_name_language->set (film()->name_language());
} else if (property == Film::RELEASE_TERRITORY) {
- checked_set (_release_territory, std_to_wx(*dcp::LanguageTag::get_subtag_description(dcp::LanguageTag::REGION, film()->release_territory().subtag())));
+ checked_set (_release_territory, std_to_wx(*dcp::LanguageTag::get_subtag_description(dcp::LanguageTag::SubtagType::REGION, film()->release_territory().subtag())));
} else if (property == Film::VERSION_NUMBER) {
checked_set (_version_number, film()->version_number());
} else if (property == Film::STATUS) {
switch (film()->status()) {
- case dcp::TEMP:
+ case dcp::Status::TEMP:
checked_set (_status, 0);
break;
- case dcp::PRE:
+ case dcp::Status::PRE:
checked_set (_status, 1);
break;
- case dcp::FINAL:
+ case dcp::Status::FINAL:
checked_set (_status, 2);
break;
}
} else if (property == Film::LUMINANCE) {
checked_set (_luminance_value, film()->luminance().value());
switch (film()->luminance().unit()) {
- case dcp::Luminance::CANDELA_PER_SQUARE_METRE:
+ case dcp::Luminance::Unit::CANDELA_PER_SQUARE_METRE:
checked_set (_luminance_unit, 0);
break;
- case dcp::Luminance::FOOT_LAMBERT:
+ case dcp::Luminance::Unit::FOOT_LAMBERT:
checked_set (_luminance_unit, 1);
break;
}
{
switch (_status->GetSelection()) {
case 0:
- film()->set_status (dcp::TEMP);
+ film()->set_status(dcp::Status::TEMP);
break;
case 1:
- film()->set_status (dcp::PRE);
+ film()->set_status(dcp::Status::PRE);
break;
case 2:
- film()->set_status (dcp::FINAL);
+ film()->set_status(dcp::Status::FINAL);
break;
}
}
dcp::Luminance::Unit unit;
switch (_luminance_unit->GetSelection()) {
case 0:
- unit = dcp::Luminance::CANDELA_PER_SQUARE_METRE;
+ unit = dcp::Luminance::Unit::CANDELA_PER_SQUARE_METRE;
break;
case 1:
- unit = dcp::Luminance::FOOT_LAMBERT;
+ unit = dcp::Luminance::Unit::FOOT_LAMBERT;
break;
default:
DCPOMATIC_ASSERT (false);
_force_effect->SetValue (static_cast<bool>(effect));
if (effect) {
switch (*effect) {
- case dcp::NONE:
+ case dcp::Effect::NONE:
_effect->SetSelection (NONE);
break;
- case dcp::BORDER:
+ case dcp::Effect::BORDER:
_effect->SetSelection (OUTLINE);
break;
- case dcp::SHADOW:
+ case dcp::Effect::SHADOW:
_effect->SetSelection (SHADOW);
break;
}
if (_force_effect->GetValue()) {
switch (_effect->GetSelection()) {
case NONE:
- _caption->set_effect (dcp::NONE);
+ _caption->set_effect (dcp::Effect::NONE);
break;
case OUTLINE:
- _caption->set_effect (dcp::BORDER);
+ _caption->set_effect (dcp::Effect::BORDER);
break;
case SHADOW:
- _caption->set_effect (dcp::SHADOW);
+ _caption->set_effect (dcp::Effect::SHADOW);
break;
}
} else {
sizer->Add (notebook, 1, wxEXPAND | wxALL, DCPOMATIC_DIALOG_BORDER);
map<dcp::VerificationNote::Type, wxRichTextCtrl*> pages;
- pages[dcp::VerificationNote::VERIFY_ERROR] = new wxRichTextCtrl (notebook, wxID_ANY, wxEmptyString, wxDefaultPosition, {400, 300}, wxRE_READONLY);
- notebook->AddPage (pages[dcp::VerificationNote::VERIFY_ERROR], _("Errors"));
- pages[dcp::VerificationNote::VERIFY_BV21_ERROR] = new wxRichTextCtrl (notebook, wxID_ANY, wxEmptyString, wxDefaultPosition, {400, 300}, wxRE_READONLY);
- notebook->AddPage (pages[dcp::VerificationNote::VERIFY_BV21_ERROR], _("SMPTE Bv2.1 errors"));
- pages[dcp::VerificationNote::VERIFY_WARNING] = new wxRichTextCtrl (notebook, wxID_ANY, wxEmptyString, wxDefaultPosition, {400, 300}, wxRE_READONLY);
- notebook->AddPage (pages[dcp::VerificationNote::VERIFY_WARNING], _("Warnings"));
+ pages[dcp::VerificationNote::Type::ERROR] = new wxRichTextCtrl (notebook, wxID_ANY, wxEmptyString, wxDefaultPosition, {400, 300}, wxRE_READONLY);
+ notebook->AddPage (pages[dcp::VerificationNote::Type::ERROR], _("Errors"));
+ pages[dcp::VerificationNote::Type::BV21_ERROR] = new wxRichTextCtrl (notebook, wxID_ANY, wxEmptyString, wxDefaultPosition, {400, 300}, wxRE_READONLY);
+ notebook->AddPage (pages[dcp::VerificationNote::Type::BV21_ERROR], _("SMPTE Bv2.1 errors"));
+ pages[dcp::VerificationNote::Type::WARNING] = new wxRichTextCtrl (notebook, wxID_ANY, wxEmptyString, wxDefaultPosition, {400, 300}, wxRE_READONLY);
+ notebook->AddPage (pages[dcp::VerificationNote::Type::WARNING], _("Warnings"));
auto summary = new wxStaticText (this, wxID_ANY, wxT(""));
sizer->Add (summary, 0, wxALL, DCPOMATIC_DIALOG_BORDER);
}
map<dcp::VerificationNote::Type, int> counts;
- counts[dcp::VerificationNote::VERIFY_WARNING] = 0;
- counts[dcp::VerificationNote::VERIFY_BV21_ERROR] = 0;
- counts[dcp::VerificationNote::VERIFY_ERROR] = 0;
+ counts[dcp::VerificationNote::Type::WARNING] = 0;
+ counts[dcp::VerificationNote::Type::BV21_ERROR] = 0;
+ counts[dcp::VerificationNote::Type::ERROR] = 0;
auto add_bullet = [&pages](dcp::VerificationNote::Type type, wxString message) {
pages[type]->BeginStandardBullet(N_("standard/diamond"), 1, 50);
if (job->finished_in_error() && job->error_summary() != "") {
/* We have an error that did not come from dcp::verify */
- add_bullet (dcp::VerificationNote::VERIFY_ERROR, std_to_wx(job->error_summary()));
+ add_bullet (dcp::VerificationNote::Type::ERROR, std_to_wx(job->error_summary()));
}
for (auto i: job->notes()) {
switch (i.code()) {
- case dcp::VerificationNote::FAILED_READ:
+ case dcp::VerificationNote::Code::FAILED_READ:
add (i, std_to_wx(*i.note()));
break;
- case dcp::VerificationNote::MISMATCHED_CPL_HASHES:
+ case dcp::VerificationNote::Code::MISMATCHED_CPL_HASHES:
add(i, _("The hash of the CPL %n in the PKL does not agree with the CPL file. This probably means that the CPL file is corrupt."));
break;
- case dcp::VerificationNote::INVALID_PICTURE_FRAME_RATE:
+ case dcp::VerificationNote::Code::INVALID_PICTURE_FRAME_RATE:
add(i, _("The picture in a reel has a frame rate of %n, which is not valid."));
break;
- case dcp::VerificationNote::INCORRECT_PICTURE_HASH:
+ case dcp::VerificationNote::Code::INCORRECT_PICTURE_HASH:
add(i, _("The hash of the picture asset %f does not agree with the PKL file. This probably means that the asset file is corrupt."));
break;
- case dcp::VerificationNote::MISMATCHED_PICTURE_HASHES:
+ case dcp::VerificationNote::Code::MISMATCHED_PICTURE_HASHES:
add(i, _("The PKL and CPL hashes disagree for picture asset %f."));
break;
- case dcp::VerificationNote::INCORRECT_SOUND_HASH:
+ case dcp::VerificationNote::Code::INCORRECT_SOUND_HASH:
add(i, _("The hash of the sound asset %f does not agree with the PKL file. This probably means that the asset file is corrupt."));
break;
- case dcp::VerificationNote::MISMATCHED_SOUND_HASHES:
+ case dcp::VerificationNote::Code::MISMATCHED_SOUND_HASHES:
add(i, _("The PKL and CPL hashes disagree for sound asset %f."));
break;
- case dcp::VerificationNote::EMPTY_ASSET_PATH:
+ case dcp::VerificationNote::Code::EMPTY_ASSET_PATH:
add(i, _("An asset has an empty path in the ASSETMAP."));
break;
- case dcp::VerificationNote::MISSING_ASSET:
+ case dcp::VerificationNote::Code::MISSING_ASSET:
add(i, _("The asset %f is missing."));
break;
- case dcp::VerificationNote::MISMATCHED_STANDARD:
+ case dcp::VerificationNote::Code::MISMATCHED_STANDARD:
add(i, _("Parts of the DCP are written according to the Interop standard and parts according to SMPTE."));
break;
- case dcp::VerificationNote::INVALID_XML:
+ case dcp::VerificationNote::Code::INVALID_XML:
if (i.line()) {
add(i, _("The XML in %f is malformed on line %l (%n)."));
} else {
add(i, _("The XML in %f is malformed (%n)."));
}
break;
- case dcp::VerificationNote::MISSING_ASSETMAP:
+ case dcp::VerificationNote::Code::MISSING_ASSETMAP:
add(i, _("No ASSETMAP or ASSETMAP.xml file was found."));
break;
- case dcp::VerificationNote::INVALID_INTRINSIC_DURATION:
+ case dcp::VerificationNote::Code::INVALID_INTRINSIC_DURATION:
add(i, _("The asset %n has an instrinsic duration of less than 1 second, which is invalid."));
break;
- case dcp::VerificationNote::INVALID_DURATION:
+ case dcp::VerificationNote::Code::INVALID_DURATION:
add(i, _("The asset %n has a duration of less than 1 second, which is invalid."));
break;
- case dcp::VerificationNote::INVALID_PICTURE_FRAME_SIZE_IN_BYTES:
+ case dcp::VerificationNote::Code::INVALID_PICTURE_FRAME_SIZE_IN_BYTES:
add(i, _("At least one frame of the video asset %f is over the limit of 250Mbit/s."));
break;
- case dcp::VerificationNote::NEARLY_INVALID_PICTURE_FRAME_SIZE_IN_BYTES:
+ case dcp::VerificationNote::Code::NEARLY_INVALID_PICTURE_FRAME_SIZE_IN_BYTES:
add(i, _("At least one frame of the video asset %f is close to the limit of 250MBit/s."));
break;
- case dcp::VerificationNote::EXTERNAL_ASSET:
+ case dcp::VerificationNote::Code::EXTERNAL_ASSET:
add(i, _("This DCP refers to at the asset %n in another DCP (and perhaps others), so it is a \"version file\" (VF)"));
break;
- case dcp::VerificationNote::INVALID_STANDARD:
+ case dcp::VerificationNote::Code::INVALID_STANDARD:
add(i, _("This DCP uses the Interop standard, but it should be made with SMPTE."));
break;
- case dcp::VerificationNote::INVALID_LANGUAGE:
+ case dcp::VerificationNote::Code::INVALID_LANGUAGE:
add(i, _("The invalid language tag %n is used."));
break;
- case dcp::VerificationNote::INVALID_PICTURE_SIZE_IN_PIXELS:
+ case dcp::VerificationNote::Code::INVALID_PICTURE_SIZE_IN_PIXELS:
add(i, _("The video asset %f uses the invalid image size %n."));
break;
- case dcp::VerificationNote::INVALID_PICTURE_FRAME_RATE_FOR_2K:
+ case dcp::VerificationNote::Code::INVALID_PICTURE_FRAME_RATE_FOR_2K:
add(i, _("The video asset %f uses the invalid frame rate %n."));
break;
- case dcp::VerificationNote::INVALID_PICTURE_FRAME_RATE_FOR_4K:
+ case dcp::VerificationNote::Code::INVALID_PICTURE_FRAME_RATE_FOR_4K:
add(i, _("The video asset %f uses the frame rate %n which is invalid for 4K video."));
break;
- case dcp::VerificationNote::INVALID_PICTURE_ASSET_RESOLUTION_FOR_3D:
+ case dcp::VerificationNote::Code::INVALID_PICTURE_ASSET_RESOLUTION_FOR_3D:
add(i, _("The video asset %f uses the frame rate %n which is invalid for 3D video."));
break;
- case dcp::VerificationNote::INVALID_CLOSED_CAPTION_XML_SIZE_IN_BYTES:
+ case dcp::VerificationNote::Code::INVALID_CLOSED_CAPTION_XML_SIZE_IN_BYTES:
add(i, _("The XML in the closed caption asset %f takes up %n bytes which is over the 256KB limit."));
break;
- case dcp::VerificationNote::INVALID_TIMED_TEXT_SIZE_IN_BYTES:
+ case dcp::VerificationNote::Code::INVALID_TIMED_TEXT_SIZE_IN_BYTES:
add(i, _("The timed text asset %f takes up %n bytes which is over the 115MB limit."));
break;
- case dcp::VerificationNote::INVALID_TIMED_TEXT_FONT_SIZE_IN_BYTES:
+ case dcp::VerificationNote::Code::INVALID_TIMED_TEXT_FONT_SIZE_IN_BYTES:
add(i, _("The fonts in the timed text asset %f take up %n bytes which is over the 10MB limit."));
break;
- case dcp::VerificationNote::MISSING_SUBTITLE_LANGUAGE:
+ case dcp::VerificationNote::Code::MISSING_SUBTITLE_LANGUAGE:
add(i, _("The subtitle asset %f contains no <Language> tag."));
break;
- case dcp::VerificationNote::MISMATCHED_SUBTITLE_LANGUAGES:
+ case dcp::VerificationNote::Code::MISMATCHED_SUBTITLE_LANGUAGES:
add(i, _("Not all subtitle assets specify the same <Language> tag."));
break;
- case dcp::VerificationNote::MISSING_SUBTITLE_START_TIME:
+ case dcp::VerificationNote::Code::MISSING_SUBTITLE_START_TIME:
add(i, _("The subtitle asset %f contains no <StartTime> tag."));
break;
- case dcp::VerificationNote::INVALID_SUBTITLE_START_TIME:
+ case dcp::VerificationNote::Code::INVALID_SUBTITLE_START_TIME:
add(i, _("The subtitle asset %f has a <StartTime> which is not zero."));
break;
- case dcp::VerificationNote::INVALID_SUBTITLE_FIRST_TEXT_TIME:
+ case dcp::VerificationNote::Code::INVALID_SUBTITLE_FIRST_TEXT_TIME:
add(i, _("The first subtitle or closed caption happens before 4s into the first reel."));
break;
- case dcp::VerificationNote::INVALID_SUBTITLE_DURATION:
+ case dcp::VerificationNote::Code::INVALID_SUBTITLE_DURATION:
add(i, _("At least one subtitle lasts less than 15 frames."));
break;
- case dcp::VerificationNote::INVALID_SUBTITLE_SPACING:
+ case dcp::VerificationNote::Code::INVALID_SUBTITLE_SPACING:
add(i, _("At least one pair of subtitles is separated by less than 2 frames."));
break;
- case dcp::VerificationNote::INVALID_SUBTITLE_LINE_COUNT:
+ case dcp::VerificationNote::Code::INVALID_SUBTITLE_LINE_COUNT:
add(i, _("There are more than 3 subtitle lines in at least one place."));
break;
- case dcp::VerificationNote::NEARLY_INVALID_SUBTITLE_LINE_LENGTH:
+ case dcp::VerificationNote::Code::NEARLY_INVALID_SUBTITLE_LINE_LENGTH:
add(i, _("There are more than 52 characters in at least one subtitle line."));
break;
- case dcp::VerificationNote::INVALID_SUBTITLE_LINE_LENGTH:
+ case dcp::VerificationNote::Code::INVALID_SUBTITLE_LINE_LENGTH:
add(i, _("There are more than 79 characters in at least one subtitle line."));
break;
- case dcp::VerificationNote::INVALID_CLOSED_CAPTION_LINE_COUNT:
+ case dcp::VerificationNote::Code::INVALID_CLOSED_CAPTION_LINE_COUNT:
add(i, _("There are more than 3 closed caption lines in at least one place."));
break;
- case dcp::VerificationNote::INVALID_CLOSED_CAPTION_LINE_LENGTH:
+ case dcp::VerificationNote::Code::INVALID_CLOSED_CAPTION_LINE_LENGTH:
add(i, _("There are more than 32 characters in at least one closed caption line."));
break;
- case dcp::VerificationNote::INVALID_SOUND_FRAME_RATE:
+ case dcp::VerificationNote::Code::INVALID_SOUND_FRAME_RATE:
add(i, _("The sound asset %f has an invalid frame rate of %n."));
break;
- case dcp::VerificationNote::MISSING_CPL_ANNOTATION_TEXT:
+ case dcp::VerificationNote::Code::MISSING_CPL_ANNOTATION_TEXT:
add(i, _("The CPL %n has no <AnnotationText> tag."));
break;
- case dcp::VerificationNote::MISMATCHED_CPL_ANNOTATION_TEXT:
+ case dcp::VerificationNote::Code::MISMATCHED_CPL_ANNOTATION_TEXT:
add(i, _("The CPL %n has an <AnnotationText> which is not the same as its <ContentTitleText>."));
break;
- case dcp::VerificationNote::MISMATCHED_ASSET_DURATION:
+ case dcp::VerificationNote::Code::MISMATCHED_ASSET_DURATION:
add(i, _("At least one asset in a reel does not have the same duration as the others."));
break;
- case dcp::VerificationNote::MISSING_MAIN_SUBTITLE_FROM_SOME_REELS:
+ case dcp::VerificationNote::Code::MISSING_MAIN_SUBTITLE_FROM_SOME_REELS:
add(i, _("The DCP has subtitles but at least one reel has no subtitle asset."));
break;
- case dcp::VerificationNote::MISMATCHED_CLOSED_CAPTION_ASSET_COUNTS:
+ case dcp::VerificationNote::Code::MISMATCHED_CLOSED_CAPTION_ASSET_COUNTS:
add(i, _("The DCP has closed captions but not every reel has the same number of closed caption assets."));
break;
- case dcp::VerificationNote::MISSING_SUBTITLE_ENTRY_POINT:
+ case dcp::VerificationNote::Code::MISSING_SUBTITLE_ENTRY_POINT:
add(i, _("The subtitle asset %n has no <EntryPoint> tag."));
break;
- case dcp::VerificationNote::INCORRECT_SUBTITLE_ENTRY_POINT:
+ case dcp::VerificationNote::Code::INCORRECT_SUBTITLE_ENTRY_POINT:
add(i, _("Subtitle asset %n has a non-zero <EntryPoint>."));
break;
- case dcp::VerificationNote::MISSING_CLOSED_CAPTION_ENTRY_POINT:
+ case dcp::VerificationNote::Code::MISSING_CLOSED_CAPTION_ENTRY_POINT:
add(i, _("The closed caption asset %n has no <EntryPoint> tag."));
break;
- case dcp::VerificationNote::INCORRECT_CLOSED_CAPTION_ENTRY_POINT:
+ case dcp::VerificationNote::Code::INCORRECT_CLOSED_CAPTION_ENTRY_POINT:
add(i, _("Closed caption asset %n has a non-zero <EntryPoint>."));
break;
- case dcp::VerificationNote::MISSING_HASH:
+ case dcp::VerificationNote::Code::MISSING_HASH:
add(i, _("The asset %n has no <Hash> in the CPL."));
break;
- case dcp::VerificationNote::MISSING_FFEC_IN_FEATURE:
+ case dcp::VerificationNote::Code::MISSING_FFEC_IN_FEATURE:
add(i, _("The DCP is a feature but has no FFEC (first frame of end credits) marker."));
break;
- case dcp::VerificationNote::MISSING_FFMC_IN_FEATURE:
+ case dcp::VerificationNote::Code::MISSING_FFMC_IN_FEATURE:
add(i, _("The DCP is a feature but has no FFMC (first frame of moving credits) marker."));
break;
- case dcp::VerificationNote::MISSING_FFOC:
+ case dcp::VerificationNote::Code::MISSING_FFOC:
add(i, _("The DCP has no FFOC (first frame of content) marker."));
break;
- case dcp::VerificationNote::MISSING_LFOC:
+ case dcp::VerificationNote::Code::MISSING_LFOC:
add(i, _("The DCP has no LFOC (last frame of content) marker."));
break;
- case dcp::VerificationNote::INCORRECT_FFOC:
+ case dcp::VerificationNote::Code::INCORRECT_FFOC:
add(i, _("The DCP has a FFOC of %n instead of 1."));
break;
- case dcp::VerificationNote::INCORRECT_LFOC:
+ case dcp::VerificationNote::Code::INCORRECT_LFOC:
add(i, _("The DCP has a LFOC of %n instead of the reel duration minus one."));
break;
- case dcp::VerificationNote::MISSING_CPL_METADATA:
+ case dcp::VerificationNote::Code::MISSING_CPL_METADATA:
add(i, _("The CPL %n has no CPL metadata tag."));
break;
- case dcp::VerificationNote::MISSING_CPL_METADATA_VERSION_NUMBER:
+ case dcp::VerificationNote::Code::MISSING_CPL_METADATA_VERSION_NUMBER:
add(i, _("The CPL %n has no CPL metadata version number tag."));
break;
- case dcp::VerificationNote::MISSING_EXTENSION_METADATA:
+ case dcp::VerificationNote::Code::MISSING_EXTENSION_METADATA:
add(i, _("The CPL %n has no CPL extension metadata tag."));
break;
- case dcp::VerificationNote::INVALID_EXTENSION_METADATA:
+ case dcp::VerificationNote::Code::INVALID_EXTENSION_METADATA:
add(i, _("The CPL %f has an invalid CPL extension metadata tag (%n)"));
break;
- case dcp::VerificationNote::UNSIGNED_CPL_WITH_ENCRYPTED_CONTENT:
+ case dcp::VerificationNote::Code::UNSIGNED_CPL_WITH_ENCRYPTED_CONTENT:
add(i, _("The CPL %n has encrypted content but is not signed."));
break;
- case dcp::VerificationNote::UNSIGNED_PKL_WITH_ENCRYPTED_CONTENT:
+ case dcp::VerificationNote::Code::UNSIGNED_PKL_WITH_ENCRYPTED_CONTENT:
add(i, _("The PKL %n has encrypted content but is not signed."));
break;
- case dcp::VerificationNote::MISMATCHED_PKL_ANNOTATION_TEXT_WITH_CPL:
+ case dcp::VerificationNote::Code::MISMATCHED_PKL_ANNOTATION_TEXT_WITH_CPL:
add(i, _("The PKL %n has an <AnnotationText> which does not match its CPL's <ContentTitleText>."));
break;
- case dcp::VerificationNote::PARTIALLY_ENCRYPTED:
+ case dcp::VerificationNote::Code::PARTIALLY_ENCRYPTED:
add(i, _("The DCP has encrypted content, but not all its assets are encrypted."));
break;
}
wxString summary_text;
- if (counts[dcp::VerificationNote::VERIFY_ERROR] == 1) {
+ if (counts[dcp::VerificationNote::Type::ERROR] == 1) {
/// TRANSLATORS: this will be used at the start of a string like "1 error, 2 Bv2.1 errors and 3 warnings."
summary_text = _("1 error, ");
} else {
/// TRANSLATORS: this will be used at the start of a string like "1 error, 2 Bv2.1 errors and 3 warnings."
- summary_text = wxString::Format("%d errors, ", counts[dcp::VerificationNote::VERIFY_ERROR]);
+ summary_text = wxString::Format("%d errors, ", counts[dcp::VerificationNote::Type::ERROR]);
}
- if (counts[dcp::VerificationNote::VERIFY_BV21_ERROR] == 1) {
+ if (counts[dcp::VerificationNote::Type::BV21_ERROR] == 1) {
/// TRANSLATORS: this will be used in the middle of a string like "1 error, 2 Bv2.1 errors and 3 warnings."
summary_text += _("1 Bv2.1 error, ");
} else {
/// TRANSLATORS: this will be used in the middle of a string like "1 error, 2 Bv2.1 errors and 3 warnings."
- summary_text += wxString::Format("%d Bv2.1 errors, ", counts[dcp::VerificationNote::VERIFY_BV21_ERROR]);
+ summary_text += wxString::Format("%d Bv2.1 errors, ", counts[dcp::VerificationNote::Type::BV21_ERROR]);
}
- if (counts[dcp::VerificationNote::VERIFY_WARNING] == 1) {
+ if (counts[dcp::VerificationNote::Type::WARNING] == 1) {
/// TRANSLATORS: this will be used at the end of a string like "1 error, 2 Bv2.1 errors and 3 warnings."
summary_text += _("and 1 warning.");
} else {
/// TRANSLATORS: this will be used at the end of a string like "1 error, 2 Bv2.1 errors and 3 warnings."
- summary_text += wxString::Format("and %d warnings.", counts[dcp::VerificationNote::VERIFY_WARNING]);
+ summary_text += wxString::Format("and %d warnings.", counts[dcp::VerificationNote::Type::WARNING]);
}
summary->SetLabel(summary_text);
- if (counts[dcp::VerificationNote::VERIFY_ERROR] == 0) {
- add_bullet (dcp::VerificationNote::VERIFY_ERROR, _("No errors found."));
+ if (counts[dcp::VerificationNote::Type::ERROR] == 0) {
+ add_bullet (dcp::VerificationNote::Type::ERROR, _("No errors found."));
}
- if (counts[dcp::VerificationNote::VERIFY_BV21_ERROR] == 0) {
- add_bullet (dcp::VerificationNote::VERIFY_BV21_ERROR, _("No SMPTE Bv2.1 errors found."));
+ if (counts[dcp::VerificationNote::Type::BV21_ERROR] == 0) {
+ add_bullet (dcp::VerificationNote::Type::BV21_ERROR, _("No SMPTE Bv2.1 errors found."));
}
- if (counts[dcp::VerificationNote::VERIFY_WARNING] == 0) {
- add_bullet (dcp::VerificationNote::VERIFY_WARNING, _("No warnings found."));
+ if (counts[dcp::VerificationNote::Type::WARNING] == 0) {
+ add_bullet (dcp::VerificationNote::Type::WARNING, _("No warnings found."));
}
}
_waveform = _waveform->scale (
dcp::Size (GetSize().GetWidth() - _x_axis_width, waveform_height),
- dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGB24, false, false
+ dcp::YUVToRGB::REC709, AV_PIX_FMT_RGB24, false, false
);
}
dcp_file(film, "cpl"),
dcp::LocalTime(),
dcp::LocalTime(),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
false,
optional<int>()
);
/*
- Copyright (C) 2019 Carl Hetherington <cth@carlh.net>
+ Copyright (C) 2019-2021 Carl Hetherington <cth@carlh.net>
This file is part of DCP-o-matic.
#include "lib/create_cli.h"
#include "lib/ratio.h"
#include "lib/dcp_content_type.h"
+#include "test.h"
#include <boost/test/unit_test.hpp>
#include <boost/tokenizer.hpp>
#include <boost/algorithm/string/predicate.hpp>
cc = run ("dcpomatic2_create x --standard SMPTE");
BOOST_CHECK (!cc.error);
- BOOST_CHECK_EQUAL (cc.standard, dcp::SMPTE);
+ BOOST_CHECK_EQUAL (cc.standard, dcp::Standard::SMPTE);
cc = run ("dcpomatic2_create x --standard interop");
BOOST_CHECK (!cc.error);
- BOOST_CHECK_EQUAL (cc.standard, dcp::INTEROP);
+ BOOST_CHECK_EQUAL (cc.standard, dcp::Standard::INTEROP);
cc = run ("dcpomatic2_create x --standard SMPTEX");
BOOST_CHECK (cc.error);
encrypted_dcp.cpls().front()->file().get(),
dcp::LocalTime ("2030-07-21T00:00:00+00:00"),
dcp::LocalTime ("2031-07-21T00:00:00+00:00"),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
true, 0
);
film->examine_and_add_content (s);
BOOST_REQUIRE (!wait_for_jobs ());
s->only_text()->set_colour (dcp::Colour (255, 255, 0));
- s->only_text()->set_effect (dcp::SHADOW);
+ s->only_text()->set_effect (dcp::Effect::SHADOW);
s->only_text()->set_effect_colour (dcp::Colour (0, 255, 255));
film->write_metadata();
film->examine_and_add_content (s);
BOOST_REQUIRE (!wait_for_jobs ());
s->only_text()->set_colour (dcp::Colour (255, 255, 0));
- s->only_text()->set_effect (dcp::SHADOW);
+ s->only_text()->set_effect (dcp::Effect::SHADOW);
s->only_text()->set_effect_colour (dcp::Colour (0, 255, 255));
shared_ptr<Job> job (new TranscodeJob (film));
film->examine_and_add_content (s);
BOOST_REQUIRE (!wait_for_jobs ());
s->only_text()->set_colour (dcp::Colour (255, 255, 0));
- s->only_text()->set_effect (dcp::SHADOW);
+ s->only_text()->set_effect (dcp::Effect::SHADOW);
s->only_text()->set_effect_colour (dcp::Colour (0, 255, 255));
film->write_metadata();
film->examine_and_add_content (s);
BOOST_REQUIRE (!wait_for_jobs ());
s->only_text()->set_colour (dcp::Colour (255, 255, 0));
- s->only_text()->set_effect (dcp::SHADOW);
+ s->only_text()->set_effect (dcp::Effect::SHADOW);
s->only_text()->set_effect_colour (dcp::Colour (0, 255, 255));
film->write_metadata();
{
shared_ptr<FFmpegImageProxy> proxy (new FFmpegImageProxy (TestPaths::private_data() / "prophet_frame.tiff", VIDEO_RANGE_FULL));
shared_ptr<Image> raw = proxy->image().image;
- shared_ptr<Image> background = raw->convert_pixel_format (dcp::YUV_TO_RGB_REC709, format, true, false);
+ shared_ptr<Image> background = raw->convert_pixel_format (dcp::YUVToRGB::REC709, format, true, false);
shared_ptr<Image> overlay (new Image (AV_PIX_FMT_BGRA, dcp::Size(431, 891), true));
overlay->make_transparent ();
background->alpha_blend (overlay, Position<int> (13, 17));
- shared_ptr<Image> save = background->convert_pixel_format (dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGB24, false, false);
+ shared_ptr<Image> save = background->convert_pixel_format (dcp::YUVToRGB::REC709, AV_PIX_FMT_RGB24, false, false);
write_image (save, "build/test/image_test_" + suffix + ".png");
check_image ("build/test/image_test_" + suffix + ".png", TestPaths::private_data() / ("image_test_" + suffix + ".png"));
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy("test/data/flat_red.png", VIDEO_RANGE_FULL));
shared_ptr<Image> raw = proxy->image().image;
shared_ptr<Image> out = raw->crop_scale_window(
- Crop(), dcp::Size(1998, 836), dcp::Size(1998, 1080), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_YUV420P, VIDEO_RANGE_FULL, true, false
+ Crop(), dcp::Size(1998, 836), dcp::Size(1998, 1080), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_YUV420P, VIDEO_RANGE_FULL, true, false
);
- shared_ptr<Image> save = out->scale(dcp::Size(1998, 1080), dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGB24, false, false);
+ shared_ptr<Image> save = out->scale(dcp::Size(1998, 1080), dcp::YUVToRGB::REC709, AV_PIX_FMT_RGB24, false, false);
write_image(save, "build/test/crop_scale_window_test.png");
check_image("test/data/crop_scale_window_test.png", "build/test/crop_scale_window_test.png");
}
{
shared_ptr<Image> image (new Image(AV_PIX_FMT_XYZ12LE, dcp::Size(2048, 858), true));
image->crop_scale_window (
- Crop(279, 0, 0, 0), dcp::Size(1069, 448), dcp::Size(1069, 578), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
+ Crop(279, 0, 0, 0), dcp::Size(1069, 448), dcp::Size(1069, 578), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
);
image->crop_scale_window (
- Crop(2048, 0, 0, 0), dcp::Size(1069, 448), dcp::Size(1069, 578), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
+ Crop(2048, 0, 0, 0), dcp::Size(1069, 448), dcp::Size(1069, 578), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
);
}
BOOST_AUTO_TEST_CASE (crop_scale_window_test3)
{
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy(TestPaths::private_data() / "player_seek_test_0.png", VIDEO_RANGE_FULL));
- shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGB24, true, false);
+ shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_RGB24, true, false);
shared_ptr<Image> cropped = xyz->crop_scale_window(
- Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
+ Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
);
write_image(cropped, "build/test/crop_scale_window_test3.png");
check_image("test/data/crop_scale_window_test3.png", "build/test/crop_scale_window_test3.png");
BOOST_AUTO_TEST_CASE (crop_scale_window_test4)
{
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy(TestPaths::private_data() / "player_seek_test_0.png", VIDEO_RANGE_FULL));
- shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGB24, true, false);
+ shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_RGB24, true, false);
shared_ptr<Image> cropped = xyz->crop_scale_window(
- Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_XYZ12LE, VIDEO_RANGE_FULL, false, false
+ Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_XYZ12LE, VIDEO_RANGE_FULL, false, false
);
write_image(cropped, "build/test/crop_scale_window_test4.png");
check_image("test/data/crop_scale_window_test4.png", "build/test/crop_scale_window_test4.png", 35000);
BOOST_AUTO_TEST_CASE (crop_scale_window_test5)
{
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy(TestPaths::private_data() / "player_seek_test_0.png", VIDEO_RANGE_FULL));
- shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_XYZ12LE, true, false);
+ shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_XYZ12LE, true, false);
shared_ptr<Image> cropped = xyz->crop_scale_window(
- Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
+ Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, false, false
);
write_image(cropped, "build/test/crop_scale_window_test5.png");
check_image("test/data/crop_scale_window_test5.png", "build/test/crop_scale_window_test5.png");
BOOST_AUTO_TEST_CASE (crop_scale_window_test6)
{
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy(TestPaths::private_data() / "player_seek_test_0.png", VIDEO_RANGE_FULL));
- shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_XYZ12LE, true, false);
+ shared_ptr<Image> xyz = proxy->image().image->convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_XYZ12LE, true, false);
shared_ptr<Image> cropped = xyz->crop_scale_window(
- Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_XYZ12LE, VIDEO_RANGE_FULL, false, false
+ Crop(512, 0, 0, 0), dcp::Size(1486, 1080), dcp::Size(1998, 1080), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_XYZ12LE, VIDEO_RANGE_FULL, false, false
);
write_image(cropped, "build/test/crop_scale_window_test6.png");
check_image("test/data/crop_scale_window_test6.png", "build/test/crop_scale_window_test6.png", 35000);
using namespace boost::filesystem;
for (int left_crop = 0; left_crop < 8; ++left_crop) {
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy("test/data/rgb_grey_testcard.png", VIDEO_RANGE_FULL));
- shared_ptr<Image> yuv = proxy->image().image->convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_YUV420P, true, false);
+ shared_ptr<Image> yuv = proxy->image().image->convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_YUV420P, true, false);
int rounded = left_crop - (left_crop % 2);
shared_ptr<Image> cropped = yuv->crop_scale_window(
Crop(left_crop, 0, 0, 0),
dcp::Size(1998 - rounded, 1080),
dcp::Size(1998 - rounded, 1080),
- dcp::YUV_TO_RGB_REC709,
+ dcp::YUVToRGB::REC709,
VIDEO_RANGE_VIDEO,
AV_PIX_FMT_RGB24,
VIDEO_RANGE_VIDEO,
{
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy("test/data/3d_test/000001.png", VIDEO_RANGE_FULL));
shared_ptr<Image> image_rgb = proxy->image().image;
- shared_ptr<Image> image_bgr = image_rgb->convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_BGRA, true, false);
+ shared_ptr<Image> image_bgr = image_rgb->convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_BGRA, true, false);
image_rgb->as_png().write ("build/test/as_png_rgb.png");
image_bgr->as_png().write ("build/test/as_png_bgr.png");
yuv.make_black ();
yuv.fade (0);
string const filename = "fade_test_black_" + name + ".png";
- yuv.convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGBA, true, false)->as_png().write("build/test/" + filename);
+ yuv.convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_RGBA, true, false)->as_png().write("build/test/" + filename);
check_image ("test/data/" + filename, "build/test/" + filename);
}
fade_test_format_red (AVPixelFormat f, float amount, string name)
{
shared_ptr<FFmpegImageProxy> proxy(new FFmpegImageProxy("test/data/flat_red.png", VIDEO_RANGE_FULL));
- shared_ptr<Image> red = proxy->image().image->convert_pixel_format(dcp::YUV_TO_RGB_REC709, f, true, false);
+ shared_ptr<Image> red = proxy->image().image->convert_pixel_format(dcp::YUVToRGB::REC709, f, true, false);
red->fade (amount);
string const filename = "fade_test_red_" + name + ".png";
- red->convert_pixel_format(dcp::YUV_TO_RGB_REC709, AV_PIX_FMT_RGBA, true, false)->as_png().write("build/test/" + filename);
+ red->convert_pixel_format(dcp::YUVToRGB::REC709, AV_PIX_FMT_RGBA, true, false)->as_png().write("build/test/" + filename);
check_image ("test/data/" + filename, "build/test/" + filename);
}
for (list<AVPixelFormat>::const_iterator i = pix_fmts.begin(); i != pix_fmts.end(); ++i) {
std::shared_ptr<Image> foo (new Image (*i, in_size, true));
foo->make_black ();
- std::shared_ptr<Image> bar = foo->scale (out_size, dcp::YUV_TO_RGB_REC601, AV_PIX_FMT_RGB24, true, false);
+ std::shared_ptr<Image> bar = foo->scale (out_size, dcp::YUVToRGB::REC601, AV_PIX_FMT_RGB24, true, false);
uint8_t* p = bar->data()[0];
for (int y = 0; y < bar->size().height; ++y) {
shared_ptr<Image> image (new Image (AV_PIX_FMT_RGB24, dcp::Size(128, 128), true));
image->make_black ();
shared_ptr<Image> scaled = image->crop_scale_window (
- Crop(0, 0, 128, 128), dcp::Size(1323, 565), dcp::Size(1349, 565), dcp::YUV_TO_RGB_REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, true, true
+ Crop(0, 0, 128, 128), dcp::Size(1323, 565), dcp::Size(1349, 565), dcp::YUVToRGB::REC709, VIDEO_RANGE_FULL, AV_PIX_FMT_RGB24, VIDEO_RANGE_FULL, true, true
);
string const filename = "over_crop_test.png";
write_image (scaled, "build/test/" + filename);
A_dcp.cpls().front()->file().get(),
dcp::LocalTime ("2030-07-21T00:00:00+00:00"),
dcp::LocalTime ("2031-07-21T00:00:00+00:00"),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
true, 0
);
AudioMapping mapping = sound->audio->mapping ();
- mapping.set (0, dcp::LEFT, 1.0);
+ mapping.set (0, dcp::Channel::LEFT, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_20_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::RIGHT, 1.0);
+ mapping.set (0, dcp::Channel::RIGHT, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_30_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::LFE, 1.0);
+ mapping.set (0, dcp::Channel::LFE, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_31_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::LS, 1.0);
+ mapping.set (0, dcp::Channel::LS, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_41_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::RS, 1.0);
+ mapping.set (0, dcp::Channel::RS, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_51_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::HI, 1.0);
+ mapping.set (0, dcp::Channel::HI, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_51_4K_DI_20140704_PP_SMPTE_OV");
film->set_audio_channels (8);
- mapping.set (0, dcp::HI, 1.0);
+ mapping.set (0, dcp::Channel::HI, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_51-HI_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::VI, 1.0);
+ mapping.set (0, dcp::Channel::VI, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_51-HI-VI_4K_DI_20140704_PP_SMPTE_OV");
film->set_audio_channels(10);
- mapping.set (0, dcp::HI, 0.0);
- mapping.set (0, dcp::VI, 0.0);
+ mapping.set (0, dcp::Channel::HI, 0.0);
+ mapping.set (0, dcp::Channel::VI, 0.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_51_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::HI, 1.0);
+ mapping.set (0, dcp::Channel::HI, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_51-HI_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::VI, 1.0);
+ mapping.set (0, dcp::Channel::VI, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_51-HI-VI_4K_DI_20140704_PP_SMPTE_OV");
film->set_audio_channels(12);
- mapping.set (0, dcp::BSL, 1.0);
- mapping.set (0, dcp::BSR, 1.0);
- mapping.set (0, dcp::HI, 0.0);
- mapping.set (0, dcp::VI, 0.0);
+ mapping.set (0, dcp::Channel::BSL, 1.0);
+ mapping.set (0, dcp::Channel::BSR, 1.0);
+ mapping.set (0, dcp::Channel::HI, 0.0);
+ mapping.set (0, dcp::Channel::VI, 0.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_71_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::HI, 1.0);
+ mapping.set (0, dcp::Channel::HI, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_71-HI_4K_DI_20140704_PP_SMPTE_OV");
- mapping.set (0, dcp::VI, 1.0);
+ mapping.set (0, dcp::Channel::VI, 1.0);
sound->audio->set_mapping (mapping);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-fr_US-R_71-HI-VI_4K_DI_20140704_PP_SMPTE_OV");
}
cinema_a_screen_1,
boost::posix_time::time_from_string(from_string),
boost::posix_time::time_from_string(until_string),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
false,
optional<int>()
);
i,
boost::posix_time::time_from_string(from_string),
boost::posix_time::time_from_string(until_string),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
false,
optional<int>()
);
/*
- Copyright (C) 2012-2014 Carl Hetherington <cth@carlh.net>
+ Copyright (C) 2012-2021 Carl Hetherington <cth@carlh.net>
This file is part of DCP-o-matic.
#include <dcp/util.h>
#include "lib/ratio.h"
#include "lib/util.h"
+#include "test.h"
using std::ostream;
static void
note (dcp::NoteType t, string n)
{
- if (t == dcp::DCP_ERROR) {
+ if (t == dcp::NoteType::ERROR) {
cout << n << "\n";
}
}
*cpl,
dcp::LocalTime ("2030-01-01T01:00:00+00:00"),
dcp::LocalTime ("2031-01-01T01:00:00+00:00"),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
true,
0
);
dcp::Time (),
dcp::Time (),
1,
- dcp::HALIGN_LEFT,
+ dcp::HAlign::LEFT,
1,
- dcp::VALIGN_TOP,
- dcp::DIRECTION_LTR,
+ dcp::VAlign::TOP,
+ dcp::Direction::LTR,
text,
- dcp::NONE,
+ dcp::Effect::NONE,
dcp::Colour (0, 0, 0),
dcp::Time (),
dcp::Time ()
BOOST_CHECK_EQUAL (a.name, "hello there world");
BOOST_CHECK_EQUAL (a.mapping().input_channels(), 2);
- BOOST_CHECK_EQUAL (a.mapping().get (0, static_cast<int> (dcp::LEFT)), 1);
- BOOST_CHECK_EQUAL (a.mapping().get (0, static_cast<int> (dcp::RIGHT)), 0);
- BOOST_CHECK_EQUAL (a.mapping().get (0, static_cast<int> (dcp::CENTRE)), 1);
- BOOST_CHECK_EQUAL (a.mapping().get (1, static_cast<int> (dcp::LEFT)), 0);
- BOOST_CHECK_EQUAL (a.mapping().get (1, static_cast<int> (dcp::RIGHT)), 1);
- BOOST_CHECK_EQUAL (a.mapping().get (1, static_cast<int> (dcp::CENTRE)), 1);
+ BOOST_CHECK_EQUAL (a.mapping().get(0, dcp::Channel::LEFT), 1);
+ BOOST_CHECK_EQUAL (a.mapping().get(0, dcp::Channel::RIGHT), 0);
+ BOOST_CHECK_EQUAL (a.mapping().get(0, dcp::Channel::CENTRE), 1);
+ BOOST_CHECK_EQUAL (a.mapping().get(1, dcp::Channel::LEFT), 0);
+ BOOST_CHECK_EQUAL (a.mapping().get(1, dcp::Channel::RIGHT), 1);
+ BOOST_CHECK_EQUAL (a.mapping().get(1, dcp::Channel::CENTRE), 1);
}
static void
note (dcp::NoteType t, string n)
{
- if (t == dcp::DCP_ERROR) {
+ if (t == dcp::NoteType::ERROR) {
cerr << n << "\n";
}
}
dcpomatic_log = _old;
}
+std::ostream&
+dcp::operator<< (std::ostream& s, dcp::Size i)
+{
+ s << i.width << "x" << i.height;
+ return s;
+}
+
+std::ostream&
+dcp::operator<< (std::ostream& s, Standard t)
+{
+ switch (t) {
+ case Standard::INTEROP:
+ s << "interop";
+ break;
+ case Standard::SMPTE:
+ s << "smpte";
+ break;
+ }
+ return s;
+}
+
/*
- Copyright (C) 2013-2019 Carl Hetherington <cth@carlh.net>
+ Copyright (C) 2013-2021 Carl Hetherington <cth@carlh.net>
This file is part of DCP-o-matic.
#include "lib/warnings.h"
+#include <dcp/types.h>
#include <boost/filesystem.hpp>
std::shared_ptr<Log> _old;
};
+namespace dcp {
+
+std::ostream& operator<< (std::ostream& s, dcp::Size i);
+std::ostream& operator<< (std::ostream& s, Standard t);
+
+}
A_dcp.cpls().front()->file().get(),
dcp::LocalTime ("2030-07-21T00:00:00+00:00"),
dcp::LocalTime ("2031-07-21T00:00:00+00:00"),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
true, 0
);
B_dcp.cpls().front()->file().get(),
dcp::LocalTime ("2030-07-21T00:00:00+00:00"),
dcp::LocalTime ("2031-07-21T00:00:00+00:00"),
- dcp::MODIFIED_TRANSITIONAL_1,
+ dcp::Formulation::MODIFIED_TRANSITIONAL_1,
true, 0
);
#include "lib/ratio.h"
#include "lib/video_content.h"
+#include "test.h"
#include <boost/test/unit_test.hpp>