}
+/** @return from time if this text should be stored in _active_texts, otherwise nullopt */
+boost::optional<DCPTime>
+Player::should_store(weak_ptr<Piece> weak_piece, weak_ptr<const TextContent> weak_content, ContentTime subtitle_from) const
+{
+ if (_suspended) {
+ return {};
+ }
+
+ auto piece = weak_piece.lock();
+ auto content = weak_content.lock();
+ auto film = _film.lock();
+ if (!piece || !content || !film) {
+ return {};
+ }
+
+ DCPTime const from(content_time_to_dcp(piece, subtitle_from));
+ if (from > piece->content->end(film)) {
+ return {};
+ }
+
+ return from;
+}
+
+
void
Player::bitmap_text_start(weak_ptr<Piece> weak_piece, weak_ptr<const TextContent> weak_content, ContentBitmapText subtitle)
{
- if (_suspended) {
+ auto from = should_store(weak_piece, weak_content, subtitle.from());
+ if (!from) {
return;
}
- auto piece = weak_piece.lock();
auto content = weak_content.lock();
- if (!piece || !content) {
+ if (!content) {
return;
}
ps.bitmap.push_back(BitmapText(image->scale(scaled_size, dcp::YUVToRGB::REC601, image->pixel_format(), Image::Alignment::PADDED, _fast), sub.rectangle));
}
- DCPTime from(content_time_to_dcp(piece, subtitle.from()));
- _active_texts[content->type()].add_from(weak_content, ps, from);
+ _active_texts[content->type()].add_from(weak_content, ps, *from);
}
void
Player::plain_text_start(weak_ptr<Piece> weak_piece, weak_ptr<const TextContent> weak_content, ContentStringText subtitle)
{
- if (_suspended) {
+ auto from = should_store(weak_piece, weak_content, subtitle.from());
+ if (!from) {
return;
}
- auto piece = weak_piece.lock();
auto content = weak_content.lock();
- auto film = _film.lock();
- if (!piece || !content || !film) {
+ if (!content) {
return;
}
PlayerText ps;
- DCPTime const from(content_time_to_dcp(piece, subtitle.from()));
-
- if (from > piece->content->end(film)) {
- return;
- }
-
for (auto s: subtitle.subs) {
s.set_h_position(s.h_position() + content->x_offset());
s.set_v_position(s.v_position() + content->y_offset());
s.set_aspect_adjust(xs / ys);
}
- s.set_in(dcp::Time(from.seconds(), 1000));
- ps.string.push_back(s);
+ s.set_in(dcp::Time(from->seconds(), 1000));
+ ps.string.push_back (s);
}
- _active_texts[content->type()].add_from(weak_content, ps, from);
+ _active_texts[content->type()].add_from(weak_content, ps, *from);
}
return;
}
- DCPTime const dcp_to = content_time_to_dcp(piece, to);
+ auto dcp_to = content_time_to_dcp(piece, to);
+ auto from = _active_texts[content->type()].add_to(weak_content, dcp_to);
+ auto subtitle_period = DCPTimePeriod(from.second, dcp_to);
- if (dcp_to > piece->content->end(film)) {
+ auto overlap = piece->content->period(film).overlap(subtitle_period);
+ if (!overlap) {
return;
}
- auto from = _active_texts[content->type()].add_to(weak_content, dcp_to);
-
bool const always = (content->type() == TextType::OPEN_SUBTITLE && _always_burn_open_subtitles);
if (content->use() && !always && !content->burn()) {
- Text(from.first, content->type(), content->dcp_track().get_value_or(DCPTextTrack()), DCPTimePeriod(from.second, dcp_to));
+ Text(from.first, content->type(), content->dcp_track().get_value_or(DCPTextTrack()), *overlap);
}
}
boost::optional<PositionImage> open_texts_for_frame(dcpomatic::DCPTime time) const;
void emit_video(std::shared_ptr<PlayerVideo> pv, dcpomatic::DCPTime time);
void use_video(std::shared_ptr<PlayerVideo> pv, dcpomatic::DCPTime time, dcpomatic::DCPTime end);
- void emit_audio(std::shared_ptr<AudioBuffers> data, dcpomatic::DCPTime time);
- std::shared_ptr<const Playlist> playlist() const;
+ void emit_audio (std::shared_ptr<AudioBuffers> data, dcpomatic::DCPTime time);
+ std::shared_ptr<const Playlist> playlist () const;
+ boost::optional<dcpomatic::DCPTime> should_store(
+ std::weak_ptr<Piece> weak_piece, std::weak_ptr<const TextContent> weak_content, dcpomatic::ContentTime subtitle_from
+ ) const;
/** Mutex to protect the most of the Player state. When it's used for the preview we have
seek() and pass() called from the Butler thread and lots of other stuff called
BOOST_CHECK(max_X > 100);
}
+
+BOOST_AUTO_TEST_CASE(dcp_subtitle_trim_test)
+{
+ auto content = make_shared<DCPSubtitleContent>("test/data/dcp_sub7.xml");
+ auto film = new_test_film("dcp_subtitle_trim_start_test", { content });
+ content->set_trim_start(film, dcpomatic::ContentTime::from_seconds(10.5));
+ content->set_trim_end(dcpomatic::ContentTime::from_seconds(2.5));
+ content->text[0]->set_language(dcp::LanguageTag("en"));
+
+ make_and_verify_dcp(
+ film,
+ {
+ dcp::VerificationNote::Code::INVALID_SUBTITLE_FIRST_TEXT_TIME,
+ dcp::VerificationNote::Code::MISSING_CPL_METADATA,
+ dcp::VerificationNote::Code::INVALID_SUBTITLE_DURATION,
+ });
+
+ dcp::SMPTETextAsset asset(find_file(film->dir(film->dcp_name()), "sub_"));
+ auto texts = asset.texts();
+ BOOST_REQUIRE_EQUAL(texts.size(), 9U);
+ BOOST_CHECK(texts[0]->in() == dcp::Time(0, 0, 0, 0, 24));
+ BOOST_CHECK(texts[0]->out() == dcp::Time(0, 0, 0, 12, 24));
+ BOOST_CHECK(texts[8]->in() == dcp::Time(0, 0, 15, 12, 24));
+ BOOST_CHECK(texts[8]->out() == dcp::Time(0, 0, 16, 0, 24));
+}
+
+
+