Player::Player (shared_ptr<const Film> film, Image::Alignment subtitle_alignment)
: _film (film)
, _suspended (0)
+ , _ignore_video(false)
+ , _ignore_audio(false)
+ , _ignore_text(false)
+ , _always_burn_open_subtitles(false)
+ , _fast(false)
, _tolerant (film->tolerant())
+ , _play_referenced(false)
, _audio_merger (_film->audio_frame_rate())
, _subtitle_alignment (subtitle_alignment)
{
: _film (film)
, _playlist (playlist_)
, _suspended (0)
+ , _ignore_video(false)
+ , _ignore_audio(false)
+ , _ignore_text(false)
+ , _always_burn_open_subtitles(false)
+ , _fast(false)
, _tolerant (film->tolerant())
+ , _play_referenced(false)
, _audio_merger (_film->audio_frame_rate())
{
construct ();
}
-void
-Player::setup_pieces ()
-{
- boost::mutex::scoped_lock lm (_mutex);
- setup_pieces_unlocked ();
-}
-
-
bool
have_video (shared_ptr<const Content> content)
{
- return static_cast<bool>(content->video) && content->video->use();
+ return static_cast<bool>(content->video) && content->video->use() && content->can_be_played();
}
bool
have_audio (shared_ptr<const Content> content)
{
- return static_cast<bool>(content->audio);
+ return static_cast<bool>(content->audio) && content->can_be_played();
}
void
-Player::setup_pieces_unlocked ()
+Player::setup_pieces ()
{
+ boost::mutex::scoped_lock lm (_mutex);
+
_playback_length = _playlist ? _playlist->length(_film) : _film->length();
auto old_pieces = _pieces;
_pieces.clear ();
- _shuffler.reset (new Shuffler());
- _shuffler->Video.connect(bind(&Player::video, this, _1, _2));
+ auto playlist_content = playlist()->content();
+ bool const have_threed = std::any_of(
+ playlist_content.begin(),
+ playlist_content.end(),
+ [](shared_ptr<const Content> c) {
+ return c->video && (c->video->frame_type() == VideoFrameType::THREE_D_LEFT || c->video->frame_type() == VideoFrameType::THREE_D_RIGHT);
+ });
+
+
+ if (have_threed) {
+ _shuffler.reset(new Shuffler());
+ _shuffler->Video.connect(bind(&Player::video, this, _1, _2));
+ }
for (auto i: playlist()->content()) {
_pieces.push_back (piece);
if (decoder->video) {
- if (i->video->frame_type() == VideoFrameType::THREE_D_LEFT || i->video->frame_type() == VideoFrameType::THREE_D_RIGHT) {
+ if (have_threed) {
/* We need a Shuffler to cope with 3D L/R video data arriving out of sequence */
decoder->video->Data.connect (bind(&Shuffler::video, _shuffler.get(), weak_ptr<Piece>(piece), _1));
} else {
}
}
+ auto ignore_overlap = [](shared_ptr<VideoContent> v) {
+ return v && v->use() && v->frame_type() != VideoFrameType::THREE_D_LEFT && v->frame_type() != VideoFrameType::THREE_D_RIGHT;
+ };
+
for (auto i = _pieces.begin(); i != _pieces.end(); ++i) {
- if (auto video = (*i)->content->video) {
- if (video->use() && video->frame_type() != VideoFrameType::THREE_D_LEFT && video->frame_type() != VideoFrameType::THREE_D_RIGHT) {
- /* Look for content later in the content list with in-use video that overlaps this */
- auto period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(_film));
- auto j = i;
- ++j;
- for (; j != _pieces.end(); ++j) {
- if ((*j)->content->video && (*j)->content->video->use()) {
- (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(_film)).overlap(period);
- }
+ if (ignore_overlap((*i)->content->video)) {
+ /* Look for content later in the content list with in-use video that overlaps this */
+ auto const period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(_film));
+ for (auto j = std::next(i); j != _pieces.end(); ++j) {
+ if ((*j)->content->video && ignore_overlap((*j)->content->video)) {
+ (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(_film)).overlap(period);
}
}
}
{
Change (ChangeType::PENDING, PlayerProperty::VIDEO_CONTAINER_SIZE, false);
- {
- boost::mutex::scoped_lock lm (_mutex);
-
- if (s == _video_container_size) {
- lm.unlock ();
- Change (ChangeType::CANCELLED, PlayerProperty::VIDEO_CONTAINER_SIZE, false);
- return;
- }
+ if (s == _video_container_size) {
+ Change(ChangeType::CANCELLED, PlayerProperty::VIDEO_CONTAINER_SIZE, false);
+ return;
+ }
- _video_container_size = s;
+ _video_container_size = s;
+ {
+ boost::mutex::scoped_lock lm (_mutex);
_black_image = make_shared<Image>(AV_PIX_FMT_RGB24, _video_container_size, Image::Alignment::PADDED);
_black_image->make_black ();
}
}
-vector<FontData>
+vector<shared_ptr<Font>>
Player::get_subtitle_fonts ()
{
boost::mutex::scoped_lock lm (_mutex);
- vector<FontData> fonts;
- for (auto i: _pieces) {
- /* XXX: things may go wrong if there are duplicate font IDs
- with different font files.
- */
- auto f = i->decoder->fonts ();
- copy (f.begin(), f.end(), back_inserter(fonts));
+ vector<shared_ptr<Font>> fonts;
+ for (auto piece: _pieces) {
+ for (auto text: piece->content->text) {
+ auto text_fonts = text->fonts();
+ copy (text_fonts.begin(), text_fonts.end(), back_inserter(fonts));
+ }
}
return fonts;
void
Player::set_ignore_video ()
{
- boost::mutex::scoped_lock lm (_mutex);
_ignore_video = true;
- setup_pieces_unlocked ();
+ setup_pieces();
}
void
Player::set_ignore_audio ()
{
- boost::mutex::scoped_lock lm (_mutex);
_ignore_audio = true;
- setup_pieces_unlocked ();
+ setup_pieces();
}
void
Player::set_ignore_text ()
{
- boost::mutex::scoped_lock lm (_mutex);
_ignore_text = true;
- setup_pieces_unlocked ();
+ setup_pieces();
}
void
Player::set_always_burn_open_subtitles ()
{
- boost::mutex::scoped_lock lm (_mutex);
_always_burn_open_subtitles = true;
}
void
Player::set_fast ()
{
- boost::mutex::scoped_lock lm (_mutex);
_fast = true;
- setup_pieces_unlocked ();
+ setup_pieces();
}
void
Player::set_play_referenced ()
{
- boost::mutex::scoped_lock lm (_mutex);
_play_referenced = true;
- setup_pieces_unlocked ();
+ setup_pieces();
}
Frame const reel_trim_start = min(reel_duration, max(int64_t(0), trim_start - offset_from_start));
Frame const reel_trim_end = min(reel_duration, max(int64_t(0), reel_duration - (offset_from_end - trim_end)));
- auto const from = max(DCPTime(), content->position() + DCPTime::from_frames(offset_from_start, frame_rate) - DCPTime::from_frames(trim_start, frame_rate));
+ auto const from = content->position() + std::max(DCPTime(), DCPTime::from_frames(offset_from_start - trim_start, frame_rate));
if (dcp->reference_video()) {
maybe_add_asset (reel_assets, reel->main_picture(), reel_trim_start, reel_trim_end, from, frame_rate);
}
);
if (latest_last_push_end != _stream_states.end()) {
- LOG_DEBUG_PLAYER("Leading stream is in %1 at %2", latest_last_push_end->second.piece->content->path(0), to_string(latest_last_push_end->second.last_push_end));
+ LOG_DEBUG_PLAYER("Leading audio stream is in %1 at %2", latest_last_push_end->second.piece->content->path(0), to_string(latest_last_push_end->second.last_push_end));
}
/* Now make a list of those streams that are less than ignore_streams_behind behind the leader */
}
if (done) {
- _shuffler->flush ();
+ if (_shuffler) {
+ _shuffler->flush ();
+ }
for (auto const& i: _delay) {
do_emit_video(i.first, i.second);
}
}
/* i.image will already have been scaled to fit _video_container_size */
- dcp::Size scaled_size (i.rectangle.width * _video_container_size.width, i.rectangle.height * _video_container_size.height);
+ dcp::Size scaled_size (i.rectangle.width * _video_container_size.load().width, i.rectangle.height * _video_container_size.load().height);
captions.push_back (
PositionImage (
i.image,
Position<int> (
- lrint(_video_container_size.width * i.rectangle.x),
- lrint(_video_container_size.height * i.rectangle.y)
+ lrint(_video_container_size.load().width * i.rectangle.x),
+ lrint(_video_container_size.load().height * i.rectangle.y)
)
)
);
/* String subtitles (rendered to an image) */
if (!j.string.empty()) {
- auto s = render_text (j.string, j.fonts, _video_container_size, time, vfr);
+ auto s = render_text(j.string, _video_container_size, time, vfr);
copy (s.begin(), s.end(), back_inserter (captions));
}
}
void
-Player::video (weak_ptr<Piece> wp, ContentVideo video)
+Player::video (weak_ptr<Piece> weak_piece, ContentVideo video)
{
if (_suspended) {
return;
}
- auto piece = wp.lock ();
+ auto piece = weak_piece.lock ();
if (!piece) {
return;
}
/* Fill if we have more than half a frame to do */
if ((fill_to - fill_from) > one_video_frame() / 2) {
- auto last = _last_video.find (wp);
+ auto last = _last_video.find (weak_piece);
if (_film->three_d()) {
auto fill_to_eyes = video.eyes;
if (fill_to_eyes == Eyes::BOTH) {
auto const content_video = piece->content->video;
- _last_video[wp] = std::make_shared<PlayerVideo>(
+ _last_video[weak_piece] = std::make_shared<PlayerVideo>(
video.image,
content_video->actual_crop(),
content_video->fade (_film, video.frame),
DCPTime t = time;
for (int i = 0; i < frc.repeat; ++i) {
if (t < piece->content->end(_film)) {
- emit_video (_last_video[wp], t);
+ emit_video (_last_video[weak_piece], t);
}
t += one_video_frame ();
}
void
-Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
+Player::audio (weak_ptr<Piece> weak_piece, AudioStreamPtr stream, ContentAudio content_audio)
{
if (_suspended) {
return;
DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
- auto piece = wp.lock ();
+ auto piece = weak_piece.lock ();
if (!piece) {
return;
}
DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
- /* Gain */
-
- if (content->gain() != 0) {
- auto gain = make_shared<AudioBuffers>(content_audio.audio);
- gain->apply_gain (content->gain());
- content_audio.audio = gain;
+ /* Gain and fade */
+
+ auto const fade_coeffs = content->fade (stream, content_audio.frame, content_audio.audio->frames(), rfr);
+ if (content->gain() != 0 || !fade_coeffs.empty()) {
+ auto gain_buffers = make_shared<AudioBuffers>(content_audio.audio);
+ if (!fade_coeffs.empty()) {
+ /* Apply both fade and gain */
+ DCPOMATIC_ASSERT (fade_coeffs.size() == static_cast<size_t>(gain_buffers->frames()));
+ auto const channels = gain_buffers->channels();
+ auto const frames = fade_coeffs.size();
+ auto data = gain_buffers->data();
+ auto const gain = db_to_linear (content->gain());
+ for (auto channel = 0; channel < channels; ++channel) {
+ for (auto frame = 0U; frame < frames; ++frame) {
+ data[channel][frame] *= gain * fade_coeffs[frame];
+ }
+ }
+ } else {
+ /* Just apply gain */
+ gain_buffers->apply_gain (content->gain());
+ }
+ content_audio.audio = gain_buffers;
}
/* Remap */
void
-Player::bitmap_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentBitmapText subtitle)
+Player::bitmap_text_start (weak_ptr<Piece> weak_piece, weak_ptr<const TextContent> weak_content, ContentBitmapText subtitle)
{
if (_suspended) {
return;
}
- auto piece = wp.lock ();
- auto text = wc.lock ();
- if (!piece || !text) {
+ auto piece = weak_piece.lock ();
+ auto content = weak_content.lock ();
+ if (!piece || !content) {
return;
}
- /* Apply content's subtitle offsets */
- subtitle.sub.rectangle.x += text->x_offset ();
- subtitle.sub.rectangle.y += text->y_offset ();
+ PlayerText ps;
+ for (auto& sub: subtitle.subs)
+ {
+ /* Apply content's subtitle offsets */
+ sub.rectangle.x += content->x_offset ();
+ sub.rectangle.y += content->y_offset ();
- /* Apply a corrective translation to keep the subtitle centred after the scale that is coming up */
- subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * ((text->x_scale() - 1) / 2);
- subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * ((text->y_scale() - 1) / 2);
+ /* Apply a corrective translation to keep the subtitle centred after the scale that is coming up */
+ sub.rectangle.x -= sub.rectangle.width * ((content->x_scale() - 1) / 2);
+ sub.rectangle.y -= sub.rectangle.height * ((content->y_scale() - 1) / 2);
- /* Apply content's subtitle scale */
- subtitle.sub.rectangle.width *= text->x_scale ();
- subtitle.sub.rectangle.height *= text->y_scale ();
+ /* Apply content's subtitle scale */
+ sub.rectangle.width *= content->x_scale ();
+ sub.rectangle.height *= content->y_scale ();
- PlayerText ps;
- auto image = subtitle.sub.image;
+ auto image = sub.image;
- /* We will scale the subtitle up to fit _video_container_size */
- int const width = subtitle.sub.rectangle.width * _video_container_size.width;
- int const height = subtitle.sub.rectangle.height * _video_container_size.height;
- if (width == 0 || height == 0) {
- return;
- }
+ /* We will scale the subtitle up to fit _video_container_size */
+ int const width = sub.rectangle.width * _video_container_size.load().width;
+ int const height = sub.rectangle.height * _video_container_size.load().height;
+ if (width == 0 || height == 0) {
+ return;
+ }
- dcp::Size scaled_size (width, height);
- ps.bitmap.push_back (BitmapText(image->scale(scaled_size, dcp::YUVToRGB::REC601, image->pixel_format(), Image::Alignment::PADDED, _fast), subtitle.sub.rectangle));
- DCPTime from (content_time_to_dcp (piece, subtitle.from()));
+ dcp::Size scaled_size (width, height);
+ ps.bitmap.push_back (BitmapText(image->scale(scaled_size, dcp::YUVToRGB::REC601, image->pixel_format(), Image::Alignment::PADDED, _fast), sub.rectangle));
+ }
- _active_texts[static_cast<int>(text->type())].add_from (wc, ps, from);
+ DCPTime from(content_time_to_dcp(piece, subtitle.from()));
+ _active_texts[static_cast<int>(content->type())].add_from(weak_content, ps, from);
}
void
-Player::plain_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentStringText subtitle)
+Player::plain_text_start (weak_ptr<Piece> weak_piece, weak_ptr<const TextContent> weak_content, ContentStringText subtitle)
{
if (_suspended) {
return;
}
- auto piece = wp.lock ();
- auto text = wc.lock ();
- if (!piece || !text) {
+ auto piece = weak_piece.lock ();
+ auto content = weak_content.lock ();
+ if (!piece || !content) {
return;
}
}
for (auto s: subtitle.subs) {
- s.set_h_position (s.h_position() + text->x_offset ());
- s.set_v_position (s.v_position() + text->y_offset ());
- float const xs = text->x_scale();
- float const ys = text->y_scale();
+ s.set_h_position (s.h_position() + content->x_offset());
+ s.set_v_position (s.v_position() + content->y_offset());
+ float const xs = content->x_scale();
+ float const ys = content->y_scale();
float size = s.size();
/* Adjust size to express the common part of the scaling;
}
s.set_in (dcp::Time(from.seconds(), 1000));
- ps.string.push_back (StringText (s, text->outline_width()));
- ps.add_fonts (text->fonts ());
+ ps.string.push_back (s);
}
- _active_texts[static_cast<int>(text->type())].add_from (wc, ps, from);
+ _active_texts[static_cast<int>(content->type())].add_from(weak_content, ps, from);
}
void
-Player::subtitle_stop (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentTime to)
+Player::subtitle_stop (weak_ptr<Piece> weak_piece, weak_ptr<const TextContent> weak_content, ContentTime to)
{
if (_suspended) {
return;
}
- auto text = wc.lock ();
- if (!text) {
+ auto content = weak_content.lock ();
+ if (!content) {
return;
}
- if (!_active_texts[static_cast<int>(text->type())].have(wc)) {
+ if (!_active_texts[static_cast<int>(content->type())].have(weak_content)) {
return;
}
- shared_ptr<Piece> piece = wp.lock ();
+ auto piece = weak_piece.lock ();
if (!piece) {
return;
}
return;
}
- auto from = _active_texts[static_cast<int>(text->type())].add_to (wc, dcp_to);
+ auto from = _active_texts[static_cast<int>(content->type())].add_to(weak_content, dcp_to);
- bool const always = (text->type() == TextType::OPEN_SUBTITLE && _always_burn_open_subtitles);
- if (text->use() && !always && !text->burn()) {
- Text (from.first, text->type(), text->dcp_track().get_value_or(DCPTextTrack()), DCPTimePeriod (from.second, dcp_to));
+ bool const always = (content->type() == TextType::OPEN_SUBTITLE && _always_burn_open_subtitles);
+ if (content->use() && !always && !content->burn()) {
+ Text (from.first, content->type(), content->dcp_track().get_value_or(DCPTextTrack()), DCPTimePeriod(from.second, dcp_to));
}
}
{
Change (ChangeType::PENDING, PlayerProperty::DCP_DECODE_REDUCTION, false);
- {
- boost::mutex::scoped_lock lm (_mutex);
-
- if (reduction == _dcp_decode_reduction) {
- lm.unlock ();
- Change (ChangeType::CANCELLED, PlayerProperty::DCP_DECODE_REDUCTION, false);
- return;
- }
-
- _dcp_decode_reduction = reduction;
- setup_pieces_unlocked ();
+ if (reduction == _dcp_decode_reduction.load()) {
+ Change(ChangeType::CANCELLED, PlayerProperty::DCP_DECODE_REDUCTION, false);
+ return;
}
+ _dcp_decode_reduction = reduction;
+ setup_pieces();
+
Change (ChangeType::DONE, PlayerProperty::DCP_DECODE_REDUCTION, false);
}
optional<DCPTime>
-Player::content_time_to_dcp (shared_ptr<const Content> content, ContentTime t)
+Player::content_time_to_dcp (shared_ptr<const Content> content, ContentTime t) const
{
boost::mutex::scoped_lock lm (_mutex);
}
+optional<ContentTime>
+Player::dcp_to_content_time (shared_ptr<const Content> content, DCPTime t) const
+{
+ boost::mutex::scoped_lock lm (_mutex);
+
+ for (auto i: _pieces) {
+ if (i->content == content) {
+ return dcp_to_content_time (i, t);
+ }
+ }
+
+ /* We couldn't find this content; perhaps things are being changed over */
+ return {};
+}
+
+
shared_ptr<const Playlist>
Player::playlist () const
{