bool
Butler::should_run () const
{
- auto check_video_much_too_big = [this](VideoType type) {
+ for (auto type: _video.indices()) {
if (_video[type].size() >= MAXIMUM_VIDEO_READAHEAD * 10) {
/* This is way too big */
auto pos = _audio.peek();
}
};
- check_video_much_too_big(VideoType::MAIN);
- check_video_much_too_big(VideoType::SIGN_LANGUAGE);
-
if (_audio.size() >= MAXIMUM_AUDIO_READAHEAD * 10) {
/* This is way too big */
auto pos = _audio.peek();
}
}
- auto check_video_too_big = [this](VideoType type) {
- if (_video[type].size() >= MAXIMUM_VIDEO_READAHEAD * 2) {
- LOG_WARNING ("Butler video buffers reached %1 frames (audio is %2)", _video[type].size(), _audio.size());
+ for (auto& video: _video) {
+ if (video.size() >= MAXIMUM_VIDEO_READAHEAD * 2) {
+ LOG_WARNING ("Butler video buffers reached %1 frames (audio is %2)", video.size(), _audio.size());
}
};
- check_video_too_big(VideoType::MAIN);
- check_video_too_big(VideoType::SIGN_LANGUAGE);
-
if (_audio.size() >= MAXIMUM_AUDIO_READAHEAD * 2) {
LOG_WARNING ("Butler audio buffers reached %1 frames (main video is %2)", _audio.size(), _video[VideoType::MAIN].size());
}
_pending_seek_position = position;
_pending_seek_accurate = accurate;
- _video[VideoType::MAIN].clear();
- _video[VideoType::SIGN_LANGUAGE].clear();
+ for (auto& video: _video) {
+ video.clear();
+ }
_audio.clear ();
_closed_caption.clear ();
if (type == ChangeType::DONE) {
auto film = _film.lock();
if (film) {
- _video[VideoType::MAIN].reset_metadata(film, _player.video_container_size(VideoType::MAIN));
- _video[VideoType::SIGN_LANGUAGE].reset_metadata(film, _player.video_container_size(VideoType::SIGN_LANGUAGE));
+ for (auto type: _video.indices()) {
+ _video[type].reset_metadata(film, _player.video_container_size(type));
+ }
}
}
return;
Player::construct ()
{
connect();
- set_video_container_size(VideoType::MAIN, full_size(VideoType::MAIN));
- set_video_container_size(VideoType::SIGN_LANGUAGE, full_size(VideoType::SIGN_LANGUAGE));
+ for (auto type: video_types()) {
+ set_video_container_size(type, full_size(type));
+ }
_black_image[VideoType::SIGN_LANGUAGE] = make_shared<Image>(AV_PIX_FMT_RGB24, dcp::Size(SIGN_LANGUAGE_WIDTH, SIGN_LANGUAGE_HEIGHT), Image::Alignment::PADDED);
}
_silent = Empty(film, playlist(), bind(&have_audio, _1), _playback_length);
- _next_video_time[VideoType::MAIN] = _next_video_time[VideoType::SIGN_LANGUAGE] = boost::none;
+ for (auto& next: _next_video_time) {
+ next = boost::none;
+ }
_next_audio_time = boost::none;
}
if (_playback_length.load() == DCPTime() || !film) {
/* Special; just give one black frame */
- use_video(black_player_video_frame(VideoType::MAIN, Eyes::BOTH), DCPTime(), one_video_frame());
- use_video(black_player_video_frame(VideoType::SIGN_LANGUAGE, Eyes::BOTH), DCPTime(), one_video_frame());
+ for (auto type: video_types()) {
+ use_video(black_player_video_frame(type, Eyes::BOTH), DCPTime(), one_video_frame());
+ }
return true;
}
which = CONTENT;
}
- auto check_black = [this, &earliest_time, &which, &black_type](VideoType type) {
+ for (auto type: _black.indices()) {
if (!_black[type].done() && !_ignore_video && (!earliest_time || _black[type].position() < *earliest_time)) {
earliest_time = _black[type].position();
which = BLACK;
black_type = type;
}
- };
-
- check_black(VideoType::MAIN);
- check_black(VideoType::SIGN_LANGUAGE);
+ }
if (!_silent.done() && !_ignore_audio && (!earliest_time || _silent.position() < *earliest_time)) {
earliest_time = _silent.position ();
}
}
+ for (auto& next: _next_video_time) {
+ if (accurate) {
+ next = time;
+ } else {
+ next = boost::none;
+ }
+ }
+
+ for (auto& black: _black) {
+ black.set_position(time);
+ }
+
if (accurate) {
- _next_video_time[VideoType::MAIN] = _next_video_time[VideoType::SIGN_LANGUAGE] = time;
_next_audio_time = time;
} else {
- _next_video_time[VideoType::MAIN] = _next_video_time[VideoType::SIGN_LANGUAGE] = boost::none;
_next_audio_time = boost::none;
}
- _black[VideoType::MAIN].set_position(time);
- _black[VideoType::SIGN_LANGUAGE].set_position(time);
_silent.set_position (time);
- _last_video[VideoType::MAIN][Eyes::LEFT] = _last_video[VideoType::MAIN][Eyes::RIGHT] = _last_video[VideoType::MAIN][Eyes::BOTH] = {};
- _last_video[VideoType::SIGN_LANGUAGE][Eyes::LEFT] = _last_video[VideoType::SIGN_LANGUAGE][Eyes::RIGHT] = _last_video[VideoType::SIGN_LANGUAGE][Eyes::BOTH] = {};
+ for (auto& last: _last_video) {
+ last[Eyes::LEFT] = last[Eyes::RIGHT] = last[Eyes::BOTH] = {};
+ }
for (auto& state: _stream_states) {
state.second.last_push_end = boost::none;
using std::string;
+using std::vector;
string
DCPOMATIC_ASSERT(false);
}
+
+vector<VideoType>
+video_types()
+{
+ return { VideoType::MAIN, VideoType::SIGN_LANGUAGE };
+}
+
#include <string>
+#include <vector>
enum class VideoType
std::string video_type_to_string(VideoType type);
+std::vector<VideoType> video_types();
+
+
#endif
_video_view = std::make_shared<SimpleVideoView>(this, p);
#endif
- _video_view[VideoType::MAIN]->Sized.connect(boost::bind(&FilmViewer::video_view_sized, this, VideoType::MAIN));
- _video_view[VideoType::SIGN_LANGUAGE]->Sized.connect(boost::bind(&FilmViewer::video_view_sized, this, VideoType::SIGN_LANGUAGE));
+ for (auto type: _video_view.indices()) {
+ _video_view[type]->Sized.connect(boost::bind(&FilmViewer::video_view_sized, this, type));
+ }
_video_view[VideoType::MAIN]->TooManyDropped.connect(boost::bind(boost::ref(TooManyDropped)));
set_film (shared_ptr<Film>());
_film = film;
- _video_view[VideoType::MAIN]->clear();
- _video_view[VideoType::SIGN_LANGUAGE]->clear();
+ for (auto& view: _video_view) {
+ view->clear();
+ }
_closed_captions_dialog->clear ();
destroy_butler();
if (!_film) {
_player = boost::none;
resume();
- _video_view[VideoType::MAIN]->update();
- _video_view[VideoType::SIGN_LANGUAGE]->update();
+ for (auto& view: _video_view) {
+ view->update();
+ }
return;
}
--_suspended;
if (_playing && !_suspended) {
start_audio_stream_if_open ();
- _video_view[VideoType::MAIN]->start();
- _video_view[VideoType::SIGN_LANGUAGE]->start();
+ for (auto& view: _video_view) {
+ view->start();
+ }
}
}
* happens we want it to come after the Started signal, so do that first.
*/
Started ();
- _video_view[VideoType::MAIN]->start();
- _video_view[VideoType::SIGN_LANGUAGE]->start();
+ for (auto& view: _video_view) {
+ view->start();
+ }
}
}
_playing = false;
- _video_view[VideoType::MAIN]->stop();
- _video_view[VideoType::SIGN_LANGUAGE]->stop();
+ for (auto& view: _video_view) {
+ view->stop();
+ }
+
Stopped ();
- _video_view[VideoType::MAIN]->rethrow();
- _video_view[VideoType::SIGN_LANGUAGE]->rethrow();
+ for (auto& view: _video_view) {
+ view->rethrow();
+ }
return true;
}
if (p == Film::Property::AUDIO_CHANNELS) {
destroy_and_maybe_create_butler();
} else if (p == Film::Property::VIDEO_FRAME_RATE) {
- _video_view[VideoType::MAIN]->set_video_frame_rate(_film->video_frame_rate());
- _video_view[VideoType::SIGN_LANGUAGE]->set_video_frame_rate(_film->video_frame_rate());
+ for (auto& view: _video_view) {
+ view->set_video_frame_rate(_film->video_frame_rate());
+ }
} else if (p == Film::Property::THREE_D) {
_video_view[VideoType::MAIN]->set_three_d(_film->three_d());
} else if (p == Film::Property::CONTENT) {
void
FilmViewer::film_length_change ()
{
- _video_view[VideoType::MAIN]->set_length(_film->length());
- _video_view[VideoType::SIGN_LANGUAGE]->set_length(_film->length());
+ for (auto& view: _video_view) {
+ view->set_length(_film->length());
+ }
}