#include <iostream>
#include <iomanip>
-using std::string;
-using std::pair;
-using std::min;
-using std::max;
+
+using std::bad_alloc;
using std::cout;
+using std::dynamic_pointer_cast;
+using std::exception;
using std::list;
-using std::bad_alloc;
using std::make_pair;
-using std::exception;
+using std::make_shared;
+using std::max;
+using std::min;
+using std::pair;
using std::shared_ptr;
-using std::dynamic_pointer_cast;
+using std::string;
using std::vector;
using std::weak_ptr;
using boost::optional;
using dcp::Size;
using namespace dcpomatic;
+
static
int
rtaudio_callback (void* out, void *, unsigned int frames, double, RtAudioStreamStatus, void* data)
return reinterpret_cast<FilmViewer*>(data)->audio_callback (out, frames);
}
+
FilmViewer::FilmViewer (wxWindow* p)
- : _coalesce_player_changes (false)
- , _audio (DCPOMATIC_RTAUDIO_API)
- , _audio_channels (0)
- , _audio_block_size (1024)
- , _playing (false)
- , _suspended (0)
- , _latency_history_count (0)
+ : _audio (DCPOMATIC_RTAUDIO_API)
, _closed_captions_dialog (new ClosedCaptionsDialog(p, this))
- , _outline_content (false)
- , _pad_black (false)
- , _idle_get (false)
{
switch (Config::instance()->video_view_type()) {
case Config::VIDEO_VIEW_OPENGL:
- _video_view = new GLVideoView (this, p);
+ _video_view = std::make_shared<GLVideoView>(this, p);
break;
case Config::VIDEO_VIEW_SIMPLE:
- _video_view = new SimpleVideoView (this, p);
+ _video_view = std::make_shared<SimpleVideoView>(this, p);
break;
}
_video_view->Sized.connect (boost::bind(&FilmViewer::video_view_sized, this));
- _video_view->TooManyDropped.connect (boost::bind(&FilmViewer::too_many_frames_dropped, this));
+ _video_view->TooManyDropped.connect (boost::bind(boost::ref(TooManyDropped)));
- set_film (shared_ptr<Film> ());
+ set_film (shared_ptr<Film>());
- _config_changed_connection = Config::instance()->Changed.connect (bind (&FilmViewer::config_changed, this, _1));
+ _config_changed_connection = Config::instance()->Changed.connect(bind(&FilmViewer::config_changed, this, _1));
config_changed (Config::SOUND_OUTPUT);
}
+
FilmViewer::~FilmViewer ()
{
stop ();
}
+
/** Ask for ::get() to be called next time we are idle */
void
FilmViewer::request_idle_display_next_frame ()
signal_manager->when_idle (boost::bind(&FilmViewer::idle_handler, this));
}
+
void
FilmViewer::idle_handler ()
{
}
}
+
void
FilmViewer::set_film (shared_ptr<Film> film)
{
}
try {
- _player.reset (new Player(_film));
+ _player = make_shared<Player>(_film);
_player->set_fast ();
if (_dcp_decode_reduction) {
_player->set_dcp_decode_reduction (_dcp_decode_reduction);
slow_refresh ();
}
+
void
FilmViewer::recreate_butler ()
{
return;
}
- _butler.reset(
- new Butler(
- _film,
- _player,
- Config::instance()->audio_mapping(_audio_channels),
- _audio_channels,
- bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24),
- VideoRange::FULL,
- false,
- true
- )
+ _butler = std::make_shared<Butler>(
+ _film,
+ _player,
+ Config::instance()->audio_mapping(_audio_channels),
+ _audio_channels,
+ bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24),
+ VideoRange::FULL,
+ false,
+ true,
+ dynamic_pointer_cast<GLVideoView>(_video_view) && _optimise_for_j2k
);
if (!Config::instance()->sound() && !_audio.isStreamOpen()) {
resume ();
}
+
void
FilmViewer::set_outline_content (bool o)
{
void
-FilmViewer::set_outline_subtitles (optional<dcpomatic::Rect<double> > rect)
+FilmViewer::set_outline_subtitles (optional<dcpomatic::Rect<double>> rect)
{
_outline_subtitles = rect;
_video_view->update ();
slow_refresh ();
}
+
void
FilmViewer::video_view_sized ()
{
}
}
+
void
FilmViewer::calculate_sizes ()
{
return;
}
- Ratio const * container = _film->container ();
+ auto const container = _film->container ();
- float const view_ratio = float(_video_view->get()->GetSize().x) / _video_view->get()->GetSize().y;
- float const film_ratio = container ? container->ratio () : 1.78;
+ auto const view_ratio = float(_video_view->get()->GetSize().x) / _video_view->get()->GetSize().y;
+ auto const film_ratio = container ? container->ratio () : 1.78;
+ dcp::Size out_size;
if (view_ratio < film_ratio) {
/* panel is less widscreen than the film; clamp width */
- _out_size.width = _video_view->get()->GetSize().x;
- _out_size.height = lrintf (_out_size.width / film_ratio);
+ out_size.width = _video_view->get()->GetSize().x;
+ out_size.height = lrintf (out_size.width / film_ratio);
} else {
/* panel is more widescreen than the film; clamp height */
- _out_size.height = _video_view->get()->GetSize().y;
- _out_size.width = lrintf (_out_size.height * film_ratio);
+ out_size.height = _video_view->get()->GetSize().y;
+ out_size.width = lrintf (out_size.height * film_ratio);
}
/* Catch silly values */
- _out_size.width = max (64, _out_size.width);
- _out_size.height = max (64, _out_size.height);
+ out_size.width = max (64, out_size.width);
+ out_size.height = max (64, out_size.height);
- _player->set_video_container_size (_out_size);
+ _player->set_video_container_size (out_size);
}
+
void
FilmViewer::suspend ()
{
}
}
+
void
FilmViewer::resume ()
{
}
}
+
void
FilmViewer::start ()
{
return;
}
- optional<bool> v = PlaybackPermitted ();
+ auto v = PlaybackPermitted ();
if (v && !*v) {
/* Computer says no */
return;
*/
if (_audio.isStreamOpen()) {
_audio.setStreamTime (_video_view->position().seconds());
- _audio.startStream ();
+ try {
+ _audio.startStream ();
+ } catch (RtAudioError& e) {
+ _audio_channels = 0;
+ error_dialog (
+ _video_view->get(),
+ _("There was a problem starting audio playback. Please try another audio output device in Preferences."), std_to_wx(e.what())
+ );
+ }
}
_playing = true;
_video_view->start ();
}
+
bool
FilmViewer::stop ()
{
return true;
}
+
void
FilmViewer::player_change (ChangeType type, int property, bool frequent)
{
player_change ({property});
}
+
void
FilmViewer::player_change (vector<int> properties)
{
for (auto i: properties) {
if (
i == VideoContentProperty::CROP ||
- i == VideoContentProperty::SCALE ||
+ i == VideoContentProperty::CUSTOM_RATIO ||
+ i == VideoContentProperty::CUSTOM_SIZE ||
i == VideoContentProperty::FADE_IN ||
i == VideoContentProperty::FADE_OUT ||
i == VideoContentProperty::COLOUR_CONVERSION ||
}
}
+
void
FilmViewer::film_change (ChangeType type, Film::Property p)
{
}
}
+
void
FilmViewer::film_length_change ()
{
_video_view->set_length (_film->length());
}
+
/** Re-get the current frame slowly by seeking */
void
FilmViewer::slow_refresh ()
seek (_video_view->position(), true);
}
+
/** Try to re-get the current frame quickly by resetting the metadata
* in the PlayerVideo that we used last time.
* @return true if this was possible, false if not.
return _video_view->reset_metadata (_film, _player->video_container_size());
}
+
void
FilmViewer::seek (shared_ptr<Content> content, ContentTime t, bool accurate)
{
- optional<DCPTime> dt = _player->content_time_to_dcp (content, t);
+ auto dt = _player->content_time_to_dcp (content, t);
if (dt) {
seek (*dt, accurate);
}
}
+
void
FilmViewer::set_coalesce_player_changes (bool c)
{
}
}
+
void
FilmViewer::seek (DCPTime t, bool accurate)
{
return;
}
- if (t < DCPTime ()) {
+ if (t < DCPTime()) {
t = DCPTime ();
}
- if (t >= _film->length ()) {
+ if (t >= _film->length()) {
t = _film->length() - one_video_frame();
}
resume ();
}
+
void
FilmViewer::config_changed (Config::Property p)
{
}
}
+
DCPTime
FilmViewer::uncorrected_time () const
{
- if (_audio.isStreamRunning ()) {
+ if (_audio.isStreamRunning()) {
return DCPTime::from_seconds (const_cast<RtAudio*>(&_audio)->getStreamTime());
}
return _video_view->position();
}
+
optional<DCPTime>
FilmViewer::audio_time () const
{
if (!_audio.isStreamRunning()) {
- return optional<DCPTime>();
+ return {};
}
return DCPTime::from_seconds (const_cast<RtAudio*>(&_audio)->getStreamTime ()) -
DCPTime::from_frames (average_latency(), _film->audio_frame_rate());
}
+
DCPTime
FilmViewer::time () const
{
return audio_time().get_value_or(_video_view->position());
}
+
int
FilmViewer::audio_callback (void* out_p, unsigned int frames)
{
while (true) {
- optional<DCPTime> t = _butler->get_audio (reinterpret_cast<float*> (out_p), frames);
+ auto t = _butler->get_audio (reinterpret_cast<float*> (out_p), frames);
if (!t || DCPTime(uncorrected_time() - *t) < one_video_frame()) {
/* There was an underrun or this audio is on time; carry on */
break;
return 0;
}
+
Frame
FilmViewer::average_latency () const
{
return total / _latency_history.size();
}
+
void
FilmViewer::set_dcp_decode_reduction (optional<int> reduction)
{
}
}
+
optional<int>
FilmViewer::dcp_decode_reduction () const
{
return _dcp_decode_reduction;
}
+
DCPTime
FilmViewer::one_video_frame () const
{
return DCPTime::from_frames (1, _film ? _film->video_frame_rate() : 24);
}
+
/** Open a dialog box showing our film's closed captions */
void
FilmViewer::show_closed_captions ()
_closed_captions_dialog->Show();
}
+
void
FilmViewer::seek_by (DCPTime by, bool accurate)
{
seek (_video_view->position() + by, accurate);
}
+
void
FilmViewer::set_pad_black (bool p)
{
_pad_black = p;
}
+
/** Called when a player has finished the current film.
* May be called from a non-UI thread.
*/
emit (boost::bind(&FilmViewer::ui_finished, this));
}
+
/** Called by finished() in the UI thread */
void
FilmViewer::ui_finished ()
Finished ();
}
+
int
FilmViewer::dropped () const
{
void
-FilmViewer::too_many_frames_dropped ()
+FilmViewer::set_optimise_for_j2k (bool o)
{
- if (!Config::instance()->nagged(Config::NAG_TOO_MANY_DROPPED_FRAMES)) {
- stop ();
- }
-
- bool shown = NagDialog::maybe_nag (
- panel(),
- Config::NAG_TOO_MANY_DROPPED_FRAMES,
- _("The player is dropping a lot of frames, so playback may not be accurate.\n\n"
- "<b>This does not necessarily mean that the DCP you are playing is defective!</b>\n\n"
- "You may be able to improve player performance by:\n"
- "• choosing 'decode at half resolution' or 'decode at quarter resolution' from the View menu\n"
- "• using a more powerful computer.\n"
- )
- );
+ _optimise_for_j2k = o;
+ _video_view->set_optimise_for_j2k (o);
}
+