shift;
build/test/unit-tests --catch_system_errors=no $*
else
-# build/test/unit-tests --catch_system_errors=no --log_level=test_suite $*
- build/test/unit-tests --catch_system_errors=no $*
+ build/test/unit-tests --catch_system_errors=no --log_level=test_suite $*
+# build/test/unit-tests --catch_system_errors=no $*
fi
/** @param f Film that we are encoding */
Encoder::Encoder (shared_ptr<const Film> film, shared_ptr<Writer> writer)
: _film (film)
- , _position (0)
, _writer (writer)
{
servers_list_changed ();
return _history_size / (seconds (now) - seconds (_time_history.back ()));
}
-/** @return Number of video frames that have been sent out */
+/** @return Number of video frames that have been queued for encoding */
int
-Encoder::video_frames_out () const
+Encoder::video_frames_enqueued () const
{
- boost::mutex::scoped_lock (_state_mutex);
- return _position;
+ return _last_player_video->time().frames_floor (_film->video_frame_rate ());
}
/** Should be called when a frame has been encoded successfully.
* for this DCP frame.
*/
void
-Encoder::encode (list<shared_ptr<PlayerVideo> > pv)
-{
- BOOST_FOREACH (shared_ptr<PlayerVideo> i, pv) {
- if (!_film->three_d()) {
- /* 2D DCP */
- if (i->eyes() == EYES_RIGHT) {
- /* Discard right-eye images */
- continue;
- } else if (i->eyes() == EYES_LEFT) {
- /* Use left-eye images for both eyes */
- i->set_eyes (EYES_BOTH);
- }
- }
-
- enqueue (i);
- }
- ++_position;
-}
-
-void
-Encoder::enqueue (shared_ptr<PlayerVideo> pv)
+Encoder::encode (shared_ptr<PlayerVideo> pv)
{
_waker.nudge ();
*/
rethrow ();
- if (_writer->can_fake_write (_position)) {
+ Frame const position = pv->time().frames_floor(_film->video_frame_rate());
+
+ if (_writer->can_fake_write (position)) {
/* We can fake-write this frame */
- _writer->fake_write (_position, pv->eyes ());
+ _writer->fake_write (position, pv->eyes ());
frame_done ();
} else if (pv->has_j2k ()) {
/* This frame already has JPEG2000 data, so just write it */
- _writer->write (pv->j2k(), _position, pv->eyes ());
- } else if (_last_player_video && _writer->can_repeat(_position) && pv->same (_last_player_video)) {
- _writer->repeat (_position, pv->eyes ());
+ _writer->write (pv->j2k(), position, pv->eyes ());
+ } else if (_last_player_video && _writer->can_repeat(position) && pv->same (_last_player_video)) {
+ _writer->repeat (position, pv->eyes ());
} else {
/* Queue this new frame for encoding */
LOG_TIMING ("add-frame-to-queue queue=%1", _queue.size ());
_queue.push_back (shared_ptr<DCPVideo> (
new DCPVideo (
pv,
- _position,
+ position,
_film->video_frame_rate(),
_film->j2k_bandwidth(),
_film->resolution(),
/** Called to indicate that a processing run is about to begin */
void begin ();
- /** Called to pass in zero or more bits of video to be encoded
- * as the next DCP frame.
- */
- void encode (std::list<boost::shared_ptr<PlayerVideo> > f);
+ /** Called to pass a bit of video to be encoded as the next DCP frame */
+ void encode (boost::shared_ptr<PlayerVideo> f);
/** Called when a processing run has finished */
void end ();
float current_encoding_rate () const;
- int video_frames_out () const;
+ int video_frames_enqueued () const;
private:
- void enqueue (boost::shared_ptr<PlayerVideo> f);
void frame_done ();
void encoder_thread (boost::optional<EncodeServerDescription>);
/** Film that we are encoding */
boost::shared_ptr<const Film> _film;
- /** Mutex for _time_history and _video_frames_enqueued */
+ /** Mutex for _time_history */
mutable boost::mutex _state_mutex;
/** List of the times of completion of the last _history_size frames;
first is the most recently completed.
std::list<struct timeval> _time_history;
/** Number of frames that we should keep history for */
static int const _history_size;
- /** Current DCP frame index */
- Frame _position;
/** Mutex for _threads */
mutable boost::mutex _threads_mutex;
shared_ptr<PlayerVideo> (
new PlayerVideo (
i->image,
- content_video_to_dcp (piece, i->frame.index()),
+ time,
piece->content->video->crop (),
piece->content->video->fade (i->frame.index()),
image_size,
float fps = 0;
if (finish.tv_sec != start.tv_sec) {
- fps = _transcoder->video_frames_out() / (finish.tv_sec - start.tv_sec);
+ fps = _transcoder->video_frames_enqueued() / (finish.tv_sec - start.tv_sec);
}
LOG_GENERAL (N_("Transcode job completed successfully: %1 fps"), fps);
if (!finished () && !_transcoder->finishing ()) {
/// TRANSLATORS: fps here is an abbreviation for frames per second
- s << "; " << _transcoder->video_frames_out() << "/"
+ s << "; " << _transcoder->video_frames_enqueued() << "/"
<< _film->length().frames_round (_film->video_frame_rate ()) << " " << _("frames") << "; "
<< fixed << setprecision (1) << fps << " " << _("fps");
}
}
/* Compute approximate proposed length here, as it's only here that we need it */
- return (_film->length().frames_round (_film->video_frame_rate ()) - t->video_frames_out()) / fps;
+ return (_film->length().frames_round (_film->video_frame_rate ()) - t->video_frames_enqueued()) / fps;
}
#include "compose.hpp"
#include "referenced_reel_asset.h"
#include "subtitle_content.h"
+#include "player_video.h"
#include <boost/signals2.hpp>
#include <boost/foreach.hpp>
#include <iostream>
}
for (DCPTime t; t < length; t += frame) {
- _encoder->encode (_player->get_video (t, true));
+
+ BOOST_FOREACH (shared_ptr<PlayerVideo> i, _player->get_video (t, true)) {
+ if (!_film->three_d()) {
+ /* 2D DCP */
+ if (i->eyes() == EYES_RIGHT) {
+ /* Discard right-eye images */
+ continue;
+ } else if (i->eyes() == EYES_LEFT) {
+ /* Use left-eye images for both eyes */
+ i->set_eyes (EYES_BOTH);
+ }
+ }
+
+ _encoder->encode (i);
+ }
+
_writer->write (_player->get_audio (t, frame, true));
if (non_burnt_subtitles) {
}
int
-Transcoder::video_frames_out () const
+Transcoder::video_frames_enqueued () const
{
- return _encoder->video_frames_out ();
+ return _encoder->video_frames_enqueued ();
}
void go ();
float current_encoding_rate () const;
- int video_frames_out () const;
+ int video_frames_enqueued () const;
/** @return true if we are in the process of calling Encoder::process_end */
bool finishing () const {