*/
+#include "dcp_decoder.h"
+#include "dcp_content.h"
+#include "j2k_image_proxy.h"
+#include "image.h"
+#include "config.h"
#include <dcp/dcp.h>
#include <dcp/cpl.h>
#include <dcp/reel.h>
#include <dcp/mono_picture_mxf.h>
#include <dcp/stereo_picture_mxf.h>
#include <dcp/reel_picture_asset.h>
+#include <dcp/reel_sound_asset.h>
#include <dcp/mono_picture_frame.h>
#include <dcp/stereo_picture_frame.h>
-#include "dcp_decoder.h"
-#include "dcp_content.h"
-#include "j2k_image_proxy.h"
-#include "image.h"
+#include <dcp/sound_frame.h>
using std::list;
using std::cout;
using boost::shared_ptr;
using boost::dynamic_pointer_cast;
-DCPDecoder::DCPDecoder (shared_ptr<const DCPContent> c, shared_ptr<Log> log)
+DCPDecoder::DCPDecoder (shared_ptr<const DCPContent> c)
: VideoDecoder (c)
, AudioDecoder (c)
, SubtitleDecoder (c)
- , _log (log)
, _dcp_content (c)
{
dcp::DCP dcp (c->directory ());
dcp.read ();
- assert (dcp.cpls().size() == 1);
+ if (c->kdm ()) {
+ dcp.add (dcp::DecryptedKDM (c->kdm().get (), Config::instance()->decryption_private_key ()));
+ }
+ DCPOMATIC_ASSERT (dcp.cpls().size() == 1);
_reels = dcp.cpls().front()->reels ();
_reel = _reels.begin ();
}
bool
-DCPDecoder::pass ()
+DCPDecoder::pass (PassReason)
{
- if (_reel == _reels.end ()) {
+ if (_reel == _reels.end () || !_dcp_content->can_be_played ()) {
return true;
}
float const vfr = _dcp_content->video_frame_rate ();
+ int64_t const frame = _next.frames (vfr);
if ((*_reel)->main_picture ()) {
shared_ptr<dcp::PictureMXF> mxf = (*_reel)->main_picture()->mxf ();
shared_ptr<dcp::MonoPictureMXF> mono = dynamic_pointer_cast<dcp::MonoPictureMXF> (mxf);
shared_ptr<dcp::StereoPictureMXF> stereo = dynamic_pointer_cast<dcp::StereoPictureMXF> (mxf);
int64_t const entry_point = (*_reel)->main_picture()->entry_point ();
- int64_t const frame = _next.frames (vfr);
if (mono) {
- video (shared_ptr<ImageProxy> (new J2KImageProxy (mono->get_frame (entry_point + frame), mxf->size(), _log)), frame);
+ video (shared_ptr<ImageProxy> (new J2KImageProxy (mono->get_frame (entry_point + frame), mxf->size())), frame);
} else {
video (
- shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_LEFT, _log)),
+ shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_LEFT)),
frame
);
video (
- shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_RIGHT, _log)),
+ shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), mxf->size(), dcp::EYE_RIGHT)),
frame
);
}
}
- /* XXX: sound */
+ if ((*_reel)->main_sound ()) {
+ int64_t const entry_point = (*_reel)->main_sound()->entry_point ();
+ shared_ptr<const dcp::SoundFrame> sf = (*_reel)->main_sound()->mxf()->get_frame (entry_point + frame);
+ uint8_t const * from = sf->data ();
+
+ int const channels = _dcp_content->audio_stream()->channels ();
+ int const frames = sf->size() / (3 * channels);
+ shared_ptr<AudioBuffers> data (new AudioBuffers (channels, frames));
+ for (int i = 0; i < frames; ++i) {
+ for (int j = 0; j < channels; ++j) {
+ data->data()[j][i] = float (from[0] | (from[1] << 8) | (from[2] << 16)) / (1 << 23);
+ from += 3;
+ }
+ }
+
+ audio (_dcp_content->audio_stream(), data, _next);
+ }
+
/* XXX: subtitle */
_next += ContentTime::from_frames (1, vfr);
list<ContentTimePeriod>
-DCPDecoder::subtitles_during (ContentTimePeriod, bool starting) const
+DCPDecoder::image_subtitles_during (ContentTimePeriod, bool) const
+{
+ return list<ContentTimePeriod> ();
+}
+
+list<ContentTimePeriod>
+DCPDecoder::text_subtitles_during (ContentTimePeriod, bool) const
{
+ /* XXX */
return list<ContentTimePeriod> ();
}