X-Git-Url: https://git.carlh.net/gitweb/?a=blobdiff_plain;f=src%2Fpicture_frame.cc;h=b4d72f6f45713bd07d4878da5227989a45cac9e5;hb=8a95b38aeadeddd52badd6aa6b0466da2f4ca25e;hp=844f018350c9d7121fe2d612be146faf6b5c3c0e;hpb=753912e0f4a70343b693b1b5aefff101edd28874;p=libdcp.git diff --git a/src/picture_frame.cc b/src/picture_frame.cc index 844f0183..b4d72f6f 100644 --- a/src/picture_frame.cc +++ b/src/picture_frame.cc @@ -22,124 +22,147 @@ #include "KM_fileio.h" #include "picture_frame.h" #include "exceptions.h" -#include "rgba_frame.h" +#include "argb_frame.h" #include "lut.h" +#include "util.h" +#include "gamma_lut.h" -using namespace std; -using namespace boost; +#define DCI_GAMMA 2.6 + +using std::string; +using boost::shared_ptr; using namespace libdcp; -PictureFrame::PictureFrame (string mxf_path, int n) +/** Make a picture frame from a 2D (monoscopic) asset. + * @param mxf_path Path to the asset's MXF file. + * @param n Frame within the asset, not taking EntryPoint into account. + */ +MonoPictureFrame::MonoPictureFrame (string mxf_path, int n, ASDCP::AESDecContext* c) { ASDCP::JP2K::MXFReader reader; if (ASDCP_FAILURE (reader.OpenRead (mxf_path.c_str()))) { - throw FileError ("could not open MXF file for reading", mxf_path); + boost::throw_exception (FileError ("could not open MXF file for reading", mxf_path)); } /* XXX: unfortunate guesswork on this buffer size */ _buffer = new ASDCP::JP2K::FrameBuffer (4 * Kumu::Megabyte); - if (ASDCP_FAILURE (reader.ReadFrame (n, *_buffer))) { - throw DCPReadError ("could not read video frame"); + if (ASDCP_FAILURE (reader.ReadFrame (n, *_buffer, c))) { + boost::throw_exception (DCPReadError ("could not read video frame")); } } -PictureFrame::~PictureFrame () +MonoPictureFrame::~MonoPictureFrame () { delete _buffer; } uint8_t const * -PictureFrame::data () const +MonoPictureFrame::j2k_data () const { - return _buffer->RoData(); + return _buffer->RoData (); } int -PictureFrame::size () const +MonoPictureFrame::j2k_size () const { return _buffer->Size (); } -shared_ptr -PictureFrame::rgba_frame () const +/** @param reduce a factor by which to reduce the resolution + * of the image, expressed as a power of two (pass 0 for no + * reduction). + * + * @return An ARGB representation of this frame. This is ARGB in the + * Cairo sense, so that each pixel takes up 4 bytes; the first byte + * is blue, second green, third red and fourth alpha (always 255). + * + */ +shared_ptr +MonoPictureFrame::argb_frame (int reduce, float srgb_gamma) const { - /* JPEG2000 -> decompressed XYZ */ - - opj_dinfo_t* decoder = opj_create_decompress (CODEC_J2K); - opj_dparameters_t parameters; - opj_set_default_decoder_parameters (¶meters); - opj_setup_decoder (decoder, ¶meters); - opj_cio_t* cio = opj_cio_open ((opj_common_ptr) decoder, const_cast (data()), size()); - opj_image_t* xyz_frame = opj_decode (decoder, cio); - if (!xyz_frame) { - opj_destroy_decompress (decoder); - opj_cio_close (cio); - throw DCPReadError ("could not decode JPEG2000 codestream"); - } - + opj_image_t* xyz_frame = decompress_j2k (const_cast (_buffer->RoData()), _buffer->Size(), reduce); assert (xyz_frame->numcomps == 3); - - /* XYZ -> RGB */ - - struct { - double x, y, z; - } s; - - struct { - double r, g, b; - } d; - - int* xyz_x = xyz_frame->comps[0].data; - int* xyz_y = xyz_frame->comps[1].data; - int* xyz_z = xyz_frame->comps[2].data; + shared_ptr f = xyz_to_rgb (xyz_frame, GammaLUT::cache.get (12, DCI_GAMMA), GammaLUT::cache.get (12, 1 / srgb_gamma)); + opj_image_destroy (xyz_frame); + return f; +} - shared_ptr rgba_frame (new RGBAFrame (xyz_frame->x1, xyz_frame->y1)); - - uint8_t* rgba = rgba_frame->data (); - - for (int y = 0; y < xyz_frame->y1; ++y) { - uint8_t* rgba_line = rgba; - for (int x = 0; x < xyz_frame->x1; ++x) { - - assert (*xyz_x >= 0 && *xyz_y >= 0 && *xyz_z >= 0 && *xyz_x < 4096 && *xyz_x < 4096 && *xyz_z < 4096); - - /* In gamma LUT */ - s.x = lut_in[*xyz_x++]; - s.y = lut_in[*xyz_y++]; - s.z = lut_in[*xyz_z++]; - - /* DCI companding */ - s.x /= DCI_COEFFICIENT; - s.y /= DCI_COEFFICIENT; - s.z /= DCI_COEFFICIENT; - - /* XYZ to RGB */ - d.r = ((s.x * color_matrix[0][0]) + (s.y * color_matrix[0][1]) + (s.z * color_matrix[0][2])); - d.g = ((s.x * color_matrix[1][0]) + (s.y * color_matrix[1][1]) + (s.z * color_matrix[1][2])); - d.b = ((s.x * color_matrix[2][0]) + (s.y * color_matrix[2][1]) + (s.z * color_matrix[2][2])); - - d.r = min (d.r, 1.0); - d.r = max (d.r, 0.0); - - d.g = min (d.g, 1.0); - d.g = max (d.g, 0.0); - - d.b = min (d.b, 1.0); - d.b = max (d.b, 0.0); - - /* Out gamma LUT */ - *rgba_line++ = lut_out[(int) (d.r * COLOR_DEPTH)]; - *rgba_line++ = lut_out[(int) (d.g * COLOR_DEPTH)]; - *rgba_line++ = lut_out[(int) (d.b * COLOR_DEPTH)]; - *rgba_line++ = 0xff; - } - - rgba += rgba_frame->stride (); +/** Make a picture frame from a 3D (stereoscopic) asset. + * @param mxf_path Path to the asset's MXF file. + * @param n Frame within the asset, not taking EntryPoint into account. + */ +StereoPictureFrame::StereoPictureFrame (string mxf_path, int n) +{ + ASDCP::JP2K::MXFSReader reader; + if (ASDCP_FAILURE (reader.OpenRead (mxf_path.c_str()))) { + boost::throw_exception (FileError ("could not open MXF file for reading", mxf_path)); + } + + /* XXX: unfortunate guesswork on this buffer size */ + _buffer = new ASDCP::JP2K::SFrameBuffer (4 * Kumu::Megabyte); + + if (ASDCP_FAILURE (reader.ReadFrame (n, *_buffer))) { + boost::throw_exception (DCPReadError ("could not read video frame")); + } +} + +StereoPictureFrame::~StereoPictureFrame () +{ + delete _buffer; +} + +/** @param reduce a factor by which to reduce the resolution + * of the image, expressed as a power of two (pass 0 for no + * reduction). + * + * @param eye Eye to return (EYE_LEFT or EYE_RIGHT). + * + * @return An ARGB representation of one of the eyes (left or right) + * of this frame. This is ARGB in the Cairo sense, so that each + * pixel takes up 4 bytes; the first byte is blue, second green, + * third red and fourth alpha (always 255). + * + */ +shared_ptr +StereoPictureFrame::argb_frame (Eye eye, int reduce, float srgb_gamma) const +{ + opj_image_t* xyz_frame = 0; + switch (eye) { + case LEFT: + xyz_frame = decompress_j2k (const_cast (_buffer->Left.RoData()), _buffer->Left.Size(), reduce); + break; + case RIGHT: + xyz_frame = decompress_j2k (const_cast (_buffer->Right.RoData()), _buffer->Right.Size(), reduce); + break; } - opj_cio_close (cio); + assert (xyz_frame->numcomps == 3); + shared_ptr f = xyz_to_rgb (xyz_frame, GammaLUT::cache.get (12, DCI_GAMMA), GammaLUT::cache.get (12, 1 / srgb_gamma)); opj_image_destroy (xyz_frame); + return f; +} + +uint8_t const * +StereoPictureFrame::left_j2k_data () const +{ + return _buffer->Left.RoData (); +} + +int +StereoPictureFrame::left_j2k_size () const +{ + return _buffer->Left.Size (); +} - return rgba_frame; +uint8_t const * +StereoPictureFrame::right_j2k_data () const +{ + return _buffer->Right.RoData (); +} + +int +StereoPictureFrame::right_j2k_size () const +{ + return _buffer->Right.Size (); }