-/** Emit our signal to say that some video data is ready.
- * @param image Video frame.
- * @param same true if `image' is the same as the last one we emitted.
- * @param sub Subtitle for this frame, or 0.
- */
-void
-VideoDecoder::signal_video (shared_ptr<Image> image, bool same, shared_ptr<Subtitle> sub)
-{
- TIMING (N_("Decoder emits %1"), _video_frame);
- Video (image, same, sub);
- ++_video_frame;
+ switch (vft) {
+ case VideoFrameType::TWO_D:
+ Data(ContentVideo(image, time, Eyes::BOTH, Part::WHOLE));
+ break;
+ case VideoFrameType::THREE_D:
+ {
+ auto eyes = Eyes::LEFT;
+ auto j2k = dynamic_pointer_cast<const J2KImageProxy>(image);
+ if (j2k && j2k->eye()) {
+ eyes = *j2k->eye() == dcp::Eye::LEFT ? Eyes::LEFT : Eyes::RIGHT;
+ }
+
+ Data(ContentVideo(image, time, eyes, Part::WHOLE));
+ break;
+ }
+ case VideoFrameType::THREE_D_ALTERNATE:
+ {
+ Eyes eyes;
+ if (_last_emitted_eyes) {
+ eyes = _last_emitted_eyes.get() == Eyes::LEFT ? Eyes::RIGHT : Eyes::LEFT;
+ } else {
+ /* We don't know what eye this frame is, so just guess */
+ auto frame = time.frames_round(_content->video_frame_rate().get_value_or(24));
+ eyes = (frame % 2) ? Eyes::RIGHT : Eyes::LEFT;
+ }
+ Data(ContentVideo(image, time, eyes, Part::WHOLE));
+ _last_emitted_eyes = eyes;
+ break;
+ }
+ case VideoFrameType::THREE_D_LEFT_RIGHT:
+ Data(ContentVideo(image, time, Eyes::LEFT, Part::LEFT_HALF));
+ Data(ContentVideo(image, time, Eyes::RIGHT, Part::RIGHT_HALF));
+ break;
+ case VideoFrameType::THREE_D_TOP_BOTTOM:
+ Data(ContentVideo(image, time, Eyes::LEFT, Part::TOP_HALF));
+ Data(ContentVideo(image, time, Eyes::RIGHT, Part::BOTTOM_HALF));
+ break;
+ case VideoFrameType::THREE_D_LEFT:
+ Data(ContentVideo(image, time, Eyes::LEFT, Part::WHOLE));
+ break;
+ case VideoFrameType::THREE_D_RIGHT:
+ Data(ContentVideo(image, time, Eyes::RIGHT, Part::WHOLE));
+ break;
+ default:
+ DCPOMATIC_ASSERT (false);
+ }