Restore upmixer_a_test and fix resampler flushing.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         if (!_play_referenced) {
160                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
162                         if (dc) {
163                                 if (dc->reference_video()) {
164                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
165                                 }
166                                 if (dc->reference_audio()) {
167                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
168                                 }
169                         }
170                 }
171         }
172
173         _have_valid_pieces = true;
174 }
175
176 void
177 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
178 {
179         shared_ptr<Content> c = w.lock ();
180         if (!c) {
181                 return;
182         }
183
184         if (
185                 property == ContentProperty::POSITION ||
186                 property == ContentProperty::LENGTH ||
187                 property == ContentProperty::TRIM_START ||
188                 property == ContentProperty::TRIM_END ||
189                 property == ContentProperty::PATH ||
190                 property == VideoContentProperty::FRAME_TYPE ||
191                 property == DCPContentProperty::NEEDS_ASSETS ||
192                 property == DCPContentProperty::NEEDS_KDM ||
193                 property == SubtitleContentProperty::COLOUR ||
194                 property == SubtitleContentProperty::OUTLINE ||
195                 property == SubtitleContentProperty::SHADOW ||
196                 property == SubtitleContentProperty::EFFECT_COLOUR ||
197                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
198                 property == VideoContentProperty::COLOUR_CONVERSION
199                 ) {
200
201                 _have_valid_pieces = false;
202                 Changed (frequent);
203
204         } else if (
205                 property == SubtitleContentProperty::LINE_SPACING ||
206                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
207                 property == SubtitleContentProperty::Y_SCALE ||
208                 property == SubtitleContentProperty::FADE_IN ||
209                 property == SubtitleContentProperty::FADE_OUT ||
210                 property == ContentProperty::VIDEO_FRAME_RATE ||
211                 property == SubtitleContentProperty::USE ||
212                 property == SubtitleContentProperty::X_OFFSET ||
213                 property == SubtitleContentProperty::Y_OFFSET ||
214                 property == SubtitleContentProperty::X_SCALE ||
215                 property == SubtitleContentProperty::FONTS ||
216                 property == VideoContentProperty::CROP ||
217                 property == VideoContentProperty::SCALE ||
218                 property == VideoContentProperty::FADE_IN ||
219                 property == VideoContentProperty::FADE_OUT
220                 ) {
221
222                 Changed (frequent);
223         }
224 }
225
226 void
227 Player::set_video_container_size (dcp::Size s)
228 {
229         _video_container_size = s;
230
231         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
232         _black_image->make_black ();
233 }
234
235 void
236 Player::playlist_changed ()
237 {
238         _have_valid_pieces = false;
239         Changed (false);
240 }
241
242 void
243 Player::film_changed (Film::Property p)
244 {
245         /* Here we should notice Film properties that affect our output, and
246            alert listeners that our output now would be different to how it was
247            last time we were run.
248         */
249
250         if (p == Film::CONTAINER) {
251                 Changed (false);
252         } else if (p == Film::VIDEO_FRAME_RATE) {
253                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
254                    so we need new pieces here.
255                 */
256                 _have_valid_pieces = false;
257                 Changed (false);
258         } else if (p == Film::AUDIO_PROCESSOR) {
259                 if (_film->audio_processor ()) {
260                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
261                 }
262         }
263 }
264
265 list<PositionImage>
266 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
267 {
268         list<PositionImage> all;
269
270         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
271                 if (!i->image) {
272                         continue;
273                 }
274
275                 /* We will scale the subtitle up to fit _video_container_size */
276                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
277
278                 /* Then we need a corrective translation, consisting of two parts:
279                  *
280                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
281                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
282                  *
283                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
284                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
285                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
286                  *
287                  * Combining these two translations gives these expressions.
288                  */
289
290                 all.push_back (
291                         PositionImage (
292                                 i->image->scale (
293                                         scaled_size,
294                                         dcp::YUV_TO_RGB_REC601,
295                                         i->image->pixel_format (),
296                                         true,
297                                         _fast
298                                         ),
299                                 Position<int> (
300                                         lrint (_video_container_size.width * i->rectangle.x),
301                                         lrint (_video_container_size.height * i->rectangle.y)
302                                         )
303                                 )
304                         );
305         }
306
307         return all;
308 }
309
310 shared_ptr<PlayerVideo>
311 Player::black_player_video_frame () const
312 {
313         return shared_ptr<PlayerVideo> (
314                 new PlayerVideo (
315                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
316                         Crop (),
317                         optional<double> (),
318                         _video_container_size,
319                         _video_container_size,
320                         EYES_BOTH,
321                         PART_WHOLE,
322                         PresetColourConversion::all().front().conversion
323                 )
324         );
325 }
326
327 Frame
328 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
329 {
330         DCPTime s = t - piece->content->position ();
331         s = min (piece->content->length_after_trim(), s);
332         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
333
334         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
335            then convert that ContentTime to frames at the content's rate.  However this fails for
336            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
337            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
338
339            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
340         */
341         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
342 }
343
344 DCPTime
345 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
346 {
347         /* See comment in dcp_to_content_video */
348         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
349         return max (DCPTime (), d + piece->content->position ());
350 }
351
352 Frame
353 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
354 {
355         DCPTime s = t - piece->content->position ();
356         s = min (piece->content->length_after_trim(), s);
357         /* See notes in dcp_to_content_video */
358         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
359 }
360
361 DCPTime
362 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
363 {
364         /* See comment in dcp_to_content_video */
365         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start (), piece->frc);
366         return max (DCPTime (), d + piece->content->position ());
367 }
368
369 ContentTime
370 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
371 {
372         DCPTime s = t - piece->content->position ();
373         s = min (piece->content->length_after_trim(), s);
374         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
375 }
376
377 DCPTime
378 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
379 {
380         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
381 }
382
383 list<shared_ptr<Font> >
384 Player::get_subtitle_fonts ()
385 {
386         if (!_have_valid_pieces) {
387                 setup_pieces ();
388         }
389
390         list<shared_ptr<Font> > fonts;
391         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
392                 if (p->content->subtitle) {
393                         /* XXX: things may go wrong if there are duplicate font IDs
394                            with different font files.
395                         */
396                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
397                         copy (f.begin(), f.end(), back_inserter (fonts));
398                 }
399         }
400
401         return fonts;
402 }
403
404 /** Set this player never to produce any video data */
405 void
406 Player::set_ignore_video ()
407 {
408         _ignore_video = true;
409 }
410
411 /** Set this player never to produce any audio data */
412 void
413 Player::set_ignore_audio ()
414 {
415         _ignore_audio = true;
416 }
417
418 /** Set whether or not this player should always burn text subtitles into the image,
419  *  regardless of the content settings.
420  *  @param burn true to always burn subtitles, false to obey content settings.
421  */
422 void
423 Player::set_always_burn_subtitles (bool burn)
424 {
425         _always_burn_subtitles = burn;
426 }
427
428 void
429 Player::set_fast ()
430 {
431         _fast = true;
432         _have_valid_pieces = false;
433 }
434
435 void
436 Player::set_play_referenced ()
437 {
438         _play_referenced = true;
439         _have_valid_pieces = false;
440 }
441
442 list<ReferencedReelAsset>
443 Player::get_reel_assets ()
444 {
445         list<ReferencedReelAsset> a;
446
447         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
448                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
449                 if (!j) {
450                         continue;
451                 }
452
453                 scoped_ptr<DCPDecoder> decoder;
454                 try {
455                         decoder.reset (new DCPDecoder (j, _film->log()));
456                 } catch (...) {
457                         return a;
458                 }
459
460                 int64_t offset = 0;
461                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
462
463                         DCPOMATIC_ASSERT (j->video_frame_rate ());
464                         double const cfr = j->video_frame_rate().get();
465                         Frame const trim_start = j->trim_start().frames_round (cfr);
466                         Frame const trim_end = j->trim_end().frames_round (cfr);
467                         int const ffr = _film->video_frame_rate ();
468
469                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
470                         if (j->reference_video ()) {
471                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
472                                 DCPOMATIC_ASSERT (ra);
473                                 ra->set_entry_point (ra->entry_point() + trim_start);
474                                 ra->set_duration (ra->duration() - trim_start - trim_end);
475                                 a.push_back (
476                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
477                                         );
478                         }
479
480                         if (j->reference_audio ()) {
481                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
482                                 DCPOMATIC_ASSERT (ra);
483                                 ra->set_entry_point (ra->entry_point() + trim_start);
484                                 ra->set_duration (ra->duration() - trim_start - trim_end);
485                                 a.push_back (
486                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
487                                         );
488                         }
489
490                         if (j->reference_subtitle ()) {
491                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
492                                 DCPOMATIC_ASSERT (ra);
493                                 ra->set_entry_point (ra->entry_point() + trim_start);
494                                 ra->set_duration (ra->duration() - trim_start - trim_end);
495                                 a.push_back (
496                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
497                                         );
498                         }
499
500                         /* Assume that main picture duration is the length of the reel */
501                         offset += k->main_picture()->duration ();
502                 }
503         }
504
505         return a;
506 }
507
508 list<shared_ptr<Piece> >
509 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
510 {
511         if (!_have_valid_pieces) {
512                 setup_pieces ();
513         }
514
515         list<shared_ptr<Piece> > overlaps;
516         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
517                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
518                         overlaps.push_back (i);
519                 }
520         }
521
522         return overlaps;
523 }
524
525 bool
526 Player::pass ()
527 {
528         if (!_have_valid_pieces) {
529                 setup_pieces ();
530         }
531
532         shared_ptr<Piece> earliest;
533         DCPTime earliest_content;
534
535         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
536                 if (!i->done) {
537                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
538                         if (!earliest || t < earliest_content) {
539                                 earliest_content = t;
540                                 earliest = i;
541                         }
542                 }
543         }
544
545         if (!earliest) {
546                 /* No more content; fill up with silent black */
547                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
548                 if (_last_time) {
549                         remaining_video.from = _last_time.get() + one_video_frame();
550                 }
551                 fill_video (remaining_video);
552                 fill_audio (DCPTimePeriod (_last_audio_time, _playlist->length()));
553                 return true;
554         }
555
556         earliest->done = earliest->decoder->pass ();
557         if (earliest->done && earliest->content->audio) {
558                 /* Flush the Player audio system for this piece */
559                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
560                         audio_flush (earliest, i);
561                 }
562         }
563
564         /* Emit any audio that is ready */
565
566         DCPTime pull_from = _playlist->length ();
567         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
568                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
569                         pull_from = i->second.last_push_end;
570                 }
571         }
572
573         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
574         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
575                 DCPOMATIC_ASSERT (i->second >= _last_audio_time);
576                 fill_audio (DCPTimePeriod (_last_audio_time, i->second));
577                 Audio (i->first, i->second);
578                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
579         }
580
581         return false;
582 }
583
584 void
585 Player::video (weak_ptr<Piece> wp, ContentVideo video)
586 {
587         shared_ptr<Piece> piece = wp.lock ();
588         if (!piece) {
589                 return;
590         }
591
592         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
593         if (frc.skip && (video.frame % 2) == 1) {
594                 return;
595         }
596
597         /* Time and period of the frame we will emit */
598         DCPTime const time = content_video_to_dcp (piece, video.frame);
599         DCPTimePeriod const period (time, time + one_video_frame());
600
601         /* Discard if it's outside the content's period */
602         if (time < piece->content->position() || time >= piece->content->end()) {
603                 return;
604         }
605
606         /* Get any subtitles */
607
608         optional<PositionImage> subtitles;
609
610         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
611
612                 if (!i->second.overlap (period)) {
613                         continue;
614                 }
615
616                 list<PositionImage> sub_images;
617
618                 /* Image subtitles */
619                 list<PositionImage> c = transform_image_subtitles (i->first.image);
620                 copy (c.begin(), c.end(), back_inserter (sub_images));
621
622                 /* Text subtitles (rendered to an image) */
623                 if (!i->first.text.empty ()) {
624                         list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
625                         copy (s.begin (), s.end (), back_inserter (sub_images));
626                 }
627
628                 if (!sub_images.empty ()) {
629                         subtitles = merge (sub_images);
630                 }
631         }
632
633         /* Fill gaps */
634
635         if (_last_time) {
636                 fill_video (DCPTimePeriod (_last_time.get() + one_video_frame(), time));
637         }
638
639         _last_video.reset (
640                 new PlayerVideo (
641                         video.image,
642                         piece->content->video->crop (),
643                         piece->content->video->fade (video.frame),
644                         piece->content->video->scale().size (
645                                 piece->content->video, _video_container_size, _film->frame_size ()
646                                 ),
647                         _video_container_size,
648                         video.eyes,
649                         video.part,
650                         piece->content->video->colour_conversion ()
651                         )
652                 );
653
654         if (subtitles) {
655                 _last_video->set_subtitle (subtitles.get ());
656         }
657
658         _last_time = time;
659
660         Video (_last_video, *_last_time);
661
662         /* Discard any subtitles we no longer need */
663
664         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
665                 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
666                 ++tmp;
667
668                 if (i->second.to < time) {
669                         _subtitles.erase (i);
670                 }
671
672                 i = tmp;
673         }
674 }
675
676 void
677 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
678 {
679         shared_ptr<AudioContent> content = piece->content->audio;
680         DCPOMATIC_ASSERT (content);
681
682         shared_ptr<Resampler> r = resampler (content, stream);
683         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
684         ContentAudio content_audio;
685         content_audio.audio = ro.first;
686         content_audio.frame = ro.second;
687
688         /* Compute time in the DCP */
689         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
690
691         audio_transform (content, stream, content_audio, time);
692 }
693
694 /** Do our common processing on some audio */
695 void
696 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
697 {
698         /* Gain */
699
700         if (content->gain() != 0) {
701                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
702                 gain->apply_gain (content->gain ());
703                 content_audio.audio = gain;
704         }
705
706         /* Remap */
707
708         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
709         dcp_mapped->make_silent ();
710
711         AudioMapping map = stream->mapping ();
712         for (int i = 0; i < map.input_channels(); ++i) {
713                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
714                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
715                                 dcp_mapped->accumulate_channel (
716                                         content_audio.audio.get(),
717                                         i,
718                                         static_cast<dcp::Channel> (j),
719                                         map.get (i, static_cast<dcp::Channel> (j))
720                                         );
721                         }
722                 }
723         }
724
725         content_audio.audio = dcp_mapped;
726
727         /* Process */
728
729         if (_audio_processor) {
730                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
731         }
732
733         /* Push */
734
735         _audio_merger.push (content_audio.audio, time);
736         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
737         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
738 }
739
740 void
741 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
742 {
743         shared_ptr<Piece> piece = wp.lock ();
744         if (!piece) {
745                 return;
746         }
747
748         shared_ptr<AudioContent> content = piece->content->audio;
749         DCPOMATIC_ASSERT (content);
750
751         /* Resample */
752         if (stream->frame_rate() != content->resampled_frame_rate()) {
753                 shared_ptr<Resampler> r = resampler (content, stream);
754                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
755                 content_audio.audio = ro.first;
756                 content_audio.frame = ro.second;
757         }
758
759         /* XXX: end-trimming used to be checked here */
760
761         /* Compute time in the DCP */
762         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
763
764         /* Remove anything that comes before the start of the content */
765         if (time < piece->content->position()) {
766                 DCPTime const discard_time = piece->content->position() - time;
767                 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
768                 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
769                 if (remaining_frames <= 0) {
770                         /* This audio is entirely discarded */
771                         return;
772                 }
773                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
774                 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
775                 content_audio.audio = cut;
776                 time += discard_time;
777         }
778
779         audio_transform (content, stream, content_audio, time);
780 }
781
782 void
783 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
784 {
785         shared_ptr<Piece> piece = wp.lock ();
786         if (!piece) {
787                 return;
788         }
789
790         /* Apply content's subtitle offsets */
791         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
792         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
793
794         /* Apply content's subtitle scale */
795         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
796         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
797
798         /* Apply a corrective translation to keep the subtitle centred after that scale */
799         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
800         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
801
802         PlayerSubtitles ps;
803         ps.image.push_back (subtitle.sub);
804         DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
805
806         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
807                 _subtitles.push_back (make_pair (ps, period));
808         } else {
809                 Subtitle (ps, period);
810         }
811 }
812
813 void
814 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
815 {
816         shared_ptr<Piece> piece = wp.lock ();
817         if (!piece) {
818                 return;
819         }
820
821         PlayerSubtitles ps;
822         DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
823
824         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
825                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
826                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
827                 float const xs = piece->content->subtitle->x_scale();
828                 float const ys = piece->content->subtitle->y_scale();
829                 float size = s.size();
830
831                 /* Adjust size to express the common part of the scaling;
832                    e.g. if xs = ys = 0.5 we scale size by 2.
833                 */
834                 if (xs > 1e-5 && ys > 1e-5) {
835                         size *= 1 / min (1 / xs, 1 / ys);
836                 }
837                 s.set_size (size);
838
839                 /* Then express aspect ratio changes */
840                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
841                         s.set_aspect_adjust (xs / ys);
842                 }
843
844                 s.set_in (dcp::Time(period.from.seconds(), 1000));
845                 s.set_out (dcp::Time(period.to.seconds(), 1000));
846                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
847                 ps.add_fonts (piece->content->subtitle->fonts ());
848         }
849
850         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
851                 _subtitles.push_back (make_pair (ps, period));
852         } else {
853                 Subtitle (ps, period);
854         }
855 }
856
857 void
858 Player::seek (DCPTime time, bool accurate)
859 {
860         if (_audio_processor) {
861                 _audio_processor->flush ();
862         }
863
864         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
865                 if (i->content->position() <= time && time < i->content->end()) {
866                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
867                         i->done = false;
868                 }
869         }
870
871         if (accurate) {
872                 _last_time = time - one_video_frame ();
873         } else {
874                 _last_time = optional<DCPTime> ();
875         }
876 }
877
878 shared_ptr<Resampler>
879 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream)
880 {
881         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
882         if (i != _resamplers.end ()) {
883                 return i->second;
884         }
885
886         LOG_GENERAL (
887                 "Creating new resampler from %1 to %2 with %3 channels",
888                 stream->frame_rate(),
889                 content->resampled_frame_rate(),
890                 stream->channels()
891                 );
892
893         shared_ptr<Resampler> r (
894                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
895                 );
896
897         _resamplers[make_pair(content, stream)] = r;
898         return r;
899 }
900
901 void
902 Player::fill_video (DCPTimePeriod period)
903 {
904         /* XXX: this may not work for 3D */
905         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
906                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
907                         if (_playlist->video_content_at(j) && _last_video) {
908                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
909                         } else {
910                                 Video (black_player_video_frame(), j);
911                         }
912                 }
913         }
914 }
915
916 void
917 Player::fill_audio (DCPTimePeriod period)
918 {
919         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
920                 DCPTime t = i.from;
921                 while (t < i.to) {
922                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
923                         Frame const samples = block.frames_round(_film->audio_frame_rate());
924                         if (samples) {
925                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
926                                 silence->make_silent ();
927                                 Audio (silence, t);
928                         }
929                         t += block;
930                 }
931         }
932 }
933
934 DCPTime
935 Player::one_video_frame () const
936 {
937         return DCPTime::from_frames (1, _film->video_frame_rate ());
938 }