Don't emit zero-sample buffers.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_channels(), _film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         _have_valid_pieces = true;
160 }
161
162 void
163 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
164 {
165         shared_ptr<Content> c = w.lock ();
166         if (!c) {
167                 return;
168         }
169
170         if (
171                 property == ContentProperty::POSITION ||
172                 property == ContentProperty::LENGTH ||
173                 property == ContentProperty::TRIM_START ||
174                 property == ContentProperty::TRIM_END ||
175                 property == ContentProperty::PATH ||
176                 property == VideoContentProperty::FRAME_TYPE ||
177                 property == DCPContentProperty::NEEDS_ASSETS ||
178                 property == DCPContentProperty::NEEDS_KDM ||
179                 property == SubtitleContentProperty::COLOUR ||
180                 property == SubtitleContentProperty::OUTLINE ||
181                 property == SubtitleContentProperty::SHADOW ||
182                 property == SubtitleContentProperty::EFFECT_COLOUR ||
183                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
184                 property == VideoContentProperty::COLOUR_CONVERSION
185                 ) {
186
187                 _have_valid_pieces = false;
188                 Changed (frequent);
189
190         } else if (
191                 property == SubtitleContentProperty::LINE_SPACING ||
192                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
193                 property == SubtitleContentProperty::Y_SCALE ||
194                 property == SubtitleContentProperty::FADE_IN ||
195                 property == SubtitleContentProperty::FADE_OUT ||
196                 property == ContentProperty::VIDEO_FRAME_RATE ||
197                 property == SubtitleContentProperty::USE ||
198                 property == SubtitleContentProperty::X_OFFSET ||
199                 property == SubtitleContentProperty::Y_OFFSET ||
200                 property == SubtitleContentProperty::X_SCALE ||
201                 property == SubtitleContentProperty::FONTS ||
202                 property == VideoContentProperty::CROP ||
203                 property == VideoContentProperty::SCALE ||
204                 property == VideoContentProperty::FADE_IN ||
205                 property == VideoContentProperty::FADE_OUT
206                 ) {
207
208                 Changed (frequent);
209         }
210 }
211
212 void
213 Player::set_video_container_size (dcp::Size s)
214 {
215         _video_container_size = s;
216
217         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
218         _black_image->make_black ();
219 }
220
221 void
222 Player::playlist_changed ()
223 {
224         _have_valid_pieces = false;
225         Changed (false);
226 }
227
228 void
229 Player::film_changed (Film::Property p)
230 {
231         /* Here we should notice Film properties that affect our output, and
232            alert listeners that our output now would be different to how it was
233            last time we were run.
234         */
235
236         if (p == Film::CONTAINER) {
237                 Changed (false);
238         } else if (p == Film::VIDEO_FRAME_RATE) {
239                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
240                    so we need new pieces here.
241                 */
242                 _have_valid_pieces = false;
243                 Changed (false);
244         } else if (p == Film::AUDIO_PROCESSOR) {
245                 if (_film->audio_processor ()) {
246                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
247                 }
248         }
249 }
250
251 list<PositionImage>
252 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
253 {
254         list<PositionImage> all;
255
256         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
257                 if (!i->image) {
258                         continue;
259                 }
260
261                 /* We will scale the subtitle up to fit _video_container_size */
262                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
263
264                 /* Then we need a corrective translation, consisting of two parts:
265                  *
266                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
267                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
268                  *
269                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
270                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
271                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
272                  *
273                  * Combining these two translations gives these expressions.
274                  */
275
276                 all.push_back (
277                         PositionImage (
278                                 i->image->scale (
279                                         scaled_size,
280                                         dcp::YUV_TO_RGB_REC601,
281                                         i->image->pixel_format (),
282                                         true,
283                                         _fast
284                                         ),
285                                 Position<int> (
286                                         lrint (_video_container_size.width * i->rectangle.x),
287                                         lrint (_video_container_size.height * i->rectangle.y)
288                                         )
289                                 )
290                         );
291         }
292
293         return all;
294 }
295
296 shared_ptr<PlayerVideo>
297 Player::black_player_video_frame () const
298 {
299         return shared_ptr<PlayerVideo> (
300                 new PlayerVideo (
301                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
302                         Crop (),
303                         optional<double> (),
304                         _video_container_size,
305                         _video_container_size,
306                         EYES_BOTH,
307                         PART_WHOLE,
308                         PresetColourConversion::all().front().conversion
309                 )
310         );
311 }
312
313 Frame
314 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
315 {
316         DCPTime s = t - piece->content->position ();
317         s = min (piece->content->length_after_trim(), s);
318         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
319
320         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
321            then convert that ContentTime to frames at the content's rate.  However this fails for
322            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
323            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
324
325            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
326         */
327         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
328 }
329
330 DCPTime
331 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
332 {
333         /* See comment in dcp_to_content_video */
334         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
335         return max (DCPTime (), d + piece->content->position ());
336 }
337
338 Frame
339 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
340 {
341         DCPTime s = t - piece->content->position ();
342         s = min (piece->content->length_after_trim(), s);
343         /* See notes in dcp_to_content_video */
344         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
345 }
346
347 DCPTime
348 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
349 {
350         /* See comment in dcp_to_content_video */
351         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start (), piece->frc);
352         return max (DCPTime (), d + piece->content->position ());
353 }
354
355 ContentTime
356 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
357 {
358         DCPTime s = t - piece->content->position ();
359         s = min (piece->content->length_after_trim(), s);
360         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
361 }
362
363 DCPTime
364 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
365 {
366         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
367 }
368
369 list<shared_ptr<Font> >
370 Player::get_subtitle_fonts ()
371 {
372         if (!_have_valid_pieces) {
373                 setup_pieces ();
374         }
375
376         list<shared_ptr<Font> > fonts;
377         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
378                 if (p->content->subtitle) {
379                         /* XXX: things may go wrong if there are duplicate font IDs
380                            with different font files.
381                         */
382                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
383                         copy (f.begin(), f.end(), back_inserter (fonts));
384                 }
385         }
386
387         return fonts;
388 }
389
390 /** Set this player never to produce any video data */
391 void
392 Player::set_ignore_video ()
393 {
394         _ignore_video = true;
395 }
396
397 /** Set this player never to produce any audio data */
398 void
399 Player::set_ignore_audio ()
400 {
401         _ignore_audio = true;
402 }
403
404 /** Set whether or not this player should always burn text subtitles into the image,
405  *  regardless of the content settings.
406  *  @param burn true to always burn subtitles, false to obey content settings.
407  */
408 void
409 Player::set_always_burn_subtitles (bool burn)
410 {
411         _always_burn_subtitles = burn;
412 }
413
414 void
415 Player::set_fast ()
416 {
417         _fast = true;
418         _have_valid_pieces = false;
419 }
420
421 void
422 Player::set_play_referenced ()
423 {
424         _play_referenced = true;
425         _have_valid_pieces = false;
426 }
427
428 list<ReferencedReelAsset>
429 Player::get_reel_assets ()
430 {
431         list<ReferencedReelAsset> a;
432
433         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
434                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
435                 if (!j) {
436                         continue;
437                 }
438
439                 scoped_ptr<DCPDecoder> decoder;
440                 try {
441                         decoder.reset (new DCPDecoder (j, _film->log()));
442                 } catch (...) {
443                         return a;
444                 }
445
446                 int64_t offset = 0;
447                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
448
449                         DCPOMATIC_ASSERT (j->video_frame_rate ());
450                         double const cfr = j->video_frame_rate().get();
451                         Frame const trim_start = j->trim_start().frames_round (cfr);
452                         Frame const trim_end = j->trim_end().frames_round (cfr);
453                         int const ffr = _film->video_frame_rate ();
454
455                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
456                         if (j->reference_video ()) {
457                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
458                                 DCPOMATIC_ASSERT (ra);
459                                 ra->set_entry_point (ra->entry_point() + trim_start);
460                                 ra->set_duration (ra->duration() - trim_start - trim_end);
461                                 a.push_back (
462                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
463                                         );
464                         }
465
466                         if (j->reference_audio ()) {
467                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
468                                 DCPOMATIC_ASSERT (ra);
469                                 ra->set_entry_point (ra->entry_point() + trim_start);
470                                 ra->set_duration (ra->duration() - trim_start - trim_end);
471                                 a.push_back (
472                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
473                                         );
474                         }
475
476                         if (j->reference_subtitle ()) {
477                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
478                                 DCPOMATIC_ASSERT (ra);
479                                 ra->set_entry_point (ra->entry_point() + trim_start);
480                                 ra->set_duration (ra->duration() - trim_start - trim_end);
481                                 a.push_back (
482                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
483                                         );
484                         }
485
486                         /* Assume that main picture duration is the length of the reel */
487                         offset += k->main_picture()->duration ();
488                 }
489         }
490
491         return a;
492 }
493
494 list<shared_ptr<Piece> >
495 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
496 {
497         if (!_have_valid_pieces) {
498                 setup_pieces ();
499         }
500
501         list<shared_ptr<Piece> > overlaps;
502         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
503                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
504                         overlaps.push_back (i);
505                 }
506         }
507
508         return overlaps;
509 }
510
511 bool
512 Player::pass ()
513 {
514         if (!_have_valid_pieces) {
515                 setup_pieces ();
516         }
517
518         shared_ptr<Piece> earliest;
519         DCPTime earliest_content;
520
521         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
522                 if (!i->done) {
523                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
524                         if (!earliest || t < earliest_content) {
525                                 earliest_content = t;
526                                 earliest = i;
527                         }
528                 }
529         }
530
531         if (!earliest) {
532                 /* No more content; fill up to the length of our playlist with silent black */
533
534                 DCPTime const length = _playlist->length ();
535
536                 DCPTime const frame = DCPTime::from_frames (1, _film->video_frame_rate());
537                 DCPTime from;
538                 if (_last_time) {
539                         from = _last_time.get() + frame;
540                 }
541                 for (DCPTime i = from; i < length; i += frame) {
542                         Video (black_player_video_frame (), i);
543                 }
544
545                 DCPTime t = _last_audio_time;
546                 while (t < length) {
547                         DCPTime block = min (DCPTime::from_seconds (0.5), length - t);
548                         Frame const samples = block.frames_round(_film->audio_frame_rate());
549                         if (samples) {
550                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
551                                 silence->make_silent ();
552                                 Audio (silence, t);
553                         }
554                         t += block;
555                 }
556
557                 return true;
558         }
559
560         earliest->done = earliest->decoder->pass ();
561
562         /* Emit any audio that is ready */
563
564         DCPTime pull_from = _playlist->length ();
565         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
566                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
567                         pull_from = i->second.last_push_end;
568                 }
569         }
570
571 //      cout << "PULL " << to_string(pull_from) << "\n";
572         pair<shared_ptr<AudioBuffers>, DCPTime> audio = _audio_merger.pull (pull_from);
573         if (audio.first->frames() > 0) {
574                 DCPOMATIC_ASSERT (audio.second >= _last_audio_time);
575                 DCPTime t = _last_audio_time;
576                 while (t < audio.second) {
577                         /* Silence up to the time of this new audio */
578                         DCPTime block = min (DCPTime::from_seconds (0.5), audio.second - t);
579                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), block.frames_round(_film->audio_frame_rate())));
580                         silence->make_silent ();
581                         Audio (silence, t);
582                         t += block;
583                 }
584
585                 Audio (audio.first, audio.second);
586                 _last_audio_time = audio.second + DCPTime::from_frames(audio.first->frames(), _film->audio_frame_rate());
587         }
588
589         return false;
590 }
591
592 void
593 Player::video (weak_ptr<Piece> wp, ContentVideo video)
594 {
595         shared_ptr<Piece> piece = wp.lock ();
596         if (!piece) {
597                 return;
598         }
599
600         /* Time and period of the frame we will emit */
601         DCPTime const time = content_video_to_dcp (piece, video.frame);
602         DCPTimePeriod const period (time, time + DCPTime::from_frames (1, _film->video_frame_rate()));
603
604         /* Discard if it's outside the content's period */
605         if (time < piece->content->position() || time >= piece->content->end()) {
606                 return;
607         }
608
609         /* Get any subtitles */
610
611         optional<PositionImage> subtitles;
612
613         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
614
615                 if (!i->second.overlap (period)) {
616                         continue;
617                 }
618
619                 list<PositionImage> sub_images;
620
621                 /* Image subtitles */
622                 list<PositionImage> c = transform_image_subtitles (i->first.image);
623                 copy (c.begin(), c.end(), back_inserter (sub_images));
624
625                 /* Text subtitles (rendered to an image) */
626                 if (!i->first.text.empty ()) {
627                         list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
628                         copy (s.begin (), s.end (), back_inserter (sub_images));
629                 }
630
631                 if (!sub_images.empty ()) {
632                         subtitles = merge (sub_images);
633                 }
634         }
635
636         /* Fill gaps */
637
638         if (_last_time) {
639                 /* XXX: this may not work for 3D */
640                 DCPTime const frame = DCPTime::from_frames (1, _film->video_frame_rate());
641                 for (DCPTime i = _last_time.get() + frame; i < time; i += frame) {
642                         if (_playlist->video_content_at(i) && _last_video) {
643                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), i);
644                         } else {
645                                 Video (black_player_video_frame (), i);
646                         }
647                 }
648         }
649
650         _last_video.reset (
651                 new PlayerVideo (
652                         video.image,
653                         piece->content->video->crop (),
654                         piece->content->video->fade (video.frame),
655                         piece->content->video->scale().size (
656                                 piece->content->video, _video_container_size, _film->frame_size ()
657                                 ),
658                         _video_container_size,
659                         video.eyes,
660                         video.part,
661                         piece->content->video->colour_conversion ()
662                         )
663                 );
664
665         if (subtitles) {
666                 _last_video->set_subtitle (subtitles.get ());
667         }
668
669         _last_time = time;
670
671         Video (_last_video, *_last_time);
672
673         /* Discard any subtitles we no longer need */
674
675         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
676                 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
677                 ++tmp;
678
679                 if (i->second.to < time) {
680                         _subtitles.erase (i);
681                 }
682
683                 i = tmp;
684         }
685 }
686
687 void
688 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
689 {
690         shared_ptr<Piece> piece = wp.lock ();
691         if (!piece) {
692                 return;
693         }
694
695         shared_ptr<AudioContent> content = piece->content->audio;
696         DCPOMATIC_ASSERT (content);
697
698         /* Gain */
699         if (content->gain() != 0) {
700                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
701                 gain->apply_gain (content->gain ());
702                 content_audio.audio = gain;
703         }
704
705         /* Resample */
706         if (stream->frame_rate() != content->resampled_frame_rate()) {
707                 shared_ptr<Resampler> r = resampler (content, stream, true);
708                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
709                 content_audio.audio = ro.first;
710                 content_audio.frame = ro.second;
711         }
712
713         /* XXX: end-trimming used to be checked here */
714
715         /* Compute time in the DCP */
716         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
717
718         /* Remove anything that comes before the start of the content */
719         if (time < piece->content->position()) {
720                 DCPTime const discard_time = piece->content->position() - time;
721                 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
722                 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
723                 if (remaining_frames <= 0) {
724                         /* This audio is entirely discarded */
725                         return;
726                 }
727                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
728                 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
729                 content_audio.audio = cut;
730                 time += discard_time;
731         }
732
733         /* Remap channels */
734         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
735         dcp_mapped->make_silent ();
736
737         AudioMapping map = stream->mapping ();
738         for (int i = 0; i < map.input_channels(); ++i) {
739                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
740                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
741                                 dcp_mapped->accumulate_channel (
742                                         content_audio.audio.get(),
743                                         i,
744                                         static_cast<dcp::Channel> (j),
745                                         map.get (i, static_cast<dcp::Channel> (j))
746                                         );
747                         }
748                 }
749         }
750
751         content_audio.audio = dcp_mapped;
752
753         if (_audio_processor) {
754                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
755         }
756
757 //      cout << "PUSH " << content_audio.audio->frames() << " @ " << to_string(time) << "\n";
758         _audio_merger.push (content_audio.audio, time);
759
760         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
761         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
762 }
763
764 void
765 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
766 {
767         shared_ptr<Piece> piece = wp.lock ();
768         if (!piece) {
769                 return;
770         }
771
772         /* Apply content's subtitle offsets */
773         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
774         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
775
776         /* Apply content's subtitle scale */
777         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
778         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
779
780         /* Apply a corrective translation to keep the subtitle centred after that scale */
781         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
782         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
783
784         PlayerSubtitles ps;
785         ps.image.push_back (subtitle.sub);
786         DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
787
788         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
789                 _subtitles.push_back (make_pair (ps, period));
790         } else {
791                 Subtitle (ps, period);
792         }
793 }
794
795 void
796 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
797 {
798         shared_ptr<Piece> piece = wp.lock ();
799         if (!piece) {
800                 return;
801         }
802
803         PlayerSubtitles ps;
804         DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
805
806         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
807                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
808                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
809                 float const xs = piece->content->subtitle->x_scale();
810                 float const ys = piece->content->subtitle->y_scale();
811                 float size = s.size();
812
813                 /* Adjust size to express the common part of the scaling;
814                    e.g. if xs = ys = 0.5 we scale size by 2.
815                 */
816                 if (xs > 1e-5 && ys > 1e-5) {
817                         size *= 1 / min (1 / xs, 1 / ys);
818                 }
819                 s.set_size (size);
820
821                 /* Then express aspect ratio changes */
822                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
823                         s.set_aspect_adjust (xs / ys);
824                 }
825
826                 s.set_in (dcp::Time(period.from.seconds(), 1000));
827                 s.set_out (dcp::Time(period.to.seconds(), 1000));
828                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
829                 ps.add_fonts (piece->content->subtitle->fonts ());
830         }
831
832         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
833                 _subtitles.push_back (make_pair (ps, period));
834         } else {
835                 Subtitle (ps, period);
836         }
837 }
838
839 void
840 Player::seek (DCPTime time, bool accurate)
841 {
842         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
843                 if (i->content->position() <= time && time < i->content->end()) {
844                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
845                         i->done = false;
846                 }
847         }
848
849         if (accurate) {
850                 _last_time = time - DCPTime::from_frames (1, _film->video_frame_rate ());
851         } else {
852                 _last_time = optional<DCPTime> ();
853         }
854 }
855
856 shared_ptr<Resampler>
857 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
858 {
859         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
860         if (i != _resamplers.end ()) {
861                 return i->second;
862         }
863
864         if (!create) {
865                 return shared_ptr<Resampler> ();
866         }
867
868         LOG_GENERAL (
869                 "Creating new resampler from %1 to %2 with %3 channels",
870                 stream->frame_rate(),
871                 content->resampled_frame_rate(),
872                 stream->channels()
873                 );
874
875         shared_ptr<Resampler> r (
876                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
877                 );
878
879         _resamplers[make_pair(content, stream)] = r;
880         return r;
881 }