X-Git-Url: https://git.carlh.net/gitweb/?a=blobdiff_plain;f=src%2Flib%2Fffmpeg_content.cc;h=ddf4548b41959fe1c8eac9ea304a9d85da77c201;hb=cbd4450197a083bf58bda510e626f73ba583cb66;hp=bb4e022308dc9882dff57d5a0959d0a014cf9cab;hpb=f90d74f7a0382f0dc63eef81bd553d7a7b38edb2;p=dcpomatic.git diff --git a/src/lib/ffmpeg_content.cc b/src/lib/ffmpeg_content.cc index bb4e02230..ddf4548b4 100644 --- a/src/lib/ffmpeg_content.cc +++ b/src/lib/ffmpeg_content.cc @@ -1,28 +1,26 @@ /* - Copyright (C) 2013-2014 Carl Hetherington + Copyright (C) 2013-2016 Carl Hetherington - This program is free software; you can redistribute it and/or modify + This file is part of DCP-o-matic. + + DCP-o-matic is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. - This program is distributed in the hope that it will be useful, + DCP-o-matic is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. + along with DCP-o-matic. If not, see . */ -extern "C" { -#include -} -#include -#include #include "ffmpeg_content.h" +#include "video_content.h" +#include "audio_content.h" #include "ffmpeg_examiner.h" #include "ffmpeg_subtitle_stream.h" #include "ffmpeg_audio_stream.h" @@ -34,45 +32,64 @@ extern "C" { #include "log.h" #include "exceptions.h" #include "frame_rate_change.h" -#include "safe_stringstream.h" +#include "caption_content.h" +#include +#include +extern "C" { +#include +#include +} +#include +#include +#include #include "i18n.h" -#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL); +#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL); using std::string; using std::vector; using std::list; using std::cout; using std::pair; +using std::make_pair; +using std::max; using boost::shared_ptr; using boost::dynamic_pointer_cast; +using boost::optional; using dcp::raw_convert; int const FFmpegContentProperty::SUBTITLE_STREAMS = 100; int const FFmpegContentProperty::SUBTITLE_STREAM = 101; -int const FFmpegContentProperty::AUDIO_STREAMS = 102; -int const FFmpegContentProperty::AUDIO_STREAM = 103; -int const FFmpegContentProperty::FILTERS = 104; - -FFmpegContent::FFmpegContent (shared_ptr f, boost::filesystem::path p) - : Content (f, p) - , VideoContent (f, p) - , AudioContent (f, p) - , SubtitleContent (f, p) +int const FFmpegContentProperty::FILTERS = 102; + +FFmpegContent::FFmpegContent (shared_ptr film, boost::filesystem::path p) + : Content (film, p) { } -FFmpegContent::FFmpegContent (shared_ptr f, cxml::ConstNodePtr node, int version, list& notes) - : Content (f, node) - , VideoContent (f, node, version) - , AudioContent (f, node) - , SubtitleContent (f, node, version) +template +optional +get_optional_enum (cxml::ConstNodePtr node, string name) +{ + optional const v = node->optional_number_child(name); + if (!v) { + return optional(); + } + return static_cast(*v); +} + +FFmpegContent::FFmpegContent (shared_ptr film, cxml::ConstNodePtr node, int version, list& notes) + : Content (film, node) { + video = VideoContent::from_xml (this, node, version); + audio = AudioContent::from_xml (this, node, version); + caption = CaptionContent::from_xml (this, node, version); + list c = node->node_children ("SubtitleStream"); for (list::const_iterator i = c.begin(); i != c.end(); ++i) { - _subtitle_streams.push_back (shared_ptr (new FFmpegSubtitleStream (*i))); + _subtitle_streams.push_back (shared_ptr (new FFmpegSubtitleStream (*i, version))); if ((*i)->optional_number_child ("Selected")) { _subtitle_stream = _subtitle_streams.back (); } @@ -80,9 +97,11 @@ FFmpegContent::FFmpegContent (shared_ptr f, cxml::ConstNodePtr node, c = node->node_children ("AudioStream"); for (list::const_iterator i = c.begin(); i != c.end(); ++i) { - _audio_streams.push_back (shared_ptr (new FFmpegAudioStream (*i, version))); - if ((*i)->optional_number_child ("Selected")) { - _audio_stream = _audio_streams.back (); + shared_ptr as (new FFmpegAudioStream (*i, version)); + audio->add_stream (as); + if (version < 11 && !(*i)->optional_node_child ("Selected")) { + /* This is an old file and this stream is not selected, so un-map it */ + as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS)); } } @@ -96,44 +115,42 @@ FFmpegContent::FFmpegContent (shared_ptr f, cxml::ConstNodePtr node, } } - _first_video = node->optional_number_child ("FirstVideo"); + optional const f = node->optional_number_child ("FirstVideo"); + if (f) { + _first_video = ContentTime (f.get ()); + } + + _color_range = get_optional_enum(node, "ColorRange"); + _color_primaries = get_optional_enum(node, "ColorPrimaries"); + _color_trc = get_optional_enum(node, "ColorTransferCharacteristic"); + _colorspace = get_optional_enum(node, "Colorspace"); + _bits_per_pixel = node->optional_number_child ("BitsPerPixel"); + } -FFmpegContent::FFmpegContent (shared_ptr f, vector > c) - : Content (f, c) - , VideoContent (f, c) - , AudioContent (f, c) - , SubtitleContent (f, c) +void +FFmpegContent::as_xml (xmlpp::Node* node, bool with_paths) const { - shared_ptr ref = dynamic_pointer_cast (c[0]); - assert (ref); + node->add_child("Type")->add_child_text ("FFmpeg"); + Content::as_xml (node, with_paths); - for (size_t i = 0; i < c.size(); ++i) { - shared_ptr fc = dynamic_pointer_cast (c[i]); - if (fc->use_subtitles() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) { - throw JoinError (_("Content to be joined must use the same subtitle stream.")); - } + if (video) { + video->as_xml (node); + } + + if (audio) { + audio->as_xml (node); - if (*(fc->_audio_stream.get()) != *(ref->_audio_stream.get())) { - throw JoinError (_("Content to be joined must use the same audio stream.")); + BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) { + shared_ptr f = dynamic_pointer_cast (i); + DCPOMATIC_ASSERT (f); + f->as_xml (node->add_child("AudioStream")); } } - _subtitle_streams = ref->subtitle_streams (); - _subtitle_stream = ref->subtitle_stream (); - _audio_streams = ref->audio_streams (); - _audio_stream = ref->audio_stream (); - _first_video = ref->_first_video; -} - -void -FFmpegContent::as_xml (xmlpp::Node* node) const -{ - node->add_child("Type")->add_child_text ("FFmpeg"); - Content::as_xml (node); - VideoContent::as_xml (node); - AudioContent::as_xml (node); - SubtitleContent::as_xml (node); + if (only_caption()) { + only_caption()->as_xml (node); + } boost::mutex::scoped_lock lm (_mutex); @@ -145,14 +162,6 @@ FFmpegContent::as_xml (xmlpp::Node* node) const (*i)->as_xml (t); } - for (vector >::const_iterator i = _audio_streams.begin(); i != _audio_streams.end(); ++i) { - xmlpp::Node* t = node->add_child("AudioStream"); - if (_audio_stream && *i == _audio_stream) { - t->add_child("Selected")->add_child_text("1"); - } - (*i)->as_xml (t); - } - for (vector::const_iterator i = _filters.begin(); i != _filters.end(); ++i) { node->add_child("Filter")->add_child_text ((*i)->id ()); } @@ -160,6 +169,22 @@ FFmpegContent::as_xml (xmlpp::Node* node) const if (_first_video) { node->add_child("FirstVideo")->add_child_text (raw_convert (_first_video.get().get())); } + + if (_color_range) { + node->add_child("ColorRange")->add_child_text (raw_convert (static_cast (*_color_range))); + } + if (_color_primaries) { + node->add_child("ColorPrimaries")->add_child_text (raw_convert (static_cast (*_color_primaries))); + } + if (_color_trc) { + node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert (static_cast (*_color_trc))); + } + if (_colorspace) { + node->add_child("Colorspace")->add_child_text (raw_convert (static_cast (*_colorspace))); + } + if (_bits_per_pixel) { + node->add_child("BitsPerPixel")->add_child_text (raw_convert (*_bits_per_pixel)); + } } void @@ -169,48 +194,93 @@ FFmpegContent::examine (shared_ptr job) Content::examine (job); - shared_ptr examiner (new FFmpegExaminer (shared_from_this ())); - take_from_video_examiner (examiner); + shared_ptr examiner (new FFmpegExaminer (shared_from_this (), job)); + + if (examiner->has_video ()) { + video.reset (new VideoContent (this)); + video->take_from_examiner (examiner); + } - shared_ptr film = _film.lock (); - assert (film); + boost::filesystem::path first_path = path (0); { boost::mutex::scoped_lock lm (_mutex); + if (examiner->has_video ()) { + _first_video = examiner->first_video (); + _color_range = examiner->color_range (); + _color_primaries = examiner->color_primaries (); + _color_trc = examiner->color_trc (); + _colorspace = examiner->colorspace (); + _bits_per_pixel = examiner->bits_per_pixel (); + + if (examiner->rotation()) { + double rot = *examiner->rotation (); + if (fabs (rot - 180) < 1.0) { + _filters.push_back (Filter::from_id ("vflip")); + _filters.push_back (Filter::from_id ("hflip")); + } else if (fabs (rot - 90) < 1.0) { + _filters.push_back (Filter::from_id ("90clock")); + } else if (fabs (rot - 270) < 1.0) { + _filters.push_back (Filter::from_id ("90anticlock")); + } + } + } + + if (!examiner->audio_streams().empty ()) { + audio.reset (new AudioContent (this)); + + BOOST_FOREACH (shared_ptr i, examiner->audio_streams ()) { + audio->add_stream (i); + } + + AudioStreamPtr as = audio->streams().front(); + AudioMapping m = as->mapping (); + film()->make_audio_mapping_default (m, first_path); + as->set_mapping (m); + } + _subtitle_streams = examiner->subtitle_streams (); if (!_subtitle_streams.empty ()) { + caption.clear (); + caption.push_back (shared_ptr (new CaptionContent (this))); _subtitle_stream = _subtitle_streams.front (); } - - _audio_streams = examiner->audio_streams (); - if (!_audio_streams.empty ()) { - _audio_stream = _audio_streams.front (); - } - _first_video = examiner->first_video (); + } + + if (examiner->has_video ()) { + set_default_colour_conversion (); } signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS); signal_changed (FFmpegContentProperty::SUBTITLE_STREAM); - signal_changed (FFmpegContentProperty::AUDIO_STREAMS); - signal_changed (FFmpegContentProperty::AUDIO_STREAM); - signal_changed (AudioContentProperty::AUDIO_CHANNELS); } string FFmpegContent::summary () const { - /* Get the string() here so that the name does not have quotes around it */ - return String::compose (_("%1 [movie]"), path_summary ()); + if (video && audio) { + return String::compose (_("%1 [movie]"), path_summary ()); + } else if (video) { + return String::compose (_("%1 [video]"), path_summary ()); + } else if (audio) { + return String::compose (_("%1 [audio]"), path_summary ()); + } + + return path_summary (); } string FFmpegContent::technical_summary () const { - string as = "none"; - if (_audio_stream) { - as = _audio_stream->technical_summary (); + string as = ""; + BOOST_FOREACH (shared_ptr i, ffmpeg_audio_streams ()) { + as += i->technical_summary () + " " ; + } + + if (as.empty ()) { + as = "none"; } string ss = "none"; @@ -219,28 +289,20 @@ FFmpegContent::technical_summary () const } string filt = Filter::ffmpeg_string (_filters); - - return Content::technical_summary() + " - " - + VideoContent::technical_summary() + " - " - + AudioContent::technical_summary() + " - " - + String::compose ( - "ffmpeg: audio %1, subtitle %2, filters %3", as, ss, filt - ); -} -string -FFmpegContent::information () const -{ - if (video_length() == ContentTime (0) || video_frame_rate() == 0) { - return ""; + string s = Content::technical_summary (); + + if (video) { + s += " - " + video->technical_summary (); + } + + if (audio) { + s += " - " + audio->technical_summary (); } - - SafeStringStream s; - - s << String::compose (_("%1 frames; %2 frames per second"), video_length_after_3d_combine().frames (video_frame_rate()), video_frame_rate()) << "\n"; - s << VideoContent::information (); - return s.str (); + return s + String::compose ( + "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt + ); } void @@ -254,51 +316,6 @@ FFmpegContent::set_subtitle_stream (shared_ptr s) signal_changed (FFmpegContentProperty::SUBTITLE_STREAM); } -void -FFmpegContent::set_audio_stream (shared_ptr s) -{ - { - boost::mutex::scoped_lock lm (_mutex); - _audio_stream = s; - } - - signal_changed (FFmpegContentProperty::AUDIO_STREAM); -} - -ContentTime -FFmpegContent::audio_length () const -{ - if (!audio_stream ()) { - return ContentTime (); - } - - return video_length (); -} - -int -FFmpegContent::audio_channels () const -{ - boost::mutex::scoped_lock lm (_mutex); - - if (!_audio_stream) { - return 0; - } - - return _audio_stream->channels (); -} - -int -FFmpegContent::audio_frame_rate () const -{ - boost::mutex::scoped_lock lm (_mutex); - - if (!_audio_stream) { - return 0; - } - - return _audio_stream->frame_rate (); -} - bool operator== (FFmpegStream const & a, FFmpegStream const & b) { @@ -314,21 +331,19 @@ operator!= (FFmpegStream const & a, FFmpegStream const & b) DCPTime FFmpegContent::full_length () const { - shared_ptr film = _film.lock (); - assert (film); - return DCPTime (video_length_after_3d_combine(), FrameRateChange (video_frame_rate (), film->video_frame_rate ())); -} + FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ()); + if (video) { + return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate()); + } -AudioMapping -FFmpegContent::audio_mapping () const -{ - boost::mutex::scoped_lock lm (_mutex); + DCPOMATIC_ASSERT (audio); - if (!_audio_stream) { - return AudioMapping (); + DCPTime longest; + BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) { + longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate())); } - return _audio_stream->mapping (); + return longest; } void @@ -342,76 +357,242 @@ FFmpegContent::set_filters (vector const & filters) signal_changed (FFmpegContentProperty::FILTERS); } -void -FFmpegContent::set_audio_mapping (AudioMapping m) -{ - audio_stream()->set_mapping (m); - AudioContent::set_audio_mapping (m); -} - string FFmpegContent::identifier () const { - SafeStringStream s; + string s = Content::identifier(); + + if (video) { + s += "_" + video->identifier(); + } - s << VideoContent::identifier(); + if (only_caption() && only_caption()->use() && only_caption()->burn()) { + s += "_" + only_caption()->identifier(); + } boost::mutex::scoped_lock lm (_mutex); if (_subtitle_stream) { - s << "_" << _subtitle_stream->identifier (); + s += "_" + _subtitle_stream->identifier (); } for (vector::const_iterator i = _filters.begin(); i != _filters.end(); ++i) { - s << "_" << (*i)->id (); + s += "_" + (*i)->id (); } - return s.str (); + return s; } -boost::filesystem::path -FFmpegContent::audio_analysis_path () const +void +FFmpegContent::set_default_colour_conversion () { - shared_ptr film = _film.lock (); - if (!film) { - return boost::filesystem::path (); - } + DCPOMATIC_ASSERT (video); - /* We need to include the stream ID in this path so that we get different - analyses for each stream. - */ + dcp::Size const s = video->size (); - boost::filesystem::path p = film->audio_analysis_dir (); - string name = digest (); - if (audio_stream ()) { - name += "_" + audio_stream()->identifier (); + boost::mutex::scoped_lock lm (_mutex); + + switch (_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)) { + case AVCOL_SPC_RGB: + video->set_colour_conversion (PresetColourConversion::from_id ("srgb").conversion); + break; + case AVCOL_SPC_BT709: + video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion); + break; + case AVCOL_SPC_BT470BG: + case AVCOL_SPC_SMPTE170M: + case AVCOL_SPC_SMPTE240M: + video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion); + break; + case AVCOL_SPC_BT2020_CL: + case AVCOL_SPC_BT2020_NCL: + video->set_colour_conversion (PresetColourConversion::from_id ("rec2020").conversion); + break; + default: + if (s.width < 1080) { + video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion); + } else { + video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion); + } + break; } - p /= name; - return p; } -list -FFmpegContent::subtitles_during (ContentTimePeriod period, bool starting) const +void +FFmpegContent::add_properties (list& p) const { - list d; - - shared_ptr stream = subtitle_stream (); - if (!stream) { - return d; + Content::add_properties (p); + + if (video) { + video->add_properties (p); + + if (_bits_per_pixel) { + int const sub = 219 * pow (2, _bits_per_pixel.get() - 8); + int const total = pow (2, _bits_per_pixel.get()); + + switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) { + case AVCOL_RANGE_UNSPECIFIED: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is unknown (not specified in the file). + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified"))); + break; + case AVCOL_RANGE_MPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is limited, so that not all possible values are valid. + p.push_back ( + UserProperty ( + UserProperty::VIDEO, _("Colour range"), String::compose (_("Limited (%1-%2)"), (total - sub) / 2, (total + sub) / 2) + ) + ); + break; + case AVCOL_RANGE_JPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is full, so that all possible pixel values are valid. + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total))); + break; + default: + DCPOMATIC_ASSERT (false); + } + } else { + switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) { + case AVCOL_RANGE_UNSPECIFIED: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is unknown (not specified in the file). + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified"))); + break; + case AVCOL_RANGE_MPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is limited, so that not all possible values are valid. + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited"))); + break; + case AVCOL_RANGE_JPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is full, so that all possible pixel values are valid. + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full"))); + break; + default: + DCPOMATIC_ASSERT (false); + } + } + + char const * primaries[] = { + _("Unspecified"), + _("BT709"), + _("Unspecified"), + _("Unspecified"), + _("BT470M"), + _("BT470BG"), + _("SMPTE 170M (BT601)"), + _("SMPTE 240M"), + _("Film"), + _("BT2020"), + _("SMPTE ST 428-1 (CIE 1931 XYZ)"), + _("SMPTE ST 431-2 (2011)"), + _("SMPTE ST 432-1 D65 (2010)"), // 12 + "", // 13 + "", // 14 + "", // 15 + "", // 16 + "", // 17 + "", // 18 + "", // 19 + "", // 20 + "", // 21 + _("JEDEC P22") + }; + + DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 23); + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries.get_value_or(AVCOL_PRI_UNSPECIFIED)])); + + char const * transfers[] = { + _("Unspecified"), + _("BT709"), + _("Unspecified"), + _("Unspecified"), + _("Gamma 22 (BT470M)"), + _("Gamma 28 (BT470BG)"), + _("SMPTE 170M (BT601)"), + _("SMPTE 240M"), + _("Linear"), + _("Logarithmic (100:1 range)"), + _("Logarithmic (316:1 range)"), + _("IEC61966-2-4"), + _("BT1361 extended colour gamut"), + _("IEC61966-2-1 (sRGB or sYCC)"), + _("BT2020 for a 10-bit system"), + _("BT2020 for a 12-bit system"), + _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"), + _("SMPTE ST 428-1"), + _("ARIB STD-B67 ('Hybrid log-gamma')") + }; + + DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19); + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc.get_value_or(AVCOL_TRC_UNSPECIFIED)])); + + char const * spaces[] = { + _("RGB / sRGB (IEC61966-2-1)"), + _("BT709"), + _("Unspecified"), + _("Unspecified"), + _("FCC"), + _("BT470BG (BT601-6)"), + _("SMPTE 170M (BT601-6)"), + _("SMPTE 240M"), + _("YCOCG"), + _("BT2020 non-constant luminance"), + _("BT2020 constant luminance"), + _("SMPTE 2085, Y'D'zD'x"), + _("Chroma-derived non-constant luminance"), + _("Chroma-derived constant luminance"), + _("BT2100") + }; + + DCPOMATIC_ASSERT (AVCOL_SPC_NB == 15); + p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)])); + + if (_bits_per_pixel) { + p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), *_bits_per_pixel)); + } + } + + if (audio) { + audio->add_properties (p); } +} - /* XXX: inefficient */ - for (vector::const_iterator i = stream->periods.begin(); i != stream->periods.end(); ++i) { - if ((starting && period.contains (i->from)) || (!starting && period.overlaps (*i))) { - d.push_back (*i); +/** Our subtitle streams have colour maps, which can be changed, but + * they have no way of signalling that change. As a hack, we have this + * method which callers can use when they've modified one of our subtitle + * streams. + */ +void +FFmpegContent::signal_subtitle_stream_changed () +{ + signal_changed (FFmpegContentProperty::SUBTITLE_STREAM); +} + +vector > +FFmpegContent::ffmpeg_audio_streams () const +{ + vector > fa; + + if (audio) { + BOOST_FOREACH (AudioStreamPtr i, audio->streams()) { + fa.push_back (dynamic_pointer_cast (i)); } } - return d; + return fa; } -bool -FFmpegContent::has_subtitles () const +void +FFmpegContent::take_settings_from (shared_ptr c) { - return !subtitle_streams().empty (); + shared_ptr fc = dynamic_pointer_cast (c); + if (!fc) { + return; + } + + Content::take_settings_from (c); + _filters = fc->_filters; }